text stringlengths 38 1.54M |
|---|
# -*- coding: utf-8 -*-
"""
This code creates a database with a list of publications data from Google
Scholar.
The data acquired from GS is Title, Citations, Links and Rank.
It is useful for finding relevant papers by sorting by the number of citations
This example will look for the top 100 papers related to the keyword
'non intrusive load monitoring', so that you can rank them by the number of citations
As output this program will plot the number of citations in the Y axis and the
rank of the result in the X axis. It also, optionally, export the database to
a .csv file.
Before using it, please update the initial variables
"""
import requests
from bs4 import BeautifulSoup
import matplotlib.pyplot as plt
import pandas as pd
def get_citations(content):
out = 0
for char in range(0,len(content)):
if content[char:char+9] == 'Cited by ':
init = char+9
for end in range(init+1,init+6):
if content[end] == '<':
break
out = content[init:end]
return int(out)
def get_year(content):
for char in range(0,len(content)):
if content[char] == '-':
out = content[char-5:char-1]
if not out.isdigit():
out = 0
return int(out)
def get_author(content):
for char in range(0,len(content)):
if content[char] == '-':
out = content[2:char-1]
break
return out
# Update these variables according to your requirement
keyword = "'non intrusive load monitoring'" # the double quote will look for the exact keyword,
# the simple quote will also look for similar keywords
number_of_results = 100 # number of results to look for on Google Scholar
save_database = False # choose if you would like to save the database to .csv
path = 'C:/_wittmann/nilm_100_exact_author.csv' # path to save the data
# Start new session
session = requests.Session()
# Variables
links = list()
title = list()
citations = list()
year = list()
rank = list()
author = list()
rank.append(0) # initialization necessary for incremental purposes
# Get content from 1000 URLs
for n in range(0, number_of_results, 10):
url = 'https://scholar.google.com/scholar?start='+str(n)+'&q='+keyword.replace(' ','+')
page = session.get(url)
c = page.content
# Create parser
soup = BeautifulSoup(c, 'html.parser')
# Get stuff
mydivs = soup.findAll("div", { "class" : "gs_r" })
for div in mydivs:
try:
links.append(div.find('h3').find('a').get('href'))
except: # catch *all* exceptions
links.append('Look manually at: https://scholar.google.com/scholar?start='+str(n)+'&q=non+intrusive+load+monitoring')
try:
title.append(div.find('h3').find('a').text)
except:
title.append('Could not catch title')
citations.append(get_citations(str(div.format_string)))
year.append(get_year(div.find('div',{'class' : 'gs_a'}).text))
author.append(get_author(div.find('div',{'class' : 'gs_a'}).text))
rank.append(rank[-1]+1)
# Create a dataset and sort by the number of citations
data = pd.DataFrame(zip(author, title, citations, year, links), index = rank[1:],
columns=['Author', 'Title', 'Citations', 'Year', 'Source'])
data.index.name = 'Rank'
data_ranked = data.sort_values(by='Citations', ascending=False)
print data_ranked
# Plot by citation number
plt.plot(rank[1:],citations,'*')
plt.ylabel('Number of Citations')
plt.xlabel('Rank of the keyword on Google Scholar')
plt.title('Keyword: '+keyword)
# Save results
if save_database:
data_ranked.to_csv(path, encoding='utf-8') # Change the path
|
#!/usr/bin/env python3
import json
from bson.objectid import ObjectId
import time
from db import db
from logger import Logger
l = Logger('backup.log', log_f=True)
trace = l.trace
log = l.getlog()
def main():
with open('weather.json') as wet:
for rec in wet:
weather = json.loads(rec)
weather['_id'] = ObjectId(weather['_id']['$oid'])
db.weather.save(weather)
print('saved [{0}]'.format(weather['_id']))
if __name__ == '__main__':
main()
|
import urllib
import urllib.request
import urllib.parse
# ajax post
url = 'http://www.kfc.com.cn/kfccda/ashx/GetStoreList.ashx?op=pid'
headers = {'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36'}
# get
url_get = 'http://www.kfc.com.cn/kfccda/ashx/GetStoreList.ashx?op=pid&cname=&pid=31&pageIndex=8&pageSize=10'
'''
cname=&pid=31&pageIndex=8&pageSize=10
'''
if __name__ == '__main__':
city = input('请输入查询城市:')
page = int(input('请输入查询多少页:'))
for p in range(1,page+1):
form = {'cname': city,
'pid': 31,
'pageIndex': p,
'pageSize': 50}
form = urllib.parse.urlencode(form).encode('utf-8')
request = urllib.request.Request(url=url,data=form,headers=headers)
response = urllib.request.urlopen(request)
content = response.read().decode('utf-8')
with open('./download/肯德基第%d页.json'%(p),mode='wb') as fp:
fp.write(content.encode('utf-8')) |
import numpy as np
import os.path as osp
import os
import sys
from util import *
import torch
import torchvision
import torchvision.transforms as transforms
import matplotlib.pyplot as plt
import torch.optim as optim
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
input_file = 'CSE_253_P4/data/input.txt'
# input_file = 'data/input.txt'
# sample_music = 'data/sample-music.txt'
# music_book contain all the information about every song, including beats, name, ryh
music_book = []
# contain only the body of music
sheet = []
with open(input_file, 'r') as f:
for i, line in enumerate(f):
# check the first 7 letter
if len(line) <= 3:
pass
elif line[0:7] == '<start>':
# initialize a music
music_book.append(music())
# initialize body
body = ''
elif line[0] == 'X':
try:
music_book[-1].ref_num = int(line[2])
except ValueError:
music_book[-1].ref_num = line[2]
elif line[0] == 'T':
music_book[-1].tune = line[2:-1]
elif line[0] == 'M':
music_book[-1].beats = line[2:-1]
elif line[0] == 'L':
music_book[-1].note_len = line[2:-1]
elif line[0] == 'K':
music_book[-1].key = line[2:-1]
elif line[0] == 'L':
music_book[-1].note_len = line[2:-1]
# store body
elif line[1] != ':' and line[0] != '<' and line[0] != '<':
body += line[:-1]
elif line[0:5] == '<end>':
music_book[-1].body = body
sheet.append(body)
for piece in sheet:
lineToTensor(piece)
input = Variable(lineToTensor(piece))
# train_input = trainset.train_data[train_idx, :, :, :]
# train_labels = [trainset.train_labels[j] for j in train_idx]
# val_input = trainset.train_data[val_idx, :, :, :]
# val_labels = [trainset.train_labels[j] for j in val_idx]
# # prepare for training set and validation set
# train_set = torch.utils.data.TensorDataset(torch.from_numpy(train_input.reshape(44998, 3, 32, 32)), torch.from_numpy(np.array(train_labels)))
# val_set = torch.utils.data.TensorDataset(torch.from_numpy(val_input.reshape(5000, 3, 32, 32)), torch.from_numpy(np.array(val_labels)))
# trainloader = torch.utils.data.DataLoader(train_set, batch_size=4, shuffle=True, num_workers=2)
# validationloader = torch.utils.data.DataLoader(val_set, batch_size=4, shuffle=False, num_workers=2)
a=5 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Mar 5 10:38:22 2017
@author: cyril
"""
import numpy as np
def load_data(data_path, data_name):
data = np.load(data_path + data_name)
return data
def gen_batch(raw_data, batch_size, shuffle=True):
'''
during each iter, randomly select a mini batch (batch_size, num_steps) from raw_data
in the next iter, the data that has already been trained on won't be chosen again
Input: raw_data numpy array (data_length, num_steps, num_features + 1), label in the last column
batch_size integer
Output: (x, y)
x, numpy array (batch_size, num_steps, num_features)
y, numpy array (batch_size, num_steps)
'''
data_length = raw_data.shape[0]
if shuffle:
indices = np.arange(data_length)
np.random.shuffle(indices)
for start_idx in range(0, data_length - batch_size + 1, batch_size):
if shuffle:
excerpt = indices[start_idx:start_idx + batch_size]
else:
excerpt = slice(start_idx, start_idx + batch_size)
batch_raw_data = raw_data[excerpt] # (batch_size, num_steps, num_features+1)
yield batch_raw_data[:,:,0:-1], batch_raw_data[:,:,-1]
def gen_epochs(max_epoch, batch_size, data):
for i in range(max_epoch):
yield gen_batch(data, batch_size, shuffle = True) # the input data is padded |
# coding: utf-8
#
# Copyright 2016 The MyCleanIndia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls import url, include
from django.contrib import admin
import django.contrib.auth.views
from django.conf import settings
admin.site.site_header = settings.ADMIN_SITE_HEADER
admin.site.site_title = settings.ADMIN_SITE_TITLE
urlpatterns = [
url(r'', include('home.urls')),
url(r'', include('app.urls')),
url(r'^admin/', admin.site.urls),
url(r'^app/', include('app.urls')),
url(r'^home/', include('home.urls')),
url('', include('social.apps.django_app.urls', namespace='social')),
]
|
"""Testing the modules from script1."""
from unittest import TestCase
from my_python_learning_scripts.script1 import parse_file, get_numbers
class TestScript1(TestCase):
"""Class holds the test functions for get_numbers method."""
def test_get_numbers(self):
"""Function should receive a list and return only the numbers."""
self.assertEqual([1, 2, 3], get_numbers(["1", "2", "3"]))
def test_get_numbers_intercalated(self):
"""Checks functions for numbers and non_numbers."""
self.assertEqual([1, 2, 3], get_numbers(["1", "abc", "2", "abc2", "3"]))
def test_get_numbers_none(self):
"""Checks function with only strings."""
self.assertEqual([], get_numbers(["abc", "abc2"]))
def test_get_numbers_ints(self):
"""Checks function with numbers, non_numbers and strings."""
self.assertEqual([1, 2, 3], get_numbers(["abc", "abc2", 1, 2, 3]))
def test_get_numbers_floats(self):
"""Checks function with floats."""
self.assertEqual([1.0, 2.0, 3], get_numbers(["abc", "1.0", "abc2", 2.0, 3]))
class TestFiles(TestCase):
"""Tests from file."""
def assert_from_file(self, input, expected):
"""Read from file and compares the results."""
z = open("input.txt", "w")
for i in input:
z.write(str(i))
z.write("\n")
z.close()
parse_file("input.txt")
o = []
with open("out.txt", "r") as lines:
for line in lines:
o.append(line.rstrip("\n"))
self.assertEqual(expected, o)
def test_get_numbers(self):
"""Tests the get_number function with only integers."""
self.assert_from_file(["1", "2", "3"], ["1", "2", "3"])
def test_get_numbers_intercalated(self):
"""Tests get_number function with integers, strings, non_numbers."""
self.assert_from_file(["1", "abc", "2", "abc2", "3"], ["1", "2", "3"])
def test_get_numbers_none(self):
"""Tests get_number function with string and non_number."""
self.assert_from_file(["abc", "abc2"], [])
def test_get_numbers_ints(self):
"""Tests get_number function with string, non_number and integers."""
self.assert_from_file(["abc", "abc2", 1, 2, 3], ["1", "2", "3"])
def test_get_numbers_floats(self):
"""Tests get_number function with string, non_number, and float numbers."""
self.assert_from_file(["abc", "1.0", "abc2", 2.0, 3], ["1.0", "2.0", "3"])
|
# Queue implementation using Linked List - preferred implementation
# To use a class of another file in current file, import it
from doubly_linked_list import DoublyLinkedList
class Queue:
def __init__(self):
self.queue_list = DoublyLinkedList() # create an empty queue object
# this object will contain nodes (doubly linked list)
def enqueue(self, item):
# call methods in doubly linked list class
# add item to left side of queue (first in)
self.queue_list.insert_at_beginning(item)
def dequeue(self):
# remove first item from queue (first out)
if self.is_empty():
raise IndexError("Stack is Empty!")
else:
front_item = self.queue_list.last_element_value() # store front item before it is removed
self.queue_list.remove_at_end()
return front_item
def front(self):
# return element at front side (right side) of queue
return self.queue_list.last_element_value() # tail value
# The concept can also be reversed (same thing btw)
# such as, insert_at_end, remove_at_beginning, front -> head value
def is_empty(self):
# are there any elements in queue
return self.queue_list.get_length() == 0 # length = 0 when queue has no items
def __len__(self):
# override built in len method - to suit your purposes
return self.queue_list.get_length()
def main():
q1 = Queue()
q1.enqueue(10)
q1.enqueue(20)
q1.enqueue(30)
q1.queue_list.print_list_forward()
print(q1.front())
print(f"Length = {len(q1)}") # use len(object) - due to being overridden
print()
q1.dequeue()
print(q1.dequeue())
print(q1.front())
print(f"Length = {len(q1)}")
q1.queue_list.print_list_forward()
print(q1.is_empty())
if __name__ == "__main__":
main()
|
from pydub import AudioSegment,playback
AudioSegment.converter='ffmpeg\\ffmpeg.exe'
songlist=['1.mp3','2.mp3']
class mediaplayer:
def __init__(self):
self.songlist = ['1.mp3', '2.mp3']
def play(self,index):
sound=AudioSegment.from_mp3(self.songlist[index])
playback.play(sound)
def mixandplay(self,index1,index2):
sound1=AudioSegment.from_mp3(self.songlist[index1])
sound2=AudioSegment.from_mp3(self.songlist[index2])
output = sound1.overlay(sound2)
playback.play(output)
c=mediaplayer()
c.play(1)
c.mixandplay(1,2) |
import os
import sys
import codecs
import requests
from itertools import groupby
from wikibench.dataset import *
class ERDConvert(object):
def __init__(self, goldenfile):
self.golden = self.read_file(goldenfile)
def get_wid(self, mid):
return requests.get(
'http://wikisense.mkapp.it/wiki/freebase/id/' + mid).json()
def get_title(self, wid):
title = requests.get(
'http://wikisense.mkapp.it/wiki/title/' + str(wid)).json()
if not title:
return ''
return title
def export_dataset(self, results, output):
instances = []
for docid, (docname, mentions) in enumerate(self.golden):
text = codecs.open(
os.path.join(results, docname + ".txt"),
'r',
'utf8'
).read()
filtered = []
prevend = 0
for m in mentions:
cum = 0
offsets = []
for c in text:
offsets.append(cum)
cum += len(c.encode('utf8'))
m.start = offsets[m.start]
m.end = m.start + len(m.spot)#offsets[min(m.end, len(offsets) - 1)]
shiftback = min(10, max(0, m.start + 1 - prevend))
while shiftback >= 0 and text[m.start:m.end] != m.spot:
m.start -= 1
m.end -= 1
shiftback -= 1
if text[m.start:m.end] != m.spot:
print docid, type(text), type(m.spot), text[m.start:m.end], '==', m.spot, docname
else:
filtered.append(m)
prevend = m.end
instance = Instance(text, filtered, docid)
instances.append(instance)
dataset = Dataset('ERD', instances)
Dataset.save_tsv(dataset, output)
def read_file(self, filename):
if not os.path.exists(filename):
return [(None, [])]
def iterate_golden():
for line in codecs.open(filename, 'r', 'utf8'):
text_id, left, right, fid, title, mention, score1, score2 = line.strip().split('\t')
wid = self.get_wid('m.' + fid[3:])
title = self.get_title(wid)
yield text_id, mention, int(left), int(right), title, wid
results = []
for docid, items in groupby(iterate_golden(), key=lambda x: x[0]):
instances = [Mention(*x[1:]) for x in items]
results.append((docid, instances))
if not results:
return [(None, [])]
return results
conv = ERDConvert(sys.argv[1])
conv.export_dataset(sys.argv[2], sys.argv[3])
|
# Copyright 2018 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ================================================================================================
from __future__ import print_function
import dwave_networkx as dnx
import networkx as nx
import dimod
# Use basic simulated annealer
sampler = dimod.SimulatedAnnealingSampler()
# The definition of a minimum vertex cover set is that each edge in the graph
# must have a vertex in the minimum vertex cover set, and we also want the
# vertex cover set to be as small as possible.
# Set up a Networkx Graph
G = nx.Graph()
G.add_edges_from([(0, 1), (0, 2), (1, 3), (2, 3), (3, 5), (4, 5), (3, 6), (4, 7), (5, 6), (5, 7), (6, 8), (7, 8), (8, 9)])
# Get the minimum vertex cover, which is known in this case to be of
# length 5
candidate = dnx.min_vertex_cover(G, sampler)
if dnx.is_vertex_cover(G, candidate) and len(candidate) == 5:
print (candidate, " is a minimum vertex cover")
else:
print (candidate, " is not a minimum vertex cover")
|
#!/usr/bin/env python3
import sys
import os
import subprocess
from subprocess import call as _call
from glob import glob
import shutil
if sys.version_info.major == 3:
from pathlib import Path
def prntfail(*args, **kwargs):
print(args)
print(kwargs)
exit(1)
# TODO: more robust check for dry run
# Wrap functions with 'ifdbg' to print useful
# message when it is called.
dry = len(sys.argv) > 1 and ("-n" in sys.argv[1:]) # dry run?
isdry = lambda: dry
ID = lambda x: x
def ifdbg(fncn, arg_fmt=ID, kwarg_fmt=ID, run_anyways=True):
fmt = "%s(args=%s, kwargs=%s)"
def ret(*args, **kwargs):
if not dry: return fncn(*args, **kwargs)
else:
n = fncn.__name__
print("%s(%s, %s)" % (n, arg_fmt(args), kwarg_fmt(kwargs)))
if run_anyways: return fncn(*args, **kwargs)
return ret
# Like ifdbg, but for IO functions that should not be executed
# (just printf-debugged).
def ifdry(*args, **kwargs):
kwargs.update({"run_anyways": False})
return ifdbg(*args, **kwargs)
# Try to run the given function, catching (and ignoring / warning)
# any exceptions listed in the remaining args *exceptions.
def trywarn(fncn, *exceptions):
def _trywarn(*args, **kwargs):
try:
return fncn(*args, **kwargs)
except tuple(exceptions) as e:
print("WARNING: " + str(e))
return _trywarn
join = os.path.join
basename = os.path.basename
ln = ifdry(os.symlink)
call = ifdry(_call, arg_fmt=lambda x: ' '.join(x[0]) + ','.join(x[1:]))
co = ifdbg(subprocess.check_output)
chmod = ifdry(os.chmod)
if sys.version_info.major == 3:
chown = ifdry(shutil.chown)
else:
chown = ifdry(os.chown)
mkdir = ifdry(lambda p,*args,**kwargs: Path(p).mkdir(*args, **(dict({"parents": True, "exist_ok": True}, **kwargs))))
def _rm(path, force=True):
try:
ifdry(os.remove)(path)
except FileNotFoundError:
if force: return
raise
rm = ifdry(_rm)
pwd = os.getcwd
def echo(*args):
for a in args:
print(a)
# Map over a list (python3 workaround...)
def lmap(fncn, lst):
ret = []
for e in lst:
ret.append(fncn(e))
return ret
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_main(object):
def setupUi(self, main):
main.setObjectName("main")
main.resize(800, 600)
self.centralwidget = QtWidgets.QWidget(main)
self.centralwidget.setObjectName("centralwidget")
self.allBooksTab = QtWidgets.QTabWidget(self.centralwidget)
self.allBooksTab.setGeometry(QtCore.QRect(0, 0, 801, 726))
self.allBooksTab.setObjectName("allBooksTab")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.line = QtWidgets.QFrame(self.tab)
self.line.setGeometry(QtCore.QRect(10, 60, 771, 16))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.layoutWidget = QtWidgets.QWidget(self.tab)
self.layoutWidget.setGeometry(QtCore.QRect(10, 20, 771, 25))
self.layoutWidget.setObjectName("layoutWidget")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.layoutWidget)
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.label = QtWidgets.QLabel(self.layoutWidget)
self.label.setObjectName("label")
self.horizontalLayout_3.addWidget(self.label)
self.newBookButton = QtWidgets.QPushButton(self.layoutWidget)
self.newBookButton.setObjectName("newBookButton")
self.horizontalLayout_3.addWidget(self.newBookButton)
self.layoutWidget1 = QtWidgets.QWidget(self.tab)
self.layoutWidget1.setGeometry(QtCore.QRect(10, 90, 771, 471))
self.layoutWidget1.setObjectName("layoutWidget1")
self.verticalLayout_7 = QtWidgets.QVBoxLayout(self.layoutWidget1)
self.verticalLayout_7.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_7.setObjectName("verticalLayout_7")
self.label_2 = QtWidgets.QLabel(self.layoutWidget1)
font = QtGui.QFont()
font.setPointSize(10)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.verticalLayout_7.addWidget(self.label_2)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.verticalLayout_6 = QtWidgets.QVBoxLayout()
self.verticalLayout_6.setObjectName("verticalLayout_6")
self.label_3 = QtWidgets.QLabel(self.layoutWidget1)
self.label_3.setObjectName("label_3")
self.verticalLayout_6.addWidget(self.label_3)
self.issuedTable = QtWidgets.QTableWidget(self.layoutWidget1)
self.issuedTable.setColumnCount(8)
self.issuedTable.setObjectName("issuedTable")
self.issuedTable.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.issuedTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.issuedTable.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.issuedTable.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.issuedTable.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.issuedTable.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.issuedTable.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.issuedTable.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.issuedTable.setHorizontalHeaderItem(7, item)
self.verticalLayout_6.addWidget(self.issuedTable)
self.horizontalLayout_5.addLayout(self.verticalLayout_6)
self.verticalLayout_3 = QtWidgets.QVBoxLayout()
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.deleteIssuedButton = QtWidgets.QPushButton(self.layoutWidget1)
self.deleteIssuedButton.setObjectName("deleteIssuedButton")
self.verticalLayout_3.addWidget(self.deleteIssuedButton)
self.editIssuedButton = QtWidgets.QPushButton(self.layoutWidget1)
self.editIssuedButton.setObjectName("editIssuedButton")
self.verticalLayout_3.addWidget(self.editIssuedButton)
self.refreshIssuedButton = QtWidgets.QPushButton(self.layoutWidget1)
self.refreshIssuedButton.setObjectName("refreshIssuedButton")
self.verticalLayout_3.addWidget(self.refreshIssuedButton)
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_3.addItem(spacerItem)
self.horizontalLayout_5.addLayout(self.verticalLayout_3)
self.horizontalLayout_6.addLayout(self.horizontalLayout_5)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.verticalLayout_5 = QtWidgets.QVBoxLayout()
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.label_4 = QtWidgets.QLabel(self.layoutWidget1)
self.label_4.setObjectName("label_4")
self.verticalLayout_5.addWidget(self.label_4)
self.unissuedTable = QtWidgets.QTableWidget(self.layoutWidget1)
self.unissuedTable.setColumnCount(8)
self.unissuedTable.setObjectName("unissuedTable")
self.unissuedTable.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.unissuedTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.unissuedTable.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.unissuedTable.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.unissuedTable.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.unissuedTable.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.unissuedTable.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.unissuedTable.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.unissuedTable.setHorizontalHeaderItem(7, item)
self.verticalLayout_5.addWidget(self.unissuedTable)
self.horizontalLayout_4.addLayout(self.verticalLayout_5)
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.deleteUnissuedButton = QtWidgets.QPushButton(self.layoutWidget1)
self.deleteUnissuedButton.setObjectName("deleteUnissuedButton")
self.verticalLayout_4.addWidget(self.deleteUnissuedButton)
self.editUnissuedButton = QtWidgets.QPushButton(self.layoutWidget1)
self.editUnissuedButton.setObjectName("editUnissuedButton")
self.verticalLayout_4.addWidget(self.editUnissuedButton)
self.refreshUnissuedButton = QtWidgets.QPushButton(self.layoutWidget1)
self.refreshUnissuedButton.setObjectName("refreshUnissuedButton")
self.verticalLayout_4.addWidget(self.refreshUnissuedButton)
spacerItem1 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_4.addItem(spacerItem1)
self.horizontalLayout_4.addLayout(self.verticalLayout_4)
self.horizontalLayout_6.addLayout(self.horizontalLayout_4)
self.verticalLayout_7.addLayout(self.horizontalLayout_6)
self.allBooksTab.addTab(self.tab, "")
self.tab_2 = QtWidgets.QWidget()
self.tab_2.setObjectName("tab_2")
self.layoutWidget2 = QtWidgets.QWidget(self.tab_2)
self.layoutWidget2.setGeometry(QtCore.QRect(10, 10, 771, 551))
self.layoutWidget2.setObjectName("layoutWidget2")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget2)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_5 = QtWidgets.QLabel(self.layoutWidget2)
self.label_5.setObjectName("label_5")
self.horizontalLayout.addWidget(self.label_5)
self.searchInput = QtWidgets.QLineEdit(self.layoutWidget2)
self.searchInput.setObjectName("searchInput")
self.horizontalLayout.addWidget(self.searchInput)
self.searchButton = QtWidgets.QPushButton(self.layoutWidget2)
self.searchButton.setObjectName("searchButton")
self.horizontalLayout.addWidget(self.searchButton)
self.verticalLayout.addLayout(self.horizontalLayout)
self.findBooksTable = QtWidgets.QTableWidget(self.layoutWidget2)
self.findBooksTable.setColumnCount(8)
self.findBooksTable.setObjectName("findBooksTable")
self.findBooksTable.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.findBooksTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.findBooksTable.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.findBooksTable.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.findBooksTable.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.findBooksTable.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.findBooksTable.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.findBooksTable.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.findBooksTable.setHorizontalHeaderItem(7, item)
self.verticalLayout.addWidget(self.findBooksTable)
self.allBooksTab.addTab(self.tab_2, "")
self.tab_3 = QtWidgets.QWidget()
self.tab_3.setObjectName("tab_3")
self.layoutWidget3 = QtWidgets.QWidget(self.tab_3)
self.layoutWidget3.setGeometry(QtCore.QRect(10, 10, 771, 551))
self.layoutWidget3.setObjectName("layoutWidget3")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.layoutWidget3)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.label_6 = QtWidgets.QLabel(self.layoutWidget3)
self.label_6.setObjectName("label_6")
self.horizontalLayout_2.addWidget(self.label_6)
self.refreshButton = QtWidgets.QPushButton(self.layoutWidget3)
self.refreshButton.setObjectName("refreshButton")
self.horizontalLayout_2.addWidget(self.refreshButton)
self.verticalLayout_2.addLayout(self.horizontalLayout_2)
self.allBooksTable = QtWidgets.QTableWidget(self.layoutWidget3)
self.allBooksTable.setColumnCount(8)
self.allBooksTable.setObjectName("allBooksTable")
self.allBooksTable.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.allBooksTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.allBooksTable.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.allBooksTable.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.allBooksTable.setHorizontalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.allBooksTable.setHorizontalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.allBooksTable.setHorizontalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.allBooksTable.setHorizontalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.allBooksTable.setHorizontalHeaderItem(7, item)
self.verticalLayout_2.addWidget(self.allBooksTable)
self.allBooksTab.addTab(self.tab_3, "")
main.setCentralWidget(self.centralwidget)
self.retranslateUi(main)
self.allBooksTab.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(main)
def retranslateUi(self, main):
_translate = QtCore.QCoreApplication.translate
main.setWindowTitle(_translate("main", "Library system"))
self.label.setText(_translate("main", "Actions:"))
self.newBookButton.setText(_translate("main", "New Book"))
self.label_2.setText(_translate("main", "Books"))
self.label_3.setText(_translate("main", "Issued books:"))
item = self.issuedTable.horizontalHeaderItem(0)
item.setText(_translate("main", "Id"))
item = self.issuedTable.horizontalHeaderItem(1)
item.setText(_translate("main", "Name"))
item = self.issuedTable.horizontalHeaderItem(2)
item.setText(_translate("main", "Description"))
item = self.issuedTable.horizontalHeaderItem(3)
item.setText(_translate("main", "ISBN"))
item = self.issuedTable.horizontalHeaderItem(4)
item.setText(_translate("main", "Page count"))
item = self.issuedTable.horizontalHeaderItem(5)
item.setText(_translate("main", "Issued"))
item = self.issuedTable.horizontalHeaderItem(6)
item.setText(_translate("main", "Author"))
item = self.issuedTable.horizontalHeaderItem(7)
item.setText(_translate("main", "Year"))
self.deleteIssuedButton.setText(_translate("main", "Delete"))
self.editIssuedButton.setText(_translate("main", "Edit"))
self.refreshIssuedButton.setText(_translate("main", "Refresh"))
self.label_4.setText(_translate("main", "Un Issued books:"))
item = self.unissuedTable.horizontalHeaderItem(0)
item.setText(_translate("main", "Id"))
item = self.unissuedTable.horizontalHeaderItem(1)
item.setText(_translate("main", "Name"))
item = self.unissuedTable.horizontalHeaderItem(2)
item.setText(_translate("main", "Description"))
item = self.unissuedTable.horizontalHeaderItem(3)
item.setText(_translate("main", "ISBN"))
item = self.unissuedTable.horizontalHeaderItem(4)
item.setText(_translate("main", "Page count"))
item = self.unissuedTable.horizontalHeaderItem(5)
item.setText(_translate("main", "Issued"))
item = self.unissuedTable.horizontalHeaderItem(6)
item.setText(_translate("main", "Author"))
item = self.unissuedTable.horizontalHeaderItem(7)
item.setText(_translate("main", "Year"))
self.deleteUnissuedButton.setText(_translate("main", "Delete"))
self.editUnissuedButton.setText(_translate("main", "Edit"))
self.refreshUnissuedButton.setText(_translate("main", "Refresh"))
self.allBooksTab.setTabText(self.allBooksTab.indexOf(self.tab), _translate("main", "Dashboard"))
self.label_5.setText(_translate("main", "Search books by id:"))
self.searchButton.setText(_translate("main", "Find"))
item = self.findBooksTable.horizontalHeaderItem(0)
item.setText(_translate("main", "Id"))
item = self.findBooksTable.horizontalHeaderItem(1)
item.setText(_translate("main", "Name"))
item = self.findBooksTable.horizontalHeaderItem(2)
item.setText(_translate("main", "Description"))
item = self.findBooksTable.horizontalHeaderItem(3)
item.setText(_translate("main", "ISBN"))
item = self.findBooksTable.horizontalHeaderItem(4)
item.setText(_translate("main", "Page count"))
item = self.findBooksTable.horizontalHeaderItem(5)
item.setText(_translate("main", "Issued"))
item = self.findBooksTable.horizontalHeaderItem(6)
item.setText(_translate("main", "Author"))
item = self.findBooksTable.horizontalHeaderItem(7)
item.setText(_translate("main", "Year"))
self.allBooksTab.setTabText(self.allBooksTab.indexOf(self.tab_2), _translate("main", "Find"))
self.label_6.setText(_translate("main", "All books"))
self.refreshButton.setText(_translate("main", "Refresh"))
item = self.allBooksTable.horizontalHeaderItem(0)
item.setText(_translate("main", "Id"))
item = self.allBooksTable.horizontalHeaderItem(1)
item.setText(_translate("main", "Name"))
item = self.allBooksTable.horizontalHeaderItem(2)
item.setText(_translate("main", "Description"))
item = self.allBooksTable.horizontalHeaderItem(3)
item.setText(_translate("main", "ISBN"))
item = self.allBooksTable.horizontalHeaderItem(4)
item.setText(_translate("main", "Page count"))
item = self.allBooksTable.horizontalHeaderItem(5)
item.setText(_translate("main", "Issued"))
item = self.allBooksTable.horizontalHeaderItem(6)
item.setText(_translate("main", "Author"))
item = self.allBooksTable.horizontalHeaderItem(7)
item.setText(_translate("main", "Year"))
self.allBooksTab.setTabText(self.allBooksTab.indexOf(self.tab_3), _translate("main", "All books"))
|
from XYZUtil4.customclass.Signal import Signal
from .Singleton import singleton
@singleton
class Signals:
pump_ena = Signal(int, bool) # bot_id, ena
channel_switch = Signal(int, int, bool) # bot_id, chem_id, is_open
cur_level = Signal(int, int, int) # bot_id, chem_id, level
cur_flow = Signal(int, int, float) # bot_id, chem_id, flow
cur_pressure = Signal(int, int, int) # bot_id, chem_id, pressure
temp_humi_data = Signal(int, float, float) # bot_id, temp, humi
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def countNodes(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if not root:
return 0
# Count left and right most depths.
l, r = 1, 1
p = root
while p.left:
p = p.left
l += 1
p = root
while p.right:
p = p.right
r += 1
if l == r:
# Perfect tree.
return 2**l - 1
else:
return 1 + self.countNodes(root.left) + self.countNodes(root.right)
|
'''
keys:
Solutions:
Similar:
T:
S:
'''
from typing import List
# https://leetcode.com/problems/least-number-of-unique-integers-after-k-removals/discuss/686335/JavaPython-3-Greedy-Alg.%3A-3-methods-from-O(nlogn)-to-O(n)-w-brief-explanation-and-analysis.
class Solution:
# T: O(nlogn), S: O(n)
def findLeastNumOfUniqueInts(self, arr: List[int], k: int) -> int:
distinct_ele_ct = 0
if len(arr) <= k:
return distinct_ele_ct
ct = Counter(arr)
minHeap = []
for num, freq in ct.items():
heappush(minHeap, (freq, num))
distinct_ele_ct = len(ct.keys())
while k > 0 and minHeap:
freq, num = heappop(minHeap)
k -= freq
if k >= 0:
distinct_ele_ct -= 1
# print (k, distinct_ele_ct)
if k > 0:
distinct_ele_ct -= k
return distinct_ele_ct
# shorter version of heapq
def findLeastNumOfUniqueInts1(self, arr: List[int], k: int) -> int:
hp = [(freq, num) for num, freq in collections.Counter(arr).items()]
heapq.heapify(hp)
while k > 0:
k -= heapq.heappop(hp)[0] # [0] for the freq
return len(hp) + (k < 0)
# hashmap
# O(n) for S and T
def findLeastNumOfUniqueInts2(self, arr: List[int], k: int) -> int:
c = Counter(arr)
# cnt: k: freq of number, v: freq of freq, i.e., number of nums with frequence as freq
cnt, remaining = Counter(c.values()), len(c)
# notice that in Counter, unexisting key will return 0
for key in range(1, len(arr) + 1): # key is the frequence
# key * cnt[key] is the total number of frequence of all related numbers
# it's like accelerating the process
if k >= key * cnt[key]:
k -= key * cnt[key]
remaining -= cnt[key]
else: # cant aford deleting all high freq numbers
return remaining - k // key
return remaining
|
from time import sleep
from multiprocessing.dummy import Pool as ThreadPool, Event
event = Event()
def click():
# event.clear() # 设置标准为假(默认是False)
print("用户在修改网页表单")
sleep(2)
print("点击了修改案例")
event.set() # 设置标准为真
def update():
print(f"事件状态:{event.is_set()}")
event.wait() # 等待到标志为真
print("修改成功")
print(f"事件状态:{event.is_set()}")
def main():
pool = ThreadPool()
pool.apply_async(click)
pool.apply_async(update)
pool.apply_async(click)
pool.close()
pool.join()
if __name__ == '__main__':
main()
|
from __future__ import print_function
import numpy as np
import pandas as pd
class Utils:
@staticmethod
def read_data(path, cols_names):
df = pd.read_csv(path, sep='\t', header=None, names=cols_names)
return df
@staticmethod
def to_one_hot(labels, n_classes):
labels = np.eye(n_classes)[labels.reshape(-1)]
return labels
@staticmethod
def to_label(data):
new_labels = []
for row in data:
new_labels.append(np.argmax(row))
return np.array(new_labels)
|
from robot_bases import MJCFBasedRobot, URDFBasedRobot
import numpy as np
import pybullet_data
import os
import gym
from robot_bases import BodyPart
class Hand(URDFBasedRobot):
used_objects = []
object_poses = {}
num_joints = 24
num_touch_sensors = 10
class ObsSpaces:
JOINT_POSITIONS = "joint_positions"
TOUCH_SENSORS = "touch_sensors"
def __init__(self):
self.robot_position = [0, 0, 0]
self.contact_threshold = 0.1
self.action_dim = self.num_joints
URDFBasedRobot.__init__(self,
'hand_description/urdf/hand.urdf',
'hand0', action_dim=self.action_dim, obs_dim=1)
self.observation_space = gym.spaces.Dict({
self.ObsSpaces.JOINT_POSITIONS: gym.spaces.Box(
-np.inf, np.inf, [self.num_joints], dtype = float),
self.ObsSpaces.TOUCH_SENSORS: gym.spaces.Box(
0, np.inf, [self.num_touch_sensors], dtype = float)})
self.object_names = dict()
self.object_bodies = dict()
self.robot_parts = {}
self.touch_sensors = []
self.joint_names = []
for arm_part in ["arm_base", "arm0", "arm1", "palm"]:
self.joint_names.append(arm_part)
for finger in ["thumb", "index", "middle", "ring", "little"]:
for nm in range(4):
if nm >= 2:
self.touch_sensors.append(finger+"%d"%nm)
self.joint_names.append(finger+"%d"%nm)
def reset(self, bullet_client):
bullet_client.resetSimulation()
super(Hand, self).reset(bullet_client)
return self.calc_state()
def get_contacts(self, forces=False):
contact_dict = {}
for part_name, part in self.parts.items():
contacts = []
for contact in part.contact_list():
if abs(contact[8]) < self.contact_threshold:
name = self.object_names[contact[2]]
if not forces:
if part_name in contact_dict.keys():
contact_dict[part_name].append(name)
else:
contact_dict[part_name]= [name]
else:
force = contact[9]
if part_name in contact_dict.keys():
contact_dict[part_name].append([name, force])
else:
contact_dict[part_name] = [(name, force)]
return contact_dict
def get_touch_sensors(self):
sensors = np.zeros(self.num_touch_sensors)
contacts = self.get_contacts(forces=True)
for i, skin in enumerate(self.touch_sensors):
if skin in contacts.keys():
cnts = contacts[skin]
if len(cnts) > 0:
force = np.max([cnt[1] for cnt in cnts])
sensors[i] = force
return sensors
def robot_specific_reset(self, bullet_client):
self.robot_body.reset_position(self.robot_position)
self.object_bodies["hand"] = self.robot_body
self.object_names[0] = "hand"
for obj_name in self.used_objects:
pos = self.object_poses[obj_name]
obj = get_object(bullet_client,
"hand_description/urdf/{}.urdf".format(obj_name),
*pos)
self.object_bodies[obj_name] = obj
self.object_names.update({obj.bodies[0]: obj_name})
for _,joint in self.jdict.items():
joint.reset_current_position(0, 0)
for name, part in self.parts.items():
self.robot_parts.update({part.bodyPartIndex: name})
def apply_action(self, a):
assert (np.isfinite(a).all())
assert(len(a) == self.num_joints)
# constraints
for i, joint in enumerate(self.joint_names):
if "arm" in joint:
a[i] = np.maximum(-np.pi, np.minimum(np.pi, a[i]))
for i, joint in enumerate(self.joint_names):
if "thumb" in joint:
a[i] = -a[i]
for i, joint in enumerate(self.joint_names):
if "0" in joint and not "arm" in joint:
a[i] = np.maximum(-np.pi/8, np.minimum(np.pi/8, a[i]))
else:
a[i] = np.maximum(0, np.minimum(np.pi*0.4, a[i]))
for i, joint in enumerate(self.joint_names):
if "thumb" in joint:
a[i] = -a[i]
# jdict.set_position
for i, joint in enumerate(self.joint_names):
self.jdict[joint].set_position(a[i])
def calc_state(self):
joints = [ self.jdict[joint].get_position() for joint in self.joint_names]
return joints
def get_object(bullet_client, object_file, x, y, z, roll=0, pitch=0, yaw=0):
position = [x, y, z]
orientation = bullet_client.getQuaternionFromEuler([roll, pitch, yaw])
fixed = True
body = bullet_client.loadURDF(
fileName=os.path.join(pybullet_data.getDataPath(), object_file),
basePosition=position,
baseOrientation=orientation,
useFixedBase=False,
flags=bullet_client.URDF_USE_INERTIA_FROM_FILE)
part_name, _ = bullet_client.getBodyInfo(body)
part_name = part_name.decode("utf8")
bodies = [body]
return BodyPart(bullet_client, part_name, bodies, 0, -1)
|
#
#
# Test misc utils
#
#
import unittest
from abstractgame import AbstractGame
from connectgame import ConnectGame
from utils import softmax, random_simulation, uct_factory
class TestUtils(unittest.TestCase):
# Can softmax
def test_board_creation_new(self):
m = softmax([1,1])
self.assertTrue(m[0] == 0.5)
self.assertTrue(m[1] == 0.5)
m = softmax([2,2,2,2])
self.assertTrue(m[0] == 0.25)
self.assertTrue(m[3] == 0.25)
def test_random_simulation(self):
total_plays = 100
win_total = 0
for n in range(total_plays):
g = AbstractGame(ConnectGame())
p = random_simulation(g)
if p == 1:
win_total = win_total + 1
if p == 0:
win_total = win_total - 1
self.assertTrue(win_total > -5)
self.assertTrue(win_total < 50)
def test_uct_factory(self):
vanilla = uct_factory(1)
self.assertEqual(vanilla(1.0, 1.0, 1.0, 1.0),1.0)
if __name__ == '__main__':
unittest.main()
|
import json
from time import sleep
import random
from selenium.common.exceptions import NoSuchElementException
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.firefox.options import Options as FirefoxOptions
def init_driver(headless=True, proxy=None, show_images=False):
""" initiate a chromedriver instance """
# create instance of web driver
#chromedriver_path = chromedriver_autoinstaller.install()
#options = Options()
options = FirefoxOptions()
#options.add_argument("user-data-dir=selenium")
if headless is True:
print("Scraping on headless mode. KEKE")
options.add_argument('--disable-gpu')
options.add_argument('window-size=1920x1080')
options.headless = True
else:
options.headless = False
options.add_argument('log-level=3')
if proxy is not None:
options.add_argument('--proxy-server=%s' % proxy)
#if show_images == False:
#prefs = {"profile.managed_default_content_settings.images": 2}
#options.add_experimental_option("prefs", prefs)
#driver = webdriver.Chrome(options=options, executable_path=chromedriver_path)
# your firefox profile where your twitter account is logged in and cached(about:profiles in firefox search bar)
ffprofile = webdriver.FirefoxProfile(
'./axyqdd0e.default-release')
driver = webdriver.Firefox(firefox_profile=ffprofile, options=options)
driver.set_window_position(0, 0)
driver.set_window_size(900, 768)
driver.set_page_load_timeout(100)
return driver
def log_in(driver, timeout=10):
username = '' # your twitter login
password = '' # your twitter password
driver.get('https://www.twitter.com/login')
username_xpath = '//input[@name="session[username_or_email]"]'
password_xpath = '//input[@name="session[password]"]'
username_el = WebDriverWait(driver, timeout).until(
EC.presence_of_element_located((By.XPATH, username_xpath)))
password_el = WebDriverWait(driver, timeout).until(
EC.presence_of_element_located((By.XPATH, password_xpath)))
username_el.send_keys(username)
password_el.send_keys(password)
password_el.send_keys(Keys.RETURN)
def check_exists_by_link_text(text, driver):
try:
driver.find_element_by_link_text(text)
except NoSuchElementException:
return False
return True
def get_users_follow(users, headless, follow=None, verbose=1, wait=2):
""" get the following or followers of a list of users """
# initiate the driver
driver = init_driver(headless=headless)
sleep(wait)
# log in (the .env file should contain the username and password)
#log_in(driver)
sleep(wait)
# followers and following dict of each user
follows_users = {}
for user in users:
# log user page
print("Crawling @" + user + " " + follow)
driver.get('https://twitter.com/' + user + '/following')
sleep(random.uniform(wait-0.5, wait+0.5))
# find the following or followers button
#driver.find_element_by_xpath('//a[contains(@href,"/' + follow + '")]/span[1]/span[1]').click()
sleep(random.uniform(wait-0.5, wait+0.5))
# if the log in fails, find the new log in button and log in again.
if check_exists_by_link_text("Log in", driver):
login = driver.find_element_by_link_text("Log in")
sleep(random.uniform(wait-0.5, wait+0.5))
driver.execute_script("arguments[0].click();", login)
sleep(random.uniform(wait-0.5, wait+0.5))
driver.get('https://twitter.com/' + user)
sleep(random.uniform(wait-0.5, wait+0.5))
driver.find_element_by_xpath(
'//a[contains(@href,"/' + follow + '")]/span[1]/span[1]').click()
sleep(random.uniform(wait-0.5, wait+0.5))
# check if we must keep scrolling
scrolling = True
last_position = driver.execute_script("return window.pageYOffset;")
follows_elem = []
follow_ids = set()
while scrolling:
# get the card of following or followers
page_cards = driver.find_elements_by_xpath(
'//div[contains(@data-testid,"UserCell")]')
for card in page_cards:
# get the following or followers element
element = card.find_element_by_xpath(
'.//div[1]/div[1]/div[1]//a[1]')
follow_elem = element.get_attribute('href')
# append to the list
follow_id = str(follow_elem)
follow_elem = str(follow_elem).split('/')[-1]
if follow_id not in follow_ids:
follow_ids.add(follow_id)
follows_elem.append(follow_elem)
if verbose:
print(follow_elem)
print("Found " + str(len(follows_elem)) + " " + follow)
scroll_attempt = 0
while True:
sleep(random.uniform(wait-0.5, wait+0.5))
driver.execute_script(
'window.scrollTo(0, document.body.scrollHeight);')
sleep(random.uniform(wait-0.5, wait+0.5))
curr_position = driver.execute_script(
"return window.pageYOffset;")
if last_position == curr_position:
scroll_attempt += 1
# end of scroll region
if scroll_attempt >= 3:
scrolling = False
break
#return follows_elem
else:
# attempt another scroll
sleep(random.uniform(wait-0.5, wait+0.5))
else:
last_position = curr_position
break
follows_users[user] = follows_elem
driver.quit()
return follows_users
def get_users_following(users, verbose=1, headless=True, wait=2):
following = get_users_follow(
users, headless, "following", verbose, wait=wait)
return following
def get_users():
with open("users.json", "r") as jsonFile:
data = json.load(jsonFile)
return data
def add_user(user):
users = [user]
following = get_users_following(
users=users, verbose=0, headless=True, wait=1)
with open("old_state/" + user + ".json", "w") as jsonFile:
json.dump(following[user], jsonFile)
users = get_users()
users.append(user)
with open("users.json", "w") as jsonFile:
json.dump(users, jsonFile)
def update_following():
users = get_users()
following = get_users_following(
users=users, verbose=0, headless=True, wait=1)
for user in users:
with open("current_state/" + user + ".json", "w") as jsonFile:
json.dump(following[user], jsonFile)
def compare_user_following():
users = get_users()
for user in users:
with open("old_state/" + user + ".json", "r") as jsonFile:
old_state = json.load(jsonFile)
with open("current_state/" + user + ".json", "r") as jsonFile:
current_state = json.load(jsonFile)
new_following = list(set(current_state) - set(old_state))
with open("new_following/" + user + ".json", "w") as jsonFile:
json.dump(new_following, jsonFile)
with open("old_state/" + user + ".json", "w") as jsonFile:
json.dump(current_state, jsonFile)
def get_new_following():
users = get_users()
result = []
for user in users:
with open("new_following/" + user + ".json", "r") as jsonFile:
data = json.load(jsonFile)
if len(data) > 0:
result_data = ""
for item in data:
item = "<https://twitter.com/" + item + ">"
result_data = result_data + item + "\n"
result.append("<https://twitter.com/" + user + ">" + " is following new accounts:\n" + result_data)
return result
def delete_user(user):
users = get_users()
if user in users:
users.remove(user)
with open("users.json", "w") as jsonFile:
json.dump(users, jsonFile)
return True
else:
return False
from discord.ext import commands
TOKEN = "" # your discord token
bot = commands.Bot(command_prefix="/")
@bot.command(name="add")
async def add_user_message(ctx, arg):
await ctx.channel.send(arg + " is adding...")
add_user(arg)
await ctx.channel.send(arg + " is added!")
@bot.command(name="update")
async def update_users_following_message(ctx):
await ctx.channel.send("The updating is in process. The results will be available soon.")
update_following()
compare_user_following()
await ctx.channel.send("Updated! Reading results...")
result = get_new_following()
if len(result) > 0:
for res in result:
await ctx.channel.send(res)
else:
await ctx.channel.send("No one has subscribed to anyone recently :(")
@bot.command(name="delete")
async def delete_user_message(ctx, arg):
if delete_user(arg):
await ctx.channel.send(arg + " is successfully deleted!")
else:
await ctx.channel.send("There is no such user")
bot.run(TOKEN)
|
from glob import glob
from xml.dom.minidom import parse
from os.path import basename
def getParseCommandPairMappingsForTrain():
'''
Returns a dictionary mapping from filename to list of parse-command pairs
'''
fileMappings = {}
recipeNumber = 0
for filename in glob("parsed_annotated_recipes/*.xml"):
if recipeNumber % 10 != 0: # if this recipe is in the training set
pairs = []
dom = parse(filename)
for elt in dom.getElementsByTagName("line"):
pairs.append((elt.getElementsByTagName("parsed-text")[0].childNodes[0].data, elt.getElementsByTagName("annotation")[0].childNodes[0].data))
fileMappings[basename(filename)[:-4]] = pairs
recipeNumber += 1
return fileMappings |
#!/usr/bin/env python
# coding=utf-8
"""
Site: http://www.beebeeto.com/
Framework: https://github.com/n0tr00t/Beebeeto-framework
"""
import requests
from baseframe import BaseFrame
class MyPoc(BaseFrame):
poc_info = {
# poc相关信息
'poc': {
'id': 'poc-2015-0097',
'name': 'Wordpress /example.html jQuery DomXSS漏洞 POC',
'author': '1024',
'create_date': '2015-05-08',
},
# 协议相关信息
'protocol': {
'name': 'http',
'port': [80],
'layer4_protocol': ['tcp'],
},
# 漏洞相关信息
'vul': {
'app_name': 'WordPress',
'vul_version': ['*'],
'type': 'Cross Site Scripting',
'tag': ['WordPress默认模板漏洞', '/example.html XSS 漏洞', 'jQuery 漏洞'],
'desc': '''
该漏洞存在于 WordPress 流行的 Genericons example.html 页面中,
默认主题 Twenty Fifteen 及知名插件 Jetpack 都内置了该页面,
由于 example.html 使用了老版本存在 DOM XSS 缺陷的 jQuery,且使用不当,
导致出现 DOM XSS,这种攻击将无视浏览器的 XSS Filter 防御。
''',
'references': ['http://linux.im/2015/05/07/jQuery-1113-DomXSS-Vulnerability.html'],
},
}
@classmethod
def verify(cls, args):
url = args['options']['target']
verify_url = '%s/wp-content/themes/twentyfifteen/genericons/example.html' % url
if args['options']['verbose']:
print '[*] Request URL: ' + verify_url
req = requests.get(verify_url)
if req.status_code == 200:
if 'jquery/1.7.2/jquery.min.js"></script>' in req.content:
args['success'] = True
args['poc_ret']['vul_url'] = verify_url
return args
exploit = verify
if __name__ == '__main__':
from pprint import pprint
mp = MyPoc()
pprint(mp.run()) |
import os
os.chdir('C:\\ENEA_CAS_WORK\\Catania_RAFAEL\\postprocessing')
os.getcwd()
import numpy as np
import pandas as pd
import geopandas as gpd
from geopandas import GeoDataFrame
from shapely.geometry import Point
import folium
import osmnx as ox
import networkx as nx
import math
import momepy
from shapely import geometry
from shapely.geometry import Point, Polygon
import psycopg2
import datetime
import seaborn as sns
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.colors as mcolors
from folium_stuff_FK_map_matching import plot_graph_folium_FK
from shapely.geometry import Point, LineString, MultiLineString
from shapely import geometry, ops
import glob
## reload data (to be used later on...)
# gdf_all_EDGES = gpd.read_file("all_EDGES.geojson")
# gdf_all_EDGES = gpd.read_file("all_EDGES_10032020.geojson") # LARGE file
# gdf_all_EDGES = gpd.read_file("all_EDGES_2019-04-15.geojson")
# gdf_all_EDGES = gpd.read_file("all_EDGES_2019-04-15_Apr-07-2020.geojson")
# gdf_all_EDGES = gpd.read_file("all_EDGES_2019-04-15_Apr-11-2020.geojson")
# match pattern of .GeoJson files
os.chdir('C:\\ENEA_CAS_WORK\\Catania_RAFAEL\\postprocessing\\new_geojsons')
extension = 'geojson'
all_filenames = [i for i in glob.glob('*.{}'.format(extension))]
#combine all files in the list
gdf_all_EDGES = pd.concat([gpd.read_file(f) for f in all_filenames])
# make a .csv file that assigns to each (u,v) pair, the type of road ("highway")
edges = []
import json
for file in all_filenames:
# with open("all_EDGES_2019-04-15_Apr-15-2020_0_130.geojson") as f:
with open(file) as f:
data = json.load(f)
for feature in data['features']:
print(feature['properties'])
edge = [feature['properties']['u'], feature['properties']['v'], feature['properties']['highway']]
edges.append((edge))
edges_highways = pd.DataFrame(edges, columns=['u', 'v', 'highway'])
os.chdir('C:\\ENEA_CAS_WORK\\Catania_RAFAEL\\postprocessing')
edges_highways.to_csv('edges_highways.csv')
os.chdir('C:\\ENEA_CAS_WORK\\Catania_RAFAEL\\postprocessing')
## select only columns 'u' and 'v'
gdf_all_EDGES_sel = gdf_all_EDGES[['u', 'v']]
# time --> secs
# distance --> km
# speed --> km/h
# gdf_all_EDGES_time = gdf_all_EDGES[['u', 'v', 'time', 'distance', 'speed', 'hour', 'timedate']]
gdf_all_EDGES_time = gdf_all_EDGES[['u', 'v', 'time', 'distance', 'speed']]
## fill nans by mean of before and after non-nan values (for 'time' and 'speed')
gdf_all_EDGES_time['time'] = (gdf_all_EDGES_time['time'].ffill()+gdf_all_EDGES_time['time'].bfill())/2
gdf_all_EDGES_time['speed'] = (gdf_all_EDGES_time['speed'].ffill()+gdf_all_EDGES_time['speed'].bfill())/2
# AAA = pd.DataFrame(gdf_all_EDGES_time)
# AAA.dropna(subset = ['hour'], inplace= True)
###################
#### GROUP BY #####
###################
#######################################################################
## count how many times an edge ('u', 'v') occur in the geodataframe ##
#######################################################################
df_all_EDGES_sel = gdf_all_EDGES.groupby(gdf_all_EDGES_sel.columns.tolist()).size().reset_index().rename(columns={0:'records'})
# make a copy
df_all_EDGES_records = df_all_EDGES_sel
threshold = np.average(df_all_EDGES_records.records)
### select only columns with records > N
# df_all_EDGES_sel = df_all_EDGES_sel[df_all_EDGES_sel.records >= 15]
# df_all_EDGES_sel = df_all_EDGES_sel[df_all_EDGES_sel.records >= round(threshold,0) + 1]
### add colors based on 'records'
vmin = min(df_all_EDGES_records.records)
vmax = max(df_all_EDGES_records.records)
# df_all_EDGES_records.iloc[-1] = np.nan
# Try to map values to colors in hex
norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax, clip=True)
mapper = plt.cm.ScalarMappable(norm=norm, cmap=plt.cm.Reds) # scales of reds
df_all_EDGES_records['color'] = df_all_EDGES_records['records'].apply(lambda x: mcolors.to_hex(mapper.to_rgba(x)))
# records = df_all_EDGES_sel[['records']]
df_all_EDGES_sel = df_all_EDGES_sel[['u','v']]
# filter recover_all_EDGES (geo-dataframe) with df_recover_all_EDGES_sel (dataframe)
keys = list(df_all_EDGES_sel.columns.values)
index_recover_all_EDGES = gdf_all_EDGES.set_index(keys).index
index_df_all_EDGES_sel = df_all_EDGES_sel.set_index(keys).index
clean_edges_matched_route = gdf_all_EDGES[index_recover_all_EDGES.isin(index_df_all_EDGES_sel)]
# get same color name according to the same 'u' 'v' pair
clean_edges_matched_route[['u', 'v']].head()
# merge records and colors into the geodataframe
MERGED_clean_EDGES = pd.merge(clean_edges_matched_route, df_all_EDGES_records, on=['u', 'v'], how='inner')
# remove duplicates nodes
MERGED_clean_EDGES.drop_duplicates(['u', 'v'], inplace=True)
MERGED_clean_EDGES['records'] = round(MERGED_clean_EDGES['records'], 0)
MERGED_clean_EDGES['length(km)'] = MERGED_clean_EDGES['length']/1000
MERGED_clean_EDGES['length(km)'] = round(MERGED_clean_EDGES['length(km)'], 3)
# compute a relative frequeny (how much the edge was travelled compared to the total number of tracked vehicles...in %)
max_records = max(MERGED_clean_EDGES['records'])
MERGED_clean_EDGES['frequency(%)'] = (MERGED_clean_EDGES['records']/max_records)*100
MERGED_clean_EDGES['frequency(%)'] = round(MERGED_clean_EDGES['frequency(%)'], 0)
#############################################################################################
# create basemap
ave_LAT = 37.53988692816245
ave_LON = 15.044971594798902
my_map = folium.Map([ave_LAT, ave_LON], zoom_start=14, tiles='cartodbpositron')
###################################################
# add colors to map
my_map = plot_graph_folium_FK(MERGED_clean_EDGES, graph_map=None, popup_attribute=None,
zoom=15, fit_bounds=True, edge_width=2, edge_opacity=0.7)
style = {'fillColor': '#00000000', 'color': '#00000000'}
# add 'u' and 'v' as highligths for each edge (in blue)
folium.GeoJson(
# data to plot
MERGED_clean_EDGES[['u','v', 'frequency(%)', 'records', 'length(km)', 'geometry']].to_json(),
show=True,
style_function=lambda x:style,
highlight_function=lambda x: {'weight':3,
'color':'blue',
'fillOpacity':1
},
# fields to show
tooltip=folium.features.GeoJsonTooltip(
fields=['u', 'v', 'length(km)', 'frequency(%)', 'records']
),
).add_to(my_map)
folium.TileLayer('cartodbdark_matter').add_to(my_map)
folium.LayerControl().add_to(my_map)
##########################################
MERGED_clean_EDGES.to_file(filename='FREQUENCIES_and_RECORDS_by_EDGES.geojson', driver='GeoJSON')
# my_map.save("clean_matched_route_frequecy_all_EDGES_10032020.html")
my_map.save("clean_matched_route_frequecy_all_EDGES_2019-04-15_Apr-17-2020.html")
### compute the average number of trips between the SAME ORIGIN and DESTINATION
gdf_all_EDGES_ODs = gdf_all_EDGES[['ORIGIN', 'DESTINATION']]
df_all_EDGES_ODs = gdf_all_EDGES.groupby(gdf_all_EDGES_ODs.columns.tolist()).size().reset_index().rename(columns={0:'N_trips'})
edge_with_more_trips = df_all_EDGES_ODs[['ORIGIN','DESTINATION']][ df_all_EDGES_ODs.N_trips == max(df_all_EDGES_ODs.N_trips)]
#######################################################################
######### get the travelled TIME in each edge, when available #########
#######################################################################
# LENGHTS = pd.DataFrame(gdf_all_EDGES.length)
# SUMMARY_times = pd.DataFrame(gdf_all_EDGES_time.time) # time is in seconds
# SUMMARY_times = SUMMARY_times.dropna(subset=['time'])
# SUMMARY_times.reset_index(inplace=True)
# get maximum edge length
L = []
for i in range(len(gdf_all_EDGES)):
l = gdf_all_EDGES.iloc[i].length
L.append(l)
max_length = max(L) #in meters
# Out[89]: 11050.723 meters
# minimum speed: 30kh/h ---> 120 sec for each km of road
max_possible_time = (1/60)*3600*max_length/1000
# gdf_all_EDGES_time = gdf_all_EDGES_time[gdf_all_EDGES_time.time < max_possible_time]
# AAA = pd.DataFrame(gdf_all_EDGES_time)
### get AVERAGE of traveled "time" and travelled "speed" for each edge
df_all_EDGES_time = (gdf_all_EDGES_time.groupby(['u', 'v']).mean()).reset_index()
df_all_EDGES_time.columns = ["u", "v", "travel_time", "travel_distance", "travel_speed"]
df_all_EDGES_time = pd.merge(MERGED_clean_EDGES, df_all_EDGES_time, on=['u', 'v'], how='inner')
df_all_EDGES_time = pd.DataFrame(df_all_EDGES_time)
sorted_length = df_all_EDGES_time.sort_values('length')
df_all_EDGES_time = df_all_EDGES_time[["u", "v", "travel_time", "travel_distance", "length(km)", "travel_speed"]]
### merge with the above "df_all_EDGES_sel" referred to the counts counts
# df_all_EDGES_time = pd.merge(df_all_EDGES_time, df_all_EDGES_sel, on=['u', 'v'], how='inner')
### drop NaN values
df_all_EDGES_time = df_all_EDGES_time.dropna(subset=['travel_time'])
df_all_EDGES_time['travel_time'] = ((df_all_EDGES_time['length(km)']) / (df_all_EDGES_time['travel_speed'])) *3600 # seconds
# sort values by travelled time
# sorted_values = df_all_EDGES_time.sort_values('travel_time')
# df_all_EDGES_time = df_all_EDGES_time[df_all_EDGES_time.travel_time < 1500] #(1000 sec == 16 minutes)
# sorted_values = df_all_EDGES_time.sort_values('travel_time')
# make a copy
df_all_timeEDGES = df_all_EDGES_time
# add colors based on 'time' (seconds)
vmin = min(df_all_timeEDGES.travel_time[df_all_timeEDGES.travel_time > 0])
vmax = max(df_all_timeEDGES.travel_time)
AVG = np.average(df_all_timeEDGES.travel_time)
# Try to map values to colors in hex
norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax, clip=True)
mapper = plt.cm.ScalarMappable(norm=norm, cmap=plt.cm.cool) # scales of reds (or "coolwarm" , "bwr", °cool°)
df_all_timeEDGES['color'] = df_all_timeEDGES['travel_time'].apply(lambda x: mcolors.to_hex(mapper.to_rgba(x)))
df_all_EDGES_time = df_all_EDGES_time[['u','v']]
# filter recover_all_EDGES (geo-dataframe) with df_recover_all_EDGES_sel (dataframe)
keys = list(df_all_EDGES_time.columns.values)
index_recover_all_EDGES = gdf_all_EDGES.set_index(keys).index
index_df_all_EDGES_time = df_all_EDGES_time.set_index(keys).index
times_edges_matched_route = gdf_all_EDGES[index_recover_all_EDGES.isin(index_df_all_EDGES_time)]
# get same color name according to the same 'u' 'v' pair
# merge records and colors into the geodataframe
TIME_EDGES = pd.merge(times_edges_matched_route, df_all_timeEDGES, on=['u', 'v'], how='inner')
# remove duplicates nodes
TIME_EDGES.drop_duplicates(['u', 'v'], inplace=True)
TIME_EDGES['travel_time'] = round(TIME_EDGES['travel_time'], 1)
# TIME_EDGES['travel_time'] = TIME_EDGES['travel_time']/60 # minutes
# TIME_EDGES['travel_time'] = round(TIME_EDGES['travel_time'], 3)
# TIME_EDGES['travel_distance'] = (TIME_EDGES['travel_speed']) * (TIME_EDGES['travel_time']/60) # (km/h)
# TIME_EDGES['travel_distance'] = round(abs(TIME_EDGES['travel_distance']), 2)
TIME_EDGES['length(km)'] = TIME_EDGES['length']/1000
TIME_EDGES['length(km)'] = round(TIME_EDGES['length(km)'], 3)
# TIME_EDGES['travel_distance'] = round(abs(TIME_EDGES["length(km)"]), 2)
# TIME_EDGES['travel_time'] = round((TIME_EDGES['length(km)'])/(TIME_EDGES['travel_speed']), 0)
TIME_EDGES['travel_speed'] = round(TIME_EDGES['travel_speed'], 0)
TIME_EDGES=TIME_EDGES.rename(columns = {'travel_time':'travel time (sec)'})
TIME_EDGES=TIME_EDGES.rename(columns = {'travel_distance':'travelled distance (km)'})
TIME_EDGES=TIME_EDGES.rename(columns = {'travel_speed':'travelled speed (km/h)'})
#############################################################################################
# create basemap
ave_LAT = 37.53988692816245
ave_LON = 15.044971594798902
my_map = folium.Map([ave_LAT, ave_LON], zoom_start=11, tiles='cartodbpositron')
#############################################################################################
# add colors to map
my_map = plot_graph_folium_FK(TIME_EDGES, graph_map=None, popup_attribute=None,
zoom=1, fit_bounds=True, edge_width=2, edge_opacity=1)
style = {'fillColor': '#00000000', 'color': '#00000000'}
# add 'u' and 'v' as highligths for each edge (in blue)
folium.GeoJson(
# data to plot
TIME_EDGES[['travel time (sec)', 'travelled speed (km/h)', 'length(km)', 'geometry']].to_json(),
show=True,
style_function=lambda x:style,
highlight_function=lambda x: {'weight':3,
'color':'blue',
'fillOpacity':1
},
# fields to show
tooltip=folium.features.GeoJsonTooltip(
fields=['travel time (sec)', 'travelled speed (km/h)', 'length(km)']
),
).add_to(my_map)
folium.TileLayer('cartodbdark_matter').add_to(my_map)
folium.LayerControl().add_to(my_map)
TIME_EDGES.to_file(filename='TIME_EDGES.geojson', driver='GeoJSON')
my_map.save("clean_matched_route_travel_time_all_EDGES_2019-04-15_Apr-17-2020.html")
#######################################################################
######### get the travelled SPEED in each edge, when available ########
#######################################################################
### get average of traveled "time" and travelled "speed" for each edge
df_all_EDGES_time = (gdf_all_EDGES_time.groupby(['u', 'v']).mean()).reset_index()
df_all_EDGES_time.columns = ["u", "v", "travel_time", "travel_distance", "travel_speed"]
df_all_EDGES_time = pd.merge(MERGED_clean_EDGES, df_all_EDGES_time, on=['u', 'v'], how='inner')
df_all_EDGES_time = pd.DataFrame(df_all_EDGES_time)
df_all_EDGES_time = df_all_EDGES_time[["u", "v", "travel_time", "travel_distance", "length(km)", "travel_speed"]]
### merge with the above "df_all_EDGES_sel" referred to the counts counts
# df_all_EDGES_time = pd.merge(df_all_EDGES_time, df_all_EDGES_sel, on=['u', 'v'], how='inner')
### drop NaN values
df_all_EDGES_speed = df_all_EDGES_time.dropna(subset=['travel_speed'])
df_all_EDGES_speed['travel_time'] = ((df_all_EDGES_speed['length(km)']) / (df_all_EDGES_speed['travel_speed'])) *3600 # seconds
# sort values by travelled time
sorted_values = df_all_EDGES_speed.sort_values('travel_speed')
# make a copy
df_all_speedEDGES = df_all_EDGES_speed
# add colors based on 'time' (seconds)
vmin = min(df_all_EDGES_speed.travel_speed[df_all_EDGES_speed.travel_speed > 0])
vmax = max(df_all_EDGES_speed.travel_speed)
AVG = np.average(df_all_EDGES_speed.travel_speed)
# Try to map values to colors in hex
norm = matplotlib.colors.Normalize(vmin=vmin, vmax=vmax, clip=True)
mapper = plt.cm.ScalarMappable(norm=norm, cmap=plt.cm.YlGn) # scales of reds (or "coolwarm" , "bwr")
df_all_EDGES_speed['color'] = df_all_EDGES_speed['travel_speed'].apply(lambda x: mcolors.to_hex(mapper.to_rgba(x)))
df_all_EDGES_speed = df_all_EDGES_speed[['u','v']]
# filter recover_all_EDGES (geo-dataframe) with df_recover_all_EDGES_sel (dataframe)
keys = list(df_all_EDGES_speed.columns.values)
index_recover_all_EDGES = gdf_all_EDGES.set_index(keys).index
index_df_all_EDGES_speed = df_all_EDGES_speed.set_index(keys).index
speeds_edges_matched_route = gdf_all_EDGES[index_recover_all_EDGES.isin(index_df_all_EDGES_speed)]
# get same color name according to the same 'u' 'v' pair
# merge records and colors into the geodataframe
SPEED_EDGES = pd.merge(speeds_edges_matched_route, df_all_speedEDGES, on=['u', 'v'], how='inner')
# remove duplicates nodes
SPEED_EDGES.drop_duplicates(['u', 'v'], inplace=True)
SPEED_EDGES['travel_time'] = round(SPEED_EDGES['travel_time'], 1)
# SPEED_EDGES['travel_time'] = round(SPEED_EDGES['travel_time'], 0)
# SPEED_EDGES['travel_time'] = SPEED_EDGES['travel_time']/60
# SPEED_EDGES['travel_time'] = round(SPEED_EDGES['travel_time'], 3)
# SPEED_EDGES['travel_distance'] = (SPEED_EDGES['travel_speed']) * (SPEED_EDGES['travel_time']/60) # (km/h)
# SPEED_EDGES['travel_distance'] = round(abs(SPEED_EDGES['travel_distance']), 2)
SPEED_EDGES['travel_speed'] = round(SPEED_EDGES['travel_speed'], 0)
SPEED_EDGES['length(km)'] = SPEED_EDGES['length']/1000
SPEED_EDGES['length(km)'] = round(SPEED_EDGES['length(km)'], 3)
SPEED_EDGES=SPEED_EDGES.rename(columns = {'travel_time':'travel time (sec)'})
SPEED_EDGES=SPEED_EDGES.rename(columns = {'travel_distance':'travelled distance (km)'})
SPEED_EDGES=SPEED_EDGES.rename(columns = {'travel_speed':'travelled speed (km/h)'})
#############################################################################################
# create basemap
ave_LAT = 37.53988692816245
ave_LON = 15.044971594798902
my_map = folium.Map([ave_LAT, ave_LON], zoom_start=11, tiles='cartodbpositron')
#############################################################################################
# add colors to map
my_map = plot_graph_folium_FK(SPEED_EDGES, graph_map=None, popup_attribute=None,
zoom=1, fit_bounds=True, edge_width=2, edge_opacity=1)
style = {'fillColor': '#00000000', 'color': '#00000000'}
# add 'u' and 'v' as highligths for each edge (in blue)
folium.GeoJson(
# data to plot
SPEED_EDGES[['travel time (sec)', 'travelled speed (km/h)', 'length(km)', 'geometry']].to_json(),
show=True,
style_function=lambda x:style,
highlight_function=lambda x: {'weight':3,
'color':'blue',
'fillOpacity':1
},
# fields to show
tooltip=folium.features.GeoJsonTooltip(
fields=['travel time (sec)', 'travelled speed (km/h)', 'length(km)']
),
).add_to(my_map)
folium.TileLayer('cartodbdark_matter').add_to(my_map)
folium.LayerControl().add_to(my_map)
# SPEED_EDGES.to_file(filename='SPEED_EDGES.geojson', driver='GeoJSON')
# my_map.save("clean_matched_route_travel_time.html")
my_map.save("clean_matched_route_travel_speed_all_EDGES_2019-04-15_Apr-17-2020.html")
'''
# add "time" to travel each edge (if found) as highligths for each edge (in blue)
folium.GeoJson(
# data to plot
MERGED_clean_EDGES[['time', 'geometry']].to_json(),
show=True,
style_function=lambda x:style,
highlight_function=lambda x: {'weight':3,
'color':'orange',
'fillOpacity':1
},
# fields to show
tooltip=folium.features.GeoJsonTooltip(
fields=['time']
),
).add_to(my_map)
'''
######################################################################
######################## COLORBAR ####################################
######################################################################
import matplotlib as mpl
COLORS_by_records = pd.DataFrame( MERGED_clean_EDGES.drop_duplicates(['frequency(%)', 'color']))[['frequency(%)', 'color']]
# sort by ascending order of the column records
COLORS_by_records = COLORS_by_records.sort_values(by=['frequency(%)'])
len(COLORS_by_records)
# keep same order...
color_list = COLORS_by_records.color.drop_duplicates().tolist()
# display colorbar based on hex colors:
fig, ax = plt.subplots(figsize=(8, 1))
fig.subplots_adjust(bottom=0.5)
# cmap = matplotlib.colors.ListedColormap(color_list)
cmap = mpl.cm.Reds
MAX = max(COLORS_by_records['frequency(%)'])
MIN = min(COLORS_by_records['frequency(%)'])
cmap.set_over(str(MAX + 5))
cmap.set_under(str(MIN -5))
cmap.set_over('k')
cmap.set_under('white')
# make a sequence list of records
bounds = np.arange(MIN, MAX, 10).tolist()
norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
cb2 = mpl.colorbar.ColorbarBase(ax, cmap=cmap,
norm=norm,
boundaries=[5] + bounds + [MAX+5],
extend='both',
ticks=bounds,
spacing='uniform',
orientation='horizontal')
cb2.set_label('travel frequency (%)')
# fig.show()
# save colorbar (map-matching frequency)
fig.savefig('colorbar_map_matched.png')
merc = os.path.join('colorbar_map_matched.png')
# overlay colorbar to my_map
folium.raster_layers.ImageOverlay(merc, bounds = [[37.822617, 15.734203], [37.768644,15.391770]], interactive=True, opacity=1).add_to(my_map)
# re-save map
# my_map.save("clean_matched_route_frequecy.html")
# my_map.save("clean_matched_route_frequecy_all_EDGES_10032020.html")
my_map.save("clean_matched_route_frequecy_all_EDGES_2019-04-15_Apr-03-2020.html")
################################################################
################################################################
'''
geoms = []
# get all the paths accross the same edge (u,v)
for i in range(len(MERGED_clean_EDGES)):
U = MERGED_clean_EDGES.u.iloc[i]
V = MERGED_clean_EDGES.v.iloc[i]
print('u:', U, 'v:', V, '================================================')
BBB = gdf_all_EDGES[(gdf_all_EDGES['u'] == U) & (gdf_all_EDGES['v'] == V)]
# get all the "story of the track_ID vehicles
ID_list = list(BBB.track_ID)
# filter gdf_all_EDGES based on a list of index
all_paths = gdf_all_EDGES[gdf_all_EDGES.track_ID.isin(ID_list)]
# all_paths.plot()
# make an unique linestring
LINE = []
# combine them into a multi-linestring
for j in range(len(all_paths)):
line = all_paths.geometry.iloc[j]
LINE.append(line)
multi_line = geometry.MultiLineString(LINE)
# merge the lines
merged_line = ops.linemerge(multi_line)
geoms.append(merged_line)
# newdata = gpd.GeoDataFrame(MERGED_clean_EDGES, geometry=geoms) # this file is too BIG!!!
# newdata.geometry.to_file(filename='newdata.geojson', driver='GeoJSON')
'''
#########################################################################
##### ORIGINS and DESTINATIONS accross the same edge (u,v) ##############
#########################################################################
import os
os.chdir('C:\\ENEA_CAS_WORK\\Catania_RAFAEL\\postprocessing')
os.getcwd()
# load grafo
file_graphml = 'Catania__Italy_cost.graphml'
grafo = ox.load_graphml(file_graphml)
# ox.plot_graph(grafo)
# make an empty dataframe to report all ORIGINS from which travels started and that crossed a given edge (u,v)
all_ORIGINS_df = pd.DataFrame([])
all_DESTINATIONS_df = pd.DataFrame([])
# get all the path accross the same edge (u,v)
for i in range(len(MERGED_clean_EDGES)):
U = MERGED_clean_EDGES.u.iloc[i]
V = MERGED_clean_EDGES.v.iloc[i]
# print('u:', U, 'v:', V, '================================================')
BBB = gdf_all_EDGES[(gdf_all_EDGES['u'] == U) & (gdf_all_EDGES['v'] == V)]
# get all the "story of the track_ID vehicles
ID_list = list(BBB.track_ID)
# filter gdf_all_EDGES based on a list of index
all_paths = gdf_all_EDGES[gdf_all_EDGES.track_ID.isin(ID_list)]
# all_paths.plot()
# make an unique list of ORIGIN and DESTINATION nodes
ORIGINS = list(all_paths.ORIGIN.unique())
DESTINATIONS = list(all_paths.DESTINATION.unique())
df_ORIGINS_lon = []
df_ORIGINS_lat = []
df_DESTINATIONS_lon = []
df_DESTINATIONS_lat = []
df = pd.DataFrame([])
# get the latitutde and longitute from the grafo of all the Catania region
for NODE_O in ORIGINS:
try:
lon_o = grafo.nodes[NODE_O]['x']
lat_o = grafo.nodes[NODE_O]['y']
except KeyError:
pass
df_ORIGINS_lon.append(lon_o)
df_ORIGINS_lat.append(lat_o)
df_lon_o = df.append(df_ORIGINS_lon, True)
df_lat_o = df.append(df_ORIGINS_lat, True)
df_lon_o.columns = ['LON_ORIGIN']
df_lat_o.columns = ['LAT_ORIGIN']
ORIGINS_coord = pd.concat([df_lon_o, df_lat_o], axis=1)
ORIGINS_coord['u'] = U
ORIGINS_coord['v'] = V
all_ORIGINS_df = all_ORIGINS_df.append(ORIGINS_coord)
for NODE_D in DESTINATIONS:
try:
lon_d = grafo.nodes[NODE_D]['x']
lat_d = grafo.nodes[NODE_D]['y']
except KeyError:
pass
df_DESTINATIONS_lon.append(lon_d)
df_DESTINATIONS_lat.append((lat_d))
df_lon_d = df.append(df_DESTINATIONS_lon, True)
df_lat_d = df.append(df_DESTINATIONS_lat, True)
df_lon_d.columns = ['LON_DESTINATION']
df_lat_d.columns = ['LAT_DESTINATION']
# bind the dataframes (keep track of U and V)
DESTINATIONS_coord = pd.concat([df_lon_d, df_lat_d], axis=1)
DESTINATIONS_coord['u'] = U
DESTINATIONS_coord['v'] = V
all_DESTINATIONS_df = all_DESTINATIONS_df.append(DESTINATIONS_coord)
# remove duplicates
all_ORIGINS_df.drop_duplicates(['LON_ORIGIN', 'LAT_ORIGIN'], inplace=True)
# make a geodataframe from lat, lon
geometry = [Point(xy) for xy in zip(all_ORIGINS_df.LON_ORIGIN, all_ORIGINS_df.LAT_ORIGIN)]
crs = {'init': 'epsg:4326'}
all_ORIGINS_gdf = GeoDataFrame(all_ORIGINS_df, crs=crs, geometry=geometry)
# save first as geojson file
all_ORIGINS_gdf.geometry.to_file(filename='all_PATHS_gdf.geojson', driver='GeoJSON')
# all_ORIGINS_gdf.plot()
all_DESTINATIONS_df.drop_duplicates(['LON_DESTINATION', 'LAT_DESTINATION'], inplace=True)
# make a geodataframe from lat, lon
geometry = [Point(xy) for xy in zip(all_DESTINATIONS_df.LON_DESTINATION, all_DESTINATIONS_df.LAT_DESTINATION)]
crs = {'init': 'epsg:4326'}
all_DESTINATIONS_gdf = GeoDataFrame(all_DESTINATIONS_df, crs=crs, geometry=geometry)
# save first as geojson file
all_DESTINATIONS_gdf.geometry.to_file(filename='all_PATHS_gdf.geojson', driver='GeoJSON')
# all_DESTINATIONS_gdf.plot()
for idx, row in all_ORIGINS_df.iterrows():
folium.CircleMarker(location=[row["LAT_ORIGIN"], row["LON_ORIGIN"]],
# popup=row["deviceid"],
radius=0.5,
color="blue",
# fill=True,
# fill_color="black",
fill_opacity=0.1).add_to(my_map)
for idx, row in all_DESTINATIONS_df.iterrows():
folium.CircleMarker(location=[row["LAT_DESTINATION"], row["LON_DESTINATION"]],
# popup=row["deviceid"],
radius=0.5,
color="red",
# fill=True,
# fill_color="blue",
fill_opacity=0.1).add_to(my_map)
my_map.save("clean_matched_route_OD.html")
|
# -*- coding: utf-8 -*-
'''
Created on 2015年5月20日
@author: zqh
'''
import freetime.util.log as ftlog
from hall.servers.common.base_checker import BaseMsgPackChecker
from poker.protocol import runcmd
from poker.protocol.decorator import markCmdActionHandler, markCmdActionMethod
@markCmdActionHandler
class ComplainTcpHandler(BaseMsgPackChecker):
def __init__(self):
pass
@markCmdActionMethod(cmd='complain', action="complain", clientIdVer=0)
def doComplain(self):
msg = runcmd.getMsgPack()
ftlog.error('NotImplementedError, doComplain, msg=', msg)
return runcmd.newOkMsgPack(code=1)
|
import pytest
from fireo.fields import TextField, NumberField
from fireo.models import Model
from fireo.models.errors import AbstractNotInstantiate, NonAbstractModel
class User(Model):
name = TextField()
class Meta:
abstract = True
def test_abstract_not_instantiate():
with pytest.raises(AbstractNotInstantiate):
u = User()
class Student(User):
age = NumberField()
def test_abstract_model():
s = Student()
s.name = "Arfan"
s.age = 27
s.save()
s2 = Student.collection.get(s.key)
assert s2.name == 'Arfan'
assert s2.age == 27
def test_extend_from_non_abstract():
with pytest.raises(NonAbstractModel):
class User2(Model):
name = TextField()
class Student2(User2):
age = NumberField()
|
import logging
from logging.handlers import SMTPHandler, RotatingFileHandler
import os
from flask import Flask, request, current_app
from config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
from flask_mail import Mail
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_babel import Babel, lazy_gettext as _l
from elasticsearch import Elasticsearch
############################################################
# Add plugins core objects
db = SQLAlchemy()
migrate = Migrate()
login = LoginManager()
login.login_view = 'auth.login'
login.login_message = _l('Please log in to access this page.') # The Flask-Login extension flashes a message in English any time it redirects the user to the login page. This message can be overriden, so we make sure that it gets translated using the _l() function for lazy processing.
mail = Mail()
bootstrap = Bootstrap()
moment = Moment()
babel = Babel()
############################################################
# Application Factory function:
def create_app(config_class=Config):
# Create the application object as an instance of class 'Flask'
app = Flask(__name__) # This is the 'app' VARIABLE. It is a member of the 'app' PACKAGE.
app.config.from_object(config_class)
#Init the app on the already instantiated plugins.
db.init_app(app)
migrate.init_app(app, db)
login.init_app(app)
mail.init_app(app)
bootstrap.init_app(app)
moment.init_app(app)
babel.init_app(app)
# Add other capabilities, not wrapped in a plugin:
app.elasticsearch = Elasticsearch([app.config['ELASTICSEARCH_URL']]) if app.config['ELASTICSEARCH_URL'] else None
# Blueprints. Imported here to avoid circular dependencies.
from app.errors import bp as errors_bp
app.register_blueprint(errors_bp)
from app.auth import bp as auth_bp
app.register_blueprint(auth_bp, url_prefix='/auth')
from app.main import bp as main_bp
app.register_blueprint(main_bp)
# Configuration to send email in case of error
if not app.debug and not app.testing:
# General config
if app.config['MAIL_SERVER']:
auth = None
if app.config['MAIL_USERNAME'] or app.config['MAIL_PASSWORD']:
auth = (app.config['MAIL_USERNAME'], app.config['MAIL_PASSWORD'])
secure = None
if app.config['MAIL_USE_TLS']:
secure = ()
mail_handler = SMTPHandler(
mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
fromaddr='no-reply@' + app.config['MAIL_SERVER'],
toaddrs=app.config['ADMINS'], subject='MicroBlog Website Failure',
credentials=auth, secure=secure)
mail_handler.setLevel(logging.ERROR) # CRITICAL > ERROR > WARNING > INFO > DEBUG > NOTSET
app.logger.addHandler(mail_handler)
if app.config['LOG_TO_STDOUT']:
stream_handler = logging.StreamHandler()
stream_handler.setLevel(logging.INFO)
app.logger.addHandler(stream_handler)
else:
if not os.path.exists('logs'):
os.mkdir('logs')
file_handler = RotatingFileHandler('logs/microblog.log',
maxBytes=10240, backupCount=10)
file_handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'))
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.setLevel(logging.INFO)
app.logger.info('Microblog startup')
return app
############################################################
# 'localselector' is a decorator included in the Babel class. The decorated function is invoked for each request to select a language translation to use for that request
@babel.localeselector
def get_locale():
return request.accept_languages.best_match(current_app.config['LANGUAGES'])
############################################################
# Workaround to avoid circular imports. Import those modules which need to import the 'app' variable. Those modules (created by us) come from the 'app' PACKAGE.
from app import models
|
def is_leap(year):
leap = False
if year%400 ==0 :
leap = True
elif year%100 !=0 and year%4 ==0:
leap = True
# Write your logic here
return leap
# print(is_leap(2000)) //checkpoint
# best solution
# def is_leap(year):
# return year % 4 == 0 and (year % 400 == 0 or year % 100 != 0) |
__author__ = 'anirudha'
import csv
import numpy as np
import matplotlib.animation as animation
import matplotlib.pyplot as plt
def init():
return line,
def update(num):
#newData = np.array([[1 + num, 2 + num / 2, 3, 4 - num / 4, 5 + num],[7, 4, 9 + num / 3, 2, 3]])
newData = np.vstack((range(num),data[:num]))
#print(newData)
line.set_data(newData)
# This is not working i 1.2.1
# annotation.set_position((newData[0][0], newData[1][0]))
with open('result.csv', 'rb') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',', quotechar='|')
i = 0
data = []
for row in spamreader:
#print ', '.join(row)
try:
if i>0:
data.append(int(row[1]))
i = i+1
except ValueError:
pass
#data = np.array([[range(len(data))],[data]])
print(data)
fig = plt.figure()
ax = plt.axes(xlim=(0, len(data)), ylim=(0, 50000))
line, = ax.plot([], [], 'r-')
anim = animation.FuncAnimation(fig, update, frames=len(data), init_func=init,interval=20, blit=False)
#anim.save('im.mp4', writer=writer)
plt.show()
#plt.plot(data)
|
#!/usr/bin/env python3
input()
from itertools import*
print(sum(len([*l]) // 3 for k, l in groupby(input()) if k == "X")) |
import sys
nl = sys.stdin.read().split()[1:]
for case,n in enumerate(nl):
n = int(n)
digits = [False]*10
i = 1
while True:
nk = n * i
i = i + 1
for d in str(nk):
digits[int(d)] = True
if sum(digits) == 10:
print("Case #{}: {}".format(case+1,nk))
break
if i > 1000:
print("Case #{}: INSOMNIA".format(case+1))
break
|
#!/usr/bin/python3
def divisible_by_2(my_list=[]):
if my_list is None:
return None
nueva = my_list.copy()
for count in range(len(my_list)):
if my_list[count] % 2 == 0:
nueva[count] = True
else:
nueva[count] = False
return nueva
|
# Generated by Django 2.2.5 on 2021-10-03 06:46
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('guardians_of_children', '0004_auto_20211003_0642'),
]
operations = [
migrations.RenameField(
model_name='videos',
old_name='videos',
new_name='video',
),
]
|
import cv2
import numpy as np
import matplotlib.pyplot as plt
cmap = plt.cm.viridis
def rgb2gray(rgb):
return np.dot(rgb[...,:3], [0.299, 0.587, 0.114])
def convert_2d_to_3d(u, v, z, K):#将2d图像转到3d空间中
v0 = K[1][2]
u0 = K[0][2]
fy = K[1][1]
fx = K[0][0]
x = (u-u0)*z/fx
y = (v-v0)*z/fy
return (x, y, z)
#特征点匹配
def feature_match(img1, img2, max_n_features):
r''' Find features on both images and match them pairwise
'''
use_flann = False # better not use flann
detector = cv2.xfeatures2d.SIFT_create(max_n_features)
# find the keypoints and descriptors with SIFT
kp1, des1 = detector.detectAndCompute(img1, None)
kp2, des2 = detector.detectAndCompute(img2, None)
if (des1 is None) or (des2 is None):
return [], []
des1 = des1.astype(np.float32)
des2 = des2.astype(np.float32)
if use_flann:
# FLANN parameters
FLANN_INDEX_KDTREE = 0
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks=50)
flann = cv2.FlannBasedMatcher(index_params,search_params)
matches = flann.knnMatch(des1,des2,k=2)
else:
matcher = cv2.DescriptorMatcher().create('BruteForce')
matches = matcher.knnMatch(des1,des2,k=2)
good = []
pts1 = []
pts2 = []
# ratio test as per Lowe's paper
for i,(m,n) in enumerate(matches):
if m.distance < 0.8*n.distance:
good.append(m)
pts1.append(kp1[m.queryIdx].pt)
pts2.append(kp2[m.trainIdx].pt)
pts1 = np.int32(pts1)
pts2 = np.int32(pts2)
return pts1, pts2
def depth_colorize(depth):
depth = (depth - np.min(depth)) / (np.max(depth) - np.min(depth))
depth = 255 * cmap(depth)[:, :, :3] # H, W, C
return depth.astype('uint8')
#使用F来剔除外点
def Fundamental(kp1,kp2):
kp1=np.array(kp1)
kp2=np.array(kp2)
if(len(kp1)>=4):
#H,mask=cv2.findHomography(kp1,kp2,cv2.RANSAC)
F, mask = cv2.findFundamentalMat(kp1,kp2,cv2.FM_RANSAC,2,0.99)
if F is None:
print('F matrix is None.')
return [],[]
else:
kp1=kp1[mask.ravel()==1]
kp2=kp2[mask.ravel()==1]
return kp1,kp2
##pnp求出位姿
def get_pose_pnp(rgb_curr, rgb_near, depth_curr, depth_near, K):#有这个oheight主要是因为对图片进行了底部裁剪,因此从3d->
gray_curr = rgb2gray(rgb_curr).astype(np.uint8)
gray_near = rgb2gray(rgb_near).astype(np.uint8)
height, width = gray_curr.shape
max_n_fetures_pose = 1000
pts2d_curr, pts2d_near = feature_match(gray_curr, gray_near, max_n_fetures_pose)# feature matching
#pts2d_curr, pts2d_near = Fundamental(pts2d_curr,pts2d_near)
#对深度图进行膨胀
kernel = cv2.getStructuringElement(cv2.MORPH_RECT,(4, 4))
depth_curr_int = depth_curr.astype(np.int16)#需要将类型转成int16类型
depth_near_int = depth_near.astype(np.int16)
depth_curr_dilated = cv2.dilate(depth_curr_int, kernel)
depth_near_dilated = cv2.dilate(depth_near_int, kernel)
# extract 3d pts
pts3d_curr = []
pts2d_near_filtered = [] # keep only feature points with depth in the current frame
pts3d_near = []
pts2d_curr_filtered = []
sparse_input = np.zeros((height, width))
sparse_input_near = np.zeros((height, width))
#像素的横坐标u与纵坐标v分别是其图像数组中所在的列数与行数
for i in range(len(pts2d_curr)): #提取pts2d_curr的特征点并给定深度
u, v = pts2d_curr[i][0], pts2d_curr[i][1]#匹配上的特征点的个数
u_n, v_n = pts2d_near[i][0], pts2d_near[i][1]
z = depth_curr_dilated[v, u]
z_n = depth_near_dilated[v_n, u_n]
sparse_input[v, u] = z #产生当前输入的稀疏深度图,有深度的信息即为特征点所在的位置
sparse_input_near[v_n, u_n] = z_n #产生相邻帧的深度图
if z > 0:
xyz_curr = convert_2d_to_3d(u, v, z, K)
pts3d_curr.append(xyz_curr)
pts2d_near_filtered.append(pts2d_near[i])
if z_n > 0:
xyz_near = convert_2d_to_3d(u_n, v_n, z_n, K)
pts3d_near.append(xyz_near)
pts2d_curr_filtered.append(pts2d_curr[i])
pts_for_loss = pts2d_curr
pts_for_loss_near = pts2d_near
# the minimal number of points accepted by solvePnP is 4:
if len(pts3d_curr)>=4 and len(pts2d_near_filtered)>=4 and len(pts3d_near)>=4 and len(pts2d_curr_filtered)>=4:
##计算从curr到near的位姿
pts3d_curr = np.expand_dims(np.array(pts3d_curr).astype(np.float32), axis=1)
pts2d_near_filtered = np.expand_dims(np.array(pts2d_near_filtered).astype(np.float32), axis=1)
# ransac
ret = cv2.solvePnPRansac(pts3d_curr, pts2d_near_filtered, K, distCoeffs=None)
success = ret[0]
rotation_vector = ret[1]
translation_vector = ret[2]
##计算从near到curr的位姿
pts3d_near = np.expand_dims(np.array(pts3d_near).astype(np.float32), axis=1)
pts2d_curr_filtered = np.expand_dims(np.array(pts2d_curr_filtered).astype(np.float32), axis=1)
ret_inv = cv2.solvePnPRansac(pts3d_near, pts2d_curr_filtered, K, distCoeffs=None)
success_inv = ret_inv[0]
rotation_vector_inv = ret_inv[1]
translation_vector_inv = ret_inv[2]
return (success and success_inv, rotation_vector, translation_vector, rotation_vector_inv, translation_vector_inv,\
sparse_input, depth_curr_dilated, sparse_input_near, depth_near_dilated, pts_for_loss, pts_for_loss_near)
else:
return (0, None, None, None, None, sparse_input, depth_curr_dilated, sparse_input_near, depth_near_dilated, pts_for_loss, pts_for_loss_near) |
"""
A :class:`~QtWidgets.QWidget` for controlling a Thorlabs_ translation stage.
.. _Thorlabs: https://www.thorlabs.com/navigation.cfm?guide_id=2060
"""
import os
import time
from msl.qt import QtWidgets, QtCore, QtGui
from msl.qt import prompt
from msl.qt.io import get_icon
from msl.qt.equipment.thorlabs import show_hardware_info
try:
from msl.equipment import Config
from msl.equipment.resources.thorlabs import MotionControlCallback
from msl.equipment.resources.thorlabs.kinesis.enums import UnitType
import numpy as np # a dependency of MSL Equipment
class _Signaler(QtCore.QObject):
"""Used for sending a signal of the current position."""
signal = QtCore.pyqtSignal()
signaler = _Signaler()
@MotionControlCallback
def callback():
signaler.signal.emit()
except ImportError:
signaler = None
class TranslationStage(QtWidgets.QWidget):
def __init__(self, connection, config=None, parent=None):
"""A :class:`~QtWidgets.QWidget` for controlling a Thorlabs translation stage.
Parameters
----------
connection : :class:`~msl.equipment.connection.Connection`
The connection to the translational stage motor controller
(e.g., LTS150, LTS300, KST101, KDC101, ...).
config : :class:`~msl.equipment.config.Config`, optional
A configuration file.
The following elements can be defined in a :class:`~msl.equipment.config.Config` file to
initialize a :class:`TranslationStage`:
.. code-block:: xml
<!--
The following attributes can be defined for a "preset" and a "jog size" element.
For a "preset" you must define a name attribute:
units - can be either "mm" or "device". If omitted then the default unit value is "mm"
name - the text that will displayed in the GUI as the name of the preset
If multiple translation stages are being used then you can uniquely identify which stage will
have its properties updated by including one of the additional attributes:
serial - the serial number of the translation stage motor controller
alias - the same alias that is used in the <equipment> XML tag
If you do not include one of 'serial' or 'alias' then all stages will be updated to the XML element value.
-->
<thorlabs_translation_stage_preset name='Si-PD' serial="123456789">54.232</thorlabs_translation_stage_preset>
<thorlabs_translation_stage_preset name='InGaAs-PD' units="mm" serial="123456789">75.2</thorlabs_translation_stage_preset>
<thorlabs_translation_stage_preset name='Reference' units="device" serial="123456789">10503037</thorlabs_translation_stage_preset>
<!-- Note: In the following you can also specify the calibration path to be a path relative to the configuration file -->
<thorlabs_translation_stage_calibration_path serial="123456789">path/to/calibration/file.dat</thorlabs_translation_stage_calibration_path>
<!-- Since the 'serial', 'alias' and 'unit' attributes are not defined then all stages will have the jog size set to 2.0 mm -->
<thorlabs_translation_stage_jog_size>2.0</thorlabs_translation_stage_jog_size>
parent : :class:`QtWidgets.QWidget`, optional
The parent widget.
"""
super(TranslationStage, self).__init__(parent=parent)
if signaler is None:
raise ImportError('This widget requires that the MSL-Equipment package is installed')
if config is not None and not issubclass(config.__class__, Config):
raise TypeError('Must pass in a MSL Equipment configuration object. Received {}'.format(config.__class__))
self._connection = connection
self._supports_calibration = hasattr(self._connection, 'set_calibration_file')
self._uncalibrated_mm = np.array([])
self._calibrated_mm = np.array([])
self._calibration_label = ''
# set the calibration file
if config is not None and self._supports_calibration:
elements = self._find_xml_elements(config, 'thorlabs_translation_stage_calibration_path')
if elements:
cal_path = elements[0].text
rel_path = os.path.join(os.path.dirname(config.path), cal_path)
if os.path.isfile(cal_path):
self.set_calibration_file(cal_path)
elif os.path.isfile(rel_path):
self.set_calibration_file(rel_path)
else:
prompt.critical('Cannot find calibration file\n' + cal_path)
# set the presets
self._preset_combobox = QtWidgets.QComboBox()
self._preset_combobox.setToolTip('Preset positions')
self._preset_combobox.addItems(['', 'Home'])
self.preset_positions = {}
if config is not None:
for element in self._find_xml_elements(config, 'thorlabs_translation_stage_preset'):
self.add_preset(element.attrib['name'], float(element.text), element.attrib.get('units', 'mm') == 'mm')
self._min_pos_mm, self._max_pos_mm = self._connection.get_motor_travel_limits()
self._position_display = QtWidgets.QLineEdit()
self._position_display.setReadOnly(True)
self._position_display.setFont(QtGui.QFont('Helvetica', 24))
self._position_display.mouseDoubleClickEvent = self._ask_move_to
fm = QtGui.QFontMetrics(self._position_display.font())
self._position_display.setFixedWidth(fm.width(' {}.xxx'.format(int(self._max_pos_mm))))
self._home_button = QtWidgets.QPushButton()
self._home_button.setToolTip('Go to the Home position')
self._home_button.clicked.connect(self.go_home)
self._home_button.setIcon(get_icon('ieframe|0'))
self._stop_button = QtWidgets.QPushButton('Stop')
self._stop_button.setToolTip('Stop moving immediately')
self._stop_button.clicked.connect(self._connection.stop_immediate)
self._stop_button.setIcon(get_icon('wmploc|155'))
if config is not None:
elements = self._find_xml_elements(config, 'thorlabs_translation_stage_jog_size')
if elements:
element = elements[0]
if element.attrib.get('units', 'mm') == 'mm':
jog_mm = float(element.text)
jog = self._connection.get_device_unit_from_real_value(jog_mm, UnitType.DISTANCE)
s = element.text + ' mm'
else:
jog = int(float(element.text))
jog_mm = self._connection.get_real_value_from_device_unit(jog, UnitType.DISTANCE)
s = element.text + ' device units'
if jog_mm > self._max_pos_mm or jog_mm < self._min_pos_mm:
prompt.critical('Invalid jog size of ' + s)
else:
self._connection.set_jog_step_size(jog)
self._jog_forward_button = QtWidgets.QPushButton()
self._jog_forward_button.clicked.connect(lambda: self.jog_forward(False))
self._jog_forward_button.setIcon(get_icon(QtWidgets.QStyle.SP_ArrowUp))
self._jog_backward_button = QtWidgets.QPushButton()
self._jog_backward_button.clicked.connect(lambda: self.jog_backward(False))
self._jog_backward_button.setIcon(get_icon(QtWidgets.QStyle.SP_ArrowDown))
settings_button = QtWidgets.QPushButton()
settings_button.clicked.connect(self._show_settings)
settings_button.setIcon(get_icon('shell32|71'))
settings_button.setToolTip('Edit the jog and move settings')
grid = QtWidgets.QGridLayout()
grid.addWidget(QtWidgets.QLabel('Presets:'), 0, 0, alignment=QtCore.Qt.AlignRight)
grid.addWidget(self._preset_combobox, 0, 1)
grid.addWidget(self._stop_button, 0, 2, 1, 2)
grid.addWidget(self._position_display, 1, 0, 2, 2)
grid.addWidget(self._home_button, 1, 2)
grid.addWidget(self._jog_forward_button, 1, 3)
grid.addWidget(settings_button, 2, 2)
grid.addWidget(self._jog_backward_button, 2, 3)
grid.setSpacing(0)
grid.setRowStretch(3, 1)
grid.setColumnStretch(4, 1)
self.setLayout(grid)
self._connection.start_polling(200)
self._polling_duration = self._connection.polling_duration() * 1e-3
self._connection.register_message_callback(callback)
signaler.signal.connect(self._update_display)
self._requested_mm = None
self._update_jog_tooltip()
self._update_display()
self._requested_mm = float(self._position_display.text())
self._preset_combobox.setCurrentText(self._get_preset_name(self._requested_mm))
self._preset_combobox.currentIndexChanged[str].connect(self._go_to_preset)
def add_preset(self, name, position, millimeters=True):
"""Add a preset position.
Parameters
----------
name : :obj:`str`
The name of the preset.
position : :obj:`float` or :obj:`int`
The position.
millimeters : :obj:`bool`
Whether the value of the `position` is in millimeters or in ``device units``.
"""
if not millimeters:
position = self._connection.get_real_value_from_device_unit(int(position), UnitType.DISTANCE)
self.preset_positions[name] = float(position)
self._preset_combobox.addItem(name)
def closeEvent(self, event):
"""Overrides `closeEvent <https://doc.qt.io/qt-5/qwidget.html#closeEvent>`_."""
self._connection.stop_polling()
def get_jog(self, millimeters=True):
"""Get the jog step size.
Parameters
----------
millimeters : :obj:`bool`, optional
Whether to return the jog step size in ``device units`` or in ``real-world units``
(i.e., in millimeters). The default is to return the value in millimeters.
Returns
-------
:obj:`int` or :obj:`float`
The jog step size in either device units (:obj:`int`) or in millimeters
(:obj:`float`).
"""
size = self._connection.get_jog_step_size()
if not millimeters:
return size
return self._connection.get_real_value_from_device_unit(size, UnitType.DISTANCE)
def get_position(self, millimeters=True):
"""Get the current position (calibrated).
If no calibration file has been set then this function returns
the same value as :meth:`get_position_raw`.
Parameters
----------
millimeters : :obj:`bool`, optional
Whether to return the current position in ``device units`` or in ``real-world units``
(i.e., in millimeters). The default is to return the value in millimeters.
Returns
-------
:obj:`int` or :obj:`float`
The current position in either device units (:obj:`int`) or in millimeters
(:obj:`float`).
"""
pos = float(self._position_display.text())
if not millimeters:
return self._connection.get_device_unit_from_real_value(pos, UnitType.DISTANCE)
return pos
def get_position_raw(self, millimeters=True):
"""Get the current position (raw and uncalibrated).
Parameters
----------
millimeters : :obj:`bool`, optional
Whether to return the current position in ``device units`` or in ``real-world units``
(i.e., in millimeters). The default is to return the value in millimeters.
Returns
-------
:obj:`int` or :obj:`float`
The current position (raw and uncalibrated) in either device units (:obj:`int`)
or in millimeters (:obj:`float`).
"""
pos = self._connection.get_position()
if not millimeters:
return pos
return self._connection.get_real_value_from_device_unit(pos, UnitType.DISTANCE)
def go_home(self, wait=True):
"""Send the motor home.
Parameters
----------
wait : :obj:`bool`
Wait until the move is finished before returning control to the calling program.
If :obj:`True` then this is a blocking method.
"""
self._requested_mm = 0.0
self._connection.home()
if wait:
self._wait(0)
self._update_preset_text_block_signals(0.0)
def jog_backward(self, wait=True):
"""Jog backward.
Parameters
----------
wait : :obj:`bool`
Wait until the move is finished before returning control to the calling program.
If :obj:`True` then this is a blocking method.
"""
# prefer for the move request to go through the move_to method
# rather than using "self._connection.move_jog(MOT_TravelDirection.MOT_Reverse)"
pos = self.get_position() - self.get_jog()
self.move_to(pos, wait=wait, millimeters=True)
def jog_forward(self, wait=True):
"""Jog forward.
Parameters
----------
wait : :obj:`bool`
Wait until the move is finished before returning control to the calling program.
If :obj:`True` then this is a blocking method.
"""
# prefer for the move request to go through the move_to method
# rather than using "self._connection.move_jog(MOT_TravelDirection.MOT_Forwards)"
pos = self.get_position() + self.get_jog()
self.move_to(pos, wait=wait, millimeters=True)
def move_by(self, value, wait=True, millimeters=True):
"""Move by a relative value.
Parameters
----------
value : :obj:`int` or :obj:`float`
The relative value to move by.
wait : :obj:`bool`
Wait until the move is finished before returning control to the calling program.
If :obj:`True` then this is a blocking method.
millimeters : :obj:`bool`, optional
Whether the `value` is in ``device units`` or in ``real-world units`` (i.e., in millimeters).
"""
# prefer for the move request to go through the move_to method
# rather than using "self._connection.move_relative(displacement)"
pos = self.get_position(millimeters) + value
self.move_to(pos, wait=wait, millimeters=millimeters)
def move_to(self, value, wait=True, millimeters=True):
"""Move to an absolute position.
Parameters
----------
value : :obj:`int`, :obj:`float` or :obj:`str`
If :obj:`str` then the name of a preset. Otherwise an absolute position to move to.
wait : :obj:`bool`
Wait until the move is finished before returning control to the calling program.
If :obj:`True` then this is a blocking method.
millimeters : :obj:`bool`, optional
Whether the `value` is in ``device units`` or in ``real-world units`` (i.e., in millimeters).
"""
if isinstance(value, str):
if value not in self.preset_positions:
prompt.critical('{} is not a preset. Must be one of: ' + ','.join(self.preset_positions.keys()))
return
value = self.preset_positions[value]
millimeters = True # the preset values are in real-world units
if not self._connection.can_move_without_homing_first():
res = prompt.question('The motor should be homed before a move can be performed.\n\nHome the motor?')
if res:
self.go_home(False)
return
if not millimeters:
value_du = value
value_mm = self._connection.get_real_value_from_device_unit(value, UnitType.DISTANCE)
else:
value_du = self._connection.get_device_unit_from_real_value(value, UnitType.DISTANCE)
value_mm = value
if self._min_pos_mm <= value_mm <= self._max_pos_mm:
self._requested_mm = value_mm
self._connection.move_to_position(value_du)
if wait:
self._wait(value_du)
self._update_preset_text_block_signals(value)
else:
m = 'Invalid move request.\n\n{} is outside the allowed range [{}, {}]'
prompt.critical(m.format(value, self._min_pos_mm, self._max_pos_mm))
def set_calibration_file(self, path, enabled=True):
"""Set the calibration file.
Parameters
----------
path : :obj:`str`
The path to the calibration file.
enabled : :obj:`bool`, optional
Whether to enable or disable the calibration file.
"""
if not self._supports_calibration:
prompt.critical('The translation stage, {}, does not support a calibration file'.format(self._connection))
return
try:
self._connection.set_calibration_file(path, enabled)
except IOError:
prompt.critical('Cannot find calibration file\n' + path)
if self._connection.is_calibration_active():
device_cal_path = self._connection.get_calibration_file()
self._uncalibrated_mm, self._calibrated_mm = np.loadtxt(device_cal_path, unpack=True)
self._calibration_label = 'Calibration file: {}'.format(os.path.basename(device_cal_path))
else:
self._uncalibrated_mm, self._calibrated_mm = np.array([]), np.array([])
self._calibration_label = ''
def set_jog(self, value, millimeters=True):
"""Set the jog step size.
Parameters
----------
value : :obj:`int` or :obj:`float`
The jog step size.
millimeters : :obj:`bool`, optional
Whether the `value` is in ``device units`` or in ``real-world units`` (i.e., in millimeters).
"""
if not millimeters:
jog = int(value)
jog_mm = self._connection.get_real_value_from_device_unit(jog, UnitType.DISTANCE)
msg = '{} device units'.format(jog)
else:
jog_mm = float(value)
jog = self._connection.get_device_unit_from_real_value(jog_mm, UnitType.DISTANCE)
msg = '{} mm'.format(jog)
if jog_mm > self._max_pos_mm or jog_mm < self._min_pos_mm:
prompt.critical('Invalid jog size of ' + msg)
else:
self._connection.set_jog_step_size(jog)
self._update_jog_tooltip()
def _ask_move_to(self, event):
msg = 'Move to position (min:{} max:{})'.format(self._min_pos_mm, self._max_pos_mm)
current = float(self._position_display.text())
value = prompt.double(msg, default=current, minimum=self._min_pos_mm, maximum=self._max_pos_mm, precision=3)
if value is not None and value != current:
self.move_to(value, wait=False, millimeters=True)
def _find_xml_elements(self, config, element_name):
elements = []
record = self._connection.equipment_record
for element in config.root.findall(element_name):
serial = element.attrib.get('serial')
alias = element.attrib.get('alias')
if (serial is None) and (alias is None):
elements.append(element)
if (serial == record.serial) or (alias == record.alias):
elements.append(element)
return elements
def _get_calibrated_mm(self, pos):
"""Perform a linear fit around the current position to determine the calibrated position"""
if pos == 0:
return 0.0
idx = np.abs(self._uncalibrated_mm - pos).argmin()
min_idx = int(max(0, idx-3))
max_idx = int(min(self._uncalibrated_mm.size, idx+3))
if max_idx - min_idx > 1:
fit = np.polyfit(self._uncalibrated_mm[min_idx:max_idx], self._calibrated_mm[min_idx:max_idx], 1)
return fit[0] * pos + fit[1]
else:
return pos
def _get_preset_name(self, position):
"""Returns the preset name or '' if the position does not correspond to a preset position"""
if position == 0:
return 'Home'
for name, value in self.preset_positions.items():
if abs(value - position) < 0.0015:
return name
return ''
def _go_to_preset(self, name):
if name == 'Home':
self.go_home(False)
elif len(name) > 0:
self.move_to(self.preset_positions[name], wait=False, millimeters=True)
def _show_settings(self):
settings = _Settings(self)
settings.sig_update_jog_tooltip.connect(self._update_jog_tooltip)
settings.exec_()
def _update_display(self):
raw_device_unit = self._connection.get_position()
raw_real_value = self._connection.get_real_value_from_device_unit(raw_device_unit, UnitType.DISTANCE)
if self._supports_calibration and self._connection.is_calibration_active():
# When the move is finished we should get rid of rounding errors from the calculation of the
# calibrated position so as to not confuse the user with the position value that is displayed
if self._requested_mm is not None and self._connection.get_status_bits() == 2148533248:
value = self._requested_mm
else:
value = self._get_calibrated_mm(raw_real_value)
# update the tooltip text
device_unit = self._connection.get_device_unit_from_real_value(value, UnitType.DISTANCE)
tt = 'Device Unit: {}\n\n'.format(device_unit)
tt += 'Device Unit: {} (uncalibrated)\n'.format(raw_device_unit)
tt += 'Position: {} mm (uncalibrated)\n\n'.format(raw_real_value)
else:
value = raw_real_value
tt = 'Device Unit: {}\n\n'.format(raw_device_unit)
self._position_display.setText('{:8.3f}'.format(value))
self._position_display.setToolTip(tt + self._calibration_label)
def _update_jog_tooltip(self):
jog = self.get_jog()
self._jog_forward_button.setToolTip('Jog forward [{:.3f} mm]'.format(jog))
self._jog_backward_button.setToolTip('Jog backward [{:.3f} mm]'.format(jog))
def _update_preset_text_block_signals(self, position):
"""Update the preset combobox without emitting the signal"""
self._preset_combobox.blockSignals(True)
self._preset_combobox.setCurrentText(self._get_preset_name(position))
self._preset_combobox.blockSignals(False)
def _wait(self, device_unit):
while self.get_position_raw(millimeters=False) != device_unit:
time.sleep(self._polling_duration)
class _Settings(QtWidgets.QDialog):
sig_update_jog_tooltip = QtCore.pyqtSignal()
def __init__(self, parent):
"""Display a QDialog to edit the settings"""
super(_Settings, self).__init__(flags=QtCore.Qt.WindowCloseButtonHint)
self.conn = parent._connection
info = self.conn.get_hardware_info()
self.setWindowTitle(info.modelNumber.decode('utf-8') + ' || ' + info.notes.decode('utf-8'))
# move info
max_vel, max_acc = self.conn.get_motor_velocity_limits()
vel, acc = self.conn.get_vel_params()
vel = self.conn.get_real_value_from_device_unit(vel, UnitType.VELOCITY)
acc = self.conn.get_real_value_from_device_unit(acc, UnitType.ACCELERATION)
backlash = self.conn.get_real_value_from_device_unit(self.conn.get_backlash(), UnitType.DISTANCE)
# move widgets
self.acc_spinbox = QtWidgets.QDoubleSpinBox()
self.acc_spinbox.setMinimum(0)
self.acc_spinbox.setMaximum(max_acc)
self.acc_spinbox.setValue(acc)
self.acc_spinbox.setToolTip('<html><b>Range:</b><br>0 - {} mm/s<sup>2</sup></html>'.format(max_acc))
self.vel_spinbox = QtWidgets.QDoubleSpinBox()
self.vel_spinbox.setMinimum(0)
self.vel_spinbox.setMaximum(max_vel)
self.vel_spinbox.setValue(vel)
self.vel_spinbox.setToolTip('<html><b>Range:</b><br>0 - {} mm/s</html>'.format(max_vel))
self.backlash_spinbox = QtWidgets.QDoubleSpinBox()
self.backlash_spinbox.setMinimum(0)
self.backlash_spinbox.setMaximum(5)
self.backlash_spinbox.setValue(backlash)
self.backlash_spinbox.setToolTip('<html><b>Range:</b><br>0 - 5 mm</html>')
move_group = QtWidgets.QGroupBox('Move Parameters')
move_grid = QtWidgets.QGridLayout()
move_grid.addWidget(QtWidgets.QLabel('Backlash'), 0, 0, alignment=QtCore.Qt.AlignRight)
move_grid.addWidget(self.backlash_spinbox, 0, 1)
move_grid.addWidget(QtWidgets.QLabel('mm'), 0, 2, alignment=QtCore.Qt.AlignLeft)
move_grid.addWidget(QtWidgets.QLabel('Maximum Velocity'), 1, 0, alignment=QtCore.Qt.AlignRight)
move_grid.addWidget(self.vel_spinbox, 1, 1)
move_grid.addWidget(QtWidgets.QLabel('mm/s'), 1, 2, alignment=QtCore.Qt.AlignLeft)
move_grid.addWidget(QtWidgets.QLabel('Acceleration'), 2, 0, alignment=QtCore.Qt.AlignRight)
move_grid.addWidget(self.acc_spinbox, 2, 1)
move_grid.addWidget(QtWidgets.QLabel('mm/s<sup>2</sup>'), 2, 2, alignment=QtCore.Qt.AlignLeft)
move_group.setLayout(move_grid)
# jog info
jog_size = self.conn.get_real_value_from_device_unit(self.conn.get_jog_step_size(), UnitType.DISTANCE)
vel, acc = self.conn.get_jog_vel_params()
jog_vel = self.conn.get_real_value_from_device_unit(vel, UnitType.VELOCITY)
jog_acc = self.conn.get_real_value_from_device_unit(acc, UnitType.ACCELERATION)
# jog widgets
min_jog, max_jog = 0.002, parent._max_pos_mm/2.0
self.jog_size_spinbox = QtWidgets.QDoubleSpinBox()
self.jog_size_spinbox.setMinimum(min_jog)
self.jog_size_spinbox.setMaximum(max_jog)
self.jog_size_spinbox.setDecimals(3)
self.jog_size_spinbox.setValue(jog_size)
self.jog_size_spinbox.setToolTip('<html><b>Range:</b><br>{} - {} mm</html>'.format(min_jog, max_jog))
self.jog_acc_spinbox = QtWidgets.QDoubleSpinBox()
self.jog_acc_spinbox.setMinimum(0)
self.jog_acc_spinbox.setMaximum(max_acc)
self.jog_acc_spinbox.setValue(jog_acc)
self.jog_acc_spinbox.setToolTip('<html><b>Range:</b><br>0 - {} mm/s<sup>2</sup></html>'.format(max_acc))
self.jog_vel_spinbox = QtWidgets.QDoubleSpinBox()
self.jog_vel_spinbox.setMinimum(0)
self.jog_vel_spinbox.setMaximum(max_vel)
self.jog_vel_spinbox.setValue(jog_vel)
self.jog_vel_spinbox.setToolTip('<html><b>Range:</b><br>0 - {} mm/s</html>'.format(max_vel))
jog_group = QtWidgets.QGroupBox('Jog Parameters')
jog_grid = QtWidgets.QGridLayout()
jog_grid.addWidget(QtWidgets.QLabel('Step Size'), 0, 0, alignment=QtCore.Qt.AlignRight)
jog_grid.addWidget(self.jog_size_spinbox, 0, 1)
jog_grid.addWidget(QtWidgets.QLabel('mm'), 0, 2, alignment=QtCore.Qt.AlignLeft)
jog_grid.addWidget(QtWidgets.QLabel('Maximum Velocity'), 1, 0, alignment=QtCore.Qt.AlignRight)
jog_grid.addWidget(self.jog_vel_spinbox, 1, 1)
jog_grid.addWidget(QtWidgets.QLabel('mm/s'), 1, 2, alignment=QtCore.Qt.AlignLeft)
jog_grid.addWidget(QtWidgets.QLabel('Acceleration'), 2, 0, alignment=QtCore.Qt.AlignRight)
jog_grid.addWidget(self.jog_acc_spinbox, 2, 1)
jog_grid.addWidget(QtWidgets.QLabel('mm/s<sup>2</sup>'), 2, 2, alignment=QtCore.Qt.AlignLeft)
jog_group.setLayout(jog_grid)
hbox = QtWidgets.QHBoxLayout()
hbox.addWidget(move_group)
hbox.addWidget(jog_group)
update_button = QtWidgets.QPushButton('Update')
update_button.setToolTip('Update the device settings')
update_button.clicked.connect(self.update_settings)
cancel_button = QtWidgets.QPushButton('Cancel')
cancel_button.setToolTip('Update the device settings')
cancel_button.clicked.connect(self.close)
info_button = QtWidgets.QPushButton()
info_button.setIcon(get_icon('imageres|109'))
info_button.clicked.connect(lambda: show_hardware_info(parent._connection))
info_button.setToolTip('Display the hardware information')
button_layout = QtWidgets.QGridLayout()
button_layout.addWidget(cancel_button, 0, 0)
button_layout.addItem(QtWidgets.QSpacerItem(1, 1, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding), 0, 1)
button_layout.addWidget(update_button, 0, 2)
button_layout.addWidget(info_button, 0, 3)
vbox = QtWidgets.QVBoxLayout()
vbox.addLayout(hbox)
vbox.addLayout(button_layout)
self.setLayout(vbox)
def update_settings(self):
vel = self.conn.get_device_unit_from_real_value(self.vel_spinbox.value(), UnitType.VELOCITY)
acc = self.conn.get_device_unit_from_real_value(self.acc_spinbox.value(), UnitType.ACCELERATION)
self.conn.set_vel_params(vel, acc)
backlash = self.conn.get_device_unit_from_real_value(self.backlash_spinbox.value(), UnitType.DISTANCE)
self.conn.set_backlash(backlash)
jog_vel = self.conn.get_device_unit_from_real_value(self.jog_vel_spinbox.value(), UnitType.VELOCITY)
jog_acc = self.conn.get_device_unit_from_real_value(self.jog_acc_spinbox.value(), UnitType.ACCELERATION)
self.conn.set_jog_vel_params(jog_vel, jog_acc)
jog_size = self.conn.get_device_unit_from_real_value(self.jog_size_spinbox.value(), UnitType.DISTANCE)
self.conn.set_jog_step_size(jog_size)
self.sig_update_jog_tooltip.emit()
self.close()
|
# -*- coding:utf-8 -*-
import hashlib
from datetime import datetime
from flask import current_app
from flask_login import UserMixin, AnonymousUserMixin
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from werkzeug.security import generate_password_hash, check_password_hash
from . import db
from . import login_manager
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(64), unique=True, index=True)
username = db.Column(db.String(64), unique=True)
password_hash = db.Column(db.String(128))
confirmed = db.Column(db.Boolean, default=None)
name = db.Column(db.String(64))
location = db.Column(db.TEXT)
about_me = db.Column(db.TEXT)
member_since = db.Column(db.DateTime(), default=datetime.now())
last_seen = db.Column(db.DateTime(), default=datetime.now())
avatar_hash = db.Column(db.String(32))
column1 = db.Column(db.String(64), )
column2 = db.Column(db.String(64))
column3 = db.Column(db.String(64))
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
if self.email is not None and self.avatar_hash is None:
self.avatar_hash = hashlib.md5(self.email.encode('utf-8')).hexdigest()
class AnonymousUser(AnonymousUserMixin):
def can(self, permissions):
return False
def is_administrator(self):
return False
def ping(self):
self.last_seen = datetime.now()
db.session.add(self)
def generate_confirmation_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm': self.id})
def confirm(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if self.id == data.get('confirm'):
self.confirmed = True
db.session.add(self)
return True
return False
def generate_reset_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'reset': self.id})
def reset_password(self, token, new_password):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if self.id == data.get('reset'):
self.password = new_password
db.session.add(self)
return True
return False
def generate_email_change_token(self, new_email, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'change_email': self.id, 'new_email': new_email})
def change_email(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('new_email') is None:
return False
if self.id == data.get('change_email'):
self.email = data.get('new_email')
self.avatar_hash = hashlib.md5(self.email.encode('utf-8').hexdigest())
db.session.add(self)
return True
return False
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
@staticmethod
def generate_fake(count=100):
from sqlalchemy.exc import IntegrityError
from random import seed
import forgery_py
seed()
for i in range(count):
u = User(email=forgery_py.internet.email_address(),
username=forgery_py.internet.user_name(True),
password=forgery_py.lorem_ipsum.word(),
confirmed=True,
name=forgery_py.name.full_name(),
location=forgery_py.address.city(),
about_me=forgery_py.lorem_ipsum.sentence(),
member_since=forgery_py.date.date(True)
)
db.session.add(u)
try:
db.session.commit()
except IntegrityError:
db.session.rollback()
def __repr__(self):
return '<User %r>' % self.name
class Post(db.Model):
__tablename__ = 'posts'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(255), unique=True)
abstract = db.Column(db.Text)
content = db.Column(db.Text)
url = db.Column(db.String(255), unique=True)
image = db.Column(db.Text)
web_site = db.Column(db.String(64))
category = db.Column(db.String(64))
others = db.Column(db.Text)
publish_time = db.Column(db.DateTime)
create_time = db.Column(db.DateTime, index=True, default=datetime.now())
@staticmethod
def generate_fake(count=100):
from random import seed, randint
import forgery_py
seed()
user_count = User.query.count()
for i in range(count):
p = Post(title=forgery_py.lorem_ipsum.sentences(randint(1, 3)),
content=forgery_py.lorem_ipsum.sentences(randint(1, 3)), create_time=forgery_py.date.date(True))
db.session.add(p)
db.session.commit()
class Category(db.Model):
__tablename__ = 'categories'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64))
create_time = db.Column(db.DateTime, index=True, default=datetime.now())
@staticmethod
def insert_categories():
categories = ['Technology', 'Sports', 'Money', 'Hot', 'Entertainment']
for name in categories:
category = Category.query.filter_by(name=name).first()
if category is None:
category = Category(name=category)
category.name = name
db.session.add(category)
db.session.commit()
def __repr__(self):
return '<Category %r>' % self.name
|
from flask import Flask, request
from flask_cors import CORS
from lib import config, log
from werkzeug.exceptions import HTTPException
from routes.roku import roku_template
from flask_swagger_ui import get_swaggerui_blueprint
from roku import Roku
try:
c = config.Configuration('config.ini')
except Exception as e:
exit()
fileLocation = None
log.setup_custom_logger(c.Logging.moduleName, c.Logging.level, fileLocation)
swaggerUiUrl = ''
swaggerDocUrl = '/static/swagger.json'
swaggerui_blueprint = get_swaggerui_blueprint(swaggerUiUrl, swaggerDocUrl, config={'app_name': 'Roku API', 'validatorUrl': None, 'layout': 'BaseLayout'})
app = Flask(__name__, static_url_path="/static")
cors = CORS(app)
rokus = []
rokuSearch = {}
for rokuObj in c.Roku.hosts.split(','):
rokuAttribs = rokuObj.split(':')
rokus.append({'id': rokuAttribs[0], 'host': rokuAttribs[1]})
rokuSearch[rokuAttribs[0]] = rokuAttribs[1]
app.rokus = rokus
app.rokuSearch = rokuSearch
app.Roku = Roku
@app.errorhandler(HTTPException)
def handle_bad_request(e):
if not hasattr(e, 'code'):
return str(e), 500
elif e.code > 499:
app.logger.error(e)
return e
app.register_blueprint(roku_template, url_prefix='/systems')
app.register_blueprint(swaggerui_blueprint, url_prefix=swaggerUiUrl)
if __name__ == '__main__':
app.run(debug=False, threaded=True, host='0.0.0.0', port=8080)
|
#!/usr/bin/env python
import numpy as np
import time
import rospy
from geometry_msgs.msg import Twist
from std_msgs.msg import String
import sys, select, os
if os.name == 'nt':
import msvcrt
else:
import tty, termios
e = """
Communications Failed
"""
def getKey(): #you can ignore this function. It's for stopping the robot when press 'Ctrl+C'
if os.name == 'nt':
return msvcrt.getch()
tty.setraw(sys.stdin.fileno())
rlist, _, _ = select.select([sys.stdin], [], [], 0.1)
if rlist:
key = sys.stdin.read(1)
else:
key = ''
termios.tcsetattr(sys.stdin, termios.TCSADRAIN, settings)
return key
class PIDcontrol():
def __init__(self):
self.cmd_pub = rospy.Publisher('cmd_vel', Twist, queue_size=10)
self.color_sub = rospy.Subscriber('camera_rgb', String, self.colour_callback, queue_size=10)
self.line_sub = rospy.Subscriber('line_idx', String, self.camera_callback, queue_size=10)
self.fwd_sub = rospy.Subscriber('fwd_Movment', String, self.fwd_callback, queue_size=10)
self.CurColour = None
self.xVel = 0.1
self.line_pos = 0
self.goal = 320
def colour_callback(self, msg):
'''
callback function that receives the most recent colour measurement from the camera.
'''
rgb = msg.data.replace('r:','').replace('b:','').replace('g:','').replace(' ','')
r,g,b = rgb.split(',')
r,g,b=(float(r), float(g),float(b))
self.CurColour = np.array([r,g,b])
def fwd_callback(self, data):
self.xVel = float(data.data)
print (self.xVel)
def camera_callback(self, data):
self.line_pos = int(data.data)
pass
def ninety_turn(self):
twist = Twist()
twist.angular.z = 0.3
self.cmd_pub.publish(twist)
time.sleep(6)
twist.angular.z = 0
self.cmd_pub.publish(twist)
time.sleep(1)
twist.angular.z = -0.3
self.cmd_pub.publish(twist)
time.sleep(6)
def follow_the_line(self):
# initialize control variables
twist = Twist()
error = 0
prev_err = 0
rate = rospy.Rate(25)
twist.linear.x = self.xVel
# control gains
k_p = 0.005
k_i = 0.00015
k_d = 0.02
k_acc = 70 #acceptable value for the error to be under, to prevent windup
# correction variables
correction_p = 0
correction_i = 0
integral = 0
derivative = 0
integral_limit = 1200
while not rospy.is_shutdown():
twist.linear.x = self.xVel
error = self.line_pos - self.goal
derivative = error - prev_err
prev_err = error
if self.xVel == 0:
self.ninety_turn()
if (np.amin(self.CurColour)<210 or self.line_pos == 0):
twist.angular.z = 0
else:
correction_p = k_p * error
if (error < 0):
integral += error + k_acc
elif (error > 0):
integral += error - k_acc
if integral > integral_limit:
integral = integral_limit
elif integral < -integral_limit:
integral = -integral_limit
correction_i = k_i*integral
correction_d = k_d*derivative
twist.angular.z = -correction_p - correction_i - correction_d
print (twist.linear.x)
self.cmd_pub.publish(twist)
rate.sleep()
pass
if __name__=="__main__":
if os.name != 'nt':
settings = termios.tcgetattr(sys.stdin)
rospy.init_node('Lab3')
PID = PIDcontrol()
try:
while(1):
key = getKey()
PID.follow_the_line()
if (key == '\x03'): #stop the robot when exit the program
break
except rospy.ROSInterruptException:
print("comm failed") |
from rest_framework.request import Request
from django.contrib.auth.models import AnonymousUser
from django.utils.functional import SimpleLazyObject
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from django.contrib.auth.middleware import get_user
from django.utils.deprecation import MiddlewareMixin
def auth_middleware(request):
user = None
try:
user_jwt = JSONWebTokenAuthentication().authenticate(Request(request))
user = user_jwt[0]
request.user = user
except Exception as e:
pass
return user
class JWTAuthenticationMiddleware(MiddlewareMixin):
""" Middleware for authenticating JSON web token in Authorize Header """
def process_request(self, request):
request.user = SimpleLazyObject(lambda: auth_middleware(request))
|
# -*- coding: utf-8 -*-
import cv2
import numpy as np
from config import VideoConfig
from utils import crop_by_roi
path = '/home/cys/Codes/DolphinDetection'
blur_ksize = 5
canny_lth = 75
canny_hth = 125
kernel_size = np.ones((3, 3), np.uint8)
def adaptive_thresh(frame, cfg=None):
# img = img[370:1080, 0:1980]
# frame = crop_by_roi(frame, cfg.roi)
gray = cv2.cvtColor(frame, cv2.COLOR_RGB2GRAY)
# blur = cv2.GaussianBlur(gray, (blur_ksize, blur_ksize), 1)
th_atm = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C, cv2.THRESH_BINARY, 21, 30)
# ret_otsu, th_otsu = cv2.threshold(blur, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
edges = cv2.Canny(th_atm, canny_lth, canny_hth)
dilation = cv2.dilate(edges, kernel_size)
return dilation
def adaptive_thresh_size(frame, block_size=21, C=40):
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
th_atm = cv2.adaptiveThreshold(gray, 255, cv2.ADAPTIVE_THRESH_MEAN_C,
cv2.THRESH_BINARY_INV, block_size, C)
return th_atm
# def main():
# init_path = osp.join(path, 'Demo/picts/test_2.jpg')
# save_path = osp.join(path, 'Demo/picts/2.jpg')
#
# img = cv2.imread(init_path)
# result = adaptive_thresh(img)
# # cv2.imwrite(save_path, dilation)
#
#
# if __name__ == '__main__':
# main()
# print("Done!")
|
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def widthOfBinaryTree(self, root: TreeNode) -> int:
# print(root)
L = [root]
lmax = len(L)
while len(L) != 0:
Ltemp = []
flag = False
for item in L:
if item == None:
if flag:
Ltemp = Ltemp + [None, None]
elif flag:
Ltemp = Ltemp + [item.left, item.right]
else:
if item.left != None:
Ltemp = [item.left, item.right]
flag = True
elif item.right != None:
Ltemp = [item.right]
flag = True
L = Ltemp
# Pop off last Nones
while len(L) != 0 and L[-1] == None:
del L[-1]
# Update lmax
lmax = max(lmax, len(L))
return lmax
|
import sys
sys.stdin = open("input.txt")
T = int(input())
money = [50000,10000,5000,1000,500,100,50,10]
for tc in range(1, T+1):
inp = int(input())
count_arr = [0] * len(money)
for i in range(len(money)) :
count_arr[i], inp = divmod(inp, money[i])
print("#{}".format(tc))
print(*count_arr)
|
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.animation import FuncAnimation
from scipy import interpolate
from random import random
import math
def read_data_3d():
loaded = np.load('DIRO_skeletons.npz')
data = loaded['data']
normal_data = data[:, 0, :, :]
normal_data = normal_data.reshape([-1,1200,75])
asym_data = data[:, 1:9, :, :]
asym_data = asym_data.reshape([-1,1200,75])
# cut videos to 50 frame clips
normal_data = normal_data.reshape([-1,50,75])
asym_data = asym_data.reshape([-1,50,75])
# scaling the speed of each clip
# and randomly cut the clip
normal_data = speed_scaling_batch(normal_data)
asym_data = speed_scaling_batch(asym_data)
# mark the clips and shuffle the data
# ratio of normal samples and abnormal samples:
ratio = 1
sample_asym = math.ceil(ratio * len(normal_data))
asym_id = list(range(len(asym_data)))
np.random.shuffle(asym_id)
asym_id = asym_id[:sample_asym]
asym_data = [asym_data[x] for x in asym_id]
data = normal_data + asym_data
label = [0 for x in range(len(normal_data))] + \
[1 for x in range(len(asym_data))]
# shuffle the data
index = list(range(len(data)))
np.random.shuffle(index)
data_new = []
label_new = []
for i in index:
data_new.append(data[i])
label_new.append(label[i])
# training test split
ratio = 0.8
split = math.ceil(ratio * len(label_new))
x_train = data_new[:split]
y_train = label_new[:split]
x_test = data_new[split:]
y_test = label_new[split:]
return x_train, y_train, x_test, y_test
'''
# split data and shuffle data
print(normal_data.shape)
print(asym_data.shape)
#plot_data( asym_data[56,:,:].squeeze())
plot_data( normal_data[21,:,:].squeeze())
'''
def speed_scaling_batch(data):
sample = data.shape[0]
data_new = []
for i in range(sample):
# do scaling to the frame
new_clip = speed_scaling(data[i,:,:].squeeze())
length = new_clip.shape[0]
# cut the clip randomly
lim = math.ceil(length * (random() * 0.5 + 0.5))
new_clip = new_clip[:lim, :]
data_new.append(new_clip)
return data_new
def speed_scaling(data):
num_frame, num_point = data.shape
data_new = []
# randomly choose a scaling factor
n = random()
if n > 0.5:
scaling = random() * 0.5 + 0.5
else:
scaling = random() + 1
# assume the original interval is 1,
# the new interval is the scaling factor
interval = scaling
for i in range(num_point):
# get trace of ith node
trace = data[:,i]
# interpolate the points
f_interp = interpolate.interp1d(np.arange(num_frame), trace)
x_new = np.arange(0, num_frame-1, interval)
y_new = f_interp(x_new)
data_new.append(y_new)
data_new = np.array(data_new)
return data_new.T
def plot_data(data):
bone_list = [(20, 4), (4, 5), (5, 6), (20, 8), (8, 9), (9, 10), (0, 12), (12, 13), \
(13, 14), (14, 15), (0, 16), (16, 17), (17, 18), (18, 19), (0, 20), (3, 20)]
fig = plt.figure()
ax = plt.axes(xlim=(1,3), ylim=(-1,1))
lines = []
for bone in bone_list:
line, = ax.plot([],[])
lines.append(line)
def init():
for line in lines:
line.set_data([],[])
return lines
def animate(j):
skel = data[j,:]
x = [skel[i] for i in range(0, len(skel), 3)]
y = [skel[i] for i in range(1, len(skel), 3)]
z = [skel[i] for i in range(2, len(skel), 3)]
for k, line in enumerate(lines):
bone = bone_list[k]
joint1 = bone[0]
joint2 = bone[1]
xdata = (x[joint1], x[joint2])
ydata = (y[joint1], y[joint2])
zdata = (z[joint1], z[joint2])
line.set_data(zdata, ydata)
return lines
length = data.shape[0]
anim = FuncAnimation(fig, animate, init_func=init, frames=length, interval=50, blit=True)
plt.show()
'''
#get joint coordinates of a specific skeleton
skel = data[0,0,0,:]
x = [skel[i] for i in range(0, len(skel), 3)]
y = [skel[i] for i in range(1, len(skel), 3)]
z = [skel[i] for i in range(2, len(skel), 3)]
#get default separation
separation = loaded['split']
#print information
print(data.shape)
print(separation)
#expected results:
#(9, 9, 1200, 75)
# meaning:
# 9subj, 9gaits, 1200frames
#['train' 'test' 'train' 'test' 'train' 'train' 'test' 'test' 'train']
#plt.scatter(x,z)
#plt.show()
selected_joints = [0, 20, 3, 4, 5, 6, \
8,9,10, 12, 13, 14, 15, 16, 17, 18, 19]
bone_list = [(0,1), (1,20), (20,2), (2,3), \
(20,4), (4,5), (5,6), (6,7), (7,21), (6,22), \
(20,8), (8,9), (9,10), (10,11), (11,23), (10,24), \
(0,12), (12,13), (13,14), (14,15),\
(0,16), (16,17), (17,18), (18,19)]
selected_bone_list = []
for bone in bone_list:
if bone[0] in selected_joints and bone[1] in selected_joints:
selected_bone_list.append(bone)
selected_bone_list.append((0,20))
selected_bone_list.append((3,20))
'''
if __name__ == '__main__':
read_data_3d()
|
from sys import stdin
class Particle:
def __init__(self, id, position, velocity, acceleration):
self.id = id
self.position = position
self.velocity = velocity
self.acceleration = acceleration
def update(self):
self.velocity = [self.velocity[i] + self.acceleration[i] for i in range(3)]
self.position = [self.position[i] + self.velocity[i] for i in range(3)]
def distance(self):
return abs(self.position[0]) + abs(self.position[1]) + abs(self.position[2])
def d(item):
return item.distance()
def remove_collisions(particles, distances):
for i in range(len(particles) - 1, -1, -1):
if distances[i] in distances[:i] + distances[i + 1:]:
particles.remove(particles[i])
return particles
lines = stdin.readlines()
particles = []
for i in range(len(lines)):
a, b, c = lines[i].strip().split(", ")
x1, y1, z1 = a.split(",")
x2, y2, z2 = b.split(",")
x3, y3, z3 = c.split(",")
position = [int(x1[3:]), int(y1), int(z1[:-1])]
velocity = [int(x2[3:]), int(y2), int(z2[:-1])]
acceleration = [int(x3[3:]), int(y3), int(z3[:-1])]
particles.append(Particle(i, position, velocity, acceleration))
for i in range(1000):
for p in particles:
p.update()
positions = [tuple(p.position) for p in particles]
if len(particles) > len(set(positions)):
particles = remove_collisions(particles, positions)
print(len(particles)) |
"""
最初始的版本,留作纪念
"""
import json
from dataclasses import dataclass, is_dataclass
import typing
def ex_dataclass(*args, **kwargs):
"""
desc:
dataclass增强版,支持原生功能及以下扩展能力;
1、支持class类型正反递归解析;
2、支持列表正反解析;
3、支持列表简易嵌套正反解析,如:{a: [[1, 2, 3]]}
4、支持typing.Type类型注解的多态行为,精确匹配字段存在最多的class;
5、允许反向解析存在冗余字段;
function:
json_loads(value: str) // 直接载入json数据
"""
def wrapper(check_class):
# passing class to investigate
check_class = dataclass(check_class, **kwargs)
if not hasattr(check_class, '__annotations__'):
raise Exception(f"type obejct {check_class.__name__} missing required attribute.")
o_init = check_class.__init__
def __get_typing_type_subclasses(type_: typing.Type) -> typing.List[typing.Type]:
subclasses = []
if hasattr(type_, '_name'):
if type_._name == "Type":
subclasses = type_.__dict__['__args__'][0].__subclasses__()
return subclasses
def __get_class_from_typing_type(type_: object) -> typing.ClassVar:
return type_.__dict__['__args__'][0]
def __get_cls_attr(cls: typing.Callable) -> typing.Dict:
return cls.__dict__['__annotations__']
def __get_high_compatibility_cls(subclass: typing.List[typing.Callable], value: typing.Dict) -> typing.Callable:
ret_cls: typing.Callable = None
max_cnt = 0
for cls in subclass:
tmp_cnt = 0
attr_dict = __get_cls_attr(cls)
for k, v in value.items():
v_type = attr_dict.get(k, None)
if v_type:
if isinstance(v, v_type):
tmp_cnt += 1
if tmp_cnt > max_cnt:
max_cnt = tmp_cnt
ret_cls = cls
return ret_cls
def __get_all_cls_typing_type(typing_type_ft: typing.ClassVar) -> typing.List[typing.Type]:
if typing_type_ft:
classes = __get_typing_type_subclasses(typing_type_ft)
if classes:
classes.append(__get_class_from_typing_type(typing_type_ft))
return classes
return []
def __handle_typing_list(field_type: typing.Callable, value: typing.List) -> typing.List:
tmp_list = []
if field_type.__dict__.get('_name', None) == 'List':
ft_tuple = field_type.__dict__.get('__args__', ())
if ft_tuple:
v = value
if value:
v = value[0] if isinstance(value[0], list) else value
return __handle_typing_list(ft_tuple[0], v)
return value
ft_cls = field_type
# print(f"sub_type: {s_type}")
all_classes = __get_all_cls_typing_type(ft_cls)
if all_classes:
for v in value:
# print(f"v.__class__: {v.__class__}")
if ft_cls == v.__class__:
tmp_list.append(v)
else:
ft_cls = __get_high_compatibility_cls(all_classes, v)
if ft_cls:
tmp_list.append(ft_cls(**v))
elif is_dataclass(ft_cls):
for v in value:
if ft_cls == v.__class__:
tmp_list.append(v)
else:
tmp_list.append(ft_cls(**v))
else:
tmp_list = value
return tmp_list
def __calculate_recursive_layer(value: typing.List, deal_with_value: typing.List) -> typing.List:
if isinstance(value, list):
if value:
if not isinstance(value[0], list):
return deal_with_value
return [__calculate_recursive_layer(value[0], deal_with_value)]
return []
def json_loads(cls, json_data: str) -> typing.Callable:
return cls(**json.loads(json_data))
def __init__(self, *args, **kwargs):
tmp_kwargs = {}
tmp_kwargs.update(kwargs)
for name, value in kwargs.items():
# print(name)
# getting field type
field_type = check_class.__annotations__.get(name, None)
if field_type is None:
for cls_ in check_class.__mro__:
if hasattr(cls_, "__annotations__"):
field_type = cls_.__annotations__.get(name, None)
if field_type:
break
else:
tmp_kwargs.pop(name)
# 支持类型 typing.Type
all_maybe_cls = __get_all_cls_typing_type(field_type)
if all_maybe_cls:
field_type = __get_high_compatibility_cls(all_maybe_cls, value)
# 支持类型 typing.List & 嵌套typing.List[typing.List[str]]
if field_type is not None and isinstance(value, list):
tmp_kwargs[name] = __calculate_recursive_layer(value, __handle_typing_list(field_type, value))
if is_dataclass(field_type) and isinstance(value, dict):
obj = field_type(**value)
tmp_kwargs[name] = obj
# print(f"tmp_kwargs: {tmp_kwargs}")
o_init(self, *args, **tmp_kwargs)
check_class.__init__ = __init__
# 加入json_loads
check_class.json_loads = classmethod(json_loads)
return check_class
return wrapper(args[0]) if args else wrapper
|
from bs4 import BeautifulSoup
import requests
url = "https://www.theguardian.com/crosswords/cryptic/27558"
r = requests.get(url)
soup = BeautifulSoup(r.content, "html.parser")
grid = list(soup.find(class_ = "crossword__grid"))
def make_grid(grid, l=15):
return [['#' if grid[i+(l*j)] is "#" else " " for i in range(l)]
for j in range(l)]
def grid_print(grid, l=15):
for i in range(l):
print(''.join(grid[i]))
unique_grids = []
for i in range(27000,27560):
if i == 26330:
continue
url = "https://www.theguardian.com/crosswords/cryptic/"+str(i)
print(url)
r = requests.get(url)
if not r:
continue
soup = BeautifulSoup(r.content, "html.parser")
grid = list(soup.find(class_ = "crossword__grid"))
blank = [['#' for i in range(15)] for j in range(15)]
for sq in grid[3:-1]:
try:
x = int(float(sq.rect['x']))//32
y = int(float(sq.rect['y']))//32
except TypeError:
x = int(float(sq['x']))//32
y = int(float(sq['y']))//32
blank[y][x] = ' '
grid = ''.join([c for row in blank for c in row])
if grid not in unique_grids:
unique_grids.append(grid)
grid_print(make_grid(grid))
print("\n\n")
|
def approachlefthand():
#i01.startedGesture()
i01.setHandSpeed("right", 31.0, 31.0, 31.0, 31.0, 31.0, 22.0)
i01.setArmSpeed("left", 100.0, 100.0, 100.0, 100.0)
i01.setArmSpeed("right", 6.0, 6.0, 6.0, 6.0)
i01.setHeadSpeed(22.0, 22.0)
i01.setTorsoSpeed(31.0, 13.0, 100.0)
i01.moveHead(20,84)
i01.moveArm("left",67,52,62,23)
i01.moveArm("right",55,61,45,16)
i01.moveHand("left",130,0,40,10,10,0)
i01.moveHand("right",180,145,145,3,0,11)
i01.moveTorso(90,85,90)
sleep(4)
i01.finishedGesture()
|
for i in range(1,11):
num_apart=int(input()) #아파트숫자
high_list=list(map(int,input().split()))#아파트높이
my_sum = 0
for j in range(num_apart):
if high_list[j] > 0 :
side = [high_list[j-2],high_list[j-1],high_list[j+1],high_list[j+2]]
m = 255
for k in side :
temp = high_list[j] - k
if temp < 0 :
m = 0
break
if temp < m :
m = temp
my_sum+=m
print(f'#{i} {my_sum}')
# print("%d %d" %(i,my_sum)) |
import requests
import json
url = "https://www.bing.com/covid/data"
response = requests.get(url)
resp = response.json()
with open("results2.json",'w') as f:
json.dump(resp,f)
with open("results2.json",'r') as f:
ditc = json.load(f)
total = int(ditc['totalConfirmed'])
deaths = int(ditc['totalDeaths'])
recov = int(ditc['totalRecovered'])
active = total - deaths - recov
print(str(total)+" "+str(deaths)+" "+str(recov)+" "+str(active)+"\n")
areas = list(ditc['areas'])
for area in areas:
totald = area['totalConfirmed']
death = area['totalDeaths']
rec = area['totalRecovered']
active = int(totald)-int(death)-int(rec)
name = area['displayName']
lat = area['lat']
lng = area['long']
print(name+" "+str(lat)+" "+str(lng)+" "+str(totald)+" "+str(death)+" "+str(rec)+" "+str(active)+" \n")
aras = list(area['areas'])
for ar in aras:
totald = ar['totalConfirmed']
death = ar['totalDeaths']
rec = ar['totalRecovered']
active = int(totald)-int(death)-int(rec)
name = ar['displayName']
lat = ar['lat']
lng = ar['long']
print(name+" "+str(lat)+" "+str(lng)+" "+str(totald)+" "+str(death)+" "+str(rec)+" "+str(active)+" \n")
region = list(ar['areas'])
for rg in region:
rtotald = rg['totalConfirmed']
rdeath = rg['totalDeaths']
rrec = rg['totalRecovered']
ractive = int(totald)-int(death)-int(rec)
rname = rg['displayName']
rlat = rg['lat']
rlng = rg['long']
print(rname+" "+str(rlat)+" "+str(rlng)+" "+str(rtotald)+" "+str(rdeath)+" "+str(rrec)+" "+str(ractive)+" \n")
|
def iif(condition, true_part, false_part):
return (condition and [true_part] or [false_part])[0] |
from django.db import models
import uuid
class RetrivedData(models.Model):
name = models.CharField(verbose_name='Customer Name',max_length=120,null=True,blank=True)
due_amount = models.IntegerField(verbose_name='Due Amount',null=True,blank=True)
template_id = models.IntegerField(verbose_name='Template ID',null=True,blank=True)
batch_id = models.IntegerField(verbose_name='Batch ID',null=True,blank=True)
mobile_no = models.CharField(verbose_name='Mobile Number',max_length=120,null=True,blank=True)
loan_account_no = models.CharField(verbose_name='Loan Account Number',max_length=120,null=True,blank=True)
status = models.BooleanField(default=False)
due_date = models.DateTimeField(null=True,blank=True)
scheduled_at = models.DateTimeField(null=True,blank=True)
created_at = models.DateTimeField(null=True,blank=True)
updated_at = models.DateTimeField(null=True,blank=True)
response = models.JSONField(null=True,blank=True)
class ProcessData(models.Model):
loan_account_no = models.CharField(verbose_name='Loan Account Number',max_length=120,null=True,blank=True)
mobile_no = models.CharField(verbose_name='Mobile Number',max_length=120,null=True,blank=True)
template_id = models.IntegerField(verbose_name='Template ID',null=True,blank=True)
request = models.JSONField()
response = models.JSONField()
audio_url = models.URLField(max_length=200,null=True,blank=True)
created_at = models.DateTimeField()
def __str__(self):
return self.loan_account_no |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-08-16 15:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('prodsys', '0034_auto_20170807_1406'),
]
operations = [
migrations.AddIndex(
model_name='job',
index=models.Index(fields=['task', 'file', 'run_number', 'chunk_number', 'panda_id', 'attempt', 'status'], name='prodsys_job_task_id_a3b4c0_idx'),
),
]
|
#-*- coding:utf-8 -*-
"""
ECC Utils
This submodule comprises of ECC utilities, and common exploit scripts.
The tool also features ECC Analyser.
"""
|
INPUT_FILE = "B-large.in"
OUTPUT_FILE = "B-large.out"
res = []
def write_output():
out_file = open(OUTPUT_FILE, "w")
for i,line in enumerate(res):
out_file.write("Case #%d: %s\n" % (i+1, line))
out_file.close()
def count_plusminus(stack):
curr = stack[0]
if len(stack) == 1:
return 0
res = 0
for i in stack[1:]:
if curr == '+' and i == '-':
res += 1
curr = i
return res
def solve(case):
plusminus = count_plusminus(case)
first = case[0]
if first == '+':
return 2 * plusminus
return 2 * plusminus + 1
def get_next(data):
for line in data:
case = line.strip()
yield case
if __name__ == '__main__':
print 'Starting...'
f = file(INPUT_FILE)
for line in get_next(f.read().strip().split('\n')[1:]):
res.append(solve(line))
write_output()
f.close()
print 'done.' |
import pandas as pd
import pickle, os
try:
DATA_PATH = open("data_location.txt", "r").read().strip()
except:
DATA_PATH = "."
def get_images_code_for_react_skinless():
fnums_list = pickle.load(open("%s/data/basics/fnums_list.p"%DATA_PATH,"rb"))
fnum_to_url_dict = pickle.load(open("%s/data/basics/fnum_to_flickr_url_dict.p"%DATA_PATH,"rb"))
monochrome_list = set(pickle.load(open("%s/data/monochrome_list_%s.p"%(DATA_PATH,"hsv"),"rb")))
already_uploaded_skinless = pickle.load(open("../aws/people_segmented_images_uploaded_to_aws_fnums_without_skin.p","rb"))
my_str = ""
my_str += " images: [\n"
for fnum in fnums_list:
if not fnum in already_uploaded_skinless:
continue
if fnum in monochrome_list:
continue
url = fnum_to_url_dict[fnum]
aws_seg_url = "https://design-trends-bucket.s3.us-east-2.amazonaws.com/people_seg_results_%d.png"%fnum
aws_seg_without_skin_url = "https://design-trends-bucket.s3.us-east-2.amazonaws.com/people_seg_results_without_skin_%d.png"%fnum
my_str += "['%s','%s','%s'],\n"%(url,aws_seg_url,aws_seg_without_skin_url)
my_str = my_str[:-2]+"\n"
my_str += "],"
text_file = open("%s/data/react-codes/react_for_skinless.txt"%DATA_PATH, "w")
text_file.write(my_str)
text_file.close()
get_images_code_for_react_skinless()
|
# Generated by Django 3.2.1 on 2021-05-16 01:29
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('CovidVaccine', '0003_auto_20210515_1557'),
]
operations = [
migrations.CreateModel(
name='AuthGroup',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, unique=True)),
],
options={
'db_table': 'auth_group',
'managed': False,
},
),
migrations.CreateModel(
name='AuthGroupPermissions',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'db_table': 'auth_group_permissions',
'managed': False,
},
),
migrations.CreateModel(
name='AuthPermission',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('codename', models.CharField(max_length=100)),
('name', models.CharField(max_length=255)),
],
options={
'db_table': 'auth_permission',
'managed': False,
},
),
migrations.CreateModel(
name='AuthUser',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128)),
('last_login', models.DateTimeField(blank=True, null=True)),
('is_superuser', models.BooleanField()),
('username', models.CharField(max_length=150, unique=True)),
('last_name', models.CharField(max_length=150)),
('email', models.CharField(max_length=254)),
('is_staff', models.BooleanField()),
('is_active', models.BooleanField()),
('date_joined', models.DateTimeField()),
('first_name', models.CharField(max_length=150)),
],
options={
'db_table': 'auth_user',
'managed': False,
},
),
migrations.CreateModel(
name='AuthUserGroups',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'db_table': 'auth_user_groups',
'managed': False,
},
),
migrations.CreateModel(
name='AuthUserUserPermissions',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
],
options={
'db_table': 'auth_user_user_permissions',
'managed': False,
},
),
migrations.CreateModel(
name='CovidvaccineUser',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
('ssn', models.IntegerField()),
('age', models.CharField(max_length=20)),
('phone', models.IntegerField()),
('prioirtyid', models.IntegerField()),
],
options={
'db_table': 'CovidVaccine_user',
'managed': False,
},
),
migrations.CreateModel(
name='CovidvaccineUseraddress',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('street', models.CharField(max_length=20)),
('city', models.CharField(max_length=20)),
('state', models.CharField(max_length=20)),
('zipcode', models.IntegerField()),
('locationx', models.IntegerField()),
('locationy', models.IntegerField()),
],
options={
'db_table': 'CovidVaccine_useraddress',
'managed': False,
},
),
migrations.CreateModel(
name='CovidvaccineUserlogin',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.CharField(max_length=20)),
('password', models.CharField(max_length=20)),
],
options={
'db_table': 'CovidVaccine_userlogin',
'managed': False,
},
),
migrations.CreateModel(
name='DjangoAdminLog',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('action_time', models.DateTimeField()),
('object_id', models.TextField(blank=True, null=True)),
('object_repr', models.CharField(max_length=200)),
('change_message', models.TextField()),
('action_flag', models.PositiveSmallIntegerField()),
],
options={
'db_table': 'django_admin_log',
'managed': False,
},
),
migrations.CreateModel(
name='DjangoContentType',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('app_label', models.CharField(max_length=100)),
('model', models.CharField(max_length=100)),
],
options={
'db_table': 'django_content_type',
'managed': False,
},
),
migrations.CreateModel(
name='DjangoMigrations',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('app', models.CharField(max_length=255)),
('name', models.CharField(max_length=255)),
('applied', models.DateTimeField()),
],
options={
'db_table': 'django_migrations',
'managed': False,
},
),
migrations.CreateModel(
name='DjangoSession',
fields=[
('session_key', models.CharField(max_length=40, primary_key=True, serialize=False)),
('session_data', models.TextField()),
('expire_date', models.DateTimeField()),
],
options={
'db_table': 'django_session',
'managed': False,
},
),
migrations.CreateModel(
name='CovidvaccinePriorityDate',
fields=[
('priorityid', models.IntegerField(primary_key=True, serialize=False)),
('slotid', models.IntegerField()),
],
),
migrations.DeleteModel(
name='PriorityGroup',
),
migrations.DeleteModel(
name='User',
),
migrations.DeleteModel(
name='UserAddress',
),
migrations.DeleteModel(
name='Userlogin',
),
]
|
#=========================================================#
# [+] Script : Encoder / Decoder Base64 #
# [+] Auteur : oOScuByOo #
# [+] Site : xxxxxxxxxxxxxxxxxxx #
# [+] Twitter : xxxxxxxx #
#=========================================================#
import base64 # Importe la librairie Base64
Str1 = "V1RKZ0NabDBFcVdNcko5QlZzN0RDWDVzUQ=="; # Definit la premiere Chaine
Str1 = base64.b64decode(Str1) # Decode la premiere Chaine
Str2 = "WTJgCZl0EqWMrJ9BVs7DCX5sQ"; # Definit la deuxieme Chaine
Str2 = base64.b64encode(Str2) # Encode la deuxieme Chaine
print "Decoded String: " + Str1 # Affiche le resultat de la premiere Chaine
print "Encoded String: " + Str2 # Affiche le resultat de la deuxieme Chaine
|
# -*- coding: utf-8 -*-
"""
Created on 13 January, 2018 @ 11:08 AM
@author: Bryant Chhun
email: bchhun@gmail.com
Project: Insight_AI_BayLabs
License:
"""
import vtk
from vtk import vtkPolyDataReader
from vtk.util import numpy_support as ns
def load_vtk(filename):
# vtk files are polydata types.
# we use primarily the "vtk_to_numpy" method in numpy_support
reader = vtkPolyDataReader()
reader.SetFileName(filename)
reader.ReadAllVectorsOn()
reader.ReadAllScalarsOn()
reader.Update()
nodes_vtk_array = ns.vtk_to_numpy(reader.GetOutput().GetPoints().GetData())
return nodes_vtk_array #array is in format X, Y, Z
def write_numpy_to_vtk(input):
# input is numpy data array
# deep is like deepcopy
VTK_data = ns.numpy_to_vtk(num_array=input.ravel(), deep=True, array_type=vtk.VTK_FLOAT)
return VTK_data
# Convert numpy array to VTK array (vtkFloatArray)
def convert_to_vtk(input_array):
vtk_data_array = ns.numpy_to_vtk(
num_array=input_array.transpose(2, 1, 0).ravel(), # ndarray contains the fitting result from the points. It is a 3D array
deep=True,
array_type=vtk.VTK_FLOAT)
img_vtk = vtk.vtkPolyData()
img_vtk.SetDimensions(input_array.shape)
# img_vtk.SetSpacing(spacing[::-1])
img_vtk.GetPointData().SetScalars(vtk_data_array)
# Convert the VTK array to vtkImageData
def convert_to_poly(vtk_data_array):
#img_vtk = vtk.vtkImageData()
img_vtk = vtk.vtkPolyData()
img_vtk.SetDimensions(vtk_data_array.shape)
#img_vtk.SetSpacing(spacing[::-1])
img_vtk.GetPointData().SetScalars(vtk_data_array)
|
import requests
import re
import os
import pyfiglet
from pyfiglet import fonts
from colorama import Fore, Back, Style
from colorama import init
import time
import urllib3
from datetime import datetime
init()
# Console colors
W = '\033[0m' # white (normal)
R = '\033[31m' # red
G = '\033[32m' # green
O = '\033[33m' # orange
B = '\033[34m' # blue
P = '\033[35m' # purple
C = '\033[36m' # cyan
GR = '\033[37m' # gray
Y = '\033[93m'
BOLD = '\033[1m'
END = '\033[0m'
#Cookie of GET request https://www.okcupid.com/match
Cookie1 = ""
#Cookie and Authorization Token of POST request https://www.okcupid.com/1/apitun/messages/send
Cookie2 = ""
#Message
message = ''
payload_count = input("Enter the payload run count: ")
while True:
try:
regex = r"(\"userinfo\" : \{\"realname\" : \")([a-zA-Z\s\d\w]*)(\", \"gender_letter\" : \")([\w]*)(\", \"gender\" : \")([a-zA-Z]*)(\", \"age\" : )([\d]*)(, \"join_date\" : )(\d*, \"rel_status\" \: \")([a-zA-Z]*)(\", \"location\" :\ \")([a-zA-Z\d\\\s]*)(\", \"orientation\" : \")([a-zA-Z]*)(\", \"displayname\" : \")([a-zA-Z\d\s]*)(\", \"staff_badge\" : [a-zA-Z]*}, \"last_login\" : [\d]*, \"likes\" : {\"mutual_like_vote\" : [\d]*, \"recycled\" : [\d]*, \"passed_on\" : [\d]*, \"they_like\" : [a-zA-Z]*, \"you_like\" : [a-zA-Z]*, \"via_spotlight\" : [a-zA-Z]*, \"mutual_like\" : [\d]*, \"vote\" : \{\}\}, \"percentages\" : \{\"match\" : )([\d]*)(, \"enemy\" : [\d]*}, \"inactive\" : [a-zA-Z]*, \"userid\" : \")([\d]*)(\", \"username\" : \")([\da-zA-Z]*)(\", \"staff\" : [a-zA-Z]*, \"thumbs\" : \[\{\"[\d]*x[\d]*\" : \")([a-zA-Z\d\/\:\.\_]*)"
okcupiddata = open("okcupiddata.txt","a")
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
i = 0
while i <= payload_count:
headers = {
"Host": "www.okcupid.com",
"User-Agent": "MMozilla/5.0 (Windows NT 10.0; Win64; x64; rv:52.0) Gecko/20100101 Firefox/52.0 Cyberfox/52.9.1",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate",
"Cookie": Cookie1,
"Connection": "close",
"Upgrade-Insecure-Requests": "1"
}
url = "https://www.okcupid.com/match"
#time.sleep(5)
try:
request = requests.get(url, headers=headers, verify=False)
response = (request.text.encode("utf-8"))
temp = open("temp.txt","w")
#print response
temp.write(response)
temp.close()
except:
print "waiting for 20 sec.."
time.sleep(20)
continue
temp_read = open ("temp.txt","r")
contents = temp_read.readlines()
#print contents
for test_str in contents:
#print test_str
matches = re.finditer(regex, test_str, re.MULTILINE)
for matchNum, match in enumerate(matches, start=1):
details = match.group(2)+","+match.group(8)+","+match.group(13)+","+match.group(19)+","+match.group(21)+","+match.group(23)+","+match.group(25)
userid = match.group(21)
with open ('okcupiddata.txt','a+') as f:
if userid in f.read():
print R+"[+]"+END+GR+"Message already sent to "+END+match.group(2)
else:
print G+"[+]"+END+O+"Details Captured: "+END+match.group(2)+" "+match.group(8)+" "+match.group(13)+" "+match.group(19)+" "+match.group(21)+" "+match.group(23)+" "+match.group(25)
headers = {
"Host": "www.okcupid.com",
"User-Agent": "MMozilla/5.0 (Windows NT 10.0; Win64; x64; rv:52.0) Gecko/20100101 Firefox/52.0 Cyberfox/52.9.1",
"Accept": "*/*",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate",
"x-okcupid-platform": "DESKTOP",
"Content-Type": "text/plain;charset=UTF-8",
"origin": "https://www.okcupid.com/",
"Referer": "https://www.okcupid.com/profile/"+userid+"?cf=regular,matchsearch",
"Cookie": Cookie2,
"Connection": "close"
}
now = datetime.now()
sendingtime = now.strftime("%d/%m/%Y %H:%M:%S")
print G+"[*]"+END+O+"["+sendingtime+"]"+END+O+"Sending Message To "+END+match.group(2)
#print message
postreq = '{"receiverid":"'+userid+'","body":"'+message+'","source":"desktop_global","service":"profile"}'
print postreq
url = "https://www.okcupid.com/1/apitun/messages/send"
time.sleep(5)
messagesend = requests.post(url, headers=headers, data=postreq, verify=False)
sendresponse = (messagesend.text.encode("utf-8"))
#print sendresponse
msgack = "success"
if msgack in sendresponse:
print G+"[+]"+END+O+"Message Sent Successfully to "+END+match.group(2)
else:
print sendresponse
print R+"[*]"+END+O+"Message Sending Failed"+END
print '..............................................................................................................'
okcupiddata.write(details)
okcupiddata.write('\n')
temp_read.close()
os.remove("temp.txt")
i = i+1
time.sleep(5)
okcupiddata.close()
except:
print "Waiting for 10 Sec"
time.sleep(10)
headers = {
"Host": "www.okcupid.com",
"User-Agent": "MMozilla/5.0 (Windows NT 10.0; Win64; x64; rv:52.0) Gecko/20100101 Firefox/52.0 Cyberfox/52.9.1",
"Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.5",
"Accept-Encoding": "gzip, deflate",
"Cookie": Cookie1,
"Connection": "close",
"Upgrade-Insecure-Requests": "1"
}
url = "https://www.okcupid.com/match"
#time.sleep(5)
request = requests.get(url, headers=headers, verify=False)
response = (request.text.encode("utf-8"))
continue
print ('\n')
print O+"Whooo!! Data Captured For Dating..."+END
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils import timezone
import time,hashlib
# Create your models here.
from django.utils.timezone import now, timedelta
class Test(models.Model):#测试数据表
Char字段 = models.CharField(verbose_name="Char字段", max_length=32) # Char字段
选择 = (('xz1', '选择1'), ('xz2', '选择2'), ('Unkown', '未知'),)
选择字段 = models.CharField(choices=选择, default='Unkown', verbose_name="选择字段", null=True, max_length=6,blank=True) # 选择字段
Decimal字段 = models.DecimalField(verbose_name="Decimal字段", max_digits=16, decimal_places=2, default=0) # Decimal字段
布尔字段 = models.BooleanField(verbose_name="布尔字段", default=False, blank=True) # 布尔字段
时间字段 = models.DateTimeField(verbose_name='时间字段', blank=True, auto_now_add=True) # 时间字段
整数字段 = models.IntegerField(verbose_name="整数字段", default=0, blank=True) # 整数字段
#外键 = models.ForeignKey('外键', verbose_name='外键', blank=True, null=True,related_name="merchantUser") # 外键
class Meta:
verbose_name = "测试数据"
verbose_name_plural = "测试数据"
ordering = ['-时间字段']
|
# http://www.cyberforum.ru/turbo-pascal/thread1294897.html
import matplotlib.pyplot as plt
import random
g=0.0
Integral=0.0
fx=0.0
fx1=0.0
xi=[]
yi=[]
random.seed(0)
a=-2 # реальный интервал от -2 до 2..
b=2 # в задаче зачем-то от -3 до 3
N=1000000
k=b-a # Переменной"k"присвоим значение длины промежутка интегрирования}
for i in range (N):
#проведем N испытаний
g=random.random() # g - случайная величина из промежутка [0;1]}
x= a + g*(b-a) # произвольная величина из [a; b] }
# print(g,x)
if x>=-2.0 or x<= 2.0: # Вычисляем функцию
fx1=-x**2+4
else:
fx1=0
yi.append(fx1)
xi.append(x)
#print (fx1, x)
fx=fx+fx1
Integral=(1/N)*k*fx
print ("Количество случайных значений u=", N)
print('Интеграл=',Integral)
# Построить точки
#plt.scatter(xi, yi)
#plt.plot(xi, yi)
#plt.show()
|
# -*- coding:utf-8 -*-
from typing import List
class Solution:
# O(nlogn)
# def getLeastNumbers(self, arr: List[int], k: int) -> List[int]:
# arr_sort = sorted(arr)
# return arr_sort[:k]
# heapq O(nlogk)
def getLeastNumbers(self, arr: List[int], k: int) -> List[int]:
import heapq
topk = [-x for x in arr[:k]]
heapq.heapify(topk)
print(topk)
for i in range(k, len(arr)):
print(-topk[0])
if -topk[0] > arr[i]:
heapq.heappop(topk)
heapq.heappush(topk, -arr[i])
topk = [-x for x in topk]
return topk
# O(nklogk)
# def getLeastNumbers(self, arr: List[int], k: int) -> List[int]:
# arr_len = len(arr)
# if arr_len == 0: return []
# if k == 0: return []
#
# mink = []
# mink_len = 0
# for a in arr:
# if mink_len < k:
# mink.append(a)
# mink_len += 1
# continue
# mink = sorted(mink)
# if mink[-1] > a:
# mink.pop()
# mink.append(a)
# return mink
if __name__ == '__main__':
arr = [0, 1, 2, 1]
k = 2
ans = Solution().getLeastNumbers(arr, k)
print(ans)
|
import csv
import io
from dateutil.parser import parse
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User
from django.http import JsonResponse
from django.shortcuts import render
from django.views import View
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Company, Invoice, Vendor
# importing data to DATABASE
class DataUploadView(View):
def get(self, request):
template_name = 'import_data.html'
return render(request, template_name)
def post(self, request):
param_file = io.TextIOWrapper(request.FILES['data'].file)
data = csv.DictReader(param_file)
list_of_dict = list(data)
vendor_objs = [
Vendor(
id=row['vendor_id'],
vendor_name=row['vendor_name'],
)
for row in list_of_dict
]
Company_objs = [
Company(
id=row['company_id'],
company_name=row['company_name'],
npwp=row['npwp'],
email=row['email'],
address=row['address'],
city=row['city'],
region=row['region'],
post_code=row['post_code'],
)
for row in list_of_dict
]
password = make_password("admin"),
user_objs = [
User(
username=row['user_id'],
email='admin@email.com',
password=password,
is_active=True,
)
for row in list_of_dict
]
try:
create_company = Company.objects.bulk_create(
objs=Company_objs, ignore_conflicts=True)
print("create_company")
create_user = User.objects.bulk_create(
objs=user_objs, ignore_conflicts=True)
print("create_user")
create_vendor = Vendor.objects.bulk_create(
objs=vendor_objs, ignore_conflicts=True)
print("create_vendor")
invoice_objs = [
Invoice(
invoice_id=row['invoice_id'],
user=User.objects.get(username=row['user_id']),
company=Company.objects.get(id=row['company_id']),
vendor=Vendor.objects.get(id=row['vendor_id']),
transaction_type=row['transaction_type'],
commercial_invoice_number=row['commercial_invoice_number'],
status_start=row['status_start'],
status_tax_summary=row['status_tax_summary'],
invoice_date=parse(row['invoice_date']),
due_date=parse(row['due_date']),
item_name=row['item_name'],
unitprice=row['unitprice'] if row['unitprice'] else None,
quantity=row['quantity'] if row['quantity'] else None,
discount=row['discount'] if row['discount'] else None,
gross_amount=row['gross_amount'] if row['gross_amount'] else None,
tax_amount=row['tax_amount'] if row['tax_amount'] else None,
total_amount=row['total_amount'] if row['total_amount'] else None,
tax_period=row['tax_period'],
revision=row['revision'] if row['revision'] else None,
reported_date=parse(
row['reported_date']) if row['reported_date'] else None,
reported_status=row['reported_status'],
reported_status_desc=row['reported_status_desc'],
tax_type=row['tax_type'],
tax_document_number=row['tax_document_number'],
tax_document_date=parse(
row['tax_document_date']) if row['tax_document_date'] else None,
approved_date=parse(
row['approved_date']) if row['approved_date'] else None,
) for row in list_of_dict
]
try:
create_invoice = Invoice.objects.bulk_create(objs=invoice_objs)
print("create_invoice")
returnmsg = {"status_code": 200}
print('imported successfully')
except Exception as e:
print('Error While Importing Data: ', e)
returnmsg = {"status_code": 500}
returnmsg = {"status_code": 200}
print('imported successfully')
except Exception as e:
print('Error While Importing Data: ', e)
returnmsg = {"status_code": 500}
return JsonResponse(returnmsg)
# 1) verification of third-parties:
class TheVendorFilterView(APIView):
queryset = Vendor.objects.all()
def get(self, format=None):
queryset = self.queryset
try:
keyword = self.request.query_params.get('vendor_name', '')
if keyword:
### Main Queryset filter
query_set = queryset.filter(vendor_name__iexact=keyword)
if not query_set:
print('Vendor_name is not in dataset ')
return Response({
"status_code": status.HTTP_404_NOT_FOUND,
"error_msg": "third-party company name is not in dataset"
})
else:
print('Vendor_name not provided: ')
return Response({
"status_code": status.HTTP_400_BAD_REQUEST,
"error_msg": "third-party company name is not provided!!"
})
except Exception as e:
return Response({
"status_code": status.HTTP_500_INTERNAL_SERVER_ERROR,
"error_msg": "Error While fetching Data from vendor filter"
})
return Response({
"status_code": status.HTTP_200_OK,
"third_party_company_exist": True if query_set else False
})
# 2) scoring of commercial relationship between companies:
class TheInvoiceFilterView(APIView):
queryset = Invoice.objects.all()
def get(self, format=None):
queryset = self.queryset
try:
cname_1 = self.request.query_params.get('company_name', '')
cname_2 = self.request.query_params.get('vendor_name', '')
if cname_1 and cname_2:
### Main Queryset filter
query_set = queryset.filter(company_id__company_name__iexact=cname_1).filter(
vendor_id__vendor_name__iexact=cname_2)
if not query_set:
print('Vendor_name is not in dataset ')
return Response({
"status_code": status.HTTP_404_NOT_FOUND,
"error_msg": "No transaction found between these companies in dataset"
})
else:
if not cname_1:
return Response({
"status_code": status.HTTP_404_NOT_FOUND,
"error_msg": "first company name is not in dataset"
})
if not cname_2:
return Response({
"status_code": status.HTTP_404_NOT_FOUND,
"error_msg": "second company name is not in dataset"
})
except Exception as e:
return Response({
"status_code": status.HTTP_500_INTERNAL_SERVER_ERROR,
"error_msg": "Error While fetching Data from vendor filter"
})
return Response({
"status_code": status.HTTP_200_OK,
"total_transactions": query_set.count()
})
|
from django.db import models
class CuadradoMedio(models.Model):
semilla = models.FloatField()
iteraciones = models.IntegerField()
ri = models.FloatField()
def set_ri(self, ri):
self.ri = ri
def get_ri(self):
return self.ri
class Aditivo(models.Model):
semilla = models.FloatField()
multiplicador = models.FloatField()
incremento = models.FloatField()
modulo = models.FloatField()
ri = models.FloatField()
def set_ri(self, ri):
self.ri = ri
def get_ri(self):
return self.ri
class Multiplicativo(models.Model):
moderna = models.FloatField()
semilla = models.FloatField()
incremento = models.FloatField()
def set_ri(self, ri):
self.ri = ri
def get_ri(self):
return self.ri |
import pandas as pd
import os
import csv
import platform
import numpy as np
def read_csv_to_dataframe(csv_path, dtype=''):
dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d')
if (dtype is ''):
df = pd.read_csv(csv_path)
else:
df = pd.read_csv(csv_path, dtype=dtype, parse_dates=['sell_date'], date_parser=dateparse)
return df
def setup_directory():
if not os.path.exists("data"):
os.makedirs("data") |
n = int(input())
if n%2:
print(n//2, n//2+1)
elif n//2%2:
print(n//2-2, n//2+2)
else:
print(n//2-1, n//2+1)
|
from torch.nn.modules.module import Module
from ..functions.add import MyAddFunction
class MyAddModule(Module):
def forward(self, input1, input2):
return MyAddFunction()(input1, input2)
|
class Egg:
def __init__(self, x=5):
self.x = x
def __cmp__(self, other):
if self.x < other.x:
return -1
elif self.x > other.x:
return 1
else:
return 0
e1 = Egg(4)
e2 = Egg(5)
print e1 == e2
print e1 > e2
print e1 < e2
|
#!/usr/bin/python3
# Homework 4 Unit Tests
import unittest
import random
import multiprocessing
import time
import sys
from hw4 import sequentialSearchRec
from hw4 import binarySearchRec
from HashTable import HashTable
# Make a decorator that skips tests when the required functionality hasn't been implemented yet.
def skip_when_not_implemented(func):
def test(*args, **kwargs):
try:
return func(*args, **kwargs)
except NotImplementedError:
raise unittest.SkipTest("Not implemented yet")
# Copy the docstring, so the unit tests give better descriptions.
test.__doc__ = func.__doc__
return test
# Test to see if we're currently running in IDLE, where multiprocessing misbehaves.
# Test based on https://stackoverflow.com/questions/3431498/what-code-can-i-use-to-check-if-python-is-running-in-idle
def running_in_idle():
try:
# IDLE replaces sys.stdin with a custom class
return sys.stdin.__module__.startswith("idle")
except AttributeError:
return False
# An unmodified HashTable will get stuck in an infinite loop when too many items have been added
# Make a decorator to test for this case, and skip the related tests
def skip_on_infinite_loop(func):
def test(*args, **kwargs):
if running_in_idle():
try:
print("IDLE may interfere with the testing for this test. If you have not implemented part 2 of question 3, this test will get stuck in an infinite loop. If it gets stuck, press Ctrl+C to skip the test.")
return func(*args, **kwargs)
except KeyboardInterrupt:
raise unittest.SkipTest("Test skipped by user")
else:
background_process = multiprocessing.Process(target=hashTableLoopTest)
background_process.start()
background_process.join(timeout=5.0) # Wait up to five seconds for this test to complete...
time.sleep(0.1)
if background_process.is_alive():
background_process.terminate()
raise unittest.SkipTest("If HashTable cannot grow, this test will get stuck in an infinite loop. It will be skipped until you're done with part 2 of question 3.")
return func(*args, **kwargs)
# Copy the docstring, so the unit tests give better descriptions.
test.__doc__ = func.__doc__
return test
def hashTableLoopTest():
ht = HashTable()
for i in range(50):
ht[i]=i
class NotAllowed(Exception):
pass
class StrictList:
def __init__(self, iter=None, parent=None):
self.list = []
if iter is not None:
self.list.extend(iter)
self.accesses = 0
def __getattr__(self, methodName):
print("Delegating method:", methodName)
return getattr(self.list, methodName)
def __getitem__(self, index):
if isinstance(index, slice):
raise NotAllowed("You should not be using slicing for this assignment!")
else:
self.accesses += 1
return self.list[index]
def __len__(self):
return len(self.list)
class TestProblem1(unittest.TestCase):
def test_API(self):
'''P1: Sanity Test: Is sequentialSearchRec callable?'''
try:
sequentialSearchRec([1,2,3], 2)
except NotImplementedError:
raise unittest.SkipTest("sequentialSearchRec has not been implemented yet")
except:
self.fail("Error while calling sequentialSearchRec")
@skip_when_not_implemented
def test_findHead(self):
'''P1: Find first element of list'''
items = StrictList([1,2,3])
self.assertTrue(sequentialSearchRec(items, 1), "Failed to find head of list")
@skip_when_not_implemented
def test_findMid(self):
'''P1: Find middle element of list'''
items = StrictList([2,1,3])
self.assertTrue(sequentialSearchRec(items, 1), "Failed to find middle of list")
@skip_when_not_implemented
def test_findTail(self):
'''P1: Find last element of list'''
items = StrictList([3,2,1])
self.assertTrue(sequentialSearchRec(items, 1), "Failed to find tail of list")
@skip_when_not_implemented
def test_cantFindSingle(self):
'''P1: Failed search in a single-item list'''
items = StrictList([2])
self.assertFalse(sequentialSearchRec(items, 1), "Found a value that isn't in the list")
@skip_when_not_implemented
def test_cantFindMultiple(self):
'''P1: Failed search in a multiple-item list'''
items = StrictList([5,6,7])
self.assertFalse(sequentialSearchRec(items, 1), "Found a value that isn't in the list")
class TestProblem2(unittest.TestCase):
def test_API(self):
'''P2: Sanity Test: Is binarySearchRec callable?'''
try:
binarySearchRec([1,2,3], 2)
except NotImplementedError:
raise unittest.SkipTest("binarySearchRec has not been implemented yet")
except:
self.fail("Error while calling binarySearchRec")
@skip_when_not_implemented
def test_findHead(self):
'''P2: Find first element of list'''
items = StrictList([232,233,234])
self.assertTrue(binarySearchRec(items, 232), "Failed to find head of list")
@skip_when_not_implemented
def test_findMid(self):
'''P2: Find middle element of list'''
items = StrictList([231,232,233])
self.assertTrue(binarySearchRec(items, 232), "Failed to find middle of list")
@skip_when_not_implemented
def test_findTail(self):
'''P2: Find last element of list'''
items = StrictList([230,231,232])
self.assertTrue(binarySearchRec(items, 232), "Failed to find tail of list")
@skip_when_not_implemented
def test_cantFindSingle(self):
'''P2: Failed search in a single-item list'''
items = StrictList([2])
self.assertFalse(binarySearchRec(items, 232), "Found a value that isn't in the list")
@skip_when_not_implemented
def test_findLong(self):
'''P2: Find element in a very long list'''
items = StrictList(range(256))
self.assertTrue(binarySearchRec(items, 232), "Failed to find an element in the list")
@skip_when_not_implemented
def test_cantFindLong(self):
'''P2: Failed search in a very long list'''
items = StrictList(range(212))
self.assertFalse(binarySearchRec(items, 232), "Found a value that isn't in the list")
@skip_when_not_implemented
def test_ensureBinarySearch(self):
'''P2: Is binarySearchRec actually performing a binary search?'''
items = StrictList(range(512))
self.assertTrue(binarySearchRec(items, 232), "Failed to find an element in the list")
self.assertLess(items.accesses, 20, "Binary search should only be testing log_2(N) items in a list with N items")
class TestProblem3(unittest.TestCase):
def test_API(self):
'''P3: Sanity Test: Is HashTable constructable?'''
try:
HashTable()
except:
self.fail("Error while constructing HashTable")
def utility_save_and_restore(self, count):
''' Using chr() and ord() is safe for ASCII values [32,127]. Starting at 'A' (65) to keep it printable '''
items = [(i, chr(ord('A')+i)) for i in range(count)] # (0,'A'), (1,'B'), (2,'C')...
ht = HashTable()
for key,value in items:
ht[key] = value
for key,value in items:
self.assertEqual(value, ht[key], "Item not properly stored in HashTable")
def utility_save_and_restore_random(self, count):
''' Assign each index a random integer, test to make sure it survives rehashes. '''
items = [(i, random.randint(0,2**30)) for i in range(count)] # (0,'?'), (1,'x'), (2,'8')...
ht = HashTable()
for key,value in items:
ht[key] = value
for key,value in items:
self.assertEqual(value, ht[key], "Item not properly stored in HashTable")
@skip_when_not_implemented
def test_newItemSizes(self):
'''P3: Does the value returned by empty_slots() decrease as new items are added?'''
items = [(i, chr(ord('A')+i)) for i in range(9)] # (0,'A'), (1,'B'), (2,'C')...
ht = HashTable()
last_empty = ht.empty_slots()
for key,value in items:
ht[key] = value
empty = ht.empty_slots()
self.assertEqual(last_empty-1, empty, "Adding a new item to the HashTable should reduce the value returned by empty_slots() by one.")
last_empty = empty
@skip_when_not_implemented
def test_updatedItemSizes(self):
'''P3: Does the value returned by empty_slots() stay the same when items in the HashTable are updated?'''
items = [(i, chr(ord('A')+i)) for i in range(9)] # (0,'A'), (1,'B'), (2,'C')...
ht = HashTable()
for key,value in items:
ht[key] = value
final_emptiness = ht.empty_slots()
for key,value in items:
ht[key] = value.lower() # (0,'a'), (1,'b')....
empty = ht.empty_slots()
self.assertEqual(final_emptiness, empty, "Updating an elemenet of HashTable should not reduce the value returned by empty_slots()")
def test_withoutResize(self):
'''P3: Can 11 elements be stored in and recovered from the HashTable?'''
self.utility_save_and_restore(11)
@skip_on_infinite_loop
def test_oneResize(self):
'''P3: Do elements persist after forcing the HashTable to rehash?'''
self.utility_save_and_restore(23)
@skip_on_infinite_loop
def test_maxSize(self):
'''P3: Do elements persist after forcing the HashTable to rehash many times?'''
self.utility_save_and_restore_random(1009)
@skip_on_infinite_loop
def test_sizeProgression(self):
'''P3: Does the table grow in proper size steps?'''
expected_sizes = set([11,23,53,97,193,389,769,1543])
ht = HashTable()
for i in range(1000):
ht[i] = i
self.assertTrue(ht.size in expected_sizes, "Hash table size was not one of the suggested primes.")
if __name__ == "__main__":
try:
unittest.main(exit=False, verbosity=2)
except:
import traceback
traceback.print_exc()
if not running_in_idle():
input("Press ENTER to dismiss...")
|
#! ./python27.tar.gz/python27/bin/python27
#-*- coding:utf-8 -*-
import sys
import re
import cPickle as pickle
import random
old_recalltag_count = pickle.load(open("recalltag_count.pkl","rb"));recalltag_count={}
recalltag2code = pickle.load(open("/home/hdp-reader-tag/shechanglue/sources/recalltag2code.pkl","rb"))
newtag = pickle.load(open("new_tag.pkl","rb"))
#get encoded tag's count
for recalltag in recalltag2code:
recalltag_count[recalltag] = old_recalltag_count.get(recalltag,0)
#shrink the recalltag_count and newtag
for recalltag in newtag.keys():
if recalltag_count.get(recalltag,-1)!=-1:
recalltag_count.pop(recalltag)
newtag.pop(recalltag)
#print info
print "the new tag need be update size is %s"%len(newtag)
#sort the recalltag
recalltag_count= sorted(recalltag_count.items(), key=lambda d:d[1], reverse = False)
#
if len(newtag)>0:
for idx,tag_to_change in enumerate(newtag.keys()):
tag_be_change = recalltag_count[idx][0]
recalltag2code[tag_to_change] = recalltag2code[tag_be_change]
#save
pickle.dump(recalltag2code,open("new_recalltag2code.pkl","wb")) |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-02-28 02:35
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('wildlifecompliance', '0123_auto_20190227_1511'),
]
operations = [
migrations.RemoveField(
model_name='application',
name='activity',
),
migrations.RemoveField(
model_name='application',
name='region',
),
migrations.RemoveField(
model_name='application',
name='tenure',
),
migrations.RemoveField(
model_name='application',
name='title',
),
]
|
N = int(input())
arr = []
for i in range(N) :
a = list(map(int,input().split()))
arr.append([a[1], a[0]] )
arr.sort()
for i in range(N):
print(arr[i][1],arr[i][0]) |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'MTabla'
db.create_table(u'tramiteDoc_mtabla', (
('IdMTabla', self.gf('django.db.models.fields.CharField')(max_length=4, primary_key=True)),
('NomMTabla', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('AbrMTabla', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('PropMTabla', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
('EstMTabla', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['MTabla'])
# Adding model 'DTabla'
db.create_table(u'tramiteDoc_dtabla', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('IdDTab', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('IdMTab', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MTabla'], db_column='idMTabla')),
('NomDTab', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('AbrDTab', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('AbrOpDTab', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('FactDTab', self.gf('django.db.models.fields.IntegerField')(blank=True)),
('IndDTab', self.gf('django.db.models.fields.IntegerField')(blank=True)),
('IdRefDTab', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('PropDTab', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
('EstDTab', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['DTabla'])
# Adding model 'MPersonal'
db.create_table(u'tramiteDoc_mpersonal', (
('IdMPer', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
('ApePMPer', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('ApeMMPer', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('NomMPer', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('FechNacMPer', self.gf('django.db.models.fields.DateField')(blank=True)),
('IdTipPerMPer', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('IdTipDocMPer', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('NDocMPer', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('EstMPer', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
('FecIngMPer', self.gf('django.db.models.fields.DateField')(blank=True)),
('Tel1MPer', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('Tel2MPer', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('Email1MPer', self.gf('django.db.models.fields.EmailField')(max_length=254, blank=True)),
('Email2MPer', self.gf('django.db.models.fields.EmailField')(max_length=254, blank=True)),
('IdArea', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['MPersonal'])
# Adding model 'MCliente'
db.create_table(u'tramiteDoc_mcliente', (
('IdMCli', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
('NomMCli', self.gf('django.db.models.fields.CharField')(max_length=300, null=True, blank=True)),
('IdTipMCli', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('IdTipInstMCli', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('DirMCli', self.gf('django.db.models.fields.CharField')(max_length=100, null=True, blank=True)),
('IdPaisMCli', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('IdRegMCli', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('IdDepMCli', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('IdProvMCli', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('IdDistMCli', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('RefMCli', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('Tel1MCli', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)),
('Tel2MCli', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)),
('Tel3MCli', self.gf('django.db.models.fields.CharField')(max_length=20, null=True, blank=True)),
('Email1MCli', self.gf('django.db.models.fields.EmailField')(max_length=300, null=True, blank=True)),
('FecIngMCli', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('EstMCli', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['MCliente'])
# Adding model 'MProyecto'
db.create_table(u'tramiteDoc_mproyecto', (
('IdMProy', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
('IdMCli', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MCliente'], null=True, db_column='IdMCli', blank=True)),
('NomMProy', self.gf('django.db.models.fields.CharField')(max_length=700, null=True, blank=True)),
('FecEntMProy', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('FecEntRealMProy', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('FecIniMProy', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('FecIniRealMProy', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('FecFinMProy', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('FecFinRealMProy', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('MontInvCliMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontInvFipMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontTotMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontTInvRealCliMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontTInvRealFipMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontTInvRealOtrMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontTotRealMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('IdTipProMProy', self.gf('django.db.models.fields.CharField')(default='0', max_length=5, null=True, blank=True)),
('EstMProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('FecIngMProy', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('IdEstMProy', self.gf('django.db.models.fields.CharField')(default='0', max_length=5, null=True, blank=True)),
('IdSector', self.gf('django.db.models.fields.CharField')(default='0', max_length=5, null=True, blank=True)),
('IdBanco', self.gf('django.db.models.fields.CharField')(default='0', max_length=5, null=True, blank=True)),
('NumCtaInterMProy', self.gf('django.db.models.fields.CharField')(max_length=50, null=True, blank=True)),
('MontTotAdeCliMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontTotAdeFipMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontTotAdeOtrMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontTotAdeTotMProy', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('IdConv', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('TiempoMProy', self.gf('django.db.models.fields.CharField')(max_length=50, null=True, blank=True)),
('PorcTotAdeMProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('RutaPDFMProy', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('RutaPDFConvMProy', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('RutaOCRConvMProy', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('RutaOCRMProy', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('RutaPDFACMProy', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('RutaOCRACMProy', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('EstCierreMProy', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['MProyecto'])
# Adding model 'MProtocolo'
db.create_table(u'tramiteDoc_mprotocolo', (
('IdMProt', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
('IdMProy', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MProyecto'], null=True, db_column='IdMProy', blank=True)),
('IdMCli', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('NomMProt', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('DescMProt', self.gf('django.db.models.fields.CharField')(max_length=300, blank=True)),
('FecEntMProt', self.gf('django.db.models.fields.DateField')(blank=True)),
('FecEntRealMProt', self.gf('django.db.models.fields.DateField')(blank=True)),
('IdTipFormEntMProt', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('IdRefMProt', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('IdTipDocGen', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('EstMProt', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
('DocInter', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['MProtocolo'])
# Adding model 'DProtocolo'
db.create_table(u'tramiteDoc_dprotocolo', (
('IdDProt', self.gf('django.db.models.fields.CharField')(max_length=35, primary_key=True)),
('IdMProt', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MProtocolo'], db_column='IdMProt')),
('NomDProt', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('IdTipDocDProt', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('UbLogDProt', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('UbiFisDProt', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('IdPerDProt', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('RutaPdfDProt', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('RutaOcrDProt', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('EstDProt', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['DProtocolo'])
# Adding model 'DProyecto'
db.create_table(u'tramiteDoc_dproyecto', (
('IdDProy', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
('IdMProy', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MProyecto'], db_column='IdMProy')),
('FecIniDProy', self.gf('django.db.models.fields.DateField')(blank=True)),
('FecFinDProy', self.gf('django.db.models.fields.DateField')(blank=True)),
('FecFirmaDProy', self.gf('django.db.models.fields.DateField')(blank=True)),
('MontAportFIPDProy', self.gf('django.db.models.fields.DecimalField')(max_digits=15, decimal_places=2, blank=True)),
('MontAportCliDProy', self.gf('django.db.models.fields.DecimalField')(max_digits=15, decimal_places=2, blank=True)),
('MontAportOtrDProy', self.gf('django.db.models.fields.DecimalField')(max_digits=15, decimal_places=2, blank=True)),
('IdTipDocDProy', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('DescDProy', self.gf('django.db.models.fields.CharField')(max_length=200, null=True, blank=True)),
('RutaPDFDProy', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('RutaOcrDProy', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('EstDProy', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['DProyecto'])
# Adding model 'MColaborador'
db.create_table(u'tramiteDoc_mcolaborador', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('IdMPer', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MPersonal'], null=True, db_column='IdMPer', blank=True)),
('IdMProy', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MProyecto'], null=True, db_column='IdMProy', blank=True)),
('IdTipoCargoProy', self.gf('django.db.models.fields.CharField')(max_length=5, null=True, blank=True)),
('FecIniMCol', self.gf('django.db.models.fields.DateField')(default=datetime.datetime(2014, 6, 3, 0, 0), null=True, blank=True)),
('FecFinMCol', self.gf('django.db.models.fields.DateField')(default=datetime.datetime(2014, 6, 3, 0, 0), null=True, blank=True)),
('FecFirmaMCol', self.gf('django.db.models.fields.DateField')(default=datetime.datetime(2014, 6, 3, 0, 0), null=True, blank=True)),
('TiempoMCol', self.gf('django.db.models.fields.CharField')(default=0, max_length=50, null=True, blank=True)),
('MontoMCol', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('MontoMenMCol', self.gf('django.db.models.fields.DecimalField')(default=0, null=True, max_digits=15, decimal_places=2, blank=True)),
('EstMCol', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('EstActMCol', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('RutaPDFMCol', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
('RutaOCRMCol', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['MColaborador'])
# Adding model 'MUsuarioDer'
db.create_table(u'tramiteDoc_musuarioder', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('IdUsuOri', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('IdUsuDer', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('EstDer', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['MUsuarioDer'])
# Adding model 'Bandeja'
db.create_table(u'tramiteDoc_bandeja', (
('IdBandeja', self.gf('django.db.models.fields.CharField')(max_length=50, primary_key=True)),
('IdMUsuEnv', self.gf('django.db.models.fields.CharField')(max_length=5)),
('IdMUsuRec', self.gf('django.db.models.fields.CharField')(max_length=5)),
('IdMProt', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
('NomMUsuEnv', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('NomMUsuRec', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('CliBProt', self.gf('django.db.models.fields.CharField')(max_length=500, blank=True)),
('ProyBProt', self.gf('django.db.models.fields.CharField')(max_length=500, blank=True)),
('FecEnv', self.gf('django.db.models.fields.DateField')(blank=True)),
('FecRec', self.gf('django.db.models.fields.DateField')(blank=True)),
('FecSist', self.gf('django.db.models.fields.DateField')(blank=True)),
('MenBProt', self.gf('django.db.models.fields.CharField')(max_length=400, blank=True)),
('EstBProt', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
('EstAccion', self.gf('django.db.models.fields.CharField')(max_length=6, blank=True)),
('EstObservado', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
('ObsProt', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('IdProtRef', self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['Bandeja'])
# Adding model 'CClientes'
db.create_table(u'tramiteDoc_cclientes', (
('IdCCli', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
('IdMCli', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MCliente'], db_column='IdMCli', blank=True)),
('ApePCCli', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('ApeMCCli', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('NomCCli', self.gf('django.db.models.fields.CharField')(max_length=80, blank=True)),
('IdTipDocCCli', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('NumDocCCli', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('EstCCli', self.gf('django.db.models.fields.CharField')(max_length=2, blank=True)),
('Tel1CCli', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('Tel2CCli', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('Email1CCli', self.gf('django.db.models.fields.EmailField')(max_length=254, blank=True)),
('Email2CCli', self.gf('django.db.models.fields.EmailField')(max_length=254, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['CClientes'])
# Adding model 'SClientes'
db.create_table(u'tramiteDoc_sclientes', (
('IdSCli', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
('IdMCli', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MCliente'], max_length=30, db_column='IdMCli', blank=True)),
('NomSCli', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('IdTipSCli', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('DirSCli', self.gf('django.db.models.fields.CharField')(max_length=100, blank=True)),
('IdPaisSCli', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('IdRegSCli', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('IdDepSCli', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('IdProvSCli', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('IdDistSCli', self.gf('django.db.models.fields.CharField')(max_length=5, blank=True)),
('RefSCli', self.gf('django.db.models.fields.CharField')(max_length=200, blank=True)),
('Tel1SCli', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('Tel2SCli', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('Tel3SCli', self.gf('django.db.models.fields.CharField')(max_length=20, blank=True)),
('Email1SCli', self.gf('django.db.models.fields.EmailField')(max_length=254, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['SClientes'])
# Adding model 'PProyectos'
db.create_table(u'tramiteDoc_pproyectos', (
('IdPProy', self.gf('django.db.models.fields.CharField')(max_length=30, primary_key=True)),
('IdMProy', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.MProyecto'], null=True, blank=True)),
('NroPartPProy', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)),
('IdNivPartPProy', self.gf('django.db.models.fields.CharField')(max_length=4, null=True, blank=True)),
('DescPProy', self.gf('django.db.models.fields.CharField')(max_length=300, null=True, blank=True)),
('TitPProy', self.gf('django.db.models.fields.CharField')(max_length=300, null=True, blank=True)),
('NroPartPerPProy', self.gf('django.db.models.fields.CharField')(max_length=40, null=True, blank=True)),
('IdUnidMed', self.gf('django.db.models.fields.CharField')(max_length=4, null=True, blank=True)),
('CantPProy', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('CostUnitPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('CostTotPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('FFFipPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('FFCliPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('FFOtrosPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PFFFipPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PFFCliPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PFFOtrosPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('FinPProy', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('VerPProy', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('EstPProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('FecIngPProy', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('IdUsuCreaPProy', self.gf('django.db.models.fields.CharField')(max_length=30, null=True, blank=True)),
('FecModPProy', self.gf('django.db.models.fields.DateField')(null=True, blank=True)),
('IdUsuModPProy', self.gf('django.db.models.fields.CharField')(max_length=30, null=True, blank=True)),
('CostEjePProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('CostSalPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('IdMotCierrePProy', self.gf('django.db.models.fields.CharField')(max_length=4, null=True, blank=True)),
('PorcCostEjePProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=5, decimal_places=2, blank=True)),
('BorradorPProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('EstPresPProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('EstEvalPProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('EstValidPProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('EstAprobPProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('EstModPProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['PProyectos'])
# Adding model 'CargaPresupuesto'
db.create_table(u'tramiteDoc_cargapresupuesto', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('doc_temp', self.gf('django.db.models.fields.files.FileField')(max_length=100, null=True, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['CargaPresupuesto'])
# Adding model 'CPProyectos'
db.create_table(u'tramiteDoc_cpproyectos', (
('IdCPProy', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('IdPProy', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['tramiteDoc.PProyectos'], null=True, blank=True)),
('MesCPProy', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('MesCCPProy', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('AnioCCPProy', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('MontoEjeCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PorcEjeCProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('VerCProy', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('SVerCProy', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True)),
('EstCProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('EstModifCProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
('MontoEjeFipCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PorcEjeFipCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=6, decimal_places=2, blank=True)),
('MontoEjeCliCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PorcEjeCliCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=6, decimal_places=2, blank=True)),
('MontoRealCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PorcRealCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=6, decimal_places=2, blank=True)),
('MontoRealFipCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PorcRealFipCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=6, decimal_places=2, blank=True)),
('MontoRealCliCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PorcRealCliCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=6, decimal_places=2, blank=True)),
('MontoDifEjecRealCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=15, decimal_places=2, blank=True)),
('PorcDifEjecRealCPProy', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=6, decimal_places=2, blank=True)),
('EstVisCPProy', self.gf('django.db.models.fields.CharField')(max_length=2, null=True, blank=True)),
))
db.send_create_signal(u'tramiteDoc', ['CPProyectos'])
def backwards(self, orm):
# Deleting model 'MTabla'
db.delete_table(u'tramiteDoc_mtabla')
# Deleting model 'DTabla'
db.delete_table(u'tramiteDoc_dtabla')
# Deleting model 'MPersonal'
db.delete_table(u'tramiteDoc_mpersonal')
# Deleting model 'MCliente'
db.delete_table(u'tramiteDoc_mcliente')
# Deleting model 'MProyecto'
db.delete_table(u'tramiteDoc_mproyecto')
# Deleting model 'MProtocolo'
db.delete_table(u'tramiteDoc_mprotocolo')
# Deleting model 'DProtocolo'
db.delete_table(u'tramiteDoc_dprotocolo')
# Deleting model 'DProyecto'
db.delete_table(u'tramiteDoc_dproyecto')
# Deleting model 'MColaborador'
db.delete_table(u'tramiteDoc_mcolaborador')
# Deleting model 'MUsuarioDer'
db.delete_table(u'tramiteDoc_musuarioder')
# Deleting model 'Bandeja'
db.delete_table(u'tramiteDoc_bandeja')
# Deleting model 'CClientes'
db.delete_table(u'tramiteDoc_cclientes')
# Deleting model 'SClientes'
db.delete_table(u'tramiteDoc_sclientes')
# Deleting model 'PProyectos'
db.delete_table(u'tramiteDoc_pproyectos')
# Deleting model 'CargaPresupuesto'
db.delete_table(u'tramiteDoc_cargapresupuesto')
# Deleting model 'CPProyectos'
db.delete_table(u'tramiteDoc_cpproyectos')
models = {
u'tramiteDoc.bandeja': {
'CliBProt': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'EstAccion': ('django.db.models.fields.CharField', [], {'max_length': '6', 'blank': 'True'}),
'EstBProt': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'EstObservado': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'FecEnv': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'FecRec': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'FecSist': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'IdBandeja': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'IdMProt': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'IdMUsuEnv': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'IdMUsuRec': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'IdProtRef': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'MenBProt': ('django.db.models.fields.CharField', [], {'max_length': '400', 'blank': 'True'}),
'Meta': {'object_name': 'Bandeja'},
'NomMUsuEnv': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'NomMUsuRec': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'ObsProt': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'ProyBProt': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'})
},
u'tramiteDoc.cargapresupuesto': {
'Meta': {'object_name': 'CargaPresupuesto'},
'doc_temp': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'tramiteDoc.cclientes': {
'ApeMCCli': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'ApePCCli': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'Email1CCli': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'Email2CCli': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'EstCCli': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'IdCCli': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'IdMCli': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MCliente']", 'db_column': "'IdMCli'", 'blank': 'True'}),
'IdTipDocCCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'Meta': {'object_name': 'CClientes'},
'NomCCli': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'NumDocCCli': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'Tel1CCli': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'Tel2CCli': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'tramiteDoc.cpproyectos': {
'AnioCCPProy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'EstCProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'EstModifCProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'EstVisCPProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'IdCPProy': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'IdPProy': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.PProyectos']", 'null': 'True', 'blank': 'True'}),
'MesCCPProy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'MesCPProy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'Meta': {'object_name': 'CPProyectos'},
'MontoDifEjecRealCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontoEjeCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontoEjeCliCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontoEjeFipCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontoRealCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontoRealCliCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontoRealFipCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'PorcDifEjecRealCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'PorcEjeCProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'PorcEjeCliCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'PorcEjeFipCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'PorcRealCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'PorcRealCliCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'PorcRealFipCPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2', 'blank': 'True'}),
'SVerCProy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'VerCProy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'tramiteDoc.dprotocolo': {
'EstDProt': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'IdDProt': ('django.db.models.fields.CharField', [], {'max_length': '35', 'primary_key': 'True'}),
'IdMProt': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MProtocolo']", 'db_column': "'IdMProt'"}),
'IdPerDProt': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'IdTipDocDProt': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'Meta': {'object_name': 'DProtocolo'},
'NomDProt': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'RutaOcrDProt': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'RutaPdfDProt': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'UbLogDProt': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'UbiFisDProt': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'})
},
u'tramiteDoc.dproyecto': {
'DescDProy': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'EstDProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'FecFinDProy': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'FecFirmaDProy': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'FecIniDProy': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'IdDProy': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'IdMProy': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MProyecto']", 'db_column': "'IdMProy'"}),
'IdTipDocDProy': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'Meta': {'object_name': 'DProyecto'},
'MontAportCliDProy': ('django.db.models.fields.DecimalField', [], {'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontAportFIPDProy': ('django.db.models.fields.DecimalField', [], {'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontAportOtrDProy': ('django.db.models.fields.DecimalField', [], {'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'RutaOcrDProy': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'RutaPDFDProy': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'})
},
u'tramiteDoc.dtabla': {
'AbrDTab': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'AbrOpDTab': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'EstDTab': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'FactDTab': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'IdDTab': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdMTab': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MTabla']", 'db_column': "'idMTabla'"}),
'IdRefDTab': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IndDTab': ('django.db.models.fields.IntegerField', [], {'blank': 'True'}),
'Meta': {'object_name': 'DTabla'},
'NomDTab': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'PropDTab': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'tramiteDoc.mcliente': {
'DirMCli': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'Email1MCli': ('django.db.models.fields.EmailField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'EstMCli': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'FecIngMCli': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'IdDepMCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdDistMCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdMCli': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'IdPaisMCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdProvMCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdRegMCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdTipInstMCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdTipMCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'Meta': {'object_name': 'MCliente'},
'NomMCli': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'RefMCli': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'Tel1MCli': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'Tel2MCli': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
'Tel3MCli': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'})
},
u'tramiteDoc.mcolaborador': {
'EstActMCol': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'EstMCol': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'FecFinMCol': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2014, 6, 3, 0, 0)', 'null': 'True', 'blank': 'True'}),
'FecFirmaMCol': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2014, 6, 3, 0, 0)', 'null': 'True', 'blank': 'True'}),
'FecIniMCol': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2014, 6, 3, 0, 0)', 'null': 'True', 'blank': 'True'}),
'IdMPer': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MPersonal']", 'null': 'True', 'db_column': "'IdMPer'", 'blank': 'True'}),
'IdMProy': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MProyecto']", 'null': 'True', 'db_column': "'IdMProy'", 'blank': 'True'}),
'IdTipoCargoProy': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'Meta': {'object_name': 'MColaborador'},
'MontoMCol': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontoMenMCol': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'RutaOCRMCol': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'RutaPDFMCol': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'TiempoMCol': ('django.db.models.fields.CharField', [], {'default': '0', 'max_length': '50', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'tramiteDoc.mpersonal': {
'ApeMMPer': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'ApePMPer': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'Email1MPer': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'Email2MPer': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'EstMPer': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'FecIngMPer': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'FechNacMPer': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'IdArea': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdMPer': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'IdTipDocMPer': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdTipPerMPer': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'Meta': {'object_name': 'MPersonal'},
'NDocMPer': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'NomMPer': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'Tel1MPer': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'Tel2MPer': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
},
u'tramiteDoc.mprotocolo': {
'DescMProt': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'DocInter': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'EstMProt': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'FecEntMProt': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'FecEntRealMProt': ('django.db.models.fields.DateField', [], {'blank': 'True'}),
'IdMCli': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'IdMProt': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'IdMProy': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MProyecto']", 'null': 'True', 'db_column': "'IdMProy'", 'blank': 'True'}),
'IdRefMProt': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'IdTipDocGen': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdTipFormEntMProt': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'Meta': {'object_name': 'MProtocolo'},
'NomMProt': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
},
u'tramiteDoc.mproyecto': {
'EstCierreMProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'EstMProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'FecEntMProy': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'FecEntRealMProy': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'FecFinMProy': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'FecFinRealMProy': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'FecIngMProy': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'FecIniMProy': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'FecIniRealMProy': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'IdBanco': ('django.db.models.fields.CharField', [], {'default': "'0'", 'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdConv': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdEstMProy': ('django.db.models.fields.CharField', [], {'default': "'0'", 'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdMCli': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MCliente']", 'null': 'True', 'db_column': "'IdMCli'", 'blank': 'True'}),
'IdMProy': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'IdSector': ('django.db.models.fields.CharField', [], {'default': "'0'", 'max_length': '5', 'null': 'True', 'blank': 'True'}),
'IdTipProMProy': ('django.db.models.fields.CharField', [], {'default': "'0'", 'max_length': '5', 'null': 'True', 'blank': 'True'}),
'Meta': {'object_name': 'MProyecto'},
'MontInvCliMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontInvFipMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontTInvRealCliMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontTInvRealFipMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontTInvRealOtrMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontTotAdeCliMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontTotAdeFipMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontTotAdeOtrMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontTotAdeTotMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontTotMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'MontTotRealMProy': ('django.db.models.fields.DecimalField', [], {'default': '0', 'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'NomMProy': ('django.db.models.fields.CharField', [], {'max_length': '700', 'null': 'True', 'blank': 'True'}),
'NumCtaInterMProy': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'PorcTotAdeMProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'RutaOCRACMProy': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'RutaOCRConvMProy': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'RutaOCRMProy': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'RutaPDFACMProy': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'RutaPDFConvMProy': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'RutaPDFMProy': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'TiempoMProy': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'})
},
u'tramiteDoc.mtabla': {
'AbrMTabla': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'EstMTabla': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'IdMTabla': ('django.db.models.fields.CharField', [], {'max_length': '4', 'primary_key': 'True'}),
'Meta': {'object_name': 'MTabla'},
'NomMTabla': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}),
'PropMTabla': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'})
},
u'tramiteDoc.musuarioder': {
'EstDer': ('django.db.models.fields.CharField', [], {'max_length': '2', 'blank': 'True'}),
'IdUsuDer': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdUsuOri': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'Meta': {'object_name': 'MUsuarioDer'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'tramiteDoc.pproyectos': {
'BorradorPProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'CantPProy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'CostEjePProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'CostSalPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'CostTotPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'CostUnitPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'DescPProy': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'EstAprobPProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'EstEvalPProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'EstModPProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'EstPProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'EstPresPProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'EstValidPProy': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'FFCliPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'FFFipPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'FFOtrosPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'FecIngPProy': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'FecModPProy': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'FinPProy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'IdMProy': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MProyecto']", 'null': 'True', 'blank': 'True'}),
'IdMotCierrePProy': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'IdNivPartPProy': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'IdPProy': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'IdUnidMed': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'IdUsuCreaPProy': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'IdUsuModPProy': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'Meta': {'object_name': 'PProyectos'},
'NroPartPProy': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'NroPartPerPProy': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'PFFCliPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'PFFFipPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'PFFOtrosPProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '15', 'decimal_places': '2', 'blank': 'True'}),
'PorcCostEjePProy': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '5', 'decimal_places': '2', 'blank': 'True'}),
'TitPProy': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True', 'blank': 'True'}),
'VerPProy': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
},
u'tramiteDoc.sclientes': {
'DirSCli': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'Email1SCli': ('django.db.models.fields.EmailField', [], {'max_length': '254', 'blank': 'True'}),
'IdDepSCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdDistSCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdMCli': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['tramiteDoc.MCliente']", 'max_length': '30', 'db_column': "'IdMCli'", 'blank': 'True'}),
'IdPaisSCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdProvSCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdRegSCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'IdSCli': ('django.db.models.fields.CharField', [], {'max_length': '30', 'primary_key': 'True'}),
'IdTipSCli': ('django.db.models.fields.CharField', [], {'max_length': '5', 'blank': 'True'}),
'Meta': {'object_name': 'SClientes'},
'NomSCli': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'RefSCli': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'Tel1SCli': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'Tel2SCli': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'Tel3SCli': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'})
}
}
complete_apps = ['tramiteDoc'] |
"""
Leap year
Write a program that reads a year from the user and tells whether a given year is a leap year or not.
A leap year (also known as an intercalary year or bissextile year) is a calendar year that contains an additional day
(or, in the case of a lunisolar calendar, a month) added to keep the calendar year synchronized with the astronomical
year or seasonal year. In the Gregorian calendar, each leap year has 366 days instead of 365, by extending February
to 29 days rather than the common 28.
In the Gregorian calendar, three criteria must be checked to identify leap years:
1. The given year must be evenly divisible by 4;
2. If the year can also be evenly divided by 100, it is NOT a leap year; unless:
3. The year is also evenly divisible by 400. Then it is a leap year.
Your code should use the above criteria to check for a leap year and then print either "That's a leap year!" or
"That's not a leap year."
"""
STANDARD_YEAR = 365
LEAP_YEAR = 366
def main():
# Prompt user to enter a year to check
check_year = float(input("Leap year check. Please enter a year: "))
# Check to see whether the user year is evenly divisible by 4
if check_year % 4 == 0:
# Check whether the user year is evenly divisible by 100
if check_year % 100 == 0:
# Check whether the user year is evenly divisible by 400
if check_year % 400 == 0:
print("That's a leap year, with " + str(LEAP_YEAR) + " days.")
else:
print("That's not a leap year, with " + str(STANDARD_YEAR) + " days.")
else:
print("That's a leap year, with " + str(LEAP_YEAR) + " days.")
else:
print("That's not a leap year, with " + str(STANDARD_YEAR) + " days.")
if __name__ == "__main__":
main() |
import tensorflow as tf
from tensorflow.keras import layers, models
from adversarial.defences import AdvClassifier
class AdvGAN(models.Model):
def __init__(self):
super(AdvGAN, self).__init__()
self.genereator = PerturbationGenerator()
self.adv_clf = AdvClassifier()
def call(self, inputs, training=False):
n = tf.shape(inputs)[0]
noise = tf.random.normal((n, 10, 10, 64))
perturbation = self.generator(noise, training=training)
clf = self.adv_clf(inputs + perturbation)
return perturbation, clf
class PerturbationGenerator(models.Model):
def __init__(self):
super(PerturbationGenerator, self).__init__()
self.generator = models.Sequential([
layers.Conv2DTranspose(64, (3, 3), strides=(2, 2), padding="SAME", output_padding=1),
layers.BatchNormalization(),
layers.Activation(tf.nn.leaky_relu),
layers.Conv2DTranspose(32, (3, 3), strides=(2, 2), padding="SAME", output_padding=1),
layers.BatchNormalization(),
layers.Activation(tf.nn.leaky_relu),
layers.Conv2DTranspose(32, (5, 5), strides=(2, 2), padding="SAME", output_padding=1),
layers.BatchNormalization(),
layers.Activation(tf.nn.leaky_relu),
layers.Conv2DTranspose(3, (5, 5), strides=(2, 2), padding="SAME", output_padding=1),
layers.BatchNormalization(),
layers.Activation(tf.nn.tanh),
])
self.temperature = 100
def call(self, noise, training=False):
x = self.generator(noise)
x = x/self.temperature
return x
class AdvDiscriminator(models.Model):
def __init__(self, drop_rate=0.5):
super(AdvDiscriminator, self).__init__()
self.features = models.Sequential([
layers.Conv2D(32, (5, 5), strides=(2, 2), padding="SAME"), # 80
layers.BatchNormalization(),
layers.Activation(tf.nn.leaky_relu),
layers.Conv2D(32, (5, 5), strides=(2, 2), padding="SAME"), # 40
layers.BatchNormalization(),
layers.Activation(tf.nn.leaky_relu),
layers.Conv2D(64, (3, 3), strides=(2, 2), padding="SAME"), # 20
layers.BatchNormalization(),
layers.Activation(tf.nn.leaky_relu),
layers.Conv2D(64, (3, 3), strides=(2, 2), padding="SAME"), # 10
layers.BatchNormalization(),
layers.Activation(tf.nn.leaky_relu),
layers.Conv2D(128, (3, 3), strides=(2, 2), padding="SAME"), # 5
layers.BatchNormalization(),
layers.Activation(tf.nn.leaky_relu),
])
self.discriminator = models.Sequential([
layers.Flatten(),
layers.Dense(256, activation=tf.nn.leaky_relu),
layers.Dropout(drop_rate),
layers.Dense(32, activation=tf.nn.leaky_relu),
layers.Dropout(drop_rate),
layers.Dense(10, activation=tf.nn.softmax),
])
def call(self, inputs, training=False):
x = self.features(inputs)
x = self.discriminator(x)
return x
|
import boto3
import sys
my_region = sys.argv[1]
ec2client = boto3.client('ec2', region_name=my_region)
cloudwatch = boto3.client('cloudwatch', region_name=my_region)
account = boto3.client('sts').get_caller_identity().get('Account')
sns_topic_arn = "arn:aws:sns:" + my_region + ":" + account + ":notification_for_instance_recovery"
response = ec2client.describe_instances()
for reservation in response["Reservations"]:
for instance in reservation["Instances"]:
if (instance["State"]["Name"] == "running"):
instance_id = instance["InstanceId"]
cloudwatch.put_metric_alarm(
AlarmName='StatusCheckFailed_Instance_' + instance_id,
ComparisonOperator='GreaterThanThreshold',
EvaluationPeriods=2,
MetricName='StatusCheckFailed_Instance',
Namespace='AWS/EC2',
Period=60,
Statistic='Minimum',
Threshold=0,
ActionsEnabled=True,
AlarmActions=[
'arn:aws:automate:' + my_region + ':ec2:reboot',
sns_topic_arn
],
AlarmDescription='This metric monitors EC2 Instance status check',
Dimensions=[
{
'Name': 'InstanceId',
'Value': instance_id
},
],
Unit='Seconds'
)
cloudwatch.put_metric_alarm(
AlarmName='StatusCheckFailed_System_' + instance_id,
ComparisonOperator='GreaterThanThreshold',
EvaluationPeriods=2,
MetricName='StatusCheckFailed_System',
Namespace='AWS/EC2',
Period=60,
Statistic='Minimum',
Threshold=0,
ActionsEnabled=True,
AlarmActions=[
'arn:aws:automate:' + my_region + ':ec2:recover',
sns_topic_arn
],
AlarmDescription='This metric monitors EC2 System status check',
Dimensions=[
{
'Name': 'InstanceId',
'Value': instance_id
},
],
Unit='Seconds'
)
|
""" Summarize the column names in a collection of tabular files. """
from hed.tools.analysis.column_name_summary import ColumnNameSummary
from hed.tools.remodeling.operations.base_op import BaseOp
from hed.tools.remodeling.operations.base_summary import BaseSummary
class SummarizeColumnNamesOp(BaseOp):
""" Summarize the column names in a collection of tabular files.
Required remodeling parameters:
- **summary_name** (*str*) The name of the summary.
- **summary_filename** (*str*) Base filename of the summary.
The purpose is to check that all of the tabular files have the same columns in same order.
"""
PARAMS = {
"operation": "summarize_column_names",
"required_parameters": {
"summary_name": str,
"summary_filename": str
},
"optional_parameters": {
"append_timecode": bool
}
}
SUMMARY_TYPE = "column_names"
def __init__(self, parameters):
""" Constructor for summarize column names operation.
Parameters:
parameters (dict): Dictionary with the parameter values for required and optional parameters.
:raises KeyError:
- If a required parameter is missing.
- If an unexpected parameter is provided.
:raises TypeError:
- If a parameter has the wrong type.
"""
super().__init__(self.PARAMS, parameters)
self.summary_name = parameters['summary_name']
self.summary_filename = parameters['summary_filename']
self.append_timecode = parameters.get('append_timecode', False)
def do_op(self, dispatcher, df, name, sidecar=None):
""" Create a column name summary for df.
Parameters:
dispatcher (Dispatcher): Manages the operation I/O.
df (DataFrame): The DataFrame to be remodeled.
name (str): Unique identifier for the dataframe -- often the original file path.
sidecar (Sidecar or file-like): Not needed for this operation.
Returns:
DataFrame: A copy of df.
Side-effect:
Updates the relevant summary.
"""
df_new = df.copy()
summary = dispatcher.summary_dicts.get(self.summary_name, None)
if not summary:
summary = ColumnNamesSummary(self)
dispatcher.summary_dicts[self.summary_name] = summary
summary.update_summary({"name": name, "column_names": list(df_new.columns)})
return df_new
class ColumnNamesSummary(BaseSummary):
def __init__(self, sum_op):
super().__init__(sum_op)
def update_summary(self, new_info):
""" Update the summary for a given tabular input file.
Parameters:
new_info (dict): A dictionary with the parameters needed to update a summary.
Notes:
- The summary information is kept in separate ColumnNameSummary objects for each file.
- The summary needs a "name" str and a "column_names" list.
- The summary uses ColumnNameSummary as the summary object.
"""
name = new_info['name']
if name not in self.summary_dict:
self.summary_dict[name] = ColumnNameSummary(name=name)
self.summary_dict[name].update(name, new_info["column_names"])
def get_details_dict(self, column_summary):
""" Return the summary dictionary extracted from a ColumnNameSummary.
Parameters:
column_summary (ColumnNameSummary): A column name summary for the data file.
Returns:
dict - a dictionary with the summary information for column names.
"""
summary = column_summary.get_summary()
return {"Name": summary['Summary name'], "Total events": "n/a",
"Total files": summary['Number files'],
"Files": [name for name in column_summary.file_dict.keys()],
"Specifics": {"Columns": summary['Columns']}}
def merge_all_info(self):
""" Create a ColumnNameSummary containing the overall dataset summary.
Returns:
ColumnNameSummary - the overall summary object for column names.
"""
all_sum = ColumnNameSummary(name='Dataset')
for key, counts in self.summary_dict.items():
for name, pos in counts.file_dict.items():
all_sum.update(name, counts.unique_headers[pos])
return all_sum
def _get_result_string(self, name, result, indent=BaseSummary.DISPLAY_INDENT):
""" Return a formatted string with the summary for the indicated name.
Parameters:
name (str): Identifier (usually the filename) of the individual file.
result (dict): The dictionary of the summary results indexed by name.
indent (str): A string containing spaces used for indentation (usually 3 spaces).
Returns:
str - The results in a printable format ready to be saved to a text file.
Notes:
This calls _get_dataset_string to get the overall summary string.
"""
if name == "Dataset":
return self._get_dataset_string(result, indent)
columns = result.get("Specifics", {}).get("Columns", [])
if columns:
return f"{indent}{str(columns[0])}"
else:
return ""
@staticmethod
def _get_dataset_string(result, indent=BaseSummary.DISPLAY_INDENT):
""" Return a string with the overall summary for all of the tabular files.
Parameters:
result (dict): Dictionary of merged summary information.
indent (str): String of blanks used as the amount to indent for readability.
Returns:
str: Formatted string suitable for saving in a file or printing.
"""
sum_list = [f"Dataset: Number of files={result.get('Total files', 0)}"]
specifics = result.get("Specifics", {})
columns = specifics.get("Columns", {})
for element in columns:
sum_list.append(f"{indent}Columns: {str(element['Column names'])}")
for file in element.get("Files", []):
sum_list.append(f"{indent}{indent}{file}")
return "\n".join(sum_list)
|
#! /usr/bin/env python
from itty import *
import urllib2
import json
"""
Network Fingerprint
Author: Clint Mann
Illustrates the following concept:
- This is a simple Spark bot that monitors a Spark Room for user input.
It is one component in the Network Fingerprint App
"""
__author__ = "Clint Mann"
__license__ = "MIT"
bot_name = "<BOTNAME>"
bot_email = "<BOTEMAIL>"
token = "<BOTTOKEN>"
def sendSparkGET(url):
request = urllib2.Request(url,
headers={"Accept": "application/json",
"Content-Type": "application/json"})
request.add_header("Authorization", "Bearer "+token)
contents = urllib2.urlopen(request).read()
return contents
def sendSparkPOST(url, data):
request = urllib2.Request(url, json.dumps(data),
headers={"Accept": "application/json",
"Content-Type": "application/json"})
request.add_header("Authorization", "Bearer "+token)
contents = urllib2.urlopen(request).read()
return contents
@post('/')
def index(request):
webhook = json.loads(request.body)
print(webhook['data']['id'])
result = sendSparkGET('https://api.ciscospark.com/v1/messages/{0}'.format(webhook['data']['id']))
result = json.loads(result)
print(result)
msg = None
if webhook['data']['personEmail'] != bot_email:
if '?' in result.get('text', '').lower():
msg = "Hello" \
+ " <br/> " + "My job is to help you find the _'Network Fingerprint'_ for a User on your network." \
+ " <br/> " + "Please @ Mention me and enter a username to get started." \
+ " <br/> " + "**Example:** @Fingerprint testuser"
elif 'help' in result.get('text', '').lower():
msg = "Hello" \
+ " <br/> " + "My job is to help you find the _'Network Fingerprint'_ for a User on your network." \
+ " <br/> " + "Please @ Mention me and enter a username to get started." \
+ " <br/> " + "**Example:** @Fingerprint testuser"
elif 'Help' in result.get('text', '').lower():
msg = "Hello" \
+ " <br/> " + "My job is to help you find the _'Network Fingerprint'_ for a User on your network." \
+ " <br/> " + "Please @ Mention me and enter a username to get started." \
+ " <br/> " + "**Example:** @Fingerprint testuser"
elif result.get('text', '').lower():
msg = "**Searching...**"
if msg != None:
sendSparkPOST("https://api.ciscospark.com/v1/messages",
{"roomId": webhook['data']['roomId'], "markdown": msg})
return "true"
run_itty(server='wsgiref', host='0.0.0.0', port=10010)
|
# -*- coding: utf-8 -*-
import pymongo
import pandas as pd
from PIL import Image
import io
import numpy as np
import matplotlib.pyplot as plt
def get_coll(name_db):
client = pymongo.MongoClient('127.0.0.1', 27017)
db = client.DataDb_detection
if name_db=="Detection":
user = db.Detection
return user
if name_db=="image":
user = db.image
return user
return
class User(object):
def __init__(self, name, advice):
self.name = name
self.advice = advice
@staticmethod
def query_users(name_db):
users = get_coll(name_db).find()
return users
@staticmethod
def get_advice(name_id):
advice = get_coll('Detection').find({'name':str(name_id)})
advice = list(advice)
t1 = advice[0] # 一条一条记录地读 (Series)
t1 = pd.Series(t1)
advice = t1[2]
return advice
@staticmethod
def get_image(filename):
img =get_coll('image').find_one({'filename':str(filename)})
data = img['data']#img.read() # 获取图片数据
image=Image.open(io.BytesIO(data))
k = np.array(image)
k = Image.fromarray(k)
return k
if __name__ == '__main__':
user1 = User.get_advice('pl40')
user = User.get_image('pl40')
fig = plt.figure(figsize=(12, 12))
# fig.set_title("132")
plt.figure(1)
plt.subplot(1, 2, 2)
plt.imshow(user)
plt.axis('off')
plt.title("driver advice:"+user1)
plt.show()
# print(user)
# user['filename']
|
from django.contrib import admin
from .models import DevTool, Idea
admin.site.register(DevTool)
admin.site.register(Idea) |
from flask import Flask, session, render_template, request, redirect, url_for, escape
import pymysql.cursors
import bcrypt
import re
app = Flask(__name__)
db = pymysql.connect(host="localhost", user="budget-sheets", passwd="sheets-budget", db="budget-sheets", cursorclass=pymysql.cursors.DictCursor)
cursor = db.cursor()
def cleanhtml(raw_html): #Clean html function using regex. Found on stack overflow. Source: https://stackoverflow.com/questions/9662346/python-code-to-remove-html-tags-from-a-string#12982689
cleanr = re.compile('<.*?>')
cleantext = re.sub(cleanr, '', raw_html)
return cleantext
#class to contain expenses retreived from the database
class Expense:
def __init__(self, val_type, name, cost):
self.val_type = val_type
self.name = name
self.cost = cost
#database interface abstraction functions
#database interfaces for creating and checking user logins
def check_login(username, password): #get the password hash for the supplied username from the database, and then use bcrypt to check it against the supplied password
sql = 'SELECT `password` FROM `users` WHERE `username`="{}"'
print(sql.format(username))
cursor.execute(sql.format(username))
result = cursor.fetchone()
if(bcrypt.checkpw(password.encode("utf-8"), result.get("password").encode("utf-8"))):
return True
else:
return False
def create_user_account(username, email, password):
password_hash = bcrypt.hashpw(password.encode("utf-8"), bcrypt.gensalt())
sql = 'INSERT INTO `users` (`username`, `email`, `password`) VALUES ("{}", "{}", "{}")'
#Use line below to print sql code when debugging
#print(sql.format(username, email, password_hash.decode("utf-8")))
cursor.execute(sql.format(cleanhtml(username), email, password_hash.decode("utf-8")))
db.commit()
return True
#database functions for adding costs and net incomes to the correct tables
def add_cost(username, val_type, name, cost):
sql = 'INSERT INTO `budget-values` (`username`, `type`, `name`, `cost`) VALUES ("{}", "{}", "{}", "{}")'
cursor.execute(sql.format(username, val_type, name, cost))
db.commit()
return True
def add_net_income(username, income):
sql = 'INSERT INTO `net-incomes` (`username`, `income`) VALUES ("{}", "{}")'
cursor.execute(sql.format(username, income))
db.commit()
return True
#retreive all expenses for a username. Ouputs list of Expense objects. Class for objects defined above.
def get_expenses(username):
sql = 'SELECT * FROM `budget-values` WHERE `username`="{}"'
cursor.execute(sql.format(username))
results = cursor.fetchall()
expenses = []
for result in results:
expenses.append(Expense(result.get("type"), result.get("name"), result.get("cost")))
return expenses
#function definition and comment not applicable to current master branch. placeholder for creating a dev branch to test features
#remove all expenses for a user. Use for updating all user expenses instead of only adding new ones. Requires old expenses to be put into the template at render.
def remove_expenses(username):
sql = 'DELETE FROM `budget-values` WHERE `username`="{}"'
cursor.execute(sql.format(username))
db.commit()
#database functions for user viewing permissions
def add_allowed_user(username, allowed_username):
sql = 'INSERT INTO `allowed-users` (`username`, `allowed-username`) VALUES ("{}", "{}")'
cursor.execute(sql.format(username, allowed_username))
db.commit()
return True
def check_user_allowed(username, target_username): #get all users that the username is allowed to access the pages of. Then check that the target username they want to see is on that list.
sql = sql = 'SELECT * FROM `allowed-users` WHERE `allowed-username`="{}"'
cursor.execute(sql.format(target_username))
result = cursor.fetchone()
if (result != None):
print(result.get("username"))
print(username)
if(result.get("username") == username):
return True
else:
return False
#flask application urls and functions
@app.route("/")
def index():
if "username" in session:
return render_template("home.html", login_status="<p>Logged in as user " + session['username'] + '</p><a href="/logout/">Logout</a>')
else:
return render_template("home.html", login_status='<a href="/login/">Log in</a>')
#user handling urls
@app.route("/login/", methods=["GET", "POST"])
def login():
if request.method == "POST":
username = request.form["username"]
password = request.form["password"]
app.logger.info("User " + username + " requested login.")
if (check_login(username, password)):
session['username'] = request.form['username']
app.logger.info("User " + username + " logged in.")
return redirect(url_for('index'))
else:
app.logger.info("User " + username + " failed login.")
return render_template("login.html")
else:
return render_template("login.html")
@app.route("/logout/")
def logout():
session.pop("username", None)
return redirect(url_for("index"))
@app.route("/signup/", methods=["GET", "POST"])
def signup():
if request.method == "POST":
username = request.form["username"]
email = request.form["email"]
password = request.form["password"]
if (create_user_account(username, email, password)):
return redirect(url_for("login"))
else:
return redirect(url_for("index"))
return redirect(url_for("index"))
#user planning pages urls.
@app.route("/budget/setup/", methods=["GET", "POST"])
def budget_setup():
if ('username' in session):
if request.method == "POST":
username = session['username']
income = request.form["net_income"]
types = request.form.getlist("type")
names = request.form.getlist("name")
costs = request.form.getlist("cost")
output = "Types: {}, Names: {}, Costs: {}"
print(output.format(types, names, costs))
remove_expenses(username)
for i in range (0, len(types)):
add_net_income(username, income)
add_cost(username, types[i], names[i], costs[i])
return redirect(url_for("index"))
else:
user_expenses = get_expenses(session["username"])
if (len(user_expenses) > 0):
return render_template("statistics.html", expenses=user_expenses, login_status="<p>Logged in as user " + session['username'] + '</p><a href="/logout/">Logout</a>')
else:
return render_template("statistics.html", login_status="<p>Logged in as user " + session['username'] + '</p><a href="/logout/">Logout</a>')
else:
return redirect(url_for("login"))
@app.route("/budget/compare/setup/", methods=["GET", "POST"])
def budget_compare_setup():
if ("username" in session):
user_expenses = get_expenses(session["username"])
if request.method == "POST":
requested_username = request.form["username"]
print(requested_username)
add_allowed_user(session["username"], requested_username)
return render_template("comparisons-setup.html", expenses_1=user_expenses, login_status="<p>Logged in as user " + session['username'] + '</p><a href="/logout/">Logout</a>')
else:
return redirect(url_for("index"))
@app.route("/budget/compare/user/", methods=["GET", "POST"])
def budget_compare_user():
if ("username" in session):
if request.method == "POST":
username = session["username"]
requested_username = request.form["username"]
if (check_user_allowed(requested_username, username)):
user_expenses = get_expenses(username)
req_expenses = get_expenses(requested_username)
return render_template("comparisons.html", expenses_1=user_expenses, expenses_2=req_expenses, login_status="<p>Logged in as user " + session['username'] + '</p><a href="/logout/">Logout</a>')
else:
return redirect(url_for("budget_compare_setup"))
else:
return redirect(url_for("budget_compare_setup"))
else:
return redirect(url_for("index"))
app.secret_key = "b@*_dx$'\xbe\x91v\x1d\xd8M\xaeC\xee\xe4\x90J\x15\xc4%\x16(\x13'"
if (__name__ == "__main__"):
app.run(host="0.0.0.0")
|
from sandbox_api import sandbox_api_key, sandbox_secret_key, sandbox_passphrase
#from real_api import real_api_key, real_secret_key, real_passphrase
import sys
sys.path.append('/Users/Joseph/Documents/Ethereum/trade')
from authenticated_client import GDAXRequestAuth
# Sandbox api base
API_BASE = 'https://api-public.sandbox.gdax.com'
# # Real api base
# API_BASE = 'https://api.gdax.com'
API_KEY = sandbox_api_key
SECRET_KEY = sandbox_secret_key
API_PASS = sandbox_passphrase
client = GDAXRequestAuth(API_KEY, SECRET_KEY, API_PASS)
# Place an order
order1 = client.buy_limit('BTC-USD', 3000, 0.02, 'GTC')
print(order1)
order2 = client.sell_limit('BTC-USD', 5000, 0.01, 'GTC')
print(order2)
order3 = client.buy_market('BTC-USD', 0.5)
print(order3)
order4 = client.sell_market('BTC-USD', 0.3)
print(order4) |
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
def obj_over_configs(x_config_names,y_kub,y_nsga,obj_name,title):
x_idx = np.arange(len(x_config_names))
width = 0.3
plt.bar(x_idx-width/2,y_kub,width=width,label="Kubernetes")
plt.bar(x_idx+width/2,y_nsga,width=width,label="NSGA-II")
plt.xlabel('Fitness Objectives')
plt.ylabel(obj_name)
plt.title(title)
plt.legend()
plt.savefig('compare.png')
plt.close()
def obj_over_configs_rescheduling(x_config_names,y_kub,y_nsga,obj_name,title):
x_idx = np.arange(len(x_config_names))
width = 0.3
plt.bar(x_idx-width/2,y_kub,width=width,label="NSGA-II(Rescheduling)")
plt.bar(x_idx+width/2,y_nsga,width=width,label="NSGA-II(Scheduling)")
plt.xlabel('Fitness Objectives')
plt.ylabel(obj_name)
plt.title(title)
plt.legend()
plt.savefig('rescheduling.png')
"""
#filler data
a=['1req-1app','1req-1.5app','1.5req-1app','1.5req-1.5app','2req-2app']
kub=[]
nsga=[]
for i in range(5):
kub.append(np.random.randint(10,50))
nsga.append(np.random.randint(10,50))
obj_over_configs(a,kub,nsga,"network distance","250 machines")
"""
def optimal_front_at_gen(x_obj,y_obj,x_obj_name,y_obj_name,title):
plt.scatter(x_obj,y_obj,s=8)
plt.xlabel(x_obj_name)
plt.ylabel(y_obj_name)
plt.title(title)
plt.show()
def visualize_history(history, name):
n_gen, pop_size, n_obj = history.shape
fig, axs = plt.subplots(n_obj, 1)
axs[-1].set_xlabel("Generations")
for i in range(n_obj):
x = np.arange(1,n_gen+1)
axs[i].set_xlim(0, n_gen+1)
axs[i].set_ylabel('obj_' + str(i+1))
#for j in range(n_gen):
# x = np.ones(pop_size) * (j + 1)
# axs[i].plot(x, history[j,:,i], 'r.')
max_obj = np.max(history[:,:,i], axis=1)
min_obj = np.min(history[:,:,i], axis=1)
axs[i].plot(x, max_obj, 'r-')
axs[i].plot(x, min_obj, 'r-')
axs[i].fill_between(x, max_obj, min_obj, where=max_obj>min_obj, facecolor='red', alpha=0.1)
average = np.sum(history[:,:,i], axis=1) / pop_size
x = np.arange(1, n_gen+1)
axs[i].plot(x, average, 'b--')
fig.set_size_inches(10,10)
plt.savefig(name)
plt.close()
|
from pacbot.game.blinky import Blinky
from pacbot.game.clyde import Clyde
from pacbot.game.inky import Inky
from pacbot.game.maze import Maze
from pacbot.game.pinky import Pinky
from pacbot.game.player import Player
from pacbot.game.renderer import Renderer
import pygame
class Game:
def __init__(self, scene):
self.frame = 0
self.scene = scene
self.maze = Maze()
self.maze.create_legacy_area()
self.renderer = Renderer(scene, self.maze)
self.player = Player(13, 26, self.maze)
self.ghosts = self.create_ghosts()
self.score = 0
self.is_finished = False
self.start()
def create_ghosts(self):
return [
# Clyde(self.player, self.maze),
# Inky(self.player, self.maze),
Blinky(self.player, self.maze),
Pinky(self.player, self.maze)
]
def start(self):
self.is_finished = False
self.score = 0
self.frame = 0
self.maze.create_legacy_area()
self.player = Player(13, 26, self.maze)
self.ghosts = self.create_ghosts()
def run(self):
self.control_collision()
if self.is_finished:
return
if self.frame % 3 == 0:
self.player.move()
self.refresh_maze()
if self.frame % 5 == 0:
for ghost in self.ghosts:
ghost.move()
self.frame += 1
def render(self):
self.scene.clear()
self.renderer.render_maze()
self.renderer.render_actor(self.player)
for ghost in self.ghosts:
self.renderer.render_actor(ghost)
self.scene.update()
def refresh_maze(self):
pos = self.player.position
if self.maze.mtr[pos[1]][pos[0]] > 1:
if self.maze.mtr[pos[1]][pos[0]] == 2:
self.score += 1
else:
self.score += 10
self.maze.mtr[pos[1]][pos[0]] = 1
self.maze.bait_count -= 1
if self.maze.bait_count <= 0:
self.is_finished = True
def control_collision(self):
for ghost in self.ghosts:
if ghost.position[0] == self.player.position[0] and ghost.position[1] == self.player.position[1]:
self.is_finished = True
def move_player(self, key=-1):
if key > -1:
self.player.change_direction(key)
return
self._handle_keyboard()
def is_player_moved(self):
return self.player.direction == self.player.next_direction
def _handle_keyboard(self):
if self.scene.key_pressed(pygame.K_LEFT):
self.player.change_direction(0)
if self.scene.key_pressed(pygame.K_UP):
self.player.change_direction(1)
if self.scene.key_pressed(pygame.K_RIGHT):
self.player.change_direction(2)
if self.scene.key_pressed(pygame.K_DOWN):
self.player.change_direction(3)
if self.scene.key_pressed(pygame.K_TAB):
self.start()
|
import datetime
from django.contrib.auth.models import User
from django.db import models
# Create your models here.
from django.urls import reverse
from GestionLab.models import Laboratorio, Maquina
from Convocatorias.models import Convocatoria
# def maqdelLab(lab):
# return Maquina.local(lab)
class Reservacion(models.Model):
fecha = models.DateTimeField("Fecha de la reservación")
local = models.ForeignKey(Laboratorio, on_delete=models.CASCADE)
maquina = models.ForeignKey(Maquina, on_delete=models.CASCADE)
usuario = models.ManyToManyField(User)
class Meta:
ordering = ["id"]
# def __str__(self):
# return self.fecha.strptime('%dd/%mm/%Y')
# ARREGLAR ESTE FORMATO
# @property
# def __str__(self):
# fecha_str = "14/07/2014"
# date_object = datetime.strptime(fecha_str, '%dd/%mm/%Y')
# return self.fecha.strptime(date_object, '%dd/%mm/%Y')
# return self.fecha.strptime('%dd/%mm/%YYYY')
# return self.fecha.__str__()
# def get_absolute_url(self):
"""
Devuelve el URL a una instancia particular de Book
"""
# return reverse('reservacion-detail', args=[str(self.id)])
# def save(self, force_insert=False, force_update=False, using=None,
# update_fields=None):
#
# return
|
# !/usr/bin/env python
# encoding: utf-8
"""
SEED Platform (TM), Copyright (c) Alliance for Sustainable Energy, LLC, and other contributors.
See also https://github.com/seed-platform/seed/main/LICENSE.md
"""
import base64
import json
from django.urls import reverse_lazy
from seed.landing.models import SEEDUser as User
from seed.models import Measure, PropertyMeasure, Scenario
from seed.test_helpers.fake import (
FakePropertyStateFactory,
FakePropertyViewFactory
)
from seed.tests.util import DeleteModelsTestCase
from seed.utils.organizations import create_organization
class TestPropertyMeasures(DeleteModelsTestCase):
def setUp(self):
user_details = {
'username': 'test_user@demo.com',
'password': 'test_pass',
'email': 'test_user@demo.com'
}
self.user = User.objects.create_superuser(**user_details)
self.user.generate_key()
self.org, _, _ = create_organization(self.user)
auth_string = base64.urlsafe_b64encode(bytes(
'{}:{}'.format(self.user.username, self.user.api_key), 'utf-8'
))
self.auth_string = 'Basic {}'.format(auth_string.decode('utf-8'))
self.headers = {'Authorization': self.auth_string}
self.property_state_factory = FakePropertyStateFactory(organization=self.org)
self.property_view_factory = FakePropertyViewFactory(organization=self.org, user=self.user)
def test_get_property_measure(self):
"""
Test PropertyMeasure view can retrieve all or individual PropertyMeasure model instances
"""
property_view = self.property_view_factory.get_property_view()
property_state = property_view.state
measures = Measure.objects.all()
scenario0 = Scenario.objects.create(property_state=property_state, name='scenario 0')
scenario1 = Scenario.objects.create(property_state=property_state, name='scenario 1')
property_measure0 = PropertyMeasure.objects.create(
measure=measures[0],
property_state=property_state,
description="Property Measure 0"
)
property_measure1 = PropertyMeasure.objects.create(
measure=measures[1],
property_state=property_state,
description="Property Measure 1"
)
property_measure2 = PropertyMeasure.objects.create(
measure=measures[2],
property_state=property_state,
description="Property Measure 2"
)
property_measure3 = PropertyMeasure.objects.create(
measure=measures[3],
property_state=property_state,
description="Property Measure 3"
)
property_measure0.scenario_set.add(scenario0.id)
property_measure1.scenario_set.add(scenario0.id)
property_measure2.scenario_set.add(scenario1.id)
property_measure3.scenario_set.add(scenario1.id)
url = reverse_lazy(
'api:v3:property-measures-list',
args=[property_view.id, 1234567]
)
response = self.client.get(url, **self.headers)
self.assertEqual(response.status_code, 404)
self.assertEqual(response.json()['message'], "No Measures found for given pks")
self.assertEqual(response.json()['status'], 'error')
url = reverse_lazy(
'api:v3:property-measures-list',
args=[property_view.id, scenario0.id]
)
response = self.client.get(url, **self.headers)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()['status'], 'success')
data = response.json()['data']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['description'], "Property Measure 0")
self.assertEqual(data[1]['description'], "Property Measure 1")
url = reverse_lazy(
'api:v3:property-measures-list',
args=[property_view.id, scenario1.id]
)
response = self.client.get(url, **self.headers)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()['status'], 'success')
data = response.json()['data']
self.assertEqual(len(data), 2)
self.assertEqual(data[0]['description'], "Property Measure 2")
self.assertEqual(data[1]['description'], "Property Measure 3")
url = reverse_lazy(
'api:v3:property-measures-detail',
args=[property_view.id, scenario1.id, property_measure2.id]
)
response = self.client.get(url, **self.headers)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()['status'], 'success')
self.assertEqual(response.json()['data']['description'], "Property Measure 2")
url = reverse_lazy(
'api:v3:property-measures-detail',
args=[property_view.id, scenario1.id, 1234]
)
response = self.client.get(url, **self.headers)
self.assertEqual(response.status_code, 404)
self.assertEqual(response.json()['status'], 'error')
self.assertEqual(response.json()['message'], "No Measure found for given pks")
def test_update_property_measure(self):
"""
Test PropertyMeasure view can update a PropertyMeasure model instances
"""
property_view = self.property_view_factory.get_property_view()
property_state = property_view.state
measures = Measure.objects.all()
scenario0 = Scenario.objects.create(property_state=property_state, name='scenario 0')
property_measure0 = PropertyMeasure.objects.create(
measure=measures[0],
property_state=property_state,
description="Property Measure 0",
implementation_status=1
)
property_measure1 = PropertyMeasure.objects.create(
measure=measures[1],
property_state=property_state,
description="Property Measure 1",
implementation_status=1
)
property_measure0.scenario_set.add(scenario0.id)
property_measure1.scenario_set.add(scenario0.id)
property_measure_fields = {
'description': 'updated desc',
'implementation_status': 7
}
url = reverse_lazy(
'api:v3:property-measures-detail',
args=[property_view.id, scenario0.id, property_measure1.id]
)
response = self.client.put(
url,
data=json.dumps(property_measure_fields),
content_type='application/json',
**self.headers
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()['status'], 'success')
data = response.json()['data']
self.assertEqual(data['description'], 'updated desc')
self.assertEqual(data['implementation_status'], "Completed")
property_measure0 = PropertyMeasure.objects.get(pk=property_measure0.id)
property_measure1 = PropertyMeasure.objects.get(pk=property_measure1.id)
self.assertEqual(property_measure0.implementation_status, 1)
self.assertEqual(property_measure1.implementation_status, 7)
def test_fail_to_update_property_measure_with_invalid_data(self):
"""
Test Failure modes when property measure is updated with invalid data
"""
property_view = self.property_view_factory.get_property_view()
property_state = property_view.state
measures = Measure.objects.all()
scenario0 = Scenario.objects.create(property_state=property_state, name='scenario 0')
property_measure0 = PropertyMeasure.objects.create(
measure=measures[0],
property_state=property_state,
description="Property Measure 0",
implementation_status=1
)
property_measure0.scenario_set.add(scenario0.id)
# Invalid Field Name
property_measure_fields = {
'description': 'updated desc',
'invalid_field': 123
}
url = reverse_lazy(
'api:v3:property-measures-detail',
args=[property_view.id, scenario0.id, property_measure0.id]
)
response = self.client.put(
url,
data=json.dumps(property_measure_fields),
content_type='application/json',
**self.headers
)
self.assertEqual(response.status_code, 400)
self.assertEqual(response.json()['status'], 'error')
self.assertEqual(response.json()['message'], '"invalid_field" is not a valid property measure field')
property_measure_fields = {
'description': 'updated desc',
}
# Invalid Property Measure ID
url = reverse_lazy(
'api:v3:property-measures-detail',
args=[property_view.id, scenario0.id, 99999]
)
response = self.client.put(
url,
data=json.dumps(property_measure_fields),
content_type='application/json',
**self.headers
)
self.assertEqual(response.status_code, 404)
self.assertEqual(response.json()['status'], 'error')
self.assertEqual(response.json()['message'], 'No Property Measure found with given pks')
url = reverse_lazy(
'api:v3:property-measures-detail',
args=[9999, scenario0.id, property_measure0.id]
)
response = self.client.put(
url,
data=json.dumps(property_measure_fields),
content_type='application/json',
**self.headers
)
self.assertEqual(response.status_code, 404)
self.assertEqual(response.json()['status'], 'error')
self.assertEqual(response.json()['message'], 'No Property Measure found with given pks')
def test_delete_property_measure(self):
"""
Test views ability to delete the model
"""
property_view = self.property_view_factory.get_property_view()
property_state = property_view.state
measures = Measure.objects.all()
scenario = Scenario.objects.create(property_state=property_state)
property_measure0 = PropertyMeasure.objects.create(
measure=measures[0],
property_state=property_state,
description="Property Measure 0"
)
property_measure1 = PropertyMeasure.objects.create(
measure=measures[1],
property_state=property_state,
description="Property Measure 1"
)
property_measure0.scenario_set.add(scenario.id)
property_measure1.scenario_set.add(scenario.id)
self.assertEqual(PropertyMeasure.objects.count(), 2)
response = self.client.delete(
reverse_lazy('api:v3:property-measures-detail', args=[property_view.id, scenario.id, property_measure0.id]),
**self.headers
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()['status'], 'success')
self.assertEqual(response.json()['message'], 'Successfully Deleted Property Measure')
self.assertEqual(PropertyMeasure.objects.count(), 1)
response = self.client.delete(
reverse_lazy('api:v3:property-measures-detail', args=[property_view.id, scenario.id, 9999]),
**self.headers
)
self.assertEqual(response.status_code, 404)
self.assertEqual(response.json()['status'], 'error')
self.assertEqual(response.json()['message'], 'No Property Measure found with given pks')
|
from flask import Flask, request, jsonify, Response
from flask_pymongo import PyMongo
from bson import json_util
from bson.objectid import ObjectId
app = Flask(__name__)
app.config['MONGO_URI']='mongodb://localhost:27017/pythonmongodb'
#Le pasamos la configuracion a PyMongo
mongo = PyMongo(app)
#POST
@app.route('/customer', methods=['POST'])
#Funcion para crear el usuario
def create_user():
#Recibiendo datos
name = request.json['name']
lastName = request.json['lastName']
identification = request.json['identification']
birthDate = request.json['birthDate']
city = request.json['city']
neighborhood = request.json['neighborhood']
phone = request.json['phone']
#Validacion sencilla de que estamos recibiendo los datos
if name and lastName and identification and birthDate and city and neighborhood and phone:
#Esto retorna un id
id = mongo.db.customers.insert(
{
'name' : name,
'lastName' : lastName,
'identification' : identification,
'birthDate' : birthDate,
'city' : city,
'neighborhood' : neighborhood,
'phone' : phone
}
)
#Descripcion del nuevo dato que se ha creado
response = {
'id': str(id),
'name' : name,
'lastName' : lastName,
'identification' : identification,
'birthDate' : birthDate,
'city' : city,
'neighborhood' : neighborhood,
'phone' : phone
}
return response
else:
return not_found()
return {'message': 'received'}
#GET ALL USERS
@app.route('/customers', methods=['GET'])
def get_users():
customers = mongo.db.customers.find()
response = {"quotesDB": customers}
return Response(json_util.dumps(response), mimetype='application/json')
# response = json_util.dumps(customers)
# #Response es para que se vea como un json en vez de un String
# return Response(response, mimetype='application/json')
#GET USER BY ID
@app.route('/customer/<id>', methods=['GET'])
def get_user(id):
#Convertimos a un object id el id que nos manda el usuario
user = mongo.db.customers.find_one({'_id': ObjectId(id)})
#Lo convertimos a un Json
response = json_util.dumps(user)
return Response(response, mimetype='application/json')
#DELETE
@app.route('/customer/<id>', methods=['DELETE'])
def delete_user(id):
mongo.db.customers.delete_one({'_id': ObjectId(id)})
response = jsonify({
'message': 'Customer ' + id + ' was deleted successfully'
})
return response
#PUT
@app.route('/customer/<id>', methods=['PUT'])
def update_user(id):
name = request.json['name']
lastName = request.json['lastName']
identification = request.json['identification']
birthDate = request.json['birthDate']
city = request.json['city']
neighborhood = request.json['neighborhood']
phone = request.json['phone']
if name and lastName and identification and birthDate and city and neighborhood and phone:
mongo.db.customers.update_one({'_id': ObjectId(id)}, {'$set': {
'name' : name,
'lastName' : lastName,
'identification' : identification,
'birthDate' : birthDate,
'city' : city,
'neighborhood' : neighborhood,
'phone' : phone
}})
response = jsonify({
'message': 'Customer ' + id + ' was updated successfully'
})
return response
#Manejador de errores
@app.errorhandler(404)
def not_found(error=None):
#jsonify permite añadir mas propiedades
response = jsonify({
'message': 'Resource not found: ' + request.url,
'status': 404
})
#Cambiamos el codigo de estatus
response.status_code = 404
return response
if __name__ == '__main__':
app.run(debug=True) |
#!/usr/bin/python
import gtk
import myanmar.converter as converter
clipboard = gtk.clipboard_get()
clipboard.set_text(converter.convert(clipboard.wait_for_text(), "zawgyi", "unicode"))
clipboard.store()
|
import time
# Welcome the user
name = input("what is your name ")
print("Welcome to my word game " + name + "!")
time.sleep(1)
print("Start Guessing...")
time.sleep(0.5)
#Set the secret word
word = "onomatopoeia"
# Create a variable with an empty Value
guesses = ''
# Determine the number of turns
turns = 12
#Create a Game Loop
while turns > 0:
#Make a counter that starts with Zero
failed = 0
#Fore every character in the word
for char in word:
if char in guesses:
print(char)
else:
print("_")
# and increase the failed counter by 1
failed += 1
if failed == 0:
print("You won")
break
print
#Ask the user to guess a letter
guess = input("Guess a letter ")
#Set the player's guess to guesses
guesses += guess
#if the guess into not found in the secret word
if guess not in word:
#turns counter decreases by one
turns -= 1
print("Sorry, that letter isn't in the word")
#print("You have " + turns + " more guesses")
if turns == 0:
print("Sorry sucker you lost and have DIED!!!") |
from sys import argv
from os.path import exists
script,from_file,to_file=argv
print "Copying from %s to %s" %(from_file,to_file)
in_file= open(from_file)
indata=in_file.read()
print "The input file is %d bytes long" %len(indata)
print "Does the output file exist? %r" %exists(to_file)
print "Ready , hit RETURN to continue, CTRL-C to abort"
raw_input()
out_file=open(to_file,'w')
out_file.write(indata)
print "Alright, all done."
out_file.close()
in_file.close()
|
# -*- coding: utf-8 -*-
# filename:pics
import csv
import numpy
import matplotlib.pyplot as plt
price,size = numpy.loadtxt("house.csv",delimiter='|',usecols=(1,2),unpack=True)
plt.figure()
plt.subplot(211)
plt.title("price")
plt.title("/10000RMB")
plt.hist(price,bins=30)
plt.subplot(212)
plt.title("area")
plt.figure(2)
plt.title("price")
plt.plot(price)
plt.show()
|
import argparse
import json
import logging
import sys
import time
from subprocess import call
import requests
log = logging.getLogger(__name__)
def configure_logging():
root = log
root.setLevel(logging.DEBUG)
handler = logging.StreamHandler(sys.stdout)
handler.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
root.addHandler(handler)
def stop_rpc_daemon():
"""This stops the local eth client. Customize for your needs"""
call(['systemctl', 'stop', 'erigon-rpc-daemon'])
def _get_result(location: str, response: requests.Response) -> int:
try:
json_ret = json.loads(response.text)
except json.JSONDecodeError:
raise ValueError(f'{location} returned invalid JSON response: {response.text}')
result = json_ret.get('result', None)
if result is None:
raise ValueError(
f'Unexpected format of {location} response for request {response.url}. '
f'Missing a result in response. Response was: {response.text}',
)
return int(result, 16)
def get_oracle_block_number() -> int:
query_str = 'https://api.etherscan.io/api?module=proxy&action=eth_blockNumber'
try:
response = requests.get(query_str)
except requests.exceptions.RequestException as e:
msg = f'Etherscan query failed due to {str(e)}'
raise ValueError(msg)
if response.status_code != 200:
raise ValueError(
f'Etherscan API request {response.url} failed '
f'with HTTP status code {response.status_code} and text '
f'{response.text}',
)
return _get_result('etherscan', response)
def get_local_block_number(eth_rpc_endpoint: str) -> int:
json_data = {'jsonrpc': '2.0', 'method': 'eth_blockNumber', 'params': [], 'id': 1}
try:
response = requests.post(eth_rpc_endpoint, json=json_data)
except requests.exceptions.RequestException as e:
msg = f'Local node query failed due to {str(e)}'
raise ValueError(msg)
if response.status_code != 200:
raise ValueError(
f'Local node API request {response.url} failed '
f'with HTTP status code {response.status_code} and text '
f'{response.text}',
)
return _get_result('local node', response)
def main(args):
subsequent_oracle_failures = 0
while True:
try:
local_block_number = get_local_block_number(args.eth1_rpc_endpoint)
except ValueError as e:
log.critical(
f'Failed to get block number from local rpc due to {str(e)}. '
f'Stopping rpc daemon ...'
)
stop_rpc_daemon()
break
try:
master_block_number = get_oracle_block_number()
except ValueError as e:
log.error(f'Failed to get block number from oracle due to {str(e)}')
subsequent_oracle_failures += 1
if subsequent_oracle_failures == args.max_oracle_failures:
log.error(
f'Failed to get block number from oracle due to {str(e)} '
f'for 10 subsequent times. Stopping rpc daemon just to be safe ...'
)
stop_rpc_daemon()
break
else:
subsequent_oracle_failures = 0 # reset counter
diff = master_block_number - local_block_number
if diff > args.max_block_difference:
log.error(
f'Local block number: {local_block_number}, Oracle block '
f'number: {master_block_number}. Diff: {diff} > '
f'{args.max_block_difference}. Stopping rpc daemon ...'
)
stop_rpc_daemon()
break
else:
log.info(
f'Local block number: {local_block_number}, Oracle block '
f'number: {master_block_number}. Diff: {diff}. All good.'
)
time.sleep(args.sleep_time)
if __name__ == "__main__":
configure_logging()
parser = argparse.ArgumentParser(
prog='Eth1 monitor',
description=(
'Script to monitor eth1 node sync status and kill it if not in sync '
'so that the backup (remote) nodes can take over',
)
)
parser.add_argument(
'--eth1-rpc-endpoint',
required=True,
help='The local eth rpc endpoint to monitor',
type=str,
)
parser.add_argument(
'--sleep-time',
help='The time in seconds to sleep between each run',
type=int,
default=10,
)
parser.add_argument(
'--max-oracle-failures',
help='The maximum number of times the oracle call can fail before we bail',
type=int,
default=10,
)
parser.add_argument(
'--max-block-difference',
help=(
'The maximum blocks difference the local node can have from the oracle before we bail',
),
type=int,
default=25,
)
args = parser.parse_args()
main(args)
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sun Apr 19 11:54:19 2020
@author: zhengsc
"""
import numpy as np
import matplotlib.pyplot as plt
# 二分类的信息熵
# p可以传递数值,也可以传递向量。因此使用np.log
def entropy(p):
return -p * np.log(p) - (1-p) * np.log(1-p)
# linspace生成向量x,从0到1均匀取值,绘制出x在不同值时对应的信息熵
x = np.linspace(0.01,0.99,100)
plt.plot(x,entropy(x))
plt.show() |
import ROOT
from xAH_config import xAH_config
import sys, os
sys.path.insert(0, os.environ['ROOTCOREBIN']+"/user_scripts/XhhCommon/")
from XhhResolved_config_VHqqbb import *
from XhhBoosted_config_VHqqbb import *
from XhhLepTop_config import *
c = xAH_config()
#
# Basic Setup
#
triggersList = ['L1_4J20',
'L1_J100',
'L1_3J25.0ETA23',
'L1_4J15.0ETA25',
'L1_J75_3J20',
'HLT_.*bmv2c20.*',
'HLT_ht.*',
'HLT_j65_bt.*',
'HLT_j70_bt.*',
'HLT_j75_bt.*',
'HLT_j175_bt.*',
'HLT_2j35_bt.*',
'HLT_2j45_bt.*',
'HLT_2j55_bt.*',
'HLT_2j65_bt.*',
'HLT_2j70_bt.*',
'HLT_2j75_bt.*',
'HLT_j65_bm.*',
'HLT_j70_bm.*',
'HLT_j75_bm.*',
'HLT_j175_bm.*',
'HLT_2j35_bm.*',
'HLT_2j45_bm.*',
'HLT_2j55_bm.*',
'HLT_2j65_bm.*',
'HLT_2j70_bm.*',
'HLT_2j75_bm.*',
'HLT_j225_bl.*',
'HLT_j300_bl.*',
'HLT_j420.*',
'HLT_j440.*',
'HLT_j400.*',
'HLT_j360.*',
'HLT_j380.*',
'HLT_j100',
'HLT_j110',
'HLT_j150',
'HLT_j175',
'HLT_j200',
'HLT_j260',
'HLT_.*bperf.*',
'HLT_.*boffperf.*',
'HLT_3j.*',
'HLT_4j.*',
'HLT_j3.*a10.*',
'HLT_j4.*a10.*',
'HLT_j100_2j55_bmedium',
'HLT_e24_lhtight_iloose',
'HLT_mu26_imedium']
triggers = ",".join(triggersList)
c.setalg("BasicEventSelection", { "m_name" : "basicEventSel",
"m_debug" : False,
"m_applyGRLCut" : False,
"m_doPUreweighting" : False,
"m_vertexContainerName" : "PrimaryVertices",
"m_PVNTrack" : 2,
"m_truthLevelOnly" : False,
"m_applyPrimaryVertexCut" : True,
"m_derivationName" : "EXOT3Kernel",
"m_applyEventCleaningCut" : True,
"m_applyCoreFlagsCut" : True,
"m_triggerSelection" : triggers,
"m_storeTrigDecisions" : True,
"m_useMetaData" : True,
"m_applyTriggerCut" : False,
"m_storePassL1" : True,
"m_storePassHLT" : True,
"m_storeTrigKeys" : True,
} )
# muon for Higgs mass correction purpose
c.setalg("MuonCalibrator", { "m_name" : "Muons",
"m_inContainerName" : "Muons",
"m_outContainerName" : "Muons_Calib",
"m_outputAlgoSystNames" : "MuonCalibrator_Syst",
# "m_release" : "PreRecs2016_05_23",
"m_debug" : False,
"m_forceDataCalib" : True,
} )
c.setalg("MuonHistsAlgo", {"m_name":"Muons_Calib/", "m_inContainerName":"Muons_Calib",
"m_detailStr" : "kinematic energyLoss", } )
c.setalg("MuonSelector", { "m_name" : "MuonSelector",
"m_inContainerName" : "Muons_Calib",
"m_outContainerName" : "Muons_Selected",
"m_createSelectedContainer" : True,
"m_pT_min" : 4*1000,
"m_eta_max" : 2.5,
"m_muonType" : "Combined",
"m_muonQualityStr" : "Medium",
"m_debug" : False,
} )
c.setalg("MuonHistsAlgo", {"m_name":"Muons_Selected/", "m_inContainerName":"Muons_Selected",
"m_detailStr" : "kinematic quality energyLoss isolation", } )
# muons for veto purpose
c.setalg("MuonSelector", { "m_name" : "MuonSelector_Veto",
"m_inContainerName" : "Muons_Calib",
"m_outContainerName" : "Muons_Veto",
"m_createSelectedContainer" : True,
"m_pT_min" : 7*1000,
"m_eta_max" : 2.7,
"m_muonType" : "Combined",
"m_muonQualityStr" : "Loose",
"m_MinIsoWPCut" : "LooseTrackOnly",
"m_d0sig_max" : 3.,
"m_z0sintheta_max" : 0.5,
"m_debug" : False,
} )
# electrons for veto purpose
c.setalg("ElectronCalibrator", { "m_name" : "Electrons",
"m_inContainerName" : "Electrons",
"m_outContainerName" : "Electrons_Calib_Veto",
"m_outputAlgoSystNames" : "ElectronCalibrator_Veto_Syst",
"m_esModel" : "es2016PRE",
"m_decorrelationModel" : "FULL_v1", # this one is randomly assigned ... should not matter ...
"m_debug" : False,
})
c.setalg("ElectronSelector", { "m_name" : "ElectronSelector_Veto",
"m_inContainerName" : "Electrons_Calib_Veto",
"m_outContainerName" : "Electrons_Veto",
"m_createSelectedContainer" : True,
"m_doLHPID" : True,
"m_doLHPIDcut" : True,
"m_LHOperatingPoint" : "Loose",
"m_MinIsoWPCut" : "LooseTrackOnly",
"m_d0sig_max" : 5.,
"m_z0sintheta_max" : 0.5,
"m_pT_min" : 7*1000.,
"m_eta_max" : 2.47,
"m_debug" : False,
})
#
# Set up resolved Analysis
#
#config_ResolvedAnalysis(c, args, doSystematics = args.is_MC)
config_ResolvedAnalysis(c, args, doSystematics = False)
#
# Set up Boosted Analysis
#
# config_BoostedAnalysis(c, args, doSystematics = args.is_MC)
config_BoostedAnalysis(c, args, doSystematics = False)
#
# Set up Leptonic Top Analysis
#
#config_LepTopAnalysis(c, args, doSystematics = args.is_MC)
config_LepTopAnalysis(c, args, doSystematics = False)
# MET for veto purpose
# Must be placed after AKT4 jets are reconstructed!
c.setalg("METConstructor", { "m_name" : "METConstructor_Veto",
"m_mapName" : "METAssoc_AntiKt4EMTopo",
"m_coreName" : "MET_Core_AntiKt4EMTopo",
"m_outputContainer" : "MET_Veto",
"m_inputJets" : "AntiKt4EMTopoJets_Calib",
"m_inputElectrons" : "Electrons_Veto",
"m_inputMuons" : "Muons_Veto",
# no photon/tau
# no additional selection
"m_useCaloJetTerm" : True,
"m_useTrackJetTerm" : False,
"m_debug" : False,
})
# OR
c.setalg("OverlapRemover", {"m_name" : "ORConstructor_qqbb",
"m_inContainerName_Muons" : "Muons_Veto",
"m_inContainerName_Electrons" : "Electrons_Veto",
"m_inContainerName_Jets" : "AntiKt4EMTopoJets_Calib",
"m_inputAlgoJets" : "AntiKt4EMTopoJets_Calib_Algo",
"m_createSelectedContainers" : True,
"m_decorateSelectedObjects" : True,
"m_outContainerName_Muons" : "Muons_Veto_OR",
"m_outContainerName_Electrons" : "Electrons_Veto_OR",
"m_outContainerName_Jets" : "AntiKt4EMTopoJets_Calib_OR",
"m_outputAlgo" : "OR_Alg_qqbb",
"m_outputLabel" : "passOR_qqbb",
"m_masterToolName" : "OverlapRemovalTool_qqbb",
"m_useCutFlow" : False,
"m_useSelected" : False,
"m_debug" : False,
})
#
# Ntuples
#
c.setalg("XhhMiniNtuple", { "m_name" : "MiniNTuple",
"m_boostedHcandName" : "finalBoostedJets",
"m_resolvedJetsName" : "AntiKt4EMTopoJets_Calib_preSel",
"m_lepTopCandName" : "LepTops",
"m_debug" : False,
"m_trigDetailStr" : "passTriggers",
"m_inTruthParticleName" : "TruthParticles",
"m_resolvedJetDetailStr" : "kinematic clean trackPV flavorTag sfFTagFix70 JVC",
"m_boostedJetDetailStr" : "kinematic substructure constituent",
"m_lepTopJetDetailStr" : "kinematic clean trackPV flavorTag sfFTagFix70",
"m_truthDetailStr" : "kinematic type bVtx parents children",
"m_evtDetailStr" : "pileup",
"m_resolvedSysName" : "AntiKt4EMTopoJets_Calib_Algo",
"m_boostedSysName" : "finalBoostedJets_Algo",
"m_doXhhTagging" : True, # specific for VHqqbb
"m_FatJetMassCut" : 50., # specific for VHqqbb
"m_TrackJetWP" : "77", # specific for VHqqbb
"m_muonContainerName" : "Muons_Selected",
"m_muonDetailStr" : "kinematic quality energyLoss isolation trackparams",
"m_elecContainerName" : "Electrons_Selected",
"m_elecDetailStr" : "kinematic quality PID isolation trackparams",
"m_metContainerName" : "RefFinalEM",
"m_metDetailStr" : "refEle refMuons refJet softClus softTrk",
"m_doResolutionStudy" : False,
"m_doResolved" : True,
"m_doBoosted" : True,
"m_doLeptop" : True,
"m_storeLeptonVeto" : True, # specific for VHqqbb analysis
"m_storeMETVeto" : True, # specific for VHqqbb analysis
"m_storeTruth" : True,
} )
|
#!/usr/bin/env python
# pip install virtualenv
# virtualenv exceltrans
# \exceltrans\Scripts\activate
# python -m pip install --upgrade pip
# pip install xlsxwriter
# pip install openpyxl
# pip install pandas
# Openpyxl <https://doitnow-man.tistory.com/159>
# Pandas <https://www.delftstack.com/ko/howto/python-pandas/how-to-add-new-column-to-existing-dataframe-in-python-pandas/>
# curl "https://openapi.naver.com/v1/papago/n2mt" -H "Content-Type: application/x-www-form-urlencoded; charset=UTF-8" -H "X-Naver-Client-Id: msl8RsSf0ro6hhXFT8cR" -H "X-Naver-Client-Secret: ESoXCEZlHI" -d "source=en&target=ko&text=Advanced Threat Detection Security Monitoring Compliance" -v
# curl -v --get 'https://dapi.kakao.com/v2/translation/translate' -d 'src_lang=en' -d 'target_lang=kr' --data-urlencode 'query=Advanced Threat Detection Security Monitoring Compliance' -H 'Authorization: KakaoAK 53436d8595e369d48bcf14aa59cf15f6'
from openpyxl import load_workbook
from openpyxl import Workbook
from itertools import islice
import numpy as np
import pandas as pd
import os
import sys
import json
import time
import urllib.request
def xlsx_read(filename):
wb = load_workbook(filename)
sheet_names = wb.sheetnames
print(sheet_names)
for sheet_name in sheet_names:
print(sheet_name)
sheet_xlsx = wb[ sheet_name]
print(sheet_xlsx.max_row)
print(sheet_xlsx.max_column)
for row in range(1, sheet_xlsx.max_row):
for col in range(1, sheet_xlsx.max_column):
print(sheet_xlsx.cell(row,col).value)
wb.close()
return print(sheet_names)
def xlsx_sheet_read(filename, sheetname='Sheet1'):
wb = load_workbook(filename)
sheet_xlsx = wb[sheetname]
print(sheet_xlsx.max_row)
print(sheet_xlsx.max_column)
for row in range(1, sheet_xlsx.max_row):
for col in range(1, sheet_xlsx.max_column):
print(sheet_xlsx.cell(row,col).value)
wb.close()
return print(sheetname)
def xlsx_sheet_trans(filename, sheetname='Sheet1', cellrange=[]):
wb = load_workbook(filename)
sheet_xlsx = wb[sheetname]
print('## SIZE ##' + '#'*18)
print(sheet_xlsx.max_row)
print(sheet_xlsx.max_column)
print('#'*25)
total_nf = np.empty((0))
for field in cellrange :
data = sheet_xlsx[field] # tuple
if len(total_nf) == 0 :
total_nf = np.array(data)
else :
total_nf = np.hstack((total_nf, np.array(data)))
#print(total_nf.shape)
fields = [ [ col.value for col in row] for row in total_nf[:1]]
contents = [ [ col.value for col in row] for row in total_nf[1:]]
df = pd.DataFrame(contents, columns=fields)
ws = wb.create_sheet()
ws.title = sheetname + '_한글'
dict = {}
#for k in range(len(total_nf)) :
# ws.cell(row=k+1, column=1).value = total_nf[k,0].value
print(len(total_nf)) # 492 I
print(len(total_nf[0])) # 4 J
for i in range(len(total_nf)) :
for j in range(len(total_nf[0])) :
if j == 0 :
ws.cell(row=i+1, column=1).value = total_nf[i,0].value
else :
if total_nf[i,j].value in dict :
ws.cell(row=i+1, column=j+1).value = dict[total_nf[i,j].value]
else:
ws.cell(row=i+1, column=j+2).value = kakao_trans(total_nf[i,j].value)
#ws.cell(row=i+1, column=j+1).value = "한글번역"
print(ws.cell(row=i+1, column=j+2).value)
dict[total_nf[i,j].value] = total_nf[i,j].value
time.sleep(5)
wb.save(filename)
wb.close()
return df
def naver_trans(sentence) :
# curl "https://openapi.naver.com/v1/papago/n2mt" -H "Content-Type: application/x-www-form-urlencoded; charset=UTF-8" -H "X-Naver-Client-Id: msl8RsSf0ro6hhXFT8cR" -H "X-Naver-Client-Secret: ESoXCEZlHI" -d "source=en&target=ko&text=Advanced Threat Detection Security Monitoring Compliance" -v
#client_id = "YOUR_CLIENT_ID"
#client_secret = "YOUR_CLIENT_SECRET"
client_id = "msl8RsSf0ro6hhXFT8cR"
client_secret = "ESoXCEZlHI"
encText = urllib.parse.quote(sentence)
data = "source=ko&target=en&text=" + encText
url = "https://openapi.naver.com/v1/language/translate"
request = urllib.request.Request(url)
request.add_header("X-Naver-Client-Id",client_id)
request.add_header("X-Naver-Client-Secret",client_secret)
response = urllib.request.urlopen(request, data=data.encode("utf-8"))
rescode = response.getcode()
if(rescode==200):
response_body = response.read().decode('utf-8')
sentence = ''
for row in json.loads(response_body)["translated_text"] :
sentence = sentence + "\n" + ''.join(row)
else:
#print("Error Code:" + rescode)
sentence = "Error Code:" + rescode
return sentence
# write stuff to database or something...
def kakao_trans(sentence):
# curl -v --get 'https://dapi.kakao.com/v2/translation/translate' -d 'src_lang=en' -d 'target_lang=kr' --data-urlencode 'query=Advanced Threat Detection Security Monitoring Compliance' -H 'Authorization: KakaoAK 53436d8595e369d48bcf14aa59cf15f6'
#encText = urllib.parse.quote("Advanced Threat Detection Security Monitoring Compliance")
rest_api_key = '53436d8595e369d48bcf14aa59cf15f6'
payload = {'src_lang':'en','target_lang':'kr','query':sentence}
url = "https://dapi.kakao.com/v2/translation/translate"
request = urllib.request.Request(url)
request.add_header("Authorization",'KakaoAK '+ rest_api_key)
response = urllib.request.urlopen(request, urllib.parse.urlencode(payload).encode('utf-8'))
rescode = response.getcode()
if(rescode==200):
response_body = response.read().decode('utf-8')
sentence = ''
for row in json.loads(response_body)["translated_text"] :
sentence = sentence + "\n" + ''.join(row)
else:
#print("Error Code:" + rescode)
sentence = "Error Code:" + rescode
return sentence
# write stuff to database or something...
if __name__ == "__main__":
#xlsx_read('.\\data\\ES_시나리오_한글_20200910.xlsx')
#xlsx_sheet_trans('.\\data\\ES_시나리오_한글_20200910.xlsx','ANALYTICS',['B2:B493','F2:G493','P2:P493'])
#main_df = xlsx_sheet_trans('.\\data\\ES_시나리오_한글_20200910.xlsx','ANALYTICS',['B2:B493','F2:G493','P2:P493'])
main_df = xlsx_sheet_trans('.\\data\\ES_시나리오_한글_20200910.xlsx','ANALYTICS',['B2:B493','F2:G493','P2:P493'])
#print(kakao_trans(sentence))
#print(kakao_trans(sentence))
|
# coding:utf-8
import codecs
from collections import defaultdict
class Vertex(object):
def __init__(self, value):
self.value = value
def __eq__(self, other):
return self.value == other.value
def __str__(self):
return str(self.value)
def __hash__(self):
#vital part, this will make the vertex be the only one.
return hash(self.value)
class Graph(object):
def __init__(self):
self.graph = defaultdict(list)
def addEdge(self, u, v):
self.graph[u].append(v)
def BFS(self, s, return_dict=False):
# initialize
explored_list = {}
for ver in self.graph.keys():
explored_list[ver] = False
explored_list[s] = True
queue = [s]
result = []
while len(queue) != 0:
v = queue.pop(0)
result.append(v)
for e in self.graph[v]:
if not explored_list[e]:
explored_list[e] = True
queue.append(e)
if return_dict:
return result, explored_list
else:
return result
def shortest_path(self, start, end):
if start == end:
return 0
# initialize
explored_list = {}
distance_list = {}
for ver in self.graph.keys():
explored_list[ver] = False
distance_list[ver] = 0
explored_list[start] = True
queue = [start]
pre_vertex = {} # save the pre-vertex of each vertex
while len(queue) != 0:
v = queue.pop(0)
for e in self.graph[v]:
if not explored_list[e]:
explored_list[e] = True
distance_list[e] = distance_list[v] + 1
pre_vertex[e] = v
if e == end:
break
queue.append(e)
path = self._reverse_path(pre_vertex, end)
return distance_list[end], path
def _reverse_path(self, pre_vertex, end):
path = []
while True:
try:
pre_ = pre_vertex[end]
path.append(pre_)
end = pre_
except KeyError:
break
return reversed(path)
def find_connect(self):
explored_list = {}
for ver in self.graph.keys():
explored_list[ver] = False
clusters = []
for ver in self.graph.keys():
if not explored_list[ver]:
components, updated_explored_list = self.BFS(ver, True)
explored_list.update(updated_explored_list)
clusters.append(components)
return clusters
def readGraph(path):
split_tag = '\t' if path == 'kargerMinCut.txt' else ' '
graph = Graph()
with codecs.open(path, 'r', 'utf-8') as file:
for line in file:
line = line.strip().split(split_tag)
v1 = Vertex(int(line[0]))
others = list(map(int, line[1:]))
for o in others:
graph.addEdge(v1, Vertex(o))
return graph
if __name__ == '__main__':
# build the graph, 'test_graph.txt' for testing 'Undirected Connectivity'
graph = readGraph('kargerMinCut.txt')
# BFS
start_v = list(graph.graph.keys())[0]
result = graph.BFS(start_v)
print('The Graph contains {} vertexs.'.format(len(result)))
# find shortest path between given vertexs
start = Vertex(3)
end = Vertex(7)
shortest_distance, path = graph.shortest_path(start, end)
print('The minimum distance from {} to {} is: {}'.format(start, end, shortest_distance))
print('The path is: {}'.format([str(p) for p in path]))
# find connected components
clusters = graph.find_connect()
for i, clu in enumerate(clusters):
print('The {} cluster is: {}'.format(i, [str(c) for c in clu])) |
# -*- coding: utf-8 -*-
import datetime
from ..models import User
from ..constants import Code
class AuthManager(object):
@classmethod
def authenticate(cls, username, password):
try:
user = User.objects.get(username=username)
except:
# TODO 日志
return
if user.check_password(password) and not user.is_deleted:
now = datetime.datetime.now()
user.update(set__last_login=now)
return user
else:
# TODO 日志
return
class UserManager(object):
@classmethod
def get_info(cls, user_id=None, user=None):
"""获取用户基本信息"""
def _detail(u):
res = {
'nickname': u.nickname,
'create_time': u.create_time,
'last_login': u.last_login,
'gender': u.gender,
'remark': u.remark,
'avatar': u.avatar,
}
return res
if user:
return _detail(user)
try:
user = User.objects.get(id=user_id)
except:
# TODO 日志
return Code.NO_SUCH_USER
return _detail(user)
@classmethod
def update_info(cls, nickname, remark, avatar, gender, user_id=None, user=None):
"""修改用户基本信息"""
if user is None:
try:
user = User.objects.get(id=user_id)
except:
# TODO 日志
return Code.NO_SUCH_USER
user.update(
set__nickname=nickname,
set__remark=remark,
set__avatar=avatar,
set__gender=gender,
)
return Code.SUCCESS
|
import cv2
objectName = "Kalem Ucu"
frameWidth = 280
frameHeight = 360
color = (255,0,0)
cap = cv2.VideoCapture(0)
cap.set(3,frameWidth)
cap.set(4,frameHeight)
def empty(a):pass
# trackbar
cv2.namedWindow("Sonuc")
cv2.resizeWindow("Sonuc", frameWidth, frameHeight + 100)
cv2.createTrackbar("Scale","Sonuc",400,1000,empty)
cv2.createTrackbar("Neighbor","Sonuc",4,50,empty)
# cascade classifier
cascade = cv2.CascadeClassifier("cascade.xml")
while True:
# read img
success, img = cap.read()
if success:
# convert bgr2gray
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# detection parameters
scaleVal = 1 + (cv2.getTrackbarPos("Scale","Sonuc")/1000)
neighbor = cv2.getTrackbarPos("Neighbor","Sonuc")
# detectiom
rects = cascade.detectMultiScale(gray, scaleVal, neighbor)
for (x,y,w,h) in rects:
cv2.rectangle(img, (x,y),(x+w,y+h), color, 3)
cv2.putText(img, objectName, (x, y-5), cv2.FONT_HERSHEY_COMPLEX_SMALL, 1, color, 2)
cv2.imshow("Sonuc", img)
if cv2.waitKey(1) & 0xFF == ord("q"): break
|
# -*- coding: utf-8 -*-
from square.api_helper import APIHelper
from square.http.api_response import ApiResponse
from square.api.base_api import BaseApi
from square.http.auth.o_auth_2 import OAuth2
class RefundsApi(BaseApi):
"""A Controller to access Endpoints in the square API."""
def __init__(self, config, call_back=None):
super(RefundsApi, self).__init__(config, call_back)
def list_payment_refunds(self,
begin_time=None,
end_time=None,
sort_order=None,
cursor=None,
location_id=None,
status=None,
source_type=None):
"""Does a GET request to /v2/refunds.
Retrieves a list of refunds for the account making the request.
Max results per page: 100
Args:
begin_time (string, optional): Timestamp for the beginning of the
requested reporting period, in RFC 3339 format. Default: The
current time minus one year.
end_time (string, optional): Timestamp for the end of the
requested reporting period, in RFC 3339 format. Default: The
current time.
sort_order (string, optional): The order in which results are
listed. - `ASC` - oldest to newest - `DESC` - newest to oldest
(default).
cursor (string, optional): A pagination cursor returned by a
previous call to this endpoint. Provide this to retrieve the
next set of results for the original query. See
[Pagination](https://developer.squareup.com/docs/basics/api101/
pagination) for more information.
location_id (string, optional): Limit results to the location
supplied. By default, results are returned for all locations
associated with the merchant.
status (string, optional): If provided, only refunds with the
given status are returned. For a list of refund status values,
see [PaymentRefund](#type-paymentrefund). Default: If omitted
refunds are returned regardless of status.
source_type (string, optional): If provided, only refunds with the
given source type are returned. - `CARD` - List refunds only
for payments where card was specified as payment source.
Default: If omitted refunds are returned regardless of source
type.
Returns:
ListPaymentRefundsResponse: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v2/refunds'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_parameters = {
'begin_time': begin_time,
'end_time': end_time,
'sort_order': sort_order,
'cursor': cursor,
'location_id': location_id,
'status': status,
'source_type': source_type
}
_query_builder = APIHelper.append_url_with_query_parameters(
_query_builder,
_query_parameters
)
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
OAuth2.apply(self.config, _request)
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
def refund_payment(self,
body):
"""Does a POST request to /v2/refunds.
Refunds a payment. You can refund the entire payment amount or a
portion of it. For more information, see
[Payments and Refunds
Overview](https://developer.squareup.com/docs/payments-api/overview).
Args:
body (RefundPaymentRequest): An object containing the fields to
POST for the request. See the corresponding object definition
for field details.
Returns:
RefundPaymentResponse: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v2/refunds'
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json',
'content-type': 'application/json; charset=utf-8'
}
# Prepare and execute request
_request = self.config.http_client.post(_query_url, headers=_headers, parameters=APIHelper.json_serialize(body))
OAuth2.apply(self.config, _request)
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
def get_payment_refund(self,
refund_id):
"""Does a GET request to /v2/refunds/{refund_id}.
Retrieves a specific `Refund` using the `refund_id`.
Args:
refund_id (string): Unique ID for the desired `PaymentRefund`.
Returns:
GetPaymentRefundResponse: Response from the API. Success
Raises:
APIException: When an error occurs while fetching the data from
the remote API. This exception includes the HTTP Response
code, an error message, and the HTTP body that was received in
the request.
"""
# Prepare query URL
_url_path = '/v2/refunds/{refund_id}'
_url_path = APIHelper.append_url_with_template_parameters(_url_path, {
'refund_id': refund_id
})
_query_builder = self.config.get_base_uri()
_query_builder += _url_path
_query_url = APIHelper.clean_url(_query_builder)
# Prepare headers
_headers = {
'accept': 'application/json'
}
# Prepare and execute request
_request = self.config.http_client.get(_query_url, headers=_headers)
OAuth2.apply(self.config, _request)
_response = self.execute_request(_request)
decoded = APIHelper.json_deserialize(_response.text)
if type(decoded) is dict:
_errors = decoded.get('errors')
else:
_errors = None
_result = ApiResponse(_response, body=decoded, errors=_errors)
return _result
|
from django.db import models
from eventex.subscriptions.validators import validate_cpf
from django.shortcuts import resolve_url as r
class Subscription(models.Model):
name = models.CharField(max_length=100, verbose_name='nome')
cpf = models.CharField(max_length=11, verbose_name='CPF',
validators=[validate_cpf])
email = models.EmailField(verbose_name='e-mail', blank=True)
phone = models.CharField(max_length=20, verbose_name='telefone',
blank=True)
created_at = models.DateTimeField(auto_now_add=True,
verbose_name='criado em')
paid = models.BooleanField(default=False, verbose_name='pago')
class Meta:
verbose_name = 'inscrição'
verbose_name_plural = 'inscrições'
ordering = ('-created_at',)
def __str__(self):
return self.name
def get_absolute_url(self):
return r('subscriptions:detail', self.pk)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.