index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
6,800 | d7d23b04f6e73db6a0a8730192398941743f32ce | import sqlite3
def connect():
connect = sqlite3.connect("books.db")
cursor = connect.cursor()
cursor.execute("CREATE TABLE IF NOT EXISTS bookstore (id INTEGER PRIMARY KEY,"
"title TEXT,"
"author TEXT,"
"year INTEGER,"
"isbn INTEGER)"
)
connect.commit()
connect.close()
def insert(title,author,year,isbn):
connect = sqlite3.connect("books.db")
cursor = connect.cursor()
cursor.execute("INSERT INTO bookstore VALUES (NULL,?,?,?,?)",(title, author, year, isbn))
connect.commit()
connect.close()
def view():
connect = sqlite3.connect("books.db")
cursor = connect.cursor()
cursor.execute("SELECT * FROM bookstore")
books = cursor.fetchall()
connect.close()
return books
def search(title="", author="", year="", isbn=""):
connect = sqlite3.connect("books.db")
cursor = connect.cursor()
cursor.execute("SELECT * FROM bookstore WHERE title=?"
"OR author=?"
"OR year=?"
"OR isbn=?", (title,author,year,isbn))
books = cursor.fetchall()
connect.close()
return books
def delete(id):
connect = sqlite3.connect("books.db")
cursor = connect.cursor()
cursor.execute("DELETE FROM bookstore WHERE id=?", (id,))
connect.commit()
connect.close()
def update(id,title,author,year,isbn):
connect = sqlite3.connect("books.db")
cursor = connect.cursor()
cursor.execute("UPDATE bookstore SET title=?, author=?, year=?, isbn=?"
"WHERE id=?", (title, author, year, isbn, id))
connect.commit()
connect.close()
def close():
return True
connect()
# insert("Holy Bible", "Joseph Smith", 1823, 123456)
# print(view())
|
6,801 | 51f7faaad29379daa58875c7b35d9ccf569c8766 | from unittest import TestCase
from ch4.array_to_btree import to_btree
from ch4.is_subtree import is_subtree
class IsSubtreeTest(TestCase):
def test_should_be_subtree(self):
container = to_btree([1, 2, 3, 4, 5, 6])
contained = to_btree([1, 3, 2])
self.assertTrue(is_subtree(container, contained))
def test_should_not_be_subtree(self):
container = to_btree([1, 2, 3, 4, 5, 6])
contained = to_btree([2, 3, 4])
self.assertFalse(is_subtree(container, contained))
|
6,802 | 39ac4e0d543048ea02123baa39b6c8ce7618d16b | # Generated by Django 3.1.6 on 2021-05-06 10:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0028_auto_20210506_1020'),
]
operations = [
migrations.AlterField(
model_name='user',
name='city',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='core.cities'),
),
]
|
6,803 | bdcbb946dadf168149342c651ad03eaf4b748401 |
from mx.handlers import MainHandler
# handler for changing app language
class Locale(MainHandler):
"""
handles requests to change LOCALE or language for internationalization.
"""
def get(self):
locale = self.request.get('locale')
if not locale :
locale = LOCALE
locale = locale[:2].lower()+'_'+locale[-2:].upper()
if self.switch_locale(locale):
if self.local_user and self.local_user.locale != locale:
u = LocalUser.by_id(self.local_user.key.id())
u.locale = locale
u.put()
self.write_json({'done':True})
else:
self.write_json({'done':False})
# home page handler
class MainPage(MainHandler):
def get(self):
self.render('home.html')
def post(self):
pw = self.request.get('pw')
|
6,804 | c3e2bd635a7ff558ed56e7fb35e8b10e1c660c88 | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 19 05:29:19 2020
@author: Gaurav
"""
from tensorflow.keras.models import load_model
import cv2
import os
from tensorflow.keras.preprocessing.image import img_to_array
import numpy as np
model=load_model('E:/AI Application Implementation/trained_model/Classification/Cifar-10/cifar-2.h5')
class_names = ['airplane', 'automobile', 'bird', 'cat', 'deer',
'dog', 'frog', 'horse', 'ship', 'truck']
# img = cv2.imread("00004_test.png")
# img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
# img = cv2.resize(img, (32, 32))
# img = img_to_array(img)
# img = np.expand_dims(img, axis=0)
# k = model.predict(img)[0]
# k=np.argmax(k)
# print(class_names[k])
arr = os.listdir()
result=[]
for i in arr:
img = cv2.imread(i)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = cv2.resize(img, (32, 32))
img = img_to_array(img)
img = np.expand_dims(img, axis=0)
k = model.predict(img)[0]
k=np.argmax(k)
result.append(class_names[k])
print(i)
dict={"filename":arr,'label':result}
import pandas as pd
df=pd.DataFrame(dict)
df.to_csv(r"E:\AI Application Implementation\trained_model\Classification\Cifar-10\sub.csv",index=False)
# df=pd.read_csv("E:/AI Application Implementation/trained_model/Classification/Cifar-10/sub.csv")
# df.to_csv(r"E:\AI Application Implementation\trained_model\Classification\Cifar-10\sub.csv",index=False)
|
6,805 | 559c665e5544dd864d2f020c967ac8a8665af134 | # coding:utf-8
import requests
import io
from zipfile import ZipFile
if __name__ == '__main__':
sentence_url = "http://www.manythings.org/anki/deu-eng.zip"
r = requests.get(sentence_url)
z = ZipFile(io.BytesIO(r.content))
file = z.read('deu.txt')
eng_ger_data = file.decode()
eng_ger_data = eng_ger_data.encode('ascii', errors='ignore')
eng_ger_data = eng_ger_data.decode().split('\n')
eng_ger_data = [x.split('\t') for x in eng_ger_data if len(x) >= 1]
[english_sentence, german_sentence] = [list(x) for x in zip(*eng_ger_data)]
print(len(english_sentence))
print(len(german_sentence))
print(eng_ger_data[9])
print(eng_ger_data[10])
print(german_sentence)
|
6,806 | d3585e7b761fa7b2eeaacf09f84bb6a4abc1cf02 | from django.contrib import admin
from .models import User
# Register your models here.
@admin.register(User)
class AuthorizationUserAdmin(admin.ModelAdmin):
exclude = ['open_id']
pass
|
6,807 | c4a13069b5add538589886b5e282d4fc9f2b72ad | from typing import List
import pytest
from raiden import waiting
from raiden.api.python import RaidenAPI
from raiden.raiden_service import RaidenService
from raiden.tests.utils.detect_failure import raise_on_failure
from raiden.tests.utils.network import CHAIN
from raiden.tests.utils.transfer import block_offset_timeout
from raiden.transfer import views
from raiden.utils.typing import BlockTimeout
@raise_on_failure
@pytest.mark.parametrize("channels_per_node", [CHAIN])
@pytest.mark.parametrize("number_of_nodes", [3])
def test_leave_token_network(raiden_network: List[RaidenService], token_addresses):
registry_address = raiden_network[0].default_registry.address
token_address = token_addresses[0]
_, app1, _ = raiden_network
channels = views.list_channelstate_for_tokennetwork(
chain_state=views.state_from_raiden(app1),
token_network_registry_address=registry_address,
token_address=token_address,
)
timeout = block_offset_timeout(
app1, "Channels not settled in time", BlockTimeout(channels[0].settle_timeout * 10)
)
with timeout:
RaidenAPI(app1).token_network_leave(registry_address, token_address)
waiting.wait_for_settle(
raiden=app1,
token_network_registry_address=registry_address,
token_address=token_address,
channel_ids=[channel.identifier for channel in channels],
retry_timeout=0.1,
)
|
6,808 | f44ff7488ae8fc64bc1785fb6cbe80c4cc011fbe | from django.conf.urls.defaults import *
#from wiki.feeds import *
from django.conf import settings
from django.conf.urls.defaults import *
# feeds for wikiPages and wikiNews
"""
feeds = {
'latestpages': LatestPages,
}
sitemaps = {
'wiki': Wiki,
}
"""
urlpatterns = patterns('',
# Example:
# (r'^goimcommunity/', include('goimcommunity.apps.foo.urls.foo')),
# Uncomment this for admin:
(r'^admin/', include('django.contrib.admin.urls')),
(r'^polls/', include('goimcommunity.polls.urls')),
(r'^league/', include('goimcommunity.leaguesystem.urls')),
(r'^board/', include('sphene.sphboard.urls')),
(r'^rewrite/(?P<groupName>\w+)/board/', include('sphene.sphboard.urls'), {'urlPrefix': '' }),
(r'^rewrite/(?P<groupName>\w+)/wiki/', include('sphene.sphwiki.urls'), {'urlPrefix': '' }),
(r'^rewrite/\w+/accounts/login/$', 'django.contrib.auth.views.login'),
(r'^rewrite/\w+/accounts/logout/$', 'django.contrib.auth.views.logout' ),
(r'^(?P<urlPrefix>test/(?P<groupName>\w+))/board/', include('sphene.sphboard.urls')),
(r'^(?P<urlPrefix>test/(?P<groupName>\w+))/wiki/', include('sphene.sphwiki.urls')),
(r'^wiki/', include('sphene.sphwiki.urls'), { 'urlPrefix': 'wiki', 'groupName': 'Sphene' }),
(r'^static/sphene/(.*)$', 'django.views.static.serve', {'document_root': settings.ROOT_PATH + '/../../communitytools/static/sphene' }),
(r'^static/(.*)$', 'django.views.static.serve', {'document_root': settings.ROOT_PATH + '/../static' }),
(r'^site_media/(.*)$', 'django.views.static.serve', {'document_root': '/home/kahless/dev/python/diamanda/media'}), # change it or remove if not on dev server
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
(r'^accounts/logout/$','django.contrib.auth.views.logout'),
(r'^accounts/register/$', 'sphene.community.views.register' ),
# (r'^forum/', include('myghtyboard.URLconf')), # forum
# (r'^muh/', 'wiki.views.show_page'), # wiki main page under /
# (r'^wiki/', include('wiki.URLconf')), # wiki
# (r'^wiki/feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}), # wiki feeds
# (r'^wiki/sitemap.xml$', 'django.contrib.sitemaps.views.sitemap', {'sitemaps': sitemaps}), # wikiPages sitemap
)
|
6,809 | 0eca1693caffcd9fe32a8a54ca3a33687763e5ce | __author__ = 'zhaobin022'
class Cmd(object):
pass |
6,810 | f6c48731b2a4e0a6f1f93034ee9d11121c2d0427 | #coding=utf-8
import pandas as pd
# 学生成绩表
df_grade = pd.read_excel("学生成绩表.xlsx")
df_grade.head()
# 学生信息表
df_sinfo = pd.read_excel("学生信息表.xlsx")
df_sinfo.head()
# 只筛选第二个表的少量的列
df_sinfo = df_sinfo[["学号", "姓名", "性别"]]
df_sinfo.head()
# join
df_merge = pd.merge(left=df_grade, right=df_sinfo, left_on="学号", right_on="学号")
df_merge.head()
# 将columns变成python的列表形式
new_columns = df_merge.columns.to_list()
# 按逆序insert,会将"姓名"/"性别"放到"学号"的后面
for name in ["姓名", "性别"][::-1]:
new_columns.remove(name)
new_columns.insert(new_columns.index("学号")+1, name)
df_merge = df_merge.reindex(columns=new_columns)
df_merge.head()
df_merge.to_excel("合并后的数据表.xlsx", index=False)
|
6,811 | ea78f754ffff26bac1e53ed1e842fd79112b8ee7 | import hashlib
def createMD5(str):
# 创建md5对象
hl = hashlib.md5()
hl.update(str.encode(encoding='utf-8'))
return hl.hexdigest() |
6,812 | 295d6a66335491b406f47212064da9fd5fca6eb6 | from sqlitedict import SqliteDict
import sys
import socket
import urllib
import argparse
import zlib, pickle, sqlite3
import random
from datetime import datetime
import time
from urllib.parse import urlparse
import hashlib
import subprocess
import requests
from multiprocessing import Pool
def gz_encode(obj):
return sqlite3.Binary(zlib.compress(pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)))
def gz_decode(obj):
return pickle.loads(zlib.decompress(bytes(obj)))
if __name__=="__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--dnscache', default="dnscache.sqld", help='IP address cache default: %(default)s')
parser.add_argument('--download', default="pages.sqld", help='Here is where the downloaded pages go: %(default)s')
parser.add_argument('--r404', default="404.sqld", help='Here is where we remember pages that gave 404 etc: %(default)s')
args = parser.parse_args()
#2) Results setup
result_store = SqliteDict(args.download, encode=gz_encode, decode=gz_decode, autocommit=True)
for url,cont in result_store.items():
print(url,cont[:30])
#3) 404 setup
r404 = SqliteDict(args.r404, autocommit=True)
for url,status in r404.items():
print(url,status)
|
6,813 | e8011e98da342e501070febf421e9f8d0b74d64e | # Generated by Django 3.1.4 on 2020-12-11 17:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0016_auto_20201211_2158'),
]
operations = [
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=256)),
('date', models.DateTimeField(auto_now_add=True)),
('std', models.IntegerField()),
('description', models.TextField(blank=True)),
('asker', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='questions', to='core.person')),
],
),
]
|
6,814 | a732e7141ffb403ca6c5d9c4204cb96c8e831aab | # Classic solution for merging two sorted arrays/list to a new one.
# (Based on Merge Sort)
class Solution:
def merge(self, nums1: List[int], m: int, nums2: List[int], n: int) -> None:
"""
m->Size of nums1 list
n->Size of nums2 list
"""
mergedArray = []
i = 0
j = 0
while(i < m and j < n):
if(nums1[i] <= nums2[j]):
mergedArray.append(nums1[i])
i += 1
else:
mergedArray.append(nums2[j])
j += 1
while(i < m):
mergedArray.append(nums1[i])
i += 1
while(j < n):
mergedArray.append(nums2[j])
j += 1
return mergedArray |
6,815 | f6b2e66379b483c6a573d34d73ae0d10de7315a3 | import numpy as np
from feature.features import Features
class RealWorldFeatures(Features):
def __init__(self):
super().__init__('tsagkias/real_world_features')
def _extract_features(self, df):
# weather from http://www.dwd.de/DE/leistungen/klimadatendeutschland/klimadatendeutschland.html
features = [
df['temp_ham'],
df['temp_fra'],
df['temp_ber'],
df['hum_ham'],
df['hum_fra'],
df['hum_ber'],
]
return np.vstack(features).T
|
6,816 | 4245da12eb7f9dd08c863e368efbd0bcf0b8fa04 | from rest_framework.pagination import PageNumberPagination
class QuoteListPagination(PageNumberPagination):
page_size = 30
|
6,817 | 2e4b47b8c3ac4f187b32f1013a34c3bea354b519 | c_horas=int(input("Ingrese la cantidad de horas trabajadas:"))
v_horas=int(input("Ingrese el valor de cada hora trabajada:"))
sueldo=c_horas*v_horas
print("Su sueldo mensual sera")
print(sueldo)
|
6,818 | 67b060349e986b06a0ee6d8a1afee82d49989c29 |
def sqrt(number):
low = 1
high = number - 1
while low <= high:
mid = (low + high) /2
if mid * mid == number:
return mid
elif mid * mid > number:
high = mid - 1
else:
low = mid + 1
return low - 1
print sqrt(15)
|
6,819 | 3af91de0b25f575ec9d981d7711c710a7e9695e4 | import datetime
now = datetime.datetime.now()
print(now.year,now.month,now.day,now.hour,now.minute,now.second)
|
6,820 | c1475209d9c9a98d72d7f703e0516aceaeb13163 | import basevcstest
class TestVCSBoxfill(basevcstest.VCSBaseTest):
def testRobinsonBoxfill(self):
# This tests if extending the longitude to more than 360 decrees is handled correctly by
# proj4. See https://github.com/UV-CDAT/uvcdat/issues/1728 for more
# information.
clt3 = self.clt('clt', latitude=(-90.0, 90.0), squeeze=1,
longitude=(-180, 200.0), time=('1979-01', '1988-12'),)
gmBoxfill = self.x.getboxfill('a_robinson_boxfill')
kwargs = {}
kwargs['cdmsfile'] = self.clt.id
kwargs['bg'] = self.bg
self.x.plot(clt3, gmBoxfill, **kwargs)
self.checkImage("test_vcs_boxfill_robinson_wrap.png")
|
6,821 | fe3e104cf213b21c33a4b5c6e1a61315c4770eda | from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import render_to_response
from django.template import RequestContext
from whydjango.casestudies.forms import SubmitCaseStudyForm
def case_study_submission(request, template_name="casestudies/submit.html"):
form = SubmitCaseStudyForm(request.POST or None)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse("submit_message"))
return render_to_response(template_name, {
"form": form,
}, context_instance=RequestContext(request))
|
6,822 | 59a8a4cf4b04a191bfb70fd07668141dbfeda790 | import xlsxwriter
workbook = xlsxwriter.Workbook('商品编码.xlsx')
worksheet = workbook.add_worksheet()
with open('商品编码.txt', 'rt') as f:
data = f.read()
data = data.splitlines(True)
count = 1
row = 0
for x in data:
if count < 3:
count+=1
continue
x = x.split(',')
column = 0
for e in x:
if row==0 and column==0:
e = e[3:]
worksheet.write(row,column,e)
column +=1
row += 1
workbook.close()
|
6,823 | 44476a32b8ab68820d73955321e57b7d1b608beb | # -*- coding: utf-8 -*-
__author__ = 'jz'
from flask.ext import restful
from flask.ext.restful import reqparse
from scs_app.db_connect import *
parser = reqparse.RequestParser()
parser.add_argument('count', type=str)
class MulActionResource(restful.Resource):
def __init__(self):
self.db = get_connection()
def post(self, type):
args = parser.parse_args()
count = args.get('count')
sids = []
if type == 'extract':
# todo multi extract
pass
elif type == 'location':
articles = self.db.query(
"select article.sid,title,content from article left join site on article.site_sid=site.sid"
" where lang='cn' and location_sid IS NULL LIMIT 0," + count)
locations = self.db.query('select sid,name,data from location where name!=%s', (u'其它',))
other_sid = self.db.get('select sid from location where name=%s', (u'其它',))['sid']
for article in articles:
sids.append(article['sid'])
content = article['title'] + article['content']
lc = False
for location in locations:
sid = location['sid']
words = [location['name']]
if location['data']:
words += location['data'].split('|')
for word in words:
if word in content:
lc = True
self.db.update('update article set location_sid=%s where sid=%s', (sid, article['sid']))
break
if lc:
break
if not lc:
self.db.update('update article set location_sid=%s where sid=%s', (other_sid, article['sid']))
return {
'count': count,
'sids': sids
}
else:
return 'no such command', 404 |
6,824 | ee417c5fff858d26ca60a78dffe4cff503a6f2b5 | from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from lessons.models import Lesson, Question, Response
from usermanage.models import SchoolClass
import json
@login_required
def lessons_overview(request):
if request.method == 'POST':
if request.user.is_staff:
school_class = SchoolClass.objects.get(id=request.POST['class_id'])
school_class.password = request.POST['class_pwd']
school_class.save()
if request.user.is_staff:
classes = request.user.teachers.select_related()
else:
classes = request.user.students.select_related()
return render(request, 'lessons_overview.html', {
'classes': classes,
})
@login_required
def lesson(request, id):
lesson = Lesson.objects.get(id=id)
if request.GET.get('grade_class'):
school_class = SchoolClass.objects.get(id=request.GET['grade_class'])
else:
school_class = None
return render(request, 'lesson.html', {
'lesson': lesson,
'school_class': school_class,
})
@staff_member_required
def new_lesson(request, id):
school_class = SchoolClass.objects.get(id=id)
if request.method == 'POST':
lesson = Lesson(
name=request.POST['lesson_name'],
school_class=school_class,
)
for title in request.POST.getlist('questions[]'):
question = Question(title=title)
question.save()
lesson.questions.add(question)
lesson.save()
return redirect('/')
return render(request, 'new_lesson.html', {
'school_class': school_class,
})
@staff_member_required
def grade_question(request, class_id, id):
question = Question.objects.get(id=id)
school_class = SchoolClass.objects.get(id=class_id)
students = school_class.students.all()
responses = Response.objects.filter(
answerer__in=students,
question=question
)
unanswered_students = []
for student in students:
try:
Response.objects.get(answerer=student, question=question)
except Response.DoesNotExist:
unanswered_students.append(student.get_full_name())
unanswered_students = ', '.join(unanswered_students) if unanswered_students else None
return render(request, 'question.html', {
'question': question,
'responses': responses,
'unanswered_students': unanswered_students,
})
def update_questions(questions, lesson_id):
questions = [q for q in questions if len(q) > 0]
lesson = Lesson.objects.get(id=lesson_id)
for question in lesson.questions.all():
question.title = questions.pop(0)
question.save()
if len(questions) > 0:
for title in questions:
new_question = Question(title=title)
new_question.save()
lesson.questions.add(new_question)
lesson.save()
@staff_member_required
def edit_lesson(request, id):
if request.method == 'POST':
if request.POST['action'] == 'update':
update_questions(request.POST.getlist('questions[]'), id)
return HttpResponse(status=200)
elif request.POST['action'] == 'delete':
Question.objects.get(id=request.POST['id']).delete()
return HttpResponse(status=200)
elif request.method == 'GET':
lesson = Lesson.objects.get(id=id)
return render(request, 'edit_lesson.html', {
'lesson': lesson,
})
@staff_member_required
def mark_response_seen(request):
response = Response.objects.get(id=request.POST['id'])
response.seen = True
response.save()
return HttpResponse(status=200)
@staff_member_required
def save_comment(request):
for id in request.POST.keys():
response = Response.objects.get(id=id)
response.seen = True # redundant
response.comment = request.POST[id]
response.save()
return HttpResponse(status=200)
@login_required
def save_responses(request):
responses = request.POST.items()
lesson = Lesson.objects.get(id=request.POST['lesson'])
responses.pop(responses.index(('lesson', request.POST['lesson'])))
new_response_ids = {}
for id in responses:
try:
response = Response.objects.get(id=id[0], answerer=request.user)
response.text = request.POST[id[0]]
response.save()
except ValueError:
if len(request.POST[id[0]]) > 0:
response = Response(
text=request.POST[id[0]],
answerer=request.user,
question=Question.objects.get(id=id[0][4:]),
lesson=lesson
)
response.save()
new_response_ids[id[0]] = str(response.id)
return HttpResponse(json.dumps(new_response_ids),
content_type='application/json')
|
6,825 | 76d0dd2d6b2d580900283f2623f05dd02a70fcd8 | #!/usr/bin/env python
import numpy as np
import rospy
import tf
from geometry_msgs.msg import PoseStamped, Twist, TwistStamped, Point
from nav_msgs.msg import Odometry
from visualization_msgs.msg import Marker
from bebop_nmpc_solver import BebopNmpcFormulationParam, bebop_nmpc_casadi_solver
# The frame by default is NWU
class BebopNmpcControl:
def __init__(self, mpc_form_param):
# MPC formulation settings
self.mpc_form_param_ = mpc_form_param
# bebop param
self.roll_max_ = self.mpc_form_param_.roll_max
self.pitch_max_ = self.mpc_form_param_.pitch_max
self.vz_max_ = self.mpc_form_param_.vz_max
self.yawrate_max_ = self.mpc_form_param_.yawrate_max
self.K_yaw_ = self.mpc_form_param_.K_yaw
self.bebop_size_ = self.mpc_form_param_.bebop_size
# state and goal pose, size
self.bebop_state_current_ = np.zeros(9)
self.bebop_pose_goal_ = np.array([0, 0, 1.0, 0])
# collision avoidance obs param
self.nobs_ = self.mpc_form_param_.nobs
self.obs_size_ = self.mpc_form_param_.obs_size
self.obs_state_current_ = np.array([0, 0, -1.0, 0, 0, 0])
self.obs_state_prediction_ = np.tile(np.array(self.obs_state_current_), (self.mpc_form_param_.N, 1)).T
# MPC settings
self.mpc_dt_ = self.mpc_form_param_.dt
self.mpc_N_ = self.mpc_form_param_.N
self.mpc_Tf_ = self.mpc_form_param_.Tf
self.mpc_nx_ = self.mpc_form_param_.nx
self.mpc_nu_ = self.mpc_form_param_.nu
self.mpc_ns_ = self.mpc_form_param_.ns
self.mpc_np_ = self.mpc_form_param_.nparam
self.mpc_weights_wp_ = self.mpc_form_param_.mpc_weights_wp
self.mpc_weights_input_ = self.mpc_form_param_.mpc_weights_input
self.mpc_weights_coll_ = self.mpc_form_param_.mpc_weights_coll
self.mpc_weights_slack_ = self.mpc_form_param_.mpc_weights_slack
# MPC variables
self.mpc_nlp_traj_ = np.zeros((self.mpc_nu_ + self.mpc_nx_, self.mpc_N_)).reshape(-1)
self.mpc_nlp_param_ = self.mpc_nx_ + self.mpc_np_ * self.mpc_N_
self.mpc_x_plan_ = np.zeros((self.mpc_nx_, self.mpc_N_))
self.mpc_u_plan_ = np.zeros((self.mpc_nu_, self.mpc_N_))
self.mpc_s_plan_ = np.zeros((self.mpc_ns_, self.mpc_N_))
self.mpc_u_now_ = np.zeros(self.mpc_nu_)
self.mpc_feasible_ = False
self.mpc_success_ = False
# MPC solver
recompile = False
[self.nlp_solver_complied_, self.nlp_lbx_, self.nlp_ubx_, self.nlp_lbg_, self.nlp_ubg_] = \
bebop_nmpc_casadi_solver(self.mpc_form_param_, recompile)
# ROS subscriber
self.odom_sub_ = rospy.Subscriber("/bebop/odom", Odometry, self.set_bebop_odom) # bebop_odom
self.received_first_odom_ = False
self.odom_received_time_ = rospy.Time.now()
self.odom_time_out_ = 0.2
self.pose_sub_ = rospy.Subscriber("/bebop/pose", PoseStamped, self.set_bebop_pose)
self.twist_sub_ = rospy.Subscriber("/bebop/twist", TwistStamped, self.set_bebop_twist)
self.pose_goal_sub_ = rospy.Subscriber("/bebop/pose_goal", PoseStamped, self.set_bebop_pose_goal)
self.received_first_goal_ = False
# ROS publisher
self.bebop_cmd_vel_ = np.array(4)
self.bebop_cmd_vel_pub_ = rospy.Publisher("/bebop/auto_cmd_vel", Twist, queue_size=1)
self.mpc_traj_plan_vis_pub_ = rospy.Publisher("/bebop/mpc/trajectory_plan_vis", Marker, queue_size=1)
def set_bebop_odom(self, odom_msg):
if self.received_first_odom_ is False:
self.received_first_odom_ = True
rospy.loginfo('First odometry received!')
# read data
self.odom_received_time_ = rospy.Time.now()
px = odom_msg.pose.pose.position.x
py = odom_msg.pose.pose.position.y
pz = odom_msg.pose.pose.position.z
vx = odom_msg.twist.twist.linear.x
vy = odom_msg.twist.twist.linear.y
vz = odom_msg.twist.twist.linear.z
rpy = tf.transformations.euler_from_quaternion([odom_msg.pose.pose.orientation.x,
odom_msg.pose.pose.orientation.y,
odom_msg.pose.pose.orientation.z,
odom_msg.pose.pose.orientation.w])
self.bebop_state_current_ = np.array([px, py, pz, vx, vy, vz, rpy[0], rpy[1], rpy[2]])
if self.received_first_goal_ is False: # if not received any goal pose
self.limo_pose_goal_ = np.array([px, py, pz, rpy[2]])
def set_bebop_pose(self, pose_msg):
if self.received_first_odom_ is False:
self.received_first_odom_ = True
rospy.loginfo('First pose received!')
self.odom_received_time_ = rospy.Time.now()
px = pose_msg.pose.position.x
py = pose_msg.pose.position.y
pz = pose_msg.pose.position.z
rpy = tf.transformations.euler_from_quaternion([pose_msg.pose.orientation.x,
pose_msg.pose.orientation.y,
pose_msg.pose.orientation.z,
pose_msg.pose.orientation.w])
self.bebop_state_current_[0:3] = np.array([px, py, pz])
self.bebop_state_current_[6:9] = np.array([rpy[0], rpy[1], rpy[2]])
if self.received_first_goal_ is False: # if not received any goal pose
self.limo_pose_goal_ = np.array([px, py, pz, rpy[2]])
def set_bebop_twist(self, twist_msg):
vx = twist_msg.twist.linear.x
vy = twist_msg.twist.linear.y
vz = twist_msg.twist.linear.z
self.bebop_state_current_[3:6] = np.array([vx, vy, vz])
def set_bebop_pose_goal(self, pose_goal_msg):
if self.received_first_goal_ is False:
self.received_first_goal_ = True
rospy.loginfo('First pose goal received!')
px_goal = pose_goal_msg.pose.position.x
py_goal = pose_goal_msg.pose.position.y
pz_goal = pose_goal_msg.pose.position.z
rpy_goal = tf.transformations.euler_from_quaternion([pose_goal_msg.pose.orientation.x,
pose_goal_msg.pose.orientation.y,
pose_goal_msg.pose.orientation.z,
pose_goal_msg.pose.orientation.w])
self.bebop_pose_goal_ = np.array([px_goal, py_goal, pz_goal, rpy_goal[2]])
def obs_motion_prediction(self):
for iStage in range(0, self.mpc_N_):
self.obs_state_prediction_[0:3] = self.obs_state_current_[0:3] \
+ self.obs_state_current_[3:6] * (iStage+1) * self.mpc_dt_
def reset_nlp_solver(self):
# initialize plan
u_reset = np.zeros(self.mpc_nu_)
x_reset = np.zeros(self.mpc_nx_)
s_reset = np.zeros(self.mpc_ns_)
# x_reset = self.bebop_state_current_[:self.mpc_nx_]
x_reset[0:3] = self.bebop_state_current_[0:3]
x_reset[6:8] = self.bebop_state_current_[6:8]
nlp_plan = np.concatenate((u_reset, x_reset, s_reset), axis=0).reshape(-1)
self.mpc_nlp_traj_ = np.tile(np.array(nlp_plan), self.mpc_N_).reshape(-1)
def initialize_nlp_solver(self):
u_traj_init = np.concatenate((self.mpc_u_plan_[:, 1:], self.mpc_u_plan_[:, -1:]), axis=1)
x_traj_init = np.concatenate((self.mpc_x_plan_[:, 1:], self.mpc_x_plan_[:, -1:]), axis=1)
s_traj_init = np.concatenate((self.mpc_s_plan_[:, 1:], self.mpc_s_plan_[:, -1:]), axis=1)
self.mpc_nlp_traj_ = np.vstack((u_traj_init, x_traj_init, s_traj_init)).reshape(-1)
def set_nlp_params(self):
parameters_all_stage = np.zeros((self.mpc_np_, self.mpc_N_)) # all parameters on each stage
for iStage in range(0, self.mpc_N_):
parameters_all_stage[self.mpc_form_param_.param_index_bebop_pose_start, iStage] = \
np.array([self.bebop_state_current_[0], self.bebop_state_current_[1], self.bebop_state_current_[2],
self.bebop_state_current_[8]])
parameters_all_stage[self.mpc_form_param_.param_index_bebop_pose_goal, iStage] = self.bebop_pose_goal_
parameters_all_stage[self.mpc_form_param_.param_index_bebop_size, iStage] = self.bebop_size_
parameters_all_stage[self.mpc_form_param_.param_index_obs_info, iStage] = np.concatenate((
self.obs_state_prediction_[0:3, iStage], self.obs_size_
))
if iStage == self.mpc_N_ - 1: # terminal weights
parameters_all_stage[self.mpc_form_param_.param_index_mpc_weights, iStage] = np.hstack(
(self.mpc_weights_wp_, 0.1 * self.mpc_weights_input_,
self.mpc_weights_coll_, self.mpc_weights_slack_)
)
else:
parameters_all_stage[self.mpc_form_param_.param_index_mpc_weights, iStage] = np.hstack(
(0.05 * self.mpc_weights_wp_, self.mpc_weights_input_,
self.mpc_weights_coll_, self.mpc_weights_slack_)
)
# set parameters
self.mpc_nlp_param_ = np.hstack((self.bebop_state_current_[:self.mpc_nx_],
np.transpose(parameters_all_stage).reshape(-1)))
def run_nlp_solver(self):
# initialize solver
if self.mpc_feasible_ is True:
self.initialize_nlp_solver()
else:
self.reset_nlp_solver()
# set solver params
self.set_nlp_params()
# call the solver
time_before_solver = rospy.get_rostime()
nlp_sol = self.nlp_solver_complied_(x0=self.mpc_nlp_traj_,
p=self.mpc_nlp_param_,
lbx=self.nlp_lbx_,
ubx=self.nlp_ubx_,
lbg=self.nlp_lbg_,
ubg=self.nlp_ubg_)
# deal with infeasibility
if self.nlp_solver_complied_.stats()['success'] is False: # if infeasible
self.mpc_feasible_ = False
self.mpc_success_ = False
rospy.logwarn("MPC infeasible!")
else:
self.mpc_feasible_ = True
self.mpc_success_ = True
solver_time = (rospy.get_rostime() - time_before_solver).to_sec() * 1000.0
solver_iter = self.nlp_solver_complied_.stats()['iter_count']
rospy.loginfo('MPC feasible, iter: %d, computation time: %.1f ms.', solver_iter, solver_time)
# obtain solution
traj_opt = nlp_sol['x'].reshape((self.mpc_nu_ + self.mpc_nx_ + self.mpc_ns_, self.mpc_N_))
self.mpc_u_plan_ = np.array(traj_opt[:self.mpc_nu_, :])
self.mpc_x_plan_ = np.array(traj_opt[self.mpc_nu_:self.mpc_nu_+self.mpc_nx_, :])
self.mpc_s_plan_ = np.array(traj_opt[self.mpc_nu_+self.mpc_nx_:, :])
self.mpc_u_now_ = self.mpc_u_plan_[:, 0]
def calculate_bebop_cmd_vel(self):
# if odom received
time_now = rospy.Time.now()
if (time_now - self.odom_received_time_).to_sec() > self.odom_time_out_:
rospy.logwarn('Odometry time out! Will try to make the MAV hover.')
self.bebop_pose_goal_ = np.concatenate((self.bebop_state_current_[0:3], self.bebop_state_current_[8:9]))
else:
# run the nlp solver
self.run_nlp_solver()
# control commands
if self.mpc_success_ is True:
roll_cmd = self.mpc_u_now_[0]
pitch_cmd = self.mpc_u_now_[1]
vz_cmd = self.mpc_u_now_[2]
else:
rospy.logwarn('MPC failure! Default commands sent.')
roll_cmd = 0.0
pitch_cmd = 0.0
vz_cmd = 0.0
# yaw control
yaw_now = self.bebop_state_current_[8]
yaw_ref = self.bebop_pose_goal_[3]
yaw_error = yaw_ref - yaw_now
while np.abs(yaw_error) > np.pi:
if yaw_error > 0.0:
yaw_error = yaw_error - 2.0 * np.pi
else:
yaw_error = yaw_error + 2.0 * np.pi
yawrate_cmd = self.K_yaw_ * yaw_error
yawrate_cmd = np.clip(yawrate_cmd, -self.yawrate_max_, self.yawrate_max_)
# obtained command
self.bebop_cmd_vel_ = np.array([roll_cmd, pitch_cmd, vz_cmd, yawrate_cmd])
def pub_bebop_cmd_vel(self):
try:
cmd_vel_msg = Twist()
cmd_vel_msg.linear.x = self.bebop_cmd_vel_[1] / self.pitch_max_ # pitch to move along x
cmd_vel_msg.linear.y = -self.bebop_cmd_vel_[0] / self.roll_max_ # roll to move along y
cmd_vel_msg.linear.z = self.bebop_cmd_vel_[2] / self.vz_max_
cmd_vel_msg.angular.z = self.bebop_cmd_vel_[3] / self.yawrate_max_
self.bebop_cmd_vel_pub_.publish(cmd_vel_msg)
except:
rospy.logwarn('Bebop cmd_vel command not published!')
def pub_mpc_traj_plan_vis(self):
try:
marker_msg = Marker()
marker_msg.header.frame_id = "map"
marker_msg.header.stamp = rospy.Time.now()
marker_msg.type = 8
marker_msg.action = 0
# set the scale of the marker
marker_msg.scale.x = 0.2
marker_msg.scale.y = 0.2
marker_msg.scale.z = 0.2
# set the color
marker_msg.color.r = 1.0
marker_msg.color.g = 0.0
marker_msg.color.b = 0.0
marker_msg.color.a = 1.0
# Set the pose of the marker
marker_msg.pose.position.x = 0.0
marker_msg.pose.position.y = 0.0
marker_msg.pose.position.z = 0.0
marker_msg.pose.orientation.x = 0
marker_msg.pose.orientation.y = 0
marker_msg.pose.orientation.z = 0
marker_msg.pose.orientation.w = 1.0
# points
mpc_traj_plan_points = []
for iStage in range(0, self.mpc_N_):
point = Point(self.mpc_x_plan_[0, iStage], self.mpc_x_plan_[1, iStage], self.mpc_x_plan_[2, iStage])
mpc_traj_plan_points.append(point)
marker_msg.points = mpc_traj_plan_points
self.mpc_traj_plan_vis_pub_.publish(marker_msg)
except:
rospy.logwarn("MPC trajectory plan not published!")
def bebop_nmpc_control():
# create a node
rospy.loginfo("Starting Bebop NMPC Control...")
rospy.init_node("bebop_nmpc_control_node", anonymous=False)
hz = 50
rate = rospy.Rate(hz)
rospy.sleep(1.0)
# formulation
mpc_form_param = BebopNmpcFormulationParam()
# control
bebop_nmpc = BebopNmpcControl(mpc_form_param)
while not rospy.is_shutdown():
if bebop_nmpc.received_first_odom_ is False:
rospy.logwarn('Waiting for first Odometry!')
elif bebop_nmpc.received_first_goal_ is False:
rospy.logwarn('Waiting for first goal pose!')
else:
bebop_nmpc.calculate_bebop_cmd_vel()
bebop_nmpc.pub_bebop_cmd_vel()
bebop_nmpc.pub_mpc_traj_plan_vis()
rate.sleep()
if __name__ == "__main__":
bebop_nmpc_control()
|
6,826 | 624212a1d73ff3a3b3092ffa27912a6ae25a2484 | from django.contrib import admin
from basic_app.models import UserProfileInfo
admin.site.register(UserProfileInfo)
# we do not need to register User() default form since it comes
# with the default admin site in Django itself.
|
6,827 | e3603d90bd5aa5de40baa27b62acf6f71eff9f6c | # -*- coding: utf-8 -*-
serviceType = "server"
serviceDesc = _({"en": "Icecream Daemon",
"tr": "Icecream Servisi"})
from comar.service import *
@synchronized
def start():
startService(command="/opt/icecream/sbin/iceccd",
args="-d -m 5 > /dev/null",
pidfile="/var/run/iceccd.pid",
donotify=True)
@synchronized
def stop():
stopService(pidfile="/var/run/iceccd.pid",
donotify=True)
def status():
return isServiceRunning("/var/run/iceccd.pid")
|
6,828 | 20ccdd319bfbbb4f17e8518eb60d125112c05d8e | from django.contrib import admin
from xchanger.models import Currency, Rates, UpdateInfo
class CurrencyAdmin(admin.ModelAdmin):
pass
class UpdAdmin(admin.ModelAdmin):
pass
class RatesAdmin(admin.ModelAdmin):
list_filter = ['c_code_id', 'upd_id']
admin.site.register(Currency, CurrencyAdmin)
admin.site.register(UpdateInfo, UpdAdmin)
admin.site.register(Rates, RatesAdmin)
|
6,829 | 35e66e5e154f5cd70f187a1cde33cef71102e1a6 | import random
import cv2
img = cv2.imread('assets/logo.jpg', -1)
print(img.shape) #3 channels, bgr
#look at the 257. row and pixel 400 --> has bgr values: [41 98 243]
print(img[257][400])
'''
# manipulate the first 100 rows, all columns, and randomize the 3 pixel values
# (rows, colums, pixels) where pixels: b,g,r
for i in range(100): #first 100 rows
for j in range(img.shape[1]): #all the colums
img[i][j] = [random.randint(0,255),random.randint(0,255),random.randint(0,255)]
cv2.imshow('modifiedImage', img)
cv2.waitKey(0)
cv2.destroyAllWindows()
'''
#copy one part of the image and copy it somewhere else
#take the pixels from row 500 bis 700 und davon die colums 600:900
tag = img[500:700, 600:900] #part of the picture
#paste this on another location in the image; needs same dimeension/ size
img[100:300, 650:950] = tag
cv2.imshow('Image', img)
cv2.waitKey(0)
cv2.destroyAllWindows() |
6,830 | b4ce95d754dd0d7c1b91fa0348de0194a4397aca | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Simple image classification with Inception.
Run image classification with Inception trained on ImageNet 2012 Challenge data
set.
This program creates a graph from a saved GraphDef protocol buffer,
and runs inference on an input JPEG image. It outputs human readable
strings of the top 5 predictions along with their probabilities.
Change the --image_file argument to any jpg image to compute a
classification of that image.
Please see the tutorial and website for a detailed description of how
to use this script to perform image recognition.
https://tensorflow.org/tutorials/image_recognition/
"""
import os.path
import re
import sys
import tarfile
#import argparse
from collections import namedtuple
import cStringIO as StringIO
import logging
import cPickle as pickle
import os
import tempfile
from contextlib import contextmanager
import time
# pylint: disable=unused-import,g-bad-import-order
import tensorflow.python.platform
from six.moves import urllib
import numpy as np
import tensorflow as tf
import redis
import requests
from wand.image import Image
# pylint: enable=unused-import,g-bad-import-order
from ast import literal_eval as make_tuple
from tensorflow.python.platform import gfile
FLAGS = tf.app.flags.FLAGS
# classify_image_graph_def.pb:
# Binary representation of the GraphDef protocol buffer.
# imagenet_synset_to_human_label_map.txt:
# Map from synset ID to a human readable string.
# imagenet_2012_challenge_label_map_proto.pbtxt:
# Text representation of a protocol buffer mapping a label to synset ID.
# this is the same as namedtuple
tf.app.flags.DEFINE_string(
'model_dir', '/tmp/imagenet',
"""Path to classify_image_graph_def.pb, """
"""imagenet_synset_to_human_label_map.txt, and """
"""imagenet_2012_challenge_label_map_proto.pbtxt.""")
tf.app.flags.DEFINE_string('image_file', '',
"""Absolute path to image file.""")
tf.app.flags.DEFINE_integer('num_top_predictions', 5,
"""Display this many predictions.""")
tf.app.flags.DEFINE_string('redis_server', '',
"""Redis server address""")
tf.app.flags.DEFINE_integer('redis_port', 6379,
"""Redis server port""")
tf.app.flags.DEFINE_string('redis_queue', 'classify',
"""Redis queue to read images from""")
Task = namedtuple('Task', 'queue value')
Specs = namedtuple('Specs', 'group path ad_id')
Result = namedtuple('Result', 'OK predictions computation_time ad_id path')
# pylint: disable=line-too-long
DATA_URL = 'http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz'
# pylint: enable=line-too-long
logging.getLogger().setLevel(logging.INFO)
logging.basicConfig(format='%(asctime)s %(message)s')
class NodeLookup(object):
"""Converts integer node ID's to human readable labels."""
def __init__(self,
label_lookup_path=None,
uid_lookup_path=None):
if not label_lookup_path:
label_lookup_path = os.path.join(
FLAGS.model_dir, 'imagenet_2012_challenge_label_map_proto.pbtxt')
if not uid_lookup_path:
uid_lookup_path = os.path.join(
FLAGS.model_dir, 'imagenet_synset_to_human_label_map.txt')
self.node_lookup = self.load(label_lookup_path, uid_lookup_path)
def load(self, label_lookup_path, uid_lookup_path):
"""Loads a human readable English name for each softmax node.
Args:
label_lookup_path: string UID to integer node ID.
uid_lookup_path: string UID to human-readable string.
Returns:
dict from integer node ID to human-readable string.
"""
if not gfile.Exists(uid_lookup_path):
tf.logging.fatal('File does not exist %s', uid_lookup_path)
if not gfile.Exists(label_lookup_path):
tf.logging.fatal('File does not exist %s', label_lookup_path)
# Loads mapping from string UID to human-readable string
proto_as_ascii_lines = gfile.GFile(uid_lookup_path).readlines()
uid_to_human = {}
p = re.compile(r'[n\d]*[ \S,]*')
for line in proto_as_ascii_lines:
parsed_items = p.findall(line)
uid = parsed_items[0]
human_string = parsed_items[2]
uid_to_human[uid] = human_string
# Loads mapping from string UID to integer node ID.
node_id_to_uid = {}
proto_as_ascii = gfile.GFile(label_lookup_path).readlines()
for line in proto_as_ascii:
if line.startswith(' target_class:'):
target_class = int(line.split(': ')[1])
if line.startswith(' target_class_string:'):
target_class_string = line.split(': ')[1]
node_id_to_uid[target_class] = target_class_string[1:-2]
# Loads the final mapping of integer node ID to human-readable string
node_id_to_name = {}
for key, val in node_id_to_uid.iteritems():
if val not in uid_to_human:
tf.logging.fatal('Failed to locate: %s', val)
name = uid_to_human[val]
node_id_to_name[key] = name
return node_id_to_name
def id_to_string(self, node_id):
if node_id not in self.node_lookup:
return ''
return self.node_lookup[node_id]
def create_graph():
""""Creates a graph from saved GraphDef file and returns a saver."""
# Creates graph from saved graph_def.pb.
with gfile.FastGFile(os.path.join(
FLAGS.model_dir, 'classify_image_graph_def.pb'), 'r') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
_ = tf.import_graph_def(graph_def, name='')
@contextmanager
def convert_to_jpg(data):
tmp = tempfile.NamedTemporaryFile(delete=False)
with Image(file=StringIO.StringIO(data)) as img:
if img.format != 'JPEG':
logging.info('Converting {} to JPEG.'.format(img.format))
img.format = 'JPEG'
img.save(tmp)
tmp.close()
yield tmp.name
os.remove(tmp.name)
def classify_images():
create_graph()
node_lookup = NodeLookup()
# 4 instances running in parallel on g2.2xlarge seems to be the magic number.
# If running more instances, memcpy errors will be thrown after some time.
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=1./4)
with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess:
r_server = redis.StrictRedis(FLAGS.redis_server, FLAGS.redis_port)
softmax_tensor = sess.graph.get_tensor_by_name('softmax:0')
while True:
task = Task(*r_server.brpop(FLAGS.redis_queue))
specs = Specs(**pickle.loads(task.value))
logging.info(specs)
try:
result_key = 'archive:{}:{}'.format(specs.group, specs.path)
kaidee_result_key = ''
full_url = specs.path.split('//')
url_path = len(full_url)>1 and full_url[1] or full_url[0]
kaidee_result_key = url_path.split('/', 1)[1]
response = requests.get(specs.path, timeout=10)
with convert_to_jpg(response.content) as jpg:
image_data = gfile.FastGFile(jpg).read()
starttime = time.time()
predictions = sess.run(softmax_tensor,{'DecodeJpeg/contents:0': image_data})
endtime = time.time()
predictions = np.squeeze(predictions)
top_k = predictions.argsort()[-FLAGS.num_top_predictions:][::-1]
result = Result(True,
[ (node_lookup.id_to_string(node_id), predictions[node_id]) for node_id in top_k ],
endtime - starttime,
specs.ad_id, specs.path)
r_server.hmset(result_key, result._asdict())
r_server.hmset(kaidee_result_key, result._asdict())
r_server.zadd('archive:{}:category:{}'.format(specs.group, result.predictions[0][0]),
result.predictions[0][1], specs.path)
# The publishing was only added since AWS ElastiCache does not support subscribing to keyspace notifications.
r_server.publish('latest', pickle.dumps({'path': specs.path, 'group': specs.group,
'category': result.predictions[0][0], 'value': float(result.predictions[0][1])}))
# Kaidee channel
predictions_dict = dict((x, y) for x, y in result.predictions)
r_server.publish('classify', pickle.dumps({'path': specs.path, 'group': specs.group,
'predictions': predictions_dict, 'ad_id': specs.ad_id}))
logging.info(result)
except Exception as e:
logging.error('Something went wrong when classifying the image: {}'.format(e))
r_server.hmset(result_key, {'OK': False})
def maybe_download_and_extract():
"""Download and extract model tar file."""
dest_directory = FLAGS.model_dir
if not os.path.exists(dest_directory):
os.makedirs(dest_directory)
filename = DATA_URL.split('/')[-1]
filepath = os.path.join(dest_directory, filename)
if not os.path.exists(filepath):
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %s %.1f%%' % (
filename, float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(DATA_URL, filepath,
reporthook=_progress)
print()
statinfo = os.stat(filepath)
print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.')
tarfile.open(filepath, 'r:gz').extractall(dest_directory)
def main(_):
maybe_download_and_extract()
classify_images()
if __name__ == '__main__':
tf.app.run()
|
6,831 | 5c001303962315afe2512eb307376f6f7a883cf9 | # inserting logical unit ids for splitting texts into logical chunks
import re
import os
splitter = "#META#Header#End#"
def logical_units(file):
ar_ra = re.compile("^[ذ١٢٣٤٥٦٧٨٩٠ّـضصثقفغعهخحجدًٌَُلإإشسيبلاتنمكطٍِلأأـئءؤرلاىةوزظْلآآ]+$")
with open(file, "r", encoding="utf8") as f1:
book = f1.read()
# splitter test
if splitter in book:
# logical units
log_ids = re.findall("\n#\d+#", book)
if len(log_ids) > 0:
print("\tthe text already have %d logical units of this length" % len(log_ids))
pass
else:
# insert logical unit ids
new_data = []
head = book.split(splitter)[0]
text = book.split(splitter)[1]
token_count = 0
data = re.findall(r"\w+|\W+", text)
word_len = len(str(len(data)))
data_len = len(data)
for i in range(0, data_len):
if "\n#" in data[i]:
if "Page" in data[i + 1]:# or ar_token_cnt(ar_ra, data[i + 1]) <= 0:
new_data.append(data[i])
else:
last = data[i].rfind("#")
token_cnt_str = str(token_count + 1)
if len(token_cnt_str) < word_len:
tmp_cnt = token_cnt_str.zfill(word_len)
else:
tmp_cnt = token_cnt_str
tmp = data[i][:last] + "#" + tmp_cnt + data[i][last:]
new_data.append(tmp)
elif ar_token_cnt(ar_ra, data[i]):
token_count += 1
new_data.append(data[i])
else:
new_data.append(data[i])
log_text = "".join(new_data)
log_text = head + splitter + log_text
with open(file + "_logical", "w", encoding="utf8") as f:
f.write(log_text)
else:
print("The file is missing the splitter!")
print(file)
def ar_token_cnt(ar_ra, text):
return sum(ar_ra.search(t) is not None for t in re.findall(r"\w+|\W+", text))
# process all texts in OpenITI
def process_all(folder):
exclude = (["OpenITI.github.io", "Annotation", "_maintenance", "i.mech"])
for root, dirs, files in os.walk(folder):
# print("root: ",root)
dirs[:] = [d for d in dirs if d not in exclude]
# print("dir: ",dirs)
for file in files:
if re.search("^\d{4}\w+\.\w+\.\w+-ara\d$", file):
logical_units(os.path.join(root, file))
# return
# input()
# /media/rostam/Seagate Backup Plus Drive
# process_all("/home/rostam/projs/KITAB/test")
# print("Done!")
|
6,832 | 3667651697ac1c093d48fe2c4baa4b4dbdf20f8a | """
Unpacks and preprocesses all of the data from the tarball of partial data,
which includes the flats and dark frames.
"""
import tools.unpack
import util.files
import util.dark
import util.flat
def main():
tools.unpack.main()
util.files.main()
util.dark.main()
util.flat.main()
if __name__ == '__main__':
main()
|
6,833 | 3838df627318b25767738da912f44e494cef40f3 | #!/bin/python3
import sys
def fibonacciModified(t1, t2, n):
ti = t1
ti_1 = t2
for i in range (2, n):
ti_2 = ti + ti_1**2
ti = ti_1
ti_1 = ti_2
return ti_2
if __name__ == "__main__":
t1, t2, n = input().strip().split(' ')
t1, t2, n = [int(t1), int(t2), int(n)]
result = fibonacciModified(t1, t2, n)
print(result)
|
6,834 | 472c8b0649e29c31b144607080938793e5f1293e | """Module to convert a lanelet UTM representation to OSM."""
__author__ = "Benjamin Orthen"
__copyright__ = "TUM Cyber-Physical Systems Group"
__credits__ = ["Priority Program SPP 1835 Cooperative Interacting Automobiles"]
__version__ = "1.1.2"
__maintainer__ = "Benjamin Orthen"
__email__ = "commonroad-i06@in.tum.de"
__status__ = "Released"
from typing import List, Tuple
import numpy as np
from pyproj import Proj
from commonroad.scenario.lanelet import Lanelet
from opendrive2lanelet.osm.osm import OSM, Node, Way, WayRelation, DEFAULT_PROJ_STRING
ways_are_equal_tolerance = 0.001
class L2OSMConverter:
"""Class to convert CommonRoad lanelet to the OSM representation."""
def __init__(self, proj_string):
if proj_string:
self.proj = Proj(proj_string)
else:
self.proj = Proj(DEFAULT_PROJ_STRING)
self.osm = None
self._id_count = -1
self.first_nodes, self.last_nodes = None, None
self.left_ways, self.right_ways = None, None
self.lanelet_network = None
@property
def id_count(self) -> int:
"""Internal counter for giving IDs to the members of the OSM.
Each call returns the count and increases it by one.
Returns:
Current id count.
"""
tmp = self._id_count
self._id_count -= 1
return tmp
def __call__(self, scenario):
"""Convert a scenario to an OSM xml document.
Args:
scenario:
"""
self.osm = OSM()
self.lanelet_network = scenario.lanelet_network
self.first_nodes = dict() # saves first left and right node
self.last_nodes = dict() # saves last left and right node
self.left_ways = dict()
self.right_ways = dict()
for lanelet in scenario.lanelet_network.lanelets:
self._convert_lanelet(lanelet)
return self.osm.serialize_to_xml()
def _convert_lanelet(self, lanelet: Lanelet):
"""Convert a lanelet to a way relation.
Add the resulting relation and its ways and nodes to the OSM.
Args:
lanelet: Lanelet to be converted.
"""
# check if there are shared ways
right_way_id = self._get_potential_right_way(lanelet)
left_way_id = self._get_potential_left_way(lanelet)
left_nodes, right_nodes = self._create_nodes(lanelet, left_way_id, right_way_id)
self.first_nodes[lanelet.lanelet_id] = (left_nodes[0], right_nodes[0])
self.last_nodes[lanelet.lanelet_id] = (left_nodes[-1], right_nodes[-1])
if not left_way_id:
left_way = Way(self.id_count, *left_nodes)
self.osm.add_way(left_way)
left_way_id = left_way.id_
if not right_way_id:
right_way = Way(self.id_count, *right_nodes)
self.osm.add_way(right_way)
right_way_id = right_way.id_
self.left_ways[lanelet.lanelet_id] = left_way_id
self.right_ways[lanelet.lanelet_id] = right_way_id
self.osm.add_way_relation(WayRelation(self.id_count, left_way_id, right_way_id))
def _create_nodes(
self, lanelet: Lanelet, left_way_id: str, right_way_id: str
) -> Tuple[List[str], List[str]]:
"""Create new nodes for the ways of the lanelet.
Add them to OSM and return a list of the node ids.
In case a left or right way already exists, the returned list
only contains the first and last node of the way.
Args:
lanelet: Lanelet of which the right and left vertices should be converted to ways.
left_way_id: Id of a potential shared left way which was already converted.
If this is not None, the left vertices of the lanelet do not have to be converted again.
right_way_id: Id of a potential right way, similar to left_way_id.
Returns:
A tuple of lists of node ids for the left and the right way.
"""
left_nodes, right_nodes = [], []
start_index = 0
end_index = len(lanelet.left_vertices)
pot_first_left_node, pot_first_right_node = self._get_shared_first_nodes_from_other_lanelets(
lanelet
)
pot_last_left_node, pot_last_right_node = self._get_shared_last_nodes_from_other_lanelets(
lanelet
)
if pot_first_left_node:
start_index = 1
if pot_last_left_node:
end_index = -1
if left_way_id:
first_left_node, last_left_node = self._get_first_and_last_nodes_from_way(
left_way_id, lanelet.adj_left_same_direction
)
else:
first_left_node = pot_first_left_node
last_left_node = pot_last_left_node
left_nodes = self._create_nodes_from_vertices(
lanelet.left_vertices[start_index:end_index]
)
if right_way_id:
first_right_node, last_right_node = self._get_first_and_last_nodes_from_way(
right_way_id, lanelet.adj_right_same_direction
)
else:
first_right_node = pot_first_right_node
last_right_node = pot_last_right_node
right_nodes = self._create_nodes_from_vertices(
lanelet.right_vertices[start_index:end_index]
)
if first_left_node:
left_nodes.insert(0, first_left_node)
if first_right_node:
right_nodes.insert(0, first_right_node)
if last_left_node:
left_nodes.append(last_left_node)
if last_right_node:
right_nodes.append(last_right_node)
return left_nodes, right_nodes
def _get_first_and_last_nodes_from_way(
self, way_id: str, same_dir: bool
) -> Tuple[str, str]:
"""Get the first and the last node of a way.
Reverse order of nodes if way is reversed.
Args:
way_id: Id of way.
same_dir: True if way is in normal direction, False if it is reversed.
Returns:
Tuple with first and last node.
"""
way = self.osm.find_way_by_id(way_id)
first_idx, last_idx = (0, -1) if same_dir else (-1, 0)
return (way.nodes[first_idx], way.nodes[last_idx])
def _create_nodes_from_vertices(self, vertices: List[np.ndarray]) -> List[str]:
"""Create nodes and add them to the OSM.
Args:
vertices: List of vertices from a lanelet boundary.
Returns:
Ids of nodes which were created.
"""
nodes = []
for vertice in vertices:
lon, lat = self.proj(vertice[0], vertice[1], inverse=True)
node = Node(self.id_count, lat, lon)
nodes.append(node.id_)
self.osm.add_node(node)
return nodes
def _get_potential_right_way(self, lanelet):
"""Check if a shared right boundary with another lanelet can be transformed
to the same way.
Args:
lanelet: Lanelet of which right boundary should be converted to a way.
Returns:
Id of a way which can be shared, else None if it is not possible.
"""
if lanelet.adj_right:
if lanelet.adj_right_same_direction:
potential_right_way = self.left_ways.get(lanelet.adj_right)
else:
potential_right_way = self.right_ways.get(lanelet.adj_right)
if potential_right_way:
adj_right = self.lanelet_network.find_lanelet_by_id(lanelet.adj_right)
vertices = (
adj_right.left_vertices
if lanelet.adj_right_same_direction
else adj_right.right_vertices[::-1]
)
if _vertices_are_equal(lanelet.right_vertices, vertices):
return potential_right_way
return None
def _get_potential_left_way(self, lanelet):
"""Check if a shared left boundary with another lanelet can be transformed
to the same way.
Args:
lanelet: Lanelet of which left boundary should be converted to a way.
Returns:
Id of a way which can be shared, else None if it is not possible.
"""
if lanelet.adj_left:
if lanelet.adj_left_same_direction:
potential_left_way = self.right_ways.get(lanelet.adj_left)
else:
potential_left_way = self.left_ways.get(lanelet.adj_left)
if potential_left_way:
adj_left = self.lanelet_network.find_lanelet_by_id(lanelet.adj_left)
vertices = (
adj_left.right_vertices
if lanelet.adj_left_same_direction
else adj_left.left_vertices[::-1]
)
if _vertices_are_equal(lanelet.left_vertices, vertices):
return potential_left_way
return None
def _get_shared_first_nodes_from_other_lanelets(
self, lanelet: Lanelet
) -> Tuple[str, str]:
"""Get already created nodes from other lanelets which could also
be used by this lanelet as first nodes.
Args:
lanelet: Lanelet for which shared nodes should be found.
Returns:
Id of first left and first right node if they exist.
"""
if lanelet.predecessor:
for lanelet_id in lanelet.predecessor:
first_left_node, first_right_node = self.last_nodes.get(
lanelet_id, (None, None)
)
if first_left_node:
return first_left_node, first_right_node
for pred_id in lanelet.predecessor:
pred = self.lanelet_network.find_lanelet_by_id(pred_id)
for succ_id in pred.successor:
first_left_node, first_right_node = self.first_nodes.get(
succ_id, (None, None)
)
if first_left_node:
return first_left_node, first_right_node
return None, None
def _get_shared_last_nodes_from_other_lanelets(
self, lanelet: Lanelet
) -> Tuple[str, str]:
"""Get already created nodes from other lanelets which could also
be used by this lanelet as last nodes.
Args:
lanelet: Lanelet for which shared nodes should be found.
Returns:
Id of last left and last right node if they exist.
"""
if lanelet.successor:
for lanelet_id in lanelet.successor:
last_left_node, last_right_node = self.first_nodes.get(
lanelet_id, (None, None)
)
if last_left_node:
return last_left_node, last_right_node
for succ_id in lanelet.successor:
succ = self.lanelet_network.find_lanelet_by_id(succ_id)
for pred_id in succ.predecessor:
last_left_node, last_right_node = self.last_nodes.get(
pred_id, (None, None)
)
if last_left_node:
return last_left_node, last_right_node
return None, None
def _vertices_are_equal(
vertices1: List[np.ndarray], vertices2: List[np.ndarray]
) -> bool:
"""Checks if two list of vertices are equal up to a tolerance.
Args:
vertices1: First vertices to compare.
vertices2: Second vertices to compare.
Returns:
True if every vertice in one list is nearly equal to the
corresponding vertices at the same position in the other list.
"""
if len(vertices1) != len(vertices2):
return False
diff = vertices1 - vertices2
if np.abs(np.max(diff)) < ways_are_equal_tolerance:
return True
return False
|
6,835 | f29637cd670524baebac6549962a1c50fc1b91c6 | import math
# 1
long_phrase = 'Насколько проще было бы писать программы, если бы не заказчики'
short_phrase = '640Кб должно хватить для любых задач. Билл Гейтс (по легенде)'
def compare (long, short):
print(len(long)>len(short))
compare(long_phrase, short_phrase)
# 2.1
text = 'Если программист в 9-00 утра на работе, значит, он там и ночевал'
d=dict()
for letter in text:
if letter not in d:
d[letter]=1
if letter in d:
d[letter]+=1
result='В строке text {} букв "а" и {} букв "и"'.format(d['а'], d['и'])
print(result)
# 2.2
if len(text.replace('и','')) < len(text.replace('а','')):
print('В строке больше букв "и"')
else:
print('В строке больше букв "а"')
# 3
byte=213680000
megabyte=byte/(10**6)
print('Объем файла равен {}Mb'.format(megabyte))
# 4
sin=math.sin(math.pi/6)
print(sin)
''' 5 дробные числа не могут быть представлены в точности в бинарном виде,
поэтому значения округляются, и такие операции,
как 0.1+0.2, дают неточный результат '''
# 5
def exchange (a, b):
b=b-a
a=a+b
b=a-b
print('a=',a,'b=',b)
exchange(120,1)
# 6
# разбиваем число на элементы, получаем спискок
num=10011
st=str(num)
st.split()
l=len(st)-1
print(l)
# создаем новый список куда добавим вычисляемые значения
new_num=list()
# вычисляем каждый элемент (умножение на 2 в степени номера эл-та)
k=-1
for i in st:
k=k+1
i=int(i)*(2**(l-k))
print(i)
new_num.append(i)
result=sum(new_num)
print(result)
|
6,836 | 11a0c3307994a90d1d4de67d442ffa355e11e13b | from .compat import reverse, action
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from rest_framework import pagination
from rest_framework import renderers
from . import registry
from .serializers import RunSerializer, RecordSerializer
from .models import Run
from .settings import import_setting
class PageNumberPagination(pagination.PageNumberPagination):
page_size = 50
class RunViewSet(ModelViewSet):
serializer_class = RunSerializer
pagination_class = PageNumberPagination
renderer_classes = [
renderers.TemplateHTMLRenderer,
renderers.JSONRenderer,
renderers.BrowsableAPIRenderer,
]
authentication_classes = [
import_setting('AUTHENTICATION'),
]
permission_classes = [
import_setting('PERMISSION'),
]
record_serializer_class = RecordSerializer
queryset = Run.objects.all()
@property
def backend(self):
from . import backend as data_wizard_backend
return data_wizard_backend
@property
def template_name(self):
if self.action == 'retrieve':
template = 'detail'
else:
template = self.action
return 'data_wizard/run_{}.html'.format(template)
def get_renderers(self):
if self.action == 'status':
return [renderers.JSONRenderer()]
else:
return super(RunViewSet, self).get_renderers()
@action(detail=True)
def status(self, request, *args, **kwargs):
task_id = request.GET.get('task', None)
result = self.backend.get_async_status(task_id)
status = result.get('status', 'UNKNOWN')
action = result.get('action', None)
if not action and status == 'SUCCESS':
action = 'records'
if action:
result['location'] = self.get_action_url(action)
elif status == 'FAILURE' and not result.get('error'):
result['error'] = "Unknown Error"
result['status'] = status
return Response(result)
_namespace = 'data_wizard'
def get_action_url(self, action):
name = self._namespace + ':run-' + action
return reverse(name, kwargs={'pk': self.get_object().pk})
def run_task(self, name, use_async=False, post=None):
run = self.get_object()
return run.run_task(
name,
use_async=use_async,
post=post,
backend=self.backend,
user=self.request.user
)
def retrieve_and_run(self, task_name, use_async=False, post=None):
response = self.retrieve(self.request, **self.kwargs)
result = self.run_task(task_name, use_async, post)
response.data.update(result)
return response
@action(detail=True)
def serializers(self, request, *args, **kwargs):
response = self.retrieve(request, **self.kwargs)
response.data['serializer_choices'] = [
{
'name': s['class_name'],
'label': s['name'],
} for s in registry.get_serializers()
if s['options'].get('show_in_list', True)
]
return response
@action(detail=True, methods=['post'])
def updateserializer(self, request, *args, **kwargs):
run = self.get_object()
self.action = 'serializers'
name = request.POST.get('serializer', None)
if name and registry.get_serializer(name):
run.serializer = name
run.save()
run.add_event('update_serializer')
return self.serializers(request)
@action(detail=True)
def columns(self, request, *args, **kwargs):
return self.retrieve_and_run('read_columns')
@action(detail=True, methods=['post'])
def updatecolumns(self, request, *args, **kwargs):
response = self.retrieve_and_run('read_columns')
self.action = 'columns'
result = self.run_task('update_columns', post=request.POST)
response.data.update(result)
return response
@action(detail=True)
def ids(self, request, *args, **kwargs):
return self.retrieve_and_run('read_row_identifiers')
@action(detail=True, methods=['post'])
def updateids(self, request, *args, **kwargs):
response = self.retrieve_and_run('read_row_identifiers')
self.action = 'ids'
result = self.run_task('update_row_identifiers', post=request.POST)
response.data.update(result)
return response
@action(detail=True, methods=['post'])
def data(self, request, *args, **kwargs):
return self.retrieve_and_run('import_data', use_async=True)
@action(detail=True, methods=['post', 'get'])
def auto(self, request, *args, **kwargs):
if request.method == 'GET':
response = self.retrieve(request, **kwargs)
task_id = request.GET.get('task', None)
if task_id:
response.data['task_id'] = task_id
else:
self.action = 'retrieve'
return response
return self.retrieve_and_run('auto_import', use_async=True)
@action(detail=True)
def records(self, request, *args, **kwargs):
response = self.retrieve(self.request, **kwargs)
response.data['records'] = self.record_serializer_class(
self.get_object().record_set.all(),
many=True
).data
return response
|
6,837 | 131caf50cc8682cf180168a1b136b1dcdd70fa76 | #-*- coding: UTF-8 -*-
#Author Motuii
'''
* ┏┓ ┏┓
* ┏┛┻━━━┛┻┓
* ┃ ┃
* ┃ ━ ┃
* ┃ ┳┛ ┗┳ ┃
* ┃ ┃
* ┃ ┻ ┃
* ┃ ┃
* ┗━┓ ┏━┛
* ┃ ┃ 神兽保佑
* ┃ ┃ 代码无BUG!
* ┃ ┗━━━┓
* ┃ ┣┓
* ┃ ┏┛
* ┗┓┓┏━┳┓┏┛
* ┃┫┫ ┃┫┫
* ┗┻┛ ┗┻┛
*
'''
n = 10
# arr = [[1]*i for i in range(1,n+1)]
# for i in range(len(arr)):
# for j in range(len(arr[i])):
# if (j!=0 and j!=len(arr[i-1])):
# arr[i][j] = arr[i-1][j-1] + arr[i-1][j]
# print ' '.join(map(lambda x:str(x),arr[i]))
an = [1]*n
for i in range(n):
for j in range(i-1,0,-1):
an[j] = an[j]+an[j-1]
print an[0:i+1]
#print "\t".join(map(lambda x:str(x),an[0:i+1]))
|
6,838 | caac877bf6c42217ea41f51717f6a704a3a9774b | ''' 简述:这里有四个数字,分别是:1、2、3、4
提问:能组成多少个互不相同且无重复数字的三位数?各是多少? '''
for x in range(1,5):
for y in range(1,5):
for z in range(1,5):
if (x != y) & (x != z) & (y != z):
print(x,y,z)
|
6,839 | 002ef36bd132f1ac258b3f8baf8098accbd8a8f2 | ''' mock_proto.py '''
from heron.common.src.python import constants
import heron.proto.execution_state_pb2 as protoEState
import heron.proto.physical_plan_pb2 as protoPPlan
import heron.proto.tmaster_pb2 as protoTmaster
import heron.proto.topology_pb2 as protoTopology
# pylint: disable=no-self-use, missing-docstring
class MockProto(object):
''' Mocking Proto'''
topology_name = "mock_topology_name"
topology_id = "mock_topology_id"
cluster = "mock_topology_cluster"
environ = "mock_topology_environ"
def create_mock_spout(self,
spout_name,
output_streams,
spout_parallelism):
spout = protoTopology.Spout()
spout.comp.name = spout_name
kv = spout.comp.config.kvs.add()
kv.key = constants.TOPOLOGY_COMPONENT_PARALLELISM
kv.type = protoTopology.ConfigValueType.Value('STRING_VALUE')
kv.value = str(spout_parallelism)
for stream in output_streams:
spout.outputs.add().stream.CopyFrom(stream)
return spout
def create_mock_bolt(self,
bolt_name,
input_streams,
output_streams,
bolt_parallelism):
bolt = protoTopology.Bolt()
bolt.comp.name = bolt_name
kv = bolt.comp.config.kvs.add()
kv.key = constants.TOPOLOGY_COMPONENT_PARALLELISM
kv.type = protoTopology.ConfigValueType.Value('STRING_VALUE')
kv.value = str(bolt_parallelism)
for stream in input_streams:
bolt.inputs.add().stream.CopyFrom(stream)
for stream in output_streams:
bolt.outputs.add().stream.CopyFrom(stream)
return bolt
def create_mock_simple_topology(
self,
spout_parallelism=1,
bolt_parallelism=1):
"""
Simple topology contains one spout and one bolt.
"""
topology = protoTopology.Topology()
topology.id = MockProto.topology_id
topology.name = MockProto.topology_name
# Stream1
stream1 = protoTopology.StreamId()
stream1.id = "mock_stream1"
stream1.component_name = "mock_spout"
# Spout1
spout = self.create_mock_spout("mock_spout", [stream1], spout_parallelism)
topology.spouts.extend([spout])
# Bolt1
bolt = self.create_mock_bolt("mock_bolt", [stream1], [], bolt_parallelism)
topology.bolts.extend([bolt])
return topology
def create_mock_medium_topology(
self,
spout_parallelism=1,
bolt1_parallelism=1,
bolt2_parallelism=1,
bolt3_parallelism=1):
"""
Medium topology is a three stage topology
with one spout, two mid stage bolts, and one
last stage bolt.
S -str1-> B1 -str3-> B3
S -str2-> B2 -str4-> B3
"""
topology = protoTopology.Topology()
topology.id = "mock_topology_id"
topology.name = "mock_topology_name"
# Streams
stream1 = protoTopology.StreamId()
stream1.id = "mock_stream1"
stream1.component_name = "mock_spout1"
stream2 = protoTopology.StreamId()
stream2.id = "mock_stream2"
stream2.component_name = "mock_spout1"
stream3 = protoTopology.StreamId()
stream3.id = "mock_stream3"
stream3.component_name = "mock_bolt1"
stream4 = protoTopology.StreamId()
stream4.id = "mock_stream4"
stream4.component_name = "mock_bolt2"
# Spouts
spout1 = self.create_mock_spout("mock_spout1",
[stream1, stream2],
spout_parallelism)
topology.spouts.extend([spout1])
# Bolts
bolt1 = self.create_mock_bolt("mock_bolt1",
[stream1],
[stream3],
bolt1_parallelism)
bolt2 = self.create_mock_bolt("mock_bolt2",
[stream2],
[stream4],
bolt2_parallelism)
bolt3 = self.create_mock_bolt("mock_bolt3",
[stream3, stream4],
[],
bolt3_parallelism)
topology.bolts.extend([bolt1, bolt2, bolt3])
return topology
def create_mock_simple_physical_plan(
self,
spout_parallelism=1,
bolt_parallelism=1):
pplan = protoPPlan.PhysicalPlan()
pplan.topology.CopyFrom(self.create_mock_simple_topology(
spout_parallelism,
bolt_parallelism))
return pplan
def create_mock_medium_physical_plan(
self,
spout_parallelism=1,
bolt1_parallelism=1,
bolt2_parallelism=1,
bolt3_parallelism=1):
pplan = protoPPlan.PhysicalPlan()
pplan.topology.CopyFrom(self.create_mock_medium_topology(
spout_parallelism,
bolt1_parallelism,
bolt2_parallelism,
bolt3_parallelism))
return pplan
def create_mock_execution_state(self):
estate = protoEState.ExecutionState()
estate.topology_name = MockProto.topology_name
estate.topology_id = MockProto.topology_id
estate.cluster = MockProto.cluster
estate.environ = MockProto.environ
return estate
def create_mock_tmaster(self):
tmaster = protoTmaster.TMasterLocation()
return tmaster
def add_topology_config(self, topology, key, value):
kv = topology.topology_config.kvs.add()
kv.key = key
kv.type = protoTopology.ConfigValueType.Value('STRING_VALUE')
kv.value = str(value)
|
6,840 | d250cc0aafdd48cb0eb56108d9c7148153cde002 | from ctypes import *
import os
import sys
import time
import datetime
import subprocess
import RPi.GPIO as GPIO
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
#import Adafruit_GPIO as GPIO
import Adafruit_GPIO.SPI as SPI
import ST7735 as TFT
import pigpio
# use BCM pin define
pin_meas = 24 # 18 in BOARD
pin_black = 25 # 22 in BOARD
pin_led = 26 # 37 in BOARD
HOME_DIR = "/home/pi/QSS003_python/"
C12880_LIB = HOME_DIR + "Dual_C12880.so"
# use BCM pin define
GATE_LED_PIN1 = 4 # 7 in BOARD
GATE_LED_PIN2 = 22 # 15 in BOARD
PWM_LED_PIN1 = 18 # in pigpio
PWM_LED_PIN2 = 13 # in pigpio
PWM_FREQ = 500
DUTY_MIN = 0
DUTY_MAX = 900000 # original = 1000000
LED_CURR_MIN = 60 #mA
LED_CURR_MAX = 330 #mA
LED_DUTY_CONST = 10000/3
# use BCM pin define
AOPIN = 23 # 16 in BOARD
RSTPIN = 12 # 32 in BOARD
SPI_PORT = 1
SPI_CH = 0
SPI_SPEED = 4000000
COLOR_RED = (255,0,0)
COLOR_GREEN = (0,255,0)
COLOR_BLUE = (0,0,255)
COLOR_WHITE = (255,255,255)
COLOR_BLACK = (0,0,0)
COLOR_YELLOW = (255,255,0)
COLOR_PURPLE = (255,0, 255)
COLOR_CYAN = (0, 255,255)
TFT_SIZE = (128, 128)
LINE1Y = 15
LINE2Y = 30
LINE3Y = 45
LINE4Y = 65
LINE5Y = 80
LINE6Y = 100
SPACE1 = 15
SPACE2 = 20
time.sleep(1)
C12880 = cdll.LoadLibrary(C12880_LIB)
if len(sys.argv) < 6:
error_str = str(sys.argv[0]) + " led1_current led2_current led_stable_time int_time1 int_time2"
print(error_str)
else:
# board initialization
C12880.Setup() # init spectrometer
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(pin_meas, GPIO.IN)
GPIO.setup(pin_black, GPIO.IN)
GPIO.setup(pin_led, GPIO.OUT)
GPIO.output(pin_led, GPIO.LOW)
GPIO.setup(GATE_LED_PIN1, GPIO.OUT)
GPIO.setup(GATE_LED_PIN2, GPIO.OUT)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH) #close
GPIO.output(GATE_LED_PIN2, GPIO.HIGH) #close
data1 = (c_uint * 288)() # data to store spectrum data
data2 = (c_uint * 288)()
meas = 1
black = 1
fnameindex = 0
# Display init
spi = SPI.SpiDev(SPI_PORT, SPI_CH, max_speed_hz = SPI_SPEED)
disp = TFT.ST7735(dc = AOPIN, rst = RSTPIN, spi = spi, width = 128, height = 128)
disp.begin()
disp.clear()
img = Image.new('RGB', TFT_SIZE, COLOR_WHITE)
draw = ImageDraw.Draw(img)
font = "/usr/share/fonts/truetype/dejavu/DejaVuSansMono.ttf"
fontout = ImageFont.truetype(font,11)
draw.text((0,LINE1Y), " Mode: Measure", font = fontout, fill = COLOR_BLUE)
draw.text((0,LINE2Y), " Bilirubin", font = fontout, fill = COLOR_BLUE)
draw.text((0,LINE4Y), " SiO2", font = fontout, fill = COLOR_BLUE)
disp.display(img)
led1_current = int(sys.argv[1])
led2_current = int(sys.argv[2])
led_stable_time = float(sys.argv[3])
int_time1 = int(sys.argv[4])
int_time2 = int(sys.argv[5])
if (led1_current < LED_CURR_MIN):
led1_current = LED_CURR_MIN
elif (led1_current > LED_CURR_MAX):
led1_current = LED_CURR_MAX
if (led2_current < LED_CURR_MIN):
led2_current = LED_CURR_MIN
elif (led2_current > LED_CURR_MAX):
led2_current = LED_CURR_MAX
print("led1_current = "+ str(led1_current))
print("led2_current = "+ str(led2_current))
led1_duty = (led1_current - LED_CURR_MIN)*LED_DUTY_CONST
led2_duty = (led2_current - LED_CURR_MIN)*LED_DUTY_CONST
print("led1_duty = "+ str(led1_duty))
print("led2_duty = "+ str(led2_duty))
pi = pigpio.pi()
while (1):
#wait until black or meas buttom is pressed
while (meas and black):
if GPIO.input(pin_meas) == GPIO.LOW:
meas = 0
print("meas low")
if GPIO.input(pin_black) == GPIO.LOW:
black = 0
print("black low")
GPIO.output(pin_led, GPIO.HIGH)
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, int(led1_duty))
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, int(led2_duty))
if (led1_duty > 0):
GPIO.output(GATE_LED_PIN1, GPIO.LOW) # open
if (led2_duty > 0):
GPIO.output(GATE_LED_PIN2, GPIO.LOW) # open
time.sleep(led_stable_time)
if (black == 0):
fname = "dual_black.txt"
else:
fname = "dual_desktop_" + str(fnameindex) + ".txt"
fname = HOME_DIR + fname
#C12880.ReadSpectrometer(int_time, data)
C12880.Read2Spectrometer(int_time1, int_time2, data1, data2)
# print the data on tft screen
draw.rectangle((0, LINE3Y, 128, LINE3Y+SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE5Y, 128, LINE5Y+SPACE2), COLOR_WHITE)
draw.rectangle((0, LINE6Y, 128, LINE6Y+SPACE1), COLOR_WHITE)
fontout = ImageFont.truetype(font,16)
draw.text((0,LINE3Y)," 12.1 mg/dL", font = fontout, fill = COLOR_RED)
draw.text((0,LINE5Y)," 66%", font = fontout, fill = COLOR_RED)
fontout = ImageFont.truetype(font,10)
draw.text((0,LINE6Y),str(datetime.datetime.now()), font = fontout, fill = COLOR_BLUE)
disp.display(img)
#out = [str(line) + '\n' for line in data]
fp = open(fname, "w+")
#print(out)
#fp.writelines(out)
for i in range(0,288):
fp.write(str(data1[i]) + ", " + str(data2[i]) + ", \n")
fp.close()
if (meas == 0):
fnameindex = fnameindex + 1
pi.hardware_PWM(PWM_LED_PIN1, PWM_FREQ, 0)
pi.hardware_PWM(PWM_LED_PIN2, PWM_FREQ, 0)
GPIO.output(GATE_LED_PIN1, GPIO.HIGH) # close
GPIO.output(GATE_LED_PIN2, GPIO.HIGH) # close
# time.sleep(led_stable_time) # for LED test
meas = 1
black = 1
GPIO.output(pin_led, GPIO.LOW) #turn off measure LED
print("done")
|
6,841 | 349581774cded59ece6a5e8178d116c166a4a6b3 | from typing import List
from uuid import uuid4
from fastapi import APIRouter, Depends, FastAPI, File, UploadFile
from sqlalchemy.orm import Session
from starlette.requests import Request
from Scripts.fastapp.common.consts import UPLOAD_DIRECTORY
from Scripts.fastapp.database.conn import db
# from Scripts.fastapp.database.schema import Users, ApiKeys, ApiWhiteLists
from Scripts.fastapp.database.schema import Train, Files
from Scripts.fastapp import models as m
from Scripts.fastapp.errors import exceptions as ex
import string
import secrets
from inspect import currentframe as frame
from Scripts.fastapp.models import MessageOk, Test, Label
from Scripts.fastapp.utils.file_module.test import t_load_data
from Scripts.fastapp.utils.file_module.load_file_manager import loadFileManager
from Scripts.fastapp.utils.preprocess_reg import preprocess_reg
import os
router = APIRouter(prefix='/pid')
@router.get('/getIsPID', response_model=List[m.GetIsPID])
# @router.get('')
async def show_data(request: Request, ispid):
"""
no params\n
:return\n
[\n
{\n
id: int = None\n
name: str = None\n
ext: str = None\n
is_pid: bool = False\n
},{\n
...\n
}\n
]\n
"""
request.state.inspect = frame()
print("### state.user : ", request.state.user)
print("### state.inspect : ", request.state.inspect)
print("###", request.url.hostname + request.url.path )
print("###", request.state.ip)
result = Files.filter(is_pid=ispid).all()
print("##RESULT##", result)
# return dict(id=result[0].id, reg_count=result[0].reg_count)
return result
@router.get('/getTrain')
async def get_train_data(request: Request, id: int):
"""
no params\n
:return\n
Train Model
"""
request.state.inspect = frame()
result = Train.filter(file_id=id).order_by("id").all()
print("##RESULT##", result)
# return dict(id=result[0].id, reg_count=result[0].reg_count)
return result
# @router.post("/register", status_code=201, response_model=Label)
@router.post("/register/{file_path}", status_code=201)
async def input_data(file_path ,request: Request, session: Session = Depends(db.session)):
"""
file path를 입력해서 해당 파일을 DB에 등록하는 함수
지금은 사용 안함
"""
print("start#########################################")
request.state.inspect = frame()
print(file_path)
df = t_load_data(file_path)
for row in df.itertuples():
print(row)
print(row.page)
Train.create(session, auto_commit=True,page=row.page ,reg_count=row.reg_count, column1=row.col1, column2=row.col2,column3=row.col3,column4=row.col4,column5=row.col5,column6=row.col6,column7=row.col7,column8=row.col8,column9=row.col9,column10=row.col10, y=-1)
# d = Train.create(session, auto_commit=True, reg_count=3, column3=1, column7=1, y=1)
# print(d.reg_count, d.id)
print("#########################################")
return MessageOk()
@router.put('/update_y')
async def update_label(request: Request, label_info: m.AddLabel):
"""
File Label Update\n
:param request:
:param y:
:param label:
:return:
"""
# user = request.state.user
n_data = Train.filter(y= -1)
request.state.inspect = frame()
reet = n_data.update(auto_commit=True, **label_info.dict())
print("2##########################################")
return reet
@router.post('/show_file')
async def show_file_data(request:Request, file_path):
"""
Ex_> D:/Project/pid/Scripts/fastapp/data/samples/pdf_sample2.pdf
"""
request.state.inspect = frame()
# file type: Dictionary
file = loadFileManager(file_path)
return file.data
@router.post("/files/")
async def create_files(files: List[bytes] = File(...)):
return {"file_sizes": [len(file) for file in files]}
@router.post("/uploadfiles")
async def create_upload_files(request: Request, files: List[UploadFile] = File(...), session: Session = Depends(db.session)):
"""
params: Files \n
return: Last File's \n
[
{
page:1
td: dfsdf
},{
page:2
td: asdasdasda
}
]
"""
for file in files:
contents = await file.read()
print(os.path.join('./', file.filename))
# with open(os.path.join('./', file.filename), "wb") as fp:
with open(UPLOAD_DIRECTORY + file.filename, "wb") as fp:
fp.write(contents)
f = loadFileManager(UPLOAD_DIRECTORY + file.filename)
try:
obj = Files.create(session, auto_commit=False, name=f.name, ext=f.ext, ip_add= request.state.ip )
# print(obj.id, f.name, f.ext, f.data)
for p in f.data:
df = preprocess_reg(p["td"])
Train.create(session, auto_commit=True, file_id=obj.id ,y=-1, page=p["page"]+1, text_data=p["td"],
reg_count=int(df["reg_count"][0]), column1=int(df["col1"][0]), column2=int(df["col2"][0]),
column3=int(df["col3"][0]),column4=int(df["col4"][0]),column5=int(df["col5"][0]),column6=int(df["col6"][0]),
column7=int(df["col7"][0]),column8=int(df["col8"][0]),column9=int(df["col9"][0]),column10=int(df["col10"][0])
)
except Exception as e:
raise ex.FileExtEx(file.filename)
# 마지막 파일 f.data
return f.data
|
6,842 | ce626afa7c0fd2e190afd92b57a0ebebf19f9e9b | from django.contrib import admin
from django.contrib.staticfiles.urls import static # 本Ch11.1
from django.urls import path, include
from . import settings_common, settings_dev # 本Ch11.1
import debug_toolbar
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('login_test_app.urls')),
path('accounts/', include('allauth.urls')), # allauthデフォルトURL:本P218
path('__debug__/', include(debug_toolbar.urls)),
]
# 開発サーバーでMEDIA_ROOT,MEDIA_URLを渡したdjango.contrib.staticfiles.urls.static関数から
# 返されたルーティングを追加する
urlpatterns +=static(settings_common.MEDIA_URL, document_root=settings_dev.MEDIA_ROOT)
|
6,843 | c1c79e5adc620690e4e386f7f1cd9f781eeec0ce | import sys
max = sys.maxsize
print(" sys.maxsize -> ", max)
|
6,844 | cfb49d78dc14e6f4b6d2357d292fd6275edec711 | import csv
import datetime
with open('/Users/wangshibao/SummerProjects/analytics-dashboard/myapp/CrimeHistory.csv','rU') as f:
reader = csv.reader(f)
header = reader.next()
date_time = "20140501 00:00"
date_time = datetime.datetime.strptime(date_time, "%Y%m%d %H:%M")
print date_time
|
6,845 | be0afa5184f753ed5f9a483379a4d81cd7af4886 | #!/usr/bin/python2.7
import sys
import datetime
import psycopg2
import json
import collections
from pprint import pprint
from pyral import Rally, rallyWorkset
import copy
import os
import argparse
from ConfigParser import SafeConfigParser
import traceback
global rally
global server_name
"""
WARNING: This was hacked together and set up to do some quick and
dirty work. Please make no judgements on the quality of the
code.
"""
## TODO: Convert all the lookups to use the cached information
## DONE: Adjust the command line so the environment can be passed
## DONE: Import the data again, this time using an order field, then order the query
##
## Since we are creating all the objects, perhaps if we log them it will be faster. The information will be cached and we won't need to make additional queries
## This will save on server performance and make the script faster
story_project_ref = {}
story_ref = {}
testcase_ref = {}
defect_project_ref = {}
defect_ref = {}
portfolio_item_ref = {}
workspace_name = ""
user_names = {}
project_names = {}
debug = 1
# get the first instance of a user
## Get's a user ref for Rally
## First time, it will query the system and add it to the dictionary
## Subsequent calls will have cached user information, speeding up the system
def getUserRef(user_name):
global rally
global server_name
global debug
# If we need to work on another instance, say integration or partners, we need to change the email address of the users
if server_name == "integrations" or server_name == "partners":
user_name = user_name.replace("@acme.com", "@" + server_name + ".acme.com")
if debug:
print(user_names.items())
if user_name in user_names:
if debug:
print("Found %s" % user_name)
value = user_names[user_name]
else:
if debug:
print("Adding name %s " %user_name)
value = rally.getUserInfo(username=user_name).pop(0).ref
user_names[user_name] = value
return value
## Get's a project ref for Rally
## First time, it will query the system and add it to the dictionary
## Subsequent calls will have cached information, speeding up the system
def getProjectRef(project_name):
global rally
global project_names
if debug:
print("Items:\n")
print(project_names.items())
#let's build a list of project names and reference ids, so we don't have to query the system each time.
if project_name in project_names:
if debug:
print("Found %s" % project_name)
value = project_names[project_name]
else:
if debug:
print("Adding name %s " %project_name)
try:
value = rally.getProject(project_name).ref
project_names[project_name] = value
except Exception, details:
sys.stderr.write("ERROR: %s \n" % details)
sys.exit(1)
return value
def getIterationRef(piName):
global rally
if debug:
print "Getting Iteration"
collection = rally.get('Iteration')
assert collection.__class__.__name__ == 'RallyRESTResponse'
if not collection.errors:
for pe in collection:
name = '%s' % pe.Name
if debug:
print pe.Name
if(name == piName):
#print pe.oid, pe.Name
return pe.oid
def getPortfolioItemInitiativeRef(piName):
global rally
if debug:
print "Getting Initiative Ref"
collection = rally.getCollection("https://us1.rallydev.com/slm/webservice/v2.0/portfolioitem/initiative?")
assert collection.__class__.__name__ == 'RallyRESTResponse'
if not collection.errors:
for pe in collection:
name = '%s' % pe.Name
if(name == piName):
#print pe.oid, pe.Name
return pe.oid
def getPortfolioItemThemeRef(piName):
global rally
if debug:
print "Getting Theme Ref"
collection = rally.getCollection("https://us1.rallydev.com/slm/webservice/v2.0/portfolioitem/theme?")
assert collection.__class__.__name__ == 'RallyRESTResponse'
if not collection.errors:
for pe in collection:
name = '%s' % pe.Name
if(name == piName):
#print pe.oid, pe.Name
return pe.oid
def getPortfolioItemFeatureRef(piName):
global rally
if debug:
print "Getting Feature Ref"
collection = rally.getCollection("https://us1.rallydev.com/slm/webservice/v2.0/portfolioitem/feature?")
#pprint(collection)
assert collection.__class__.__name__ == 'RallyRESTResponse'
if not collection.errors:
for pe in collection:
name = '%s' % pe.Name
if debug:
print pe.Name
if(name == piName):
if debug:
print "Feature Found"
#print pe.oid, pe.Name
return pe.oid
def getPreliminaryEstimateRef(object_value):
global rally
if debug:
print "Getting Prelim Estiamte"
collection = rally.getCollection("https://us1.rallydev.com/slm/webservice/v2.0/preliminaryestimate?")
#pprint(collection)
assert collection.__class__.__name__ == 'RallyRESTResponse'
if not collection.errors:
for pe in collection:
name = '%s' % pe.Name
if(name == object_value):
#print pe.ref, pe.Name, pe.oid
return pe.ref
def getReleaseRef(object_value):
global rally
if debug:
print "Getting Release Data"
collection = rally.get('Release')
#pprint(collection)
assert collection.__class__.__name__ == 'RallyRESTResponse'
if not collection.errors:
for pe in collection:
name = '%s' % pe.Name
if(name == object_value):
#print pe.ref, pe.Name, pe.Project
return pe.ref
def getUserStoryRef(object_value):
global rally
if debug:
print "Getting User Story Data"
#print "Scope is : " + rally.getProject().Name
args = {"projectScopeDown" : "True", "Project" : "Online Store"}
collection = rally.get('UserStory', kwargs=args)
#pprint(collection)
assert collection.__class__.__name__ == 'RallyRESTResponse'
if not collection.errors:
for pe in collection:
name = '%s' % pe.Name
if debug:
print pe.Name, pe.Project
if(name == object_value):
#print pe.ref, pe.Name, pe.Project
return pe.ref
def getTestFolderRef(object_value):
global rally
debug = 0
if debug:
print "Getting TestFolder Data"
collection = rally.getCollection('https://us1.rallydev.com/slm/webservice/v2.0/testfolder?')
pprint(collection)
assert collection.__class__.__name__ == 'RallyRESTResponse'
if not collection.errors:
for pe in collection:
name = '%s' % pe.Name
if debug:
print "peName is %s" % pe.Name
if(name == object_value):
#print pe.ref, pe.Name, pe.Project
return pe.ref
def getTestCaseRef(object_value):
global rally
debug = 0
if debug:
print "Getting TestCase Data"
collection = rally.getCollection('https://us1.rallydev.com/slm/webservice/v2.0/testcase?')
#pprint(collection)
assert collection.__class__.__name__ == 'RallyRESTResponse'
if not collection.errors:
for pe in collection:
name = '%s' % pe.Name
if debug:
print pe.Name
if(name == object_value):
#print pe.ref, pe.Name,
return pe.ref
def myconverter(o):
if isinstance(o, datetime.datetime):
out = o.__str__()
out = out.replace(" ", "T")
return out
def db(database_name='rally_data'):
return psycopg2.connect("dbname=rally_data user=readonly password=readonly host=localhost")
#Get the database values and store them into dictionary.
def query_db(query, args=(), one=False):
cur = db().cursor()
cur.execute(query, args)
r = [dict((cur.description[i][0], value) \
for i, value in enumerate(row)) for row in cur.fetchall()]
cur.connection.close()
return (r[0] if r else None) if one else r
def getUserStoryRefByName(name):
debug = 0
if debug:
print "debug mode userstoryrefbyname"
print "searching for user story name %s" % name
ref = story_ref[name] if story_ref.get(name) else False
if ref == False:
pprint(story_ref)
sys.stderr.write("Error finding user story reference... something has gone wrong")
sys.exit(1)
if debug:
print "found user story ref %s" % ref
return ref
def getTestCaseRefByName(name):
debug = 0
if debug:
print "debug mode test case refbyname"
print "searching for user story name %s" % name
ref = testcase_ref[name] if testcase_ref.get(name) else False
if ref == False:
pprint(testcase_ref)
sys.stderr.write("Error finding test case reference... something has gone wrong")
sys.exit(1)
if debug:
print "found test case ref"
return ref
# When creating a task, the project needs to be filled in. We get that from the user story.
def getProjectRefFromUserStoryName(name):
debug = 0
if debug:
print "debug mode projectreffromstoryname"
print "searching for user story name %s" % name
ref = story_project_ref[name] if story_project_ref.get(name) else False
if ref == False: #Let's check the defect log before we error out
pprint(story_project_ref)
sys.stderr.write("Error finding project ref from user story reference... something has gone wrong")
sys.exit(1)
if debug:
print "found test case ref"
return ref
"""
This determines which fields need to be translated due to being a reference value.
If found, substitute the text with the real value
"""
def replace_values(val, object_type):
debug = 0
#print "replace values for "
#print val
#print " item type: " + object_type
if debug:
print val
if object_type == "Task":
if debug:
print "we need to get a project in the task object from the story object"
val['project'] = getProjectRefFromUserStoryName(val['workproduct'])
val['workproduct'] = getUserStoryRefByName(val['workproduct'])
#print "Workproduct %s project %s" % (val['workproduct'], val['project'])
if val.get("testcase"):
if debug:
print "checking for testcase"
val["testcase"] = getUserStoryRefByName(val["testcase"])
if val.get("testfolder"):
debug = 1
if debug:
print "checking for testfolder is %s" % val["testfolder"]
val["testfolder"] = getTestFolderRef(val["testfolder"])
#print "TestFolder is %s" % (val['testfolder'])
if val.get("requirement"):
if debug:
print "checking for requirement"
val["requirement"] = getUserStoryRefByName(val["requirement"])
if val.get("iteration"):
if debug:
print "checking for iteration"
val["iteration"] = getIterationRef(val["iteration"])
if val.get("portfolioitem"):
if debug:
print "checking for parent on user story"
if object_type == "Story":
val["portfolioitem"] = getPortfolioItemFeatureRef(val["portfolioitem"])
if val.get("release"):
if debug:
print "checking for release"
val["release"] = getReleaseRef(val["release"])
if val.get("project") and (object_type != "Task"):
if debug:
print "checking for project"
val["project"] = getProjectRef(val["project"])
if val.get("workproduct") and (object_type != "Task"): #Work product on non tasks can be a user story, so let's look that up.
if debug:
print "checking for workproduct"
val["workproduct"] = getUserStoryRefByName(val["workproduct"])
if val.get("owner"):
if debug:
print "getting user"
val["owner"] = getUserRef(val["owner"])
if val.get("preliminaryestimate"):
if debug:
print "getting prelim estimate"
val["preliminaryestimate"] = getPreliminaryEstimateRef(val["preliminaryestimate"])
if val.get("parent"): # Parent can mean different objects, depending on where it is referenced. So we determine the type and replace it.
if debug:
print "getting parent for " + object_type + "\n"
if object_type == "Initiative":
val["parent"] = getPortfolioItemThemeRef(val["parent"])
if object_type == "Feature":
val["parent"] = getPortfolioItemInitiativeRef(val["parent"])
if val.get("state") :
if debug:
print "getting state"
if object_type == "Initiative":
val["state"] = rally.getState('Initiative', val["state"]).ref
if object_type == "Feature":
val["state"] = rally.getState('Feature', val["state"]).ref
if object_type == "Theme":
val["state"] = rally.getState('Theme', val["state"]).ref
"""
We need to clear out none/nulls from the json output.
"""
def scrub(x):
# Converts None to empty string
ret = copy.deepcopy(x)
# Handle dictionaries, lits & tuples. Scrub all values
if isinstance(x, dict):
for k, v in ret.items():
ret[k] = scrub(v)
if isinstance(x, (list, tuple)):
for k, v in enumerate(ret):
ret[k] = scrub(v)
# Handle None
if x is None:
ret = ''
# Finished scrubbing
# Scrub out listing_order from dataset
return ret
"""
Empty keys cause a problem with Rally, so let's clean them out.
"""
def remove_empty_keys(x):
empty_keys = [k for k,v in x.iteritems() if not v]
for k in empty_keys:
del x[k]
#this was added in to make user story numbers (and other objects) import in a specific order.
if 'listing_order' in x:
del x['listing_order']
#this is to remove the dataset column from import.
if 'dataset' in x:
del x['dataset']
def main(args):
global rally
global server_name
global debug
login_name = ""
api_key = ""
#Parse Command line options
parser = argparse.ArgumentParser("create_data")
parser.add_argument("--server", "-s", "--server", required=True, help="Server options = sales, integrations or partner", type=str)
parser.add_argument("--workspace_name", "-n", "--name", required=True, help="Name of the workspace to update")
parser.add_argument("--dataset_name", "-d", "--dataset", required=True, help="Name of the dataset to load")
args = parser.parse_args()
workspace_name = args.workspace_name
server_name = args.server
dataset = args.dataset_name
config = SafeConfigParser()
config.read('config.ini')
if config.has_option(server_name,'server'):
rally_server = config.get(server_name,'server')
if config.has_option(server_name,'username'):
login_name = config.get(server_name,'username')
if config.has_option(server_name,'password'):
password = config.get(server_name,'password')
if config.has_option(server_name,'api_key'):
api_key = config.get(server_name,'api_key')
#print api_key + login_name + password + rally_server + server_name
#login_name = "thomas.mcquitty@acme.com"
if debug:
print "server name is %s" % args.server
print "workspace name is %s" % args.workspace_name
valid_servers = ["integrations", "sales", "partners"]
if server_name.lower() not in valid_servers:
print "You have selected an invalid server. Please use a valid option."
sys.exit(1)
try:
if api_key == "":
if debug:
print "Login/password connection"
rally = Rally(rally_server, login_name, password, workspace=workspace_name, project='Online Store')
if api_key != "":
if debug:
print "Api connection"
rally = Rally(rally_server, apikey=api_key, workspace=workspace_name, project='Online Store')
except Exception, details:
print traceback.print_exc()
print "details %s " % details
print ("Error logging in")
sys.exit(1)
if debug:
rally.enableLogging('output.log')
objects = ["Release", "Iteration", "Theme", "Initiative", "Feature", "Story", "Defect", "TestFolder", "TestSet", "TestCase", "TestCaseStep", "TestCaseResult", "Task", "FundingIncrement"]
for item_type in objects:
item_text = "%s" % item_type
print "Processing " + item_text + "..."
query_text = "select * from " + item_text + " where dataset = '" + dataset + "';"
my_query = query_db(query_text)
#process each item. We will have to do substitution for values that are references in the data, like projects and user names
for item in my_query:
item = scrub(item)
replace_values(item, item_text)
remove_empty_keys(item) #remove empty keys, they cause an issue when loading and Rally expects a value.
output = json.dumps(item, default = myconverter)
output = json.loads(output)
try:
if debug:
print output
print "creating object " + item_text + "\n\n"
record = rally.create(item_text, output)
#pprint(record)
debug = 0
#build array of stories, defects, testsets and test cases for quick reference
if (item_text == "Story") or (item_text == "Defect") or (item_text == "TestSet") or (item_text == "TestCase"):
if debug:
print "Debugging???"
print "Name %s story ref %s Project %s " % (record.Name, record.ref, record.Project.ref)
story_project_ref[record.Name] = record.Project.ref
story_ref[record.Name] = record.ref
# Build array of feature, themes and initiatives added to the workspace
if(item_text == "Theme") or (item_text == "Initiative") or (item_text == "Feature"):
if debug:
print "adding to theme list"
print "Name %s portfolio ref %s" % (record.Name, record.ref)
portfolio_item_ref[record.Name] = record.ref
except Exception, details:
sys.stderr.write("error creating %s\n\n" % item_text)
sys.stderr.write("ERROR: %s \n" % details)
sys.exit(1)
debug = 0
if __name__ == '__main__':
main(sys.argv[1:])
sys.exit(0)
|
6,846 | afd72ce2d9598f92937f3038eb0ef49b740b9977 | from guet.commands.strategies.strategy import CommandStrategy
class TooManyArgsStrategy(CommandStrategy):
def apply(self):
print('Too many arguments.')
|
6,847 | 37d817436ce977339594867ef917177e7371a212 | import pycmc
# open project, get Crag, CragVolumes, and intensity images
crag = ...
cragVolumes = ...
raw = ...
membrane = ...
nodeFeatures = ...
edgeFeatures = ...
statisticsFeatureProvider = pycmc.StatisticsFeatureProvider(cragVolumes, raw, "raw")
shapeFeatureProvider = pycmc.ShapeFeatureProvider(cragVolumes)
featureProvider = pycmc.CompositeFeatureProvider()
featureProvider.add(shapeFeatureProvider)
featureProvider.add(statisticsFeatureProvider)
featureExtractor = pycmc.FeatureExtractor(crag)
featureExtractor.extractFeatures(nodeFeatures, edgeFeatures, featureProvider)
|
6,848 | fc4fafe4e29a7f116c38be265fce8e4fb6638330 | from .fieldmatrix import *
|
6,849 | 22dccf6bb76dab735f373089d0772f475b2d5a5d | #!/bin/env python
# coding: utf-8
"""
Dakara Online protocol generator, by Alejandro Santos
"""
from genpackets import *
from gendefs_js import *
BUILDERS = []
HANDLERS = []
DECODE_DISPATCH = []
ARGS_HANDLER = []
def write_packets_from(f, fph, base_name, namespace, P):
# Enum with IDs
if base_name != "ServerPacket" :
f.write("""var {base_name}ID = {{ \n""".format(base_name=base_name))
for i, x in enumerate(P):
if x:
f.write(" {name} : {packet_id}".format(base_name=base_name, name=x.name, packet_id=i))
f.write(",\n")
f.write(""" {base_name}ID_PACKET_COUNT : {packet_id}\n}};\n""".format(base_name=base_name, packet_id=len(P)))
# Factory
'''
f.write("""
function {base_name}Factory(buffer) {{
if (buffer.length() < 1) return 0;
var p;
PacketID = buffer.PeekByte();
switch (PacketID) {{
""".format(base_name=base_name))
for i, x in enumerate(P):
if not x: continue
f.write("""
case {i}:
p = new {name}(buffer);
break;
""".format(i=i, name=x.name))
f.write("""
}}
return p;
}}
""".format())
'''
for i, x in enumerate(P):
if not x: continue
header_fields = []
header_fields_signature = []
items_assign_e = []
items_assign_build = []
ctor_fields = ""
min_byte_count = 0
ctor_fields_bytequeue = ""
parametros_fields = ""
parametros_args = ""
serialize_fields = ""
if x.name == "MultiMessage":
escribir_multimessage(f)
continue
for y in x.args:
arg_name = y[0]
arg_type = y[1] & 0xff
arg_type_str = TYPE_TO_STR[arg_type]
arg_type_sig_str = TYPE_TO_SIGNATURE_STR[arg_type]
arg_is_array = ((y[1] & TYPE_ARRAY) == TYPE_ARRAY)
type_reader_name = TYPE_TO_READER_NAME[arg_type]
type_writer_name = TYPE_TO_WRITER_NAME[arg_type]
ctor_fields += ", " + arg_name + "()"
items_assign_e.append(" {arg_name}: {arg_name},".format(arg_name=arg_name))
items_assign_build.append(" e.{arg_name}= {arg_name};".format(arg_name=arg_name))
if arg_is_array:
array_size=y[2]
min_byte_count += TYPE_SIZE[arg_type] * array_size
header_fields.append(" {arg_name}; ".format(arg_type_str=arg_type_str, arg_name=arg_name, array_size=array_size))
header_fields_signature.append("{arg_name} ".format(arg_type_str=arg_type_sig_str, arg_name=arg_name, array_size=array_size))
ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name, array_size=array_size)
parametros_fields += x.get_parametros_fields_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name, array_size=array_size)
parametros_args += x.get_parametros_args_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name, array_size=array_size)
serialize_fields += x.get_serialize_fields_fmt(arg_is_array).format(arg_name=arg_name, type_writer_name=type_writer_name, array_size=array_size)
else:
min_byte_count += TYPE_SIZE[arg_type]
header_fields.append(" {arg_type_str} {arg_name}; ".format(arg_type_str=arg_type_str, arg_name=arg_name))
header_fields_signature.append("{arg_type_str} {arg_name}".format(arg_type_str=arg_type_sig_str, arg_name=arg_name))
ctor_fields_bytequeue += x.get_ctor_fields_bytequeue_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name)
parametros_fields += x.get_parametros_fields_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name)
parametros_args += x.get_parametros_args_fmt(arg_is_array).format(arg_name=arg_name, type_reader_name=type_reader_name)
serialize_fields += x.get_serialize_fields_fmt(arg_is_array).format(arg_name=arg_name, type_writer_name=type_writer_name)
format_args = {
'base_name': base_name,
'name': x.name,
'header_fields': '\n'.join(header_fields),
'header_fields_signature': ', '.join(header_fields_signature),
'items_assign_e': '\n'.join(items_assign_e),
'items_assign_build': '\n'.join(items_assign_build),
'ctor_fields': ctor_fields,
'packet_id': i,
'min_byte_count': min_byte_count,
'ctor_fields_bytequeue': ctor_fields_bytequeue,
'serialize_fields': serialize_fields,
'parametros_fields' : parametros_fields,
'parametros_args' : parametros_args
}
# Individual packet header
if base_name != "ServerPacket" :
f.write(x.get_header_fmt().format(**format_args))
BUILDERS.append(x.get_builder_fmt().format(**format_args))
if base_name == "ServerPacket" :
HANDLERS.append(x.get_handler_fmt().format(**format_args))
#para el serverpacketdecodeanddispatch (sin tener que crear packetes)
if base_name == "ServerPacket" :
dec_dispatch = x.get_parametros_fmt().format(**format_args);
#le saco la ultima coma si es que tiene:
pos = dec_dispatch.rfind(",")
if pos > 0:
dec_dispatch = dec_dispatch[:pos] + dec_dispatch[pos+1:]
DECODE_DISPATCH.append(dec_dispatch)
if base_name == "ServerPacket" :
args_handler = x.get_argumentosHandler_fmt().format(**format_args);
#le saco la ultima coma si es que tiene:
pos = args_handler.rfind(",")
if pos > 0:
args_handler = args_handler[:pos] + args_handler[pos+1:]
#le saco fin de linea
pos = args_handler.rfind("\n")
args_handler = args_handler[:pos] + args_handler[pos+1:]
ARGS_HANDLER.append(args_handler)
# Decode and Dispatch, keeping the Packet in the stack
# Suggested by hmk
if base_name == "ServerPacket" :
f.write("""
function {base_name}DecodeAndDispatch(buffer, handler) {{
if (buffer.length() < 1) return;
var PacketID = buffer.ReadByte();
switch (PacketID) {{
""".format(base_name=base_name))
for i, x in enumerate(P):
if not x: continue
f.write("""
case {i}:
{{
{decode_dispatch}
break;
}}
""".format(i=i, decode_dispatch=DECODE_DISPATCH.pop(0)))
f.write("""
default:
{{
msg = "error decoding packet id: " + PacketID;
throw new Error(msg);
}}
}}
}}
""".format())
fph.write("""
/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/
""".format(base_name=base_name))
for i, x in enumerate(P):
if not x: continue
fph.write("""\n\thandle{name}: function ({arg_handler}){{ \n""".format(base_name=base_name, name=x.name, arg_handler = ARGS_HANDLER.pop(0)))
#fph.write(HANDLERS.pop(0))
fph.write("""\t\tlog.network("TODO: handle{name} ");\n\t}},\n""".format(base_name=base_name, name=x.name))
for i, x in enumerate(P):
if not x: continue
#fph.write("""\n\thandle{name}: function (p){{ \n""".format(base_name=base_name, name=x.name))
#fph.write(HANDLERS.pop(0))
#fph.write("""\t\talert("TODO: handle{name} ");\n\t}},\n""".format(base_name=base_name, name=x.name))
fph.write("""
/** ESTE ARCHIVO SOLO ESTA PARA FACILITAR ESCRIBIR LOS HANLDLES POR PRIMERA VEZ, NO TINENE NINGUN USO ***************************************************************************************************************************************************/
""")
def write_packets():
f = open("protocol.js", "w")
fph = open("protocolhandlerAux.js", "w")
f.write("""
/* Automatically generated file */
define(['enums'], function (Enums) {
""")
write_packets_from(f,fph, "ClientPacket", "client", CLIENT_PACKETS)
write_packets_from(f,fph, "ClientGMPacket", "clientgm", CLIENT_GM_PACKETS)
write_packets_from(f,fph, "ServerPacket", "server", SERVER_PACKETS)
#Multimessages hardcodeado: // TODO ; hacerlo bien
f.write("""
class Protocolo{
""")
for builder in BUILDERS:
f.write(builder)
f.write("""
ServerPacketDecodeAndDispatch(buffer, handler){
ServerPacketDecodeAndDispatch(buffer, handler);
}
""")
f.write("""
}
return Protocolo;
}); """)
f.close()
fph.close()
def escribir_multimessage(f):
DECODE_DISPATCH.append('''
var msgIdx = buffer.ReadByte();
switch (msgIdx) {
case Enums.eMessage.NPCHitUser:
handler.handleNPCHitUser(buffer.ReadByte(), buffer.ReadInteger());
break;
case Enums.eMessage.UserHitNPC:
handler.handleUserHitNPC(buffer.ReadLong());
break;
case Enums.eMessage.UserAttackedSwing:
handler.handleUserAttackedSwing(buffer.ReadInteger());
break;
case Enums.eMessage.UserHittedByUser:
handler.handleUserHittedByUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());
break;
case Enums.eMessage.UserHittedUser:
handler.handleUserHittedUser(buffer.ReadInteger(), buffer.ReadByte(), buffer.ReadInteger());
break;
case Enums.eMessage.WorkRequestTarget:
handler.handleWorkRequestTarget(buffer.ReadByte());
break;
case Enums.eMessage.HaveKilledUser:
handler.handleHaveKilledUser(buffer.ReadInteger(),buffer.ReadLong());
break;
case Enums.eMessage.UserKill:
handler.handleUserKill(buffer.ReadInteger());
break;
case Enums.eMessage.Home:
handler.handleHome(buffer.ReadByte(),buffer.ReadInteger(),buffer.ReadUnicodeString());
break;
case Enums.eMessage.DontSeeAnything:
handler.handleDontSeeAnything();
break;
case Enums.eMessage.NPCSwing:
handler.handleNPCSwing();
break;
case Enums.eMessage.NPCKillUser:
handler.handleNPCKillUser();
break;
case Enums.eMessage.BlockedWithShieldUser:
handler.handleBlockedWithShieldUser();
break;
case Enums.eMessage.BlockedWithShieldOther:
handler.handleBlockedWithShieldOther();
break;
case Enums.eMessage.UserSwing:
handler.handleUserSwing();
break;
case Enums.eMessage.SafeModeOn:
handler.handleSafeModeOn();
break;
case Enums.eMessage.SafeModeOff:
handler.handleSafeModeOff();
break;
case Enums.eMessage.ResuscitationSafeOff:
handler.handleResuscitationSafeOff();
break;
case Enums.eMessage.ResuscitationSafeOn:
handler.handleResuscitationSafeOn();
break;
case Enums.eMessage.NobilityLost:
handler.handleNobilityLost();
break;
case Enums.eMessage.CantUseWhileMeditating:
handler.handleCantUseWhileMeditating();
break;
case Enums.eMessage.EarnExp:
handler.handleEarnExp();
break;
case Enums.eMessage.FinishHome:
handler.handleFinishHome();
break;
case Enums.eMessage.CancelHome:
handler.handleCancelHome();
break;
default:
throw new Error("Multimessage: " + msgIdx + " no reconocido por el protocolo");
}
''')
ARGS_HANDLER.append("msgIdx,args")
def main():
write_packets()
if __name__ == '__main__':
main() |
6,850 | 7180dc0d622fd449fcee32f2c50000d05ae2d8bb | from load_blender_data import pose_spherical
from misc import mse, mse2psnr, to8b
import os
import imageio
import json
import torch
import torch.nn as nn
import numpy as np
import cv2
from torch.utils.data.dataset import Dataset
from torch.utils.data.dataloader import DataLoader
device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
class MLP(nn.Module):
def __init__(self, in_ch=2, num_layers=4, num_neurons=256):
super(MLP, self).__init__()
layers = []
layers.append(nn.Linear(in_ch, num_neurons))
layers.append(nn.ReLU())
for i in range(1, num_layers-1):
layers.append(nn.Linear(num_neurons, num_neurons))
layers.append(nn.ReLU())
layers.append(nn.Linear(num_neurons, 3))
layers.append(nn.Sigmoid())
self.linears = nn.ModuleList(layers)
def forward(self, x):
for layer in self.linears:
x = layer(x)
return x
class BlenderDataset(Dataset):
def __init__(self, datadir, split='train', testskip=8):
super(BlenderDataset, self).__init__()
imgs = []
with open(os.path.join(datadir, split+".txt")) as f:
lines = f.readlines()
for i, line in enumerate(lines):
name = line.strip()
pose_path = os.path.join(datadir, name, 'rendering/transforms.json')
with open(pose_path, 'r') as f:
cam_params = json.load(f)['frames']
for cam_param in cam_params:
img_name = cam_param['file_path']
imgs.append(os.path.join(datadir, name, f'rendering/{img_name}.png'))
self.images = imgs
print(f'{split} dataset: {len(self.images)}')
def get_rays_np(self, H, W, focal, c2w):
i, j = np.meshgrid(np.arange(W, dtype=np.float32), np.arange(H, dtype=np.float32), indexing='xy')
dirs = np.stack([(i - W * .5) / focal, -(j - H * .5) / focal, -np.ones_like(i)], -1)
# Rotate ray directions from camera frame to the world frame
rays_d = np.sum(dirs[..., np.newaxis, :] * c2w[:3, :3],
-1) # dot product, equals to: [c2w.dot(dir) for dir in dirs]
# Translate camera frame's origin to the world frame. It is the origin of all rays.
rays_o = np.broadcast_to(c2w[:3, -1], np.shape(rays_d))
return rays_o, rays_d
# def __getitem__(self, idx):
# img = self.images[idx]
# pose = self.poses[idx]
# H, W = img.shape[:2]
# rays_o, rays_d = self.get_rays_np(H, W, self.focal, pose)
# # ret = {'img':img.transpose((2, 0, 1)),
# # 'rays_o': rays_o.transpose((2, 0, 1)),
# # 'rays_d': rays_d.transpose((2, 0, 1))}
# ret = {'img': img,
# 'rays_o': rays_o,
# 'rays_d': rays_d}
# return ret
def get_coords2d(self, H, W):
coord = np.linspace(0, 1, H, endpoint=False)
coords = np.stack(np.meshgrid(coord, coord), -1)
return coords
def __getitem__(self, idx):
img_path = self.images[idx]
img = cv2.imread(img_path, cv2.IMREAD_COLOR)
img = cv2.resize(img, (512, 512), interpolation=cv2.INTER_LINEAR) / 255.
H, W = img.shape[:2]
rays_o = self.get_coords2d(H, W)
ret = {'img': img.astype(np.float32), 'rays_o': rays_o.astype(np.float32)}
return ret
def __len__(self):
return len(self.images)
class MLPRunner(object):
def __init__(self, args):
self.basedir = args.basedir
self.expname = args.expname
self.num_layers = 4
self.num_neurons = 256
self.mapping_size = 256
self.num_epoch = 1000 # on average, each image is seen by network num_epoch times
self.val_epoch = 100
self.lr = 1e-4
self.batch_size = args.batch_size
self.num_workers = args.num_workers
self.train_set = BlenderDataset(args.datadir, split='train')
self.train_loader = DataLoader(self.train_set,
batch_size=self.batch_size,
num_workers=self.num_workers,
shuffle=True)
self.val_set = BlenderDataset(args.datadir, split='val')
self.val_idxs = [i for i in range(len(self.val_set))]
self.i_print = 1000
self.scale = 10
self.in_ch = self.mapping_size * 2
self.B_gauss = torch.randn((self.mapping_size, 2)).to(device)
self.model = MLP(in_ch=self.in_ch)
self.optimizer = torch.optim.Adam(self.model.parameters(), lr=self.lr)
def embed(self, x, B):
if B is None:
return x
else:
x_proj = (2. * np.pi * x).matmul(B.transpose(1, 0))
return torch.cat([torch.sin(x_proj), torch.cos(x_proj)], -1)
def train(self):
self.model.to(device)
global_step = 0
for epoch in range(self.num_epoch):
for i, data in enumerate(self.train_loader):
img = data['img'].to(device)
rays_o = data['rays_o'].to(device)
embedding = self.embed(rays_o, self.B_gauss)
embedding = embedding.reshape((-1, embedding.shape[-1]))
img_pred = self.model.forward(embedding)
img_pred = img_pred.reshape(img.shape)
loss = mse(img_pred, img)
psnr = mse2psnr(loss)
self.optimizer.zero_grad()
loss.backward()
self.optimizer.step()
if global_step % self.i_print == 0:
print(f'[{epoch} | {global_step}] loss:{loss.item()} psnr:{psnr.item()}')
# cv2.imwrite(os.path.join(self.basedir, self.expname, f'train_gt_{epoch}_{global_step}.png'),
# to8b(img[0].detach().cpu().numpy()))
cv2.imwrite(os.path.join(self.basedir, self.expname, f'train_{epoch}_{global_step}.png'),
to8b(img_pred[0].detach().cpu().numpy()))
global_step += 1
if epoch % self.val_epoch == 0:
idx = np.random.choice(self.val_idxs, 1)[0]
data = self.val_set.__getitem__(idx)
img = torch.tensor(data['img']).to(device)
rays_o = torch.tensor(data['rays_o']).to(device)
with torch.no_grad():
embedding = self.embed(rays_o, self.B_gauss)
embedding = embedding.reshape((-1, embedding.shape[-1]))
img_pred = self.model.forward(embedding)
img_pred = img_pred.reshape(img.shape)
loss = mse(img_pred, img)
psnr = mse2psnr(loss)
print(f'[{epoch} | val] loss:{loss.item()} psnr:{psnr.item()}')
# cv2.imwrite(os.path.join(self.basedir, self.expname, f'val_gt_{epoch}_{global_step}.png'),
# to8b(img.detach().cpu().numpy()))
cv2.imwrite(os.path.join(self.basedir, self.expname, f'val_{epoch}_{global_step}.png'),
to8b(img_pred.detach().cpu().numpy()))
|
6,851 | a012055d11202c68d9eddf5cf2a17043f9bbaf0a | #!/usr/bin/env python
'''
Script for analysis of wavefunctions on GaSb/InAs/GaSb simmetric quantum wells.
This piece code is part of the project "phd_gasb_inas", which comprises the work
related to the Phd. Dissertation named: "Quantum transport of charge and spin in
topological insulators 2D".
Author: Marcos Medeiros
email: mhlmedeiros@gmail.com
'''
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
# Kwant related stuff
import kwant
import tinyarray
# local application imports
from hamiltonians import gasb_hamiltonian as gasb
from system_geometry import shapes
from transport_tools import bands_and_currents as tools
def map_density(ax, syst, psi_sqrd, colormap = "Reds"):
# Plot the results:
# print(max(psi_sqrd))
kwant.plotter.map(syst, psi_sqrd, ax = ax, fig_size = (7,3), cmap = colormap, vmax = 0.99*max(psi_sqrd))
tools.edit_axis(ax,'dens')
return 0
def density_in_line(syst, states, Op = np.eye(6)):
y_stack = []
def line(site):
(x, y) = site.pos
half = 0
delta = shapes.A_STD
ans = abs(x - half) < delta
if ans == True : y_stack.append(y)
return ans
rho_line = kwant.operator.Density(syst, Op, where = line, sum = False)
dos_line = np.array([rho_line(p) for p in states])
return dos_line, np.array(y_stack)
def plot_dos_in_line(dos_line):
fig, ax = plt.subplots(1, 2, figsize = (10,5))
ax[0].plot(dos_line[0], color = 'red')
ax[1].plot(dos_line[1], color = 'blue')
plt.tight_layout()
plt.show()
def normalize(dos_in_line):
# return sum(dos_in_line)
return sum(dos_in_line)/max(sum(dos_in_line))
def print_info_dos_line(y_values, dos_in_line):
print(80*"=")
print("Size of dos_both: ", dos_in_line.shape)
print("Size of y_both: ", y_values.shape)
print("y_both:\n", y_values)
def main():
# Define the system:
hamiltonian = gasb.hamiltonian_97_k_plus()
# hamiltonian = gasb.hamiltonian_97_down()
lead_ham = gasb.free_ham(6)
centralShape = shapes.Rect()
syst = gasb.system_builder(hamiltonian, lead_ham, centralShape)
# Calculate the wave_function:
energia = 448
parametros = gasb.params_97
parametros['Eta3'] = 0
parametros['Eta2'] = 0
parametros['eF'] = 60
parametros = dict(GammaLead = parametros["GammaC"], V = 100, **parametros )
wf = kwant.wave_function(syst, energy = energia, params = parametros)
modes_left = wf(0)
modes_right = wf(1)
# modes_total = np.vstack((wf(0), wf(1)))
# Calculate the density:
sigma_z = tinyarray.array([[1,0],[0,-1]])
spin_proj= np.kron(sigma_z, np.eye(3))
identity = np.eye(6)
rho = kwant.operator.Density(syst, spin_proj)
psi_left = sum(rho(p) for p in modes_left)
psi_right = sum(rho(p) for p in modes_right)
# Calculate dos in a line
dos_in_line_from_left = density_in_line(syst, psi)
dos_in_line_from_both = density_in_line(syst, np.vstack((wf(0),wf(1))))
plt.plot(sum(dos_in_line_from_both))
plt.show()
# print(dos_in_line.shape)
# print(dos_in_line)
plot_dos_in_line(dos_in_line_from_left)
# plot_dos_in_line(dos_in_line_from_both)
print(sum(dos_in_line_from_both).shape)
# Plot the results:
colorRight = "seismic"
colorLeft = "seismic"
fig, ax = plt.subplots(2,2,figsize=(14,6))
y_values_left = y_values_left * (shapes.A0 / 10) # conversion to nm^{-1}
y_values_right = y_values_right * (shapes.A0 / 10) # conversion to nm^{-1}
min_line, max_line = -0.7 * shapes.L_STD, 0.7 * shapes.L_STD
map_density(ax[0][0], syst, psi_left, colormap = colorRight)
ax[0][0].vlines(0, min_line, max_line, linestyle = "--")
ax[0][0].set_title("left lead")
map_density(ax[1][0], syst, psi_right, colormap = colorLeft)
ax[1][0].vlines(0, min_line, max_line, linestyle = "--")
ax[1][0].set_title("right lead")
ax[0][1].plot(y_values_left, normalize(dos_in_line_from_left),
marker = ".", markersize = 2.5, linestyle = "-" )
ax[1][1].plot(y_values_right, normalize(dos_in_line_from_right),
marker = ".", markersize = 2.5, linestyle = "-" )
plt.tight_layout()
plt.show()
if __name__ == '__main__':
main()
|
6,852 | d5c7b8966e73c607d1d1c5da9814ef507dc53b59 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2017-03-09 14:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('proposal', '0016_project_callobjectives'),
]
operations = [
migrations.AlterModelOptions(
name='setting',
options={'ordering': ['group', 'name']},
),
migrations.AddField(
model_name='setting',
name='description',
field=models.TextField(blank=True, help_text='Explain what this setting does, where it is used.', verbose_name='Description of this setting'),
),
]
|
6,853 | e754a24fc9c965c50f7fa12036c884a1a54cc29d | # Copyright 2016-2021 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# ReFrame Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Meta-class for creating regression tests.
#
import reframe.core.namespaces as namespaces
import reframe.core.parameters as parameters
import reframe.core.variables as variables
from reframe.core.exceptions import ReframeSyntaxError
from reframe.core.hooks import HookRegistry
class RegressionTestMeta(type):
class MetaNamespace(namespaces.LocalNamespace):
'''Custom namespace to control the cls attribute assignment.
Regular Python class attributes can be overriden by either
parameters or variables respecting the order of execution.
A variable or a parameter may not be declared more than once in the
same class body. Overriding a variable with a parameter or the other
way around has an undefined behaviour. A variable's value may be
updated multiple times within the same class body. A parameter's
value may not be updated more than once within the same class body.
'''
def __setitem__(self, key, value):
if isinstance(value, variables.TestVar):
# Insert the attribute in the variable namespace
self['_rfm_local_var_space'][key] = value
value.__set_name__(self, key)
# Override the regular class attribute (if present)
self._namespace.pop(key, None)
elif isinstance(value, parameters.TestParam):
# Insert the attribute in the parameter namespace
self['_rfm_local_param_space'][key] = value
# Override the regular class attribute (if present)
self._namespace.pop(key, None)
elif key in self['_rfm_local_param_space']:
raise ValueError(
f'cannot override parameter {key!r}'
)
else:
# Insert the items manually to overide the namespace clash
# check from the base namespace.
self._namespace[key] = value
def __getitem__(self, key):
'''Expose and control access to the local namespaces.
Variables may only be retrieved if their value has been previously
set. Accessing a parameter in the class body is disallowed (the
actual test parameter is set during the class instantiation).
'''
try:
return super().__getitem__(key)
except KeyError as err:
try:
# Handle variable access
return self['_rfm_local_var_space'][key]
except KeyError:
# Handle parameter access
if key in self['_rfm_local_param_space']:
raise ValueError(
'accessing a test parameter from the class '
'body is disallowed'
) from None
else:
# As the last resource, look if key is a variable in
# any of the base classes. If so, make its value
# available in the current class' namespace.
for b in self['_rfm_bases']:
if key in b._rfm_var_space:
# Store a deep-copy of the variable's
# value and return.
v = b._rfm_var_space[key].default_value
self._namespace[key] = v
return self._namespace[key]
# If 'key' is neither a variable nor a parameter,
# raise the exception from the base __getitem__.
raise err from None
@classmethod
def __prepare__(metacls, name, bases, **kwargs):
namespace = super().__prepare__(name, bases, **kwargs)
# Keep reference to the bases inside the namespace
namespace['_rfm_bases'] = [
b for b in bases if hasattr(b, '_rfm_var_space')
]
# Regression test parameter space defined at the class level
local_param_space = namespaces.LocalNamespace()
namespace['_rfm_local_param_space'] = local_param_space
# Directive to insert a regression test parameter directly in the
# class body as: `P0 = parameter([0,1,2,3])`.
namespace['parameter'] = parameters.TestParam
# Regression test var space defined at the class level
local_var_space = namespaces.LocalNamespace()
namespace['_rfm_local_var_space'] = local_var_space
# Directives to add/modify a regression test variable
namespace['variable'] = variables.TestVar
namespace['required'] = variables.Undefined
return metacls.MetaNamespace(namespace)
def __new__(metacls, name, bases, namespace, **kwargs):
return super().__new__(metacls, name, bases, dict(namespace), **kwargs)
def __init__(cls, name, bases, namespace, **kwargs):
super().__init__(name, bases, namespace, **kwargs)
# Create a set with the attribute names already in use.
cls._rfm_dir = set()
for base in bases:
if hasattr(base, '_rfm_dir'):
cls._rfm_dir.update(base._rfm_dir)
used_attribute_names = set(cls._rfm_dir)
# Build the var space and extend the target namespace
variables.VarSpace(cls, used_attribute_names)
used_attribute_names.update(cls._rfm_var_space.vars)
# Build the parameter space
parameters.ParamSpace(cls, used_attribute_names)
# Update used names set with the local __dict__
cls._rfm_dir.update(cls.__dict__)
# Set up the hooks for the pipeline stages based on the _rfm_attach
# attribute; all dependencies will be resolved first in the post-setup
# phase if not assigned elsewhere
hooks = HookRegistry.create(namespace)
for b in bases:
if hasattr(b, '_rfm_pipeline_hooks'):
hooks.update(getattr(b, '_rfm_pipeline_hooks'))
cls._rfm_pipeline_hooks = hooks # HookRegistry(local_hooks)
cls._final_methods = {v.__name__ for v in namespace.values()
if hasattr(v, '_rfm_final')}
# Add the final functions from its parents
cls._final_methods.update(*(b._final_methods for b in bases
if hasattr(b, '_final_methods')))
if hasattr(cls, '_rfm_special_test') and cls._rfm_special_test:
return
for v in namespace.values():
for b in bases:
if not hasattr(b, '_final_methods'):
continue
if callable(v) and v.__name__ in b._final_methods:
msg = (f"'{cls.__qualname__}.{v.__name__}' attempts to "
f"override final method "
f"'{b.__qualname__}.{v.__name__}'; "
f"you should use the pipeline hooks instead")
raise ReframeSyntaxError(msg)
def __call__(cls, *args, **kwargs):
'''Intercept reframe-specific constructor arguments.
When registering a regression test using any supported decorator,
this decorator may pass additional arguments to the class constructor
to perform specific reframe-internal actions. This gives extra control
over the class instantiation process, allowing reframe to instantiate
the regression test class differently if this class was registered or
not (e.g. when deep-copying a regression test object). These interal
arguments must be intercepted before the object initialization, since
these would otherwise affect the __init__ method's signature, and these
internal mechanisms must be fully transparent to the user.
'''
obj = cls.__new__(cls, *args, **kwargs)
# Intercept constructor arguments
kwargs.pop('_rfm_use_params', None)
obj.__init__(*args, **kwargs)
return obj
def __getattr__(cls, name):
''' Attribute lookup method for the MetaNamespace.
This metaclass implements a custom namespace, where built-in `variable`
and `parameter` types are stored in their own sub-namespaces (see
:class:`reframe.core.meta.RegressionTestMeta.MetaNamespace`).
This method will perform an attribute lookup on these sub-namespaces if
a call to the default `__getattribute__` method fails to retrieve the
requested class attribute.
'''
try:
return cls._rfm_var_space.vars[name]
except KeyError:
try:
return cls._rfm_param_space.params[name]
except KeyError:
raise AttributeError(
f'class {cls.__qualname__!r} has no attribute {name!r}'
) from None
@property
def param_space(cls):
# Make the parameter space available as read-only
return cls._rfm_param_space
def is_abstract(cls):
'''Check if the class is an abstract test.
This is the case when some parameters are undefined, which results in
the length of the parameter space being 0.
:return: bool indicating wheteher the test has undefined parameters.
:meta private:
'''
return len(cls.param_space) == 0
|
6,854 | 7106a8ddbec60ce4b7d9e8e5ce8d7df02e5f7222 | from ScientificColorschemez import Colorschemez
import matplotlib.pyplot as plt
cs = Colorschemez.latest()
for name, hexcode in zip(cs.colornames, cs.colors):
print('%s: %s' % (hexcode, name))
fig, ax = plt.subplots()
cs.example_plot(ax)
fig.savefig('latest.png', dpi=200, bbox_inches='tight')
|
6,855 | bde3975f5b614a4b00ad392d9f0b4c1bd8c55dc0 | # Neural network model(s) for the pygym 'CartPoleEnv'
#
# author: John Welsh
import torch.nn as nn
import torch.nn.functional as F
class CartPoleModel(nn.Module):
def __init__(self):
super(CartPoleModel, self).__init__()
self.fc1 = nn.Linear(4, 60)
self.fc2 = nn.Linear(60, 120)
self.fc3 = nn.Linear(120, 2)
def forward(self, x):
x = F.tanh(self.fc1(x))
x = F.tanh(self.fc2(x))
x = self.fc3(x)
return x
|
6,856 | a159f9f9cc06bb9d22f84781fb2fc664ea204b64 | import time
if __name__ == '__main__':
for i in range(10):
print('here %s' % i)
time.sleep(1)
print('TEST SUCEEDED')
|
6,857 | 981cfecdb50b5f3ae326bf3103163f6e814ccc95 | import numpy as np
import torch
import torch.nn as nn
from utils import *
from collections import OrderedDict
from torchsummary import summary
class Model(nn.Module):
"""Example usage:
model = Model()
outputs = model(pov_tensor, feat_tensor)
"""
def __init__(self):
super(Model, self).__init__()
# Convolutional network architecture
self.image_embed = nn.Sequential(
nn.BatchNorm2d(3),
nn.Conv2d(3, 16, 5, stride=2),
nn.MaxPool2d(2, 2),
nn.LeakyReLU(True),
nn.BatchNorm2d(16),
nn.Conv2d(16, 24, 3),
nn.MaxPool2d(2, 2),
nn.LeakyReLU(True),
nn.BatchNorm2d(24),
nn.Conv2d(24, 24, 3),
nn.MaxPool2d(2, 2),
nn.LeakyReLU(True),
nn.BatchNorm2d(24),
nn.Flatten(),
nn.Linear(96, 50),
)
# Regularization layer
self.l1 = nn.Linear(50 + 2, 50)
self.r1 = nn.LeakyReLU()
self.out = nn.Linear(50, 11)
"""Model to approximate Q values.
Input
-----
pov: (batch_size, 3, 64, 64) tensor of player view
input_size: (batch_size, 2)
Returns
-------
action: (batch_size, 9) tensor with indicies:
0: attack probability
1-5: CAMERA_OPTIONS[0-4]
6: forward probability
7: jump probability
8: place probability
"""
def forward(self, pov, feats):
pov = self.image_embed(pov)
full_embed = self.l1(torch.cat((pov, feats), dim=1))
full_embed = self.r1(full_embed)
out = self.out(full_embed)
return out
|
6,858 | e4767d8a4991a1180cc185c4c2d77104d63f9c7a | import json
import argparse
import sys
import os
if __name__ == '__main__':
ap = argparse.ArgumentParser()
ap.add_argument("-sd","--startdate", help="Date to start scheduling trials, format is MM/DD.", required=True)
ap.add_argument("-r", "--round",help="A number.", required=True)
ap.add_argument("-hs", "--hsched", help="Which high schedule to use (e.g. H1, H2, H3)", required=True)
ap.add_argument("-ls", "--lsched", help="Which low schedule to use (e.g. H1, H2, H3)", required=True)
ap.add_argument("-h1", "--hfish1", help="1st Fish that will be assigned H schedule", required=True)
ap.add_argument("-h2", "--hfish2", help="2nd Fish that will be assigned H schedule", required=True)
ap.add_argument("-h3", "--hfish3", help="3rd Fish that will be assigned H schedule", required=True)
ap.add_argument("-l1", "--lfish1", help="1st Fish that will be assigned L schedule", required=True)
ap.add_argument("-l2", "--lfish2", help="2nd Fish that will be assigned L schedule", required=True)
ap.add_argument("-l3", "--lfish3", help="3rd Fish that will be assigned L schedule", required=True)
args = vars(ap.parse_args())
a_dict = {"startDate": args["startdate"], "round": args["round"], "h_schedule": args["hsched"], "l_schedule": args["lsched"], "mapping": {"H": { "fish1" : args["hfish1"], "fish2": args["hfish2"], "fish3": args["hfish3"]}, "L": { "fish1" : args["lfish1"], "fish2": args["lfish2"], "fish3": args["lfish3"]}}}
#print a_dict
os.remove('top.json')
with open('top.json', 'w') as f:
json.dump(a_dict, f, sort_keys=True, indent=4, separators=(',', ': '))
sys.exit(0)
|
6,859 | 1fe7d5db1b47ba082301d07d010c6796fbd7edb7 | import random
def Fun_hiraganas():
hiraganas = ['a', 'i', 'u', 'e', 'o', 'ka', 'ki', 'ku', 'ke', 'ko', 'sa', 'shi', 'su', 'se',
'so', 'ta', 'chi', 'tsu', 'te', 'to', 'na', 'ni', 'nu', 'ne', 'no', 'ha', 'hi', 'fu', 'he', 'ho']
print("escriba el hiragana", hiraganas[random.randint(0, len(hiraganas)-1)])
print("Hello, type exit if you want to leave")
answer = ""
while answer.lower() != 'exit':
Fun_hiraganas()
answer = input("Type exit if you want to leave")
print("bye")
|
6,860 | 1a5c189b9a2bed35fbbb7df40ec80a1d02402d7f | import fnmatch
import tempfile
from contextlib import contextmanager
from os import (
makedirs,
unlink,
)
from os.path import (
abspath,
basename,
dirname,
exists,
join,
sep,
)
from re import (
compile,
escape,
)
from typing import (
Any,
Dict,
List,
Type,
)
from urllib.parse import urlencode
from galaxy.util.bunch import Bunch
from .config_util import read_file
from .transport import (
get_file,
post_file,
rsync_get_file,
rsync_post_file,
scp_get_file,
scp_post_file,
)
from .util import (
copy_to_path,
directory_files,
unique_path_prefix,
)
DEFAULT_MAPPED_ACTION = 'transfer' # Not really clear to me what this should be, exception?
DEFAULT_PATH_MAPPER_TYPE = 'prefix'
STAGING_ACTION_REMOTE = "remote"
STAGING_ACTION_LOCAL = "local"
STAGING_ACTION_NONE = None
STAGING_ACTION_DEFAULT = "default"
# Poor man's enum.
path_type = Bunch(
# Galaxy input datasets and extra files.
INPUT="input",
# Galaxy config and param files.
CONFIG="config",
# Files from tool's tool_dir (for now just wrapper if available).
TOOL="tool",
# Input tool work dir files - e.g. task-split input file
WORKDIR="workdir",
# Job directory files (e.g. tool standard input/output and containerized command).
JOBDIR="jobdir",
# Input metadata dir files - e.g. metadata files, etc..
METADATA="metadata",
# Galaxy output datasets in their final home.
OUTPUT="output",
# Galaxy from_work_dir output paths and other files (e.g. galaxy.json)
OUTPUT_WORKDIR="output_workdir",
# Meta job and data files (e.g. Galaxy metadata generation files and
# metric instrumentation files)
OUTPUT_METADATA="output_metadata",
# Job directory files output.
OUTPUT_JOBDIR="output_jobdir",
# Other fixed tool parameter paths (likely coming from tool data, but not
# necessarily).
UNSTRUCTURED="unstructured",
)
ACTION_DEFAULT_PATH_TYPES = [
path_type.INPUT,
path_type.CONFIG,
path_type.TOOL,
path_type.WORKDIR,
path_type.JOBDIR,
path_type.METADATA,
path_type.OUTPUT,
path_type.OUTPUT_WORKDIR,
path_type.OUTPUT_METADATA,
path_type.OUTPUT_JOBDIR,
]
ALL_PATH_TYPES = ACTION_DEFAULT_PATH_TYPES + [path_type.UNSTRUCTURED]
MISSING_FILES_ENDPOINT_ERROR = "Attempted to use remote_transfer action without defining a files_endpoint."
MISSING_SSH_KEY_ERROR = "Attempt to use file transfer action requiring an SSH key without specifying a ssh_key."
class FileActionMapper:
"""
Objects of this class define how paths are mapped to actions.
>>> json_string = r'''{"paths": [ \
{"path": "/opt/galaxy", "action": "none"}, \
{"path": "/galaxy/data", "action": "transfer"}, \
{"path": "/cool/bamfiles/**/*.bam", "action": "copy", "match_type": "glob"}, \
{"path": ".*/dataset_\\\\d+.dat", "action": "copy", "match_type": "regex"} \
]}'''
>>> from tempfile import NamedTemporaryFile
>>> from os import unlink
>>> def mapper_for(default_action, config_contents):
... f = NamedTemporaryFile(delete=False)
... f.write(config_contents.encode('UTF-8'))
... f.close()
... mock_client = Bunch(default_file_action=default_action, action_config_path=f.name, files_endpoint=None)
... mapper = FileActionMapper(mock_client)
... as_dict = config=mapper.to_dict()
... mapper = FileActionMapper(config=as_dict) # Serialize and deserialize it to make sure still works
... unlink(f.name)
... return mapper
>>> mapper = mapper_for(default_action='none', config_contents=json_string)
>>> # Test first config line above, implicit path prefix mapper
>>> action = mapper.action({'path': '/opt/galaxy/tools/filters/catWrapper.py'}, 'input')
>>> action.action_type == u'none'
True
>>> action.staging_needed
False
>>> # Test another (2nd) mapper, this one with a different action
>>> action = mapper.action({'path': '/galaxy/data/files/000/dataset_1.dat'}, 'input')
>>> action.action_type == u'transfer'
True
>>> action.staging_needed
True
>>> # Always at least copy work_dir outputs.
>>> action = mapper.action({'path': '/opt/galaxy/database/working_directory/45.sh'}, 'workdir')
>>> action.action_type == u'copy'
True
>>> action.staging_needed
True
>>> # Test glob mapper (matching test)
>>> mapper.action({'path': '/cool/bamfiles/projectABC/study1/patient3.bam'}, 'input').action_type == u'copy'
True
>>> # Test glob mapper (non-matching test)
>>> mapper.action({'path': '/cool/bamfiles/projectABC/study1/patient3.bam.bai'}, 'input').action_type == u'none'
True
>>> # Regex mapper test.
>>> mapper.action({'path': '/old/galaxy/data/dataset_10245.dat'}, 'input').action_type == u'copy'
True
>>> # Doesn't map unstructured paths by default
>>> mapper.action({'path': '/old/galaxy/data/dataset_10245.dat'}, 'unstructured').action_type == u'none'
True
>>> input_only_mapper = mapper_for(default_action="none", config_contents=r'''{"paths": [ \
{"path": "/", "action": "transfer", "path_types": "input"} \
] }''')
>>> input_only_mapper.action({'path': '/dataset_1.dat'}, 'input').action_type == u'transfer'
True
>>> input_only_mapper.action({'path': '/dataset_1.dat'}, 'output').action_type == u'none'
True
>>> unstructured_mapper = mapper_for(default_action="none", config_contents=r'''{"paths": [ \
{"path": "/", "action": "transfer", "path_types": "*any*"} \
] }''')
>>> unstructured_mapper.action({'path': '/old/galaxy/data/dataset_10245.dat'}, 'unstructured').action_type == u'transfer'
True
>>> match_type_only_mapper = mapper_for(default_action="none", config_contents=r'''{"paths": [ \
{"action": "transfer", "path_types": "input"}, \
{"action": "remote_copy", "path_types": "output"} \
] }''')
>>> input_action = match_type_only_mapper.action({}, 'input')
>>> input_action.action_type
'transfer'
>>> output_action = match_type_only_mapper.action({}, 'output')
>>> output_action.action_type
'remote_copy'
"""
def __init__(self, client=None, config=None):
if config is None and client is None:
message = "FileActionMapper must be constructed from either a client or a config dictionary."
raise Exception(message)
if config is None:
config = self.__client_to_config(client)
self.default_action = config.get("default_action", "transfer")
self.ssh_key = config.get("ssh_key", None)
self.ssh_user = config.get("ssh_user", None)
self.ssh_host = config.get("ssh_host", None)
self.ssh_port = config.get("ssh_port", None)
self.mappers = mappers_from_dicts(config.get("paths", []))
self.files_endpoint = config.get("files_endpoint", None)
def action(self, source, type, mapper=None):
path = source.get("path", None)
mapper = self.__find_mapper(path, type, mapper)
action_class = self.__action_class(path, type, mapper)
file_lister = DEFAULT_FILE_LISTER
action_kwds = {}
if mapper:
file_lister = mapper.file_lister
action_kwds = mapper.action_kwds
action = action_class(source, file_lister=file_lister, **action_kwds)
self.__process_action(action, type)
return action
def unstructured_mappers(self):
""" Return mappers that will map 'unstructured' files (i.e. go beyond
mapping inputs, outputs, and config files).
"""
return filter(lambda m: path_type.UNSTRUCTURED in m.path_types, self.mappers)
def to_dict(self):
return dict(
default_action=self.default_action,
files_endpoint=self.files_endpoint,
ssh_key=self.ssh_key,
ssh_user=self.ssh_user,
ssh_port=self.ssh_port,
ssh_host=self.ssh_host,
paths=list(map(lambda m: m.to_dict(), self.mappers))
)
def __client_to_config(self, client):
action_config_path = client.action_config_path
if action_config_path:
config = read_file(action_config_path)
else:
config = getattr(client, "file_actions", {})
config["default_action"] = client.default_file_action
config["files_endpoint"] = client.files_endpoint
for attr in ['ssh_key', 'ssh_user', 'ssh_port', 'ssh_host']:
if hasattr(client, attr):
config[attr] = getattr(client, attr)
return config
def __find_mapper(self, path, type, mapper=None):
if not mapper:
if path is not None:
normalized_path = abspath(path)
else:
normalized_path = None
for query_mapper in self.mappers:
if query_mapper.matches(normalized_path, type):
mapper = query_mapper
break
return mapper
def __action_class(self, path, type, mapper):
action_type = self.default_action if type in ACTION_DEFAULT_PATH_TYPES else "none"
if mapper:
action_type = mapper.action_type
if type in ["workdir", "jobdir", "output_workdir", "output_metadata", "output_jobdir"] and action_type == "none":
# We are changing the working_directory/job_directory relative to what
# Galaxy would use, these need to be copied over.
action_type = "copy"
action_class = actions.get(action_type, None)
if action_class is None:
message_template = "Unknown action_type encountered %s while trying to map path %s"
message_args = (action_type, path)
raise Exception(message_template % message_args)
return action_class
def __process_action(self, action, file_type):
""" Extension point to populate extra action information after an
action has been created.
"""
if getattr(action, "inject_url", False):
self.__inject_url(action, file_type)
if getattr(action, "inject_ssh_properties", False):
self.__inject_ssh_properties(action)
def __inject_url(self, action, file_type):
url_base = self.files_endpoint
if not url_base:
raise Exception(MISSING_FILES_ENDPOINT_ERROR)
if "?" not in url_base:
url_base = "%s?" % url_base
else:
url_base = "%s&" % url_base
url_params = urlencode({"path": action.path, "file_type": file_type})
action.url = f"{url_base}{url_params}"
def __inject_ssh_properties(self, action):
for attr in ["ssh_key", "ssh_host", "ssh_port", "ssh_user"]:
action_attr = getattr(action, attr)
if action_attr == UNSET_ACTION_KWD:
client_default_attr = getattr(self, attr, None)
setattr(action, attr, client_default_attr)
if action.ssh_key is None:
raise Exception(MISSING_SSH_KEY_ERROR)
REQUIRED_ACTION_KWD = object()
UNSET_ACTION_KWD = "__UNSET__"
class BaseAction:
whole_directory_transfer_supported = False
action_spec: Dict[str, Any] = {}
action_type: str
def __init__(self, source, file_lister=None):
self.source = source
self.file_lister = file_lister or DEFAULT_FILE_LISTER
@property
def path(self):
return self.source.get("path")
def unstructured_map(self, path_helper):
unstructured_map = self.file_lister.unstructured_map(self.path)
if self.staging_needed:
# To ensure uniqueness, prepend unique prefix to each name
prefix = unique_path_prefix(self.path)
for path, name in unstructured_map.items():
unstructured_map[path] = join(prefix, name)
else:
path_rewrites = {}
for path in unstructured_map:
rewrite = self.path_rewrite(path_helper, path)
if rewrite:
path_rewrites[path] = rewrite
unstructured_map = path_rewrites
return unstructured_map
@property
def staging_needed(self):
return self.staging != STAGING_ACTION_NONE
@property
def staging_action_local(self):
return self.staging == STAGING_ACTION_LOCAL
def _extend_base_dict(self, **kwds):
base_dict = dict(
path=self.path, # For older Pulsar servers (pre-0.13.0?)
source=self.source,
action_type=self.action_type,
)
base_dict.update(**kwds)
return base_dict
def to_dict(self):
return self._extend_base_dict()
def __str__(self):
as_dict = self.to_dict()
attribute_str = ""
first = True
for key, value in as_dict.items():
if key == "source":
continue
if first:
first = False
else:
attribute_str += ","
attribute_str += "{}={}".format(key, value)
return "FileAction[%s]" % attribute_str
class NoneAction(BaseAction):
""" This action indicates the corresponding path does not require any
additional action. This should indicate paths that are available both on
the Pulsar client (i.e. Galaxy server) and remote Pulsar server with the same
paths. """
action_type = "none"
staging = STAGING_ACTION_NONE
def to_dict(self):
return self._extend_base_dict()
@classmethod
def from_dict(cls, action_dict):
return NoneAction(source=action_dict["source"])
def path_rewrite(self, path_helper, path=None):
return None
class RewriteAction(BaseAction):
""" This actin indicates the Pulsar server should simply rewrite the path
to the specified file.
"""
action_spec = dict(
source_directory=REQUIRED_ACTION_KWD,
destination_directory=REQUIRED_ACTION_KWD
)
action_type = "rewrite"
staging = STAGING_ACTION_NONE
def __init__(self, source, file_lister=None, source_directory=None, destination_directory=None):
super().__init__(source, file_lister=file_lister)
self.source_directory = source_directory
self.destination_directory = destination_directory
def to_dict(self):
return self._extend_base_dict(
source_directory=self.source_directory,
destination_directory=self.destination_directory,
)
@classmethod
def from_dict(cls, action_dict):
return RewriteAction(
source=action_dict["source"],
source_directory=action_dict["source_directory"],
destination_directory=action_dict["destination_directory"],
)
def path_rewrite(self, path_helper, path=None):
if not path:
path = self.path
new_path = path_helper.from_posix_with_new_base(self.path, self.source_directory, self.destination_directory)
return None if new_path == self.path else new_path
class TransferAction(BaseAction):
""" This actions indicates that the Pulsar client should initiate an HTTP
transfer of the corresponding path to the remote Pulsar server before
launching the job. """
action_type = "transfer"
staging = STAGING_ACTION_LOCAL
class CopyAction(BaseAction):
""" This action indicates that the Pulsar client should execute a file system
copy of the corresponding path to the Pulsar staging directory prior to
launching the corresponding job. """
action_type = "copy"
staging = STAGING_ACTION_LOCAL
class RemoteCopyAction(BaseAction):
""" This action indicates the Pulsar server should copy the file before
execution via direct file system copy. This is like a CopyAction, but
it indicates the action should occur on the Pulsar server instead of on
the client.
"""
action_type = "remote_copy"
staging = STAGING_ACTION_REMOTE
@classmethod
def from_dict(cls, action_dict):
return RemoteCopyAction(source=action_dict["source"])
def write_to_path(self, path):
copy_to_path(open(self.path, "rb"), path)
def write_from_path(self, pulsar_path):
destination = self.path
parent_directory = dirname(destination)
if not exists(parent_directory):
makedirs(parent_directory)
with open(pulsar_path, "rb") as f:
copy_to_path(f, destination)
class RemoteTransferAction(BaseAction):
""" This action indicates the Pulsar server should transfer the file before
execution via one of the remote transfer implementations. This is like a TransferAction, but
it indicates the action requires network access to the staging server, and
should be executed via ssh/rsync/etc
"""
inject_url = True
action_type = "remote_transfer"
staging = STAGING_ACTION_REMOTE
def __init__(self, source, file_lister=None, url=None):
super().__init__(source, file_lister=file_lister)
self.url = url
def to_dict(self):
return self._extend_base_dict(url=self.url)
@classmethod
def from_dict(cls, action_dict):
return RemoteTransferAction(source=action_dict["source"], url=action_dict["url"])
def write_to_path(self, path):
get_file(self.url, path)
def write_from_path(self, pulsar_path):
post_file(self.url, pulsar_path)
class RemoteObjectStoreCopyAction(BaseAction):
"""
"""
action_type = "remote_object_store_copy"
staging = STAGING_ACTION_REMOTE
inject_object_store = True
@classmethod
def from_dict(cls, action_dict):
return RemoteObjectStoreCopyAction(source=action_dict["source"])
def write_to_path(self, path):
assert self.object_store # Make sure object_store attribute injected
assert "object_store_ref" in self.source
object_store_ref = self.source["object_store_ref"]
dataset_object = Bunch(
id=object_store_ref["dataset_id"],
uuid=object_store_ref["dataset_uuid"],
object_store_id=object_store_ref["object_store_id"],
)
filename = self.object_store.get_filename(dataset_object)
copy_to_path(open(filename, 'rb'), path)
def write_from_path(self, pulsar_path):
raise NotImplementedError("Writing raw files to object store not supported at this time.")
class PubkeyAuthenticatedTransferAction(BaseAction):
"""Base class for file transfers requiring an SSH public/private key
"""
inject_ssh_properties = True
action_spec = dict(
ssh_key=UNSET_ACTION_KWD,
ssh_user=UNSET_ACTION_KWD,
ssh_host=UNSET_ACTION_KWD,
ssh_port=UNSET_ACTION_KWD,
)
staging = STAGING_ACTION_REMOTE
def __init__(self, source, file_lister=None, ssh_user=UNSET_ACTION_KWD,
ssh_host=UNSET_ACTION_KWD, ssh_port=UNSET_ACTION_KWD, ssh_key=UNSET_ACTION_KWD):
super().__init__(source, file_lister=file_lister)
self.ssh_user = ssh_user
self.ssh_host = ssh_host
self.ssh_port = ssh_port
self.ssh_key = ssh_key
def to_dict(self):
return self._extend_base_dict(
ssh_user=self.ssh_user,
ssh_host=self.ssh_host,
ssh_port=self.ssh_port
)
@contextmanager
def _serialized_key(self):
key_file = self.__serialize_ssh_key()
yield key_file
self.__cleanup_ssh_key(key_file)
def __serialize_ssh_key(self):
f = tempfile.NamedTemporaryFile(delete=False)
if self.ssh_key is not None:
f.write(self.ssh_key.encode("utf-8"))
else:
raise Exception("SSH_KEY not available")
return f.name
def __cleanup_ssh_key(self, keyfile):
if exists(keyfile):
unlink(keyfile)
class RsyncTransferAction(PubkeyAuthenticatedTransferAction):
action_type = "remote_rsync_transfer"
@classmethod
def from_dict(cls, action_dict):
return RsyncTransferAction(source=action_dict["source"],
ssh_user=action_dict["ssh_user"],
ssh_host=action_dict["ssh_host"],
ssh_port=action_dict["ssh_port"],
ssh_key=action_dict["ssh_key"])
def write_to_path(self, path):
with self._serialized_key() as key_file:
rsync_get_file(self.path, path, self.ssh_user, self.ssh_host,
self.ssh_port, key_file)
def write_from_path(self, pulsar_path):
with self._serialized_key() as key_file:
rsync_post_file(pulsar_path, self.path, self.ssh_user,
self.ssh_host, self.ssh_port, key_file)
class ScpTransferAction(PubkeyAuthenticatedTransferAction):
action_type = "remote_scp_transfer"
@classmethod
def from_dict(cls, action_dict):
return ScpTransferAction(source=action_dict["source"],
ssh_user=action_dict["ssh_user"],
ssh_host=action_dict["ssh_host"],
ssh_port=action_dict["ssh_port"],
ssh_key=action_dict["ssh_key"])
def write_to_path(self, path):
with self._serialized_key() as key_file:
scp_get_file(self.path, path, self.ssh_user, self.ssh_host,
self.ssh_port, key_file)
def write_from_path(self, pulsar_path):
with self._serialized_key() as key_file:
scp_post_file(pulsar_path, self.path, self.ssh_user, self.ssh_host,
self.ssh_port, key_file)
class MessageAction:
""" Sort of pseudo action describing "files" store in memory and
transferred via message (HTTP, Python-call, MQ, etc...)
"""
action_type = "message"
staging = STAGING_ACTION_DEFAULT
def __init__(self, contents, client=None):
self.contents = contents
self.client = client
@property
def staging_needed(self):
return True
@property
def staging_action_local(self):
# Ekkk, cannot be called if created through from_dict.
# Shouldn't be a problem the way it is used - but is an
# object design problem.
return self.client.prefer_local_staging
def to_dict(self):
return dict(contents=self.contents, action_type=MessageAction.action_type)
@classmethod
def from_dict(cls, action_dict):
return MessageAction(contents=action_dict["contents"])
def write_to_path(self, path):
open(path, "w").write(self.contents)
DICTIFIABLE_ACTION_CLASSES = [
RemoteCopyAction,
RemoteTransferAction,
MessageAction,
RsyncTransferAction,
ScpTransferAction,
RemoteObjectStoreCopyAction
]
def from_dict(action_dict):
action_type = action_dict.get("action_type", None)
target_class = None
for action_class in DICTIFIABLE_ACTION_CLASSES:
if action_type == action_class.action_type:
target_class = action_class
if not target_class:
message = "Failed to recover action from dictionary - invalid action type specified %s." % action_type
raise Exception(message)
if "source" in action_dict:
action_dict.pop("path") # remove redundant information stored for backward compatibility.
elif "path" in action_dict:
# legacy message received from older Pulsar client, pop the path from the dict
# and convert it to a source.
source = {"path": action_dict.pop("path")}
action_dict["source"] = source
return target_class.from_dict(action_dict)
class BasePathMapper:
match_type: str
def __init__(self, config):
action_type = config.get('action', DEFAULT_MAPPED_ACTION)
action_class = actions.get(action_type, None)
action_kwds = action_class.action_spec.copy()
for key, value in action_kwds.items():
if key in config:
action_kwds[key] = config[key]
elif value is REQUIRED_ACTION_KWD:
message_template = "action_type %s requires key word argument %s"
message = message_template % (action_type, key)
raise Exception(message)
else:
action_kwds[key] = value
self.action_type = action_type
self.action_kwds = action_kwds
path_types_str = config.get('path_types', "*defaults*")
path_types_str = path_types_str.replace("*defaults*", ",".join(ACTION_DEFAULT_PATH_TYPES))
path_types_str = path_types_str.replace("*any*", ",".join(ALL_PATH_TYPES))
self.path_types = path_types_str.split(",")
self.file_lister = FileLister(config)
def matches(self, path, path_type):
path_type_matches = path_type in self.path_types
rval = path_type_matches and self._path_matches(path)
return rval
def _extend_base_dict(self, **kwds):
base_dict = dict(
action=self.action_type,
path_types=",".join(self.path_types),
match_type=self.match_type
)
base_dict.update(self.file_lister.to_dict())
base_dict.update(self.action_kwds)
base_dict.update(**kwds)
return base_dict
def to_pattern(self):
raise NotImplementedError()
class PathTypeOnlyMapper(BasePathMapper):
match_type = 'path_type_only'
def __init__(self, config):
super().__init__(config)
def _path_matches(self, path):
return True
def to_dict(self):
return self._extend_base_dict()
class PrefixPathMapper(BasePathMapper):
match_type = 'prefix'
def __init__(self, config):
super().__init__(config)
self.prefix_path = abspath(config['path'])
def _path_matches(self, path):
return path is not None and path.startswith(self.prefix_path)
def to_pattern(self):
pattern_str = r"({}{}[^\s,\"\']+)".format(escape(self.prefix_path), escape(sep))
return compile(pattern_str)
def to_dict(self):
return self._extend_base_dict(path=self.prefix_path)
class GlobPathMapper(BasePathMapper):
match_type = 'glob'
def __init__(self, config):
super().__init__(config)
self.glob_path = config['path']
def _path_matches(self, path):
return path is not None and fnmatch.fnmatch(path, self.glob_path)
def to_pattern(self):
return compile(fnmatch.translate(self.glob_path))
def to_dict(self):
return self._extend_base_dict(path=self.glob_path)
class RegexPathMapper(BasePathMapper):
match_type = 'regex'
def __init__(self, config):
super().__init__(config)
self.pattern_raw = config['path']
self.pattern = compile(self.pattern_raw)
def _path_matches(self, path):
return path is not None and self.pattern.match(path) is not None
def to_pattern(self):
return self.pattern
def to_dict(self):
return self._extend_base_dict(path=self.pattern_raw)
MAPPER_CLASSES = [PathTypeOnlyMapper, PrefixPathMapper, GlobPathMapper, RegexPathMapper]
MAPPER_CLASS_DICT = dict(map(lambda c: (c.match_type, c), MAPPER_CLASSES))
def mappers_from_dicts(mapper_def_list):
return list(map(lambda m: _mappper_from_dict(m), mapper_def_list))
def _mappper_from_dict(mapper_dict):
if "path" in mapper_dict:
map_type = mapper_dict.get('match_type', DEFAULT_PATH_MAPPER_TYPE)
else:
map_type = 'path_type_only'
return MAPPER_CLASS_DICT[map_type](mapper_dict)
class FileLister:
def __init__(self, config):
self.depth = int(config.get("depth", "0"))
def to_dict(self):
return dict(
depth=self.depth
)
def unstructured_map(self, path):
depth = self.depth
if self.depth == 0:
return {path: basename(path)}
else:
while depth > 0:
path = dirname(path)
depth -= 1
return {join(path, f): f for f in directory_files(path)}
DEFAULT_FILE_LISTER = FileLister(dict(depth=0))
ACTION_CLASSES: List[Type[BaseAction]] = [
NoneAction,
RewriteAction,
TransferAction,
CopyAction,
RemoteCopyAction,
RemoteTransferAction,
RemoteObjectStoreCopyAction,
RsyncTransferAction,
ScpTransferAction,
]
actions = {clazz.action_type: clazz for clazz in ACTION_CLASSES}
__all__ = (
'FileActionMapper',
'path_type',
'from_dict',
'MessageAction',
'RemoteTransferAction', # For testing
)
|
6,861 | 86d3e90493ed04bbe23792716f46a68948911dc3 | import cv2
import numpy as np
import time
import itertools
from unionfind import UnionFind
R = 512
C = 512
# Setup window
cv2.namedWindow('main')
#img_i = np.zeros((R, C), np.uint8)
img_i = cv2.imread("window1.png", cv2.IMREAD_GRAYSCALE)
#img_i = cv2.threshold(img_i, 127, 255, cv2.THRESH_BINARY)[1]
down = False
last_pos = (0,0)
last_time = time.time()
def wtf(img):
"""
Source: http://opencvpython.blogspot.com.au/2012/05/skeletonization-using-opencv-python.html
:param img:
:return: thinned image
"""
thinned = np.zeros(img.shape, np.uint8)
ret, img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY)
element = cv2.getStructuringElement(cv2.MORPH_CROSS, (3, 3))
iteration = 0
file_prefix = "./images/" + time.strftime("wtf_%Y-%m-%d_%H-%M-%S_")
joined = np.zeros((img.shape[0], img.shape[1]*2), np.uint8)
joined[:img.shape[0], 0:img.shape[1]] = img
joined[:img.shape[0], img.shape[1]:img.shape[1]*2] = thinned
cv2.imwrite(file_prefix + str(iteration) + ".png", joined)
while True:
eroded = cv2.erode(img, element)
temp = cv2.dilate(eroded, element)
temp = cv2.subtract(img, temp)
thinned = cv2.bitwise_or(thinned, temp)
img = eroded.copy()
iteration += 1
joined[:img.shape[0], 0:img.shape[1]] = img
joined[:img.shape[0], img.shape[1]:img.shape[1] * 2] = thinned
cv2.imwrite(file_prefix + str(iteration) + ".png", joined)
if cv2.countNonZero(img) == 0:
break
return thinned
def neighbours8(bounds, pos, repeat_first_last=False):
# nhood8 = [(-1, 0), (-1, 1), (0, 1), (1, 1), (1, 0), (1, -1), (0, -1), (-1, -1)]
rows, cols = bounds
r, c = pos
cup = r > 0
crh = c < cols - 1
cdn = r < rows - 1
clf = c > 0
if cup:
yield (r - 1, c)
if crh:
yield (r - 1, c + 1)
if crh:
yield (r, c + 1)
if cdn:
yield (r + 1, c + 1)
if cdn:
yield (r + 1, c)
if clf:
yield (r + 1, c - 1)
if clf:
yield (r, c - 1)
if cup:
yield (r - 1, c - 1)
if repeat_first_last and cup:
yield (r - 1, c)
def neighbour_transitions_to_white(img, pos):
last_value = None
count = 0
for neighbour in neighbours8((img.shape[0], img.shape[1]), pos, True):
r, c = neighbour
if last_value is None:
last_value = img[r][c]
continue
count += last_value == 0 and img[r][c] != 0
last_value = img[r][c]
return count
def black_neighbours(img, pos):
count = 0
for neighbour in neighbours8((img.shape[0], img.shape[1]), pos):
r, c = neighbour
count += img[r][c] == 0
return count
def hilditch(img):
"""
Source: http://cgm.cs.mcgill.ca/~godfried/teaching/projects97/azar/skeleton.html
:param img:
:return: thinned image
"""
rows, cols = (img.shape[0], img.shape[1])
ret, img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY_INV)
temp = np.copy(img)
# Repeat these two steps till no changes
changed = True
iteration = 0
file_prefix = "./images/" + time.strftime("hilditch_%Y-%m-%d_%H-%M-%S_")
cv2.imwrite(file_prefix + str(iteration) + ".png", img)
while changed:
changed = False
# Step 1
# for each pixel that has 8 neighbours
for r in range(1, rows - 1):
for c in range(1, cols - 1):
# and is black
if img[r][c] != 0:
continue
# and 2 <= B(Pixel) <= 6
B = black_neighbours(img, (r, c))
if B < 2 or B > 6:
continue
# and A(Pixel) = 1
A = neighbour_transitions_to_white(img, (r, c))
if A != 1:
continue
# and P2||P4||P8||A(P2)!=1
if img[r-1][c] == 0 and img[r][c+1] == 0 and img[r][c-1] == 0 and neighbour_transitions_to_white(img, (r - 1, c)) == 1:
continue
# and P2||P4||P6||A(P4)!=1
if img[r-1][c] == 0 and img[r][c+1] == 0 and img[r+1][c-1] == 0 and neighbour_transitions_to_white(img, (r, c+1)) == 1:
continue
changed = True
temp[r][c] = 255
img = np.copy(temp)
iteration += 1
cv2.imwrite(file_prefix + str(iteration) + ".png", img)
return img
def zhangsuen(img):
"""
Source: http://rosettacode.org/wiki/Zhang-Suen_thinning_algorithm
:param img:
:return: thinned image
"""
rows, cols = (img.shape[0], img.shape[1])
ret, img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY_INV)
temp = np.copy(img)
# Repeat these two steps till no changes
changed = True
iteration = 0
file_prefix = "./images/" + time.strftime("zhangsuen_%Y-%m-%d_%H-%M-%S_")
cv2.imwrite(file_prefix + str(iteration) + ".png", img)
while changed:
changed = False
# Step 1
# for each pixel that has 8 neighbours
for r in range(1, rows - 1):
for c in range(1, cols - 1):
# and is black
if img[r][c] != 0:
continue
# and 2 <= B(Pixel) <= 6
B = black_neighbours(img, (r, c))
if B < 2 or B > 6:
continue
# and A(Pixel) = 1
A = neighbour_transitions_to_white(img, (r, c))
if A != 1:
continue
# and P2||P4||P6
if img[r-1][c] == 0 and img[r][c+1] == 0 and img[r+1][c] == 0:
continue
# and P4||P6||P8
if img[r][c+1] == 0 and img[r+1][c] == 0 and img[r][c-1] == 0:
continue
changed = True
temp[r][c] = 255
img = np.copy(temp)
# Step 2
# for each pixel that has 8 neighbours
for r in range(1, rows - 1):
for c in range(1, cols - 1):
# and is black
if img[r][c] != 0:
continue
# and 2 <= B(Pixel) <= 6
B = black_neighbours(img, (r, c))
if B < 2 or B > 6:
continue
# and A(Pixel) = 1
A = neighbour_transitions_to_white(img, (r, c))
if A != 1:
continue
# and P2||P4||P8 <===
if img[r-1][c] == 0 and img[r][c+1] == 0 and img[r][c-1] == 0:
continue
# and ===>P2||P6||P8
if img[r-1][c] == 0 and img[r+1][c] == 0 and img[r][c-1] == 0:
continue
changed = True
temp[r][c] = 255
img = np.copy(temp)
iteration += 1
cv2.imwrite(file_prefix + str(iteration) + ".png", img)
return img
class BFCell:
"""Brushfire Cell"""
def __init__(self, r, c, id, occupied):
"""BFCell(row, col)"""
self.r = r
self.c = c
self.id = id
self.occupied = occupied
def __repr__(self):
return str(self)
def __str__(self):
#return "(%d, %d)" % (self.r, self.c)
return "(%d)" % (self.id)
class BFCounter:
def __init__(self):
self.count = 0
def i(self):
orig = self.count
self.count += 1
return orig
def brushfire(img):
"""
:param img:
:return: Output Image
"""
WALL = 255
SPACE = 255 - WALL
colours = BFCounter()
VORONOI = colours.i()
LEFT = colours.i()
RIGHT = colours.i()
UP = colours.i()
DOWN = colours.i()
CV = BFCell(-1, -1, -1, False) # Voronoi
CL = BFCell(-1, -1, -2, True) # Left
CR = BFCell(-1, -1, -3, True) # Right
CU = BFCell(-1, -1, -4, True) # Up
CD = BFCell(-1, -1, -5, True) # Down
rows, cols = (img.shape[0], img.shape[1])
ret, img = cv2.threshold(img, 127, 255, cv2.THRESH_BINARY_INV)
regions = UnionFind()
cells = [[BFCell(r, c, r * cols + c) for c in range(cols)] for r in range(rows)]
cellsf = [cell for row in cells for cell in row]
regions.insert_objects(itertools.chain(cellsf, (CV, CL, CR, CU, CD)))
visited = set()
# Add the border cells to a set
for r in range(rows):
pass
return img
process = False
def mouse_callback(event, x, y, flags, param):
global img_i, down, last_pos, last_time, process
if event == cv2.EVENT_RBUTTONDOWN:
#img_i = np.zeros((R, C), np.uint8)
process = True
elif event == cv2.EVENT_LBUTTONDOWN:
down = True
last_pos = (x, y)
elif event == cv2.EVENT_LBUTTONUP:
down = False
last_pos = (x, y)
elif event == cv2.EVENT_MOUSEMOVE:
if down:
cv2.line(img_i, last_pos, (x, y), 255, 5)
last_pos = (x, y)
last_time = time.time()
cv2.setMouseCallback("main", mouse_callback)
edges = []
img_o = np.copy(img_i)
# iterr = None
while True:
key = cv2.waitKey(1) & 0xFF
if key == ord('q'):
break
# if (time.time() - last_time) > 1:
# last_time = time.time()
# del edges[:]
if process:
process = False
#img_o = hilditch(img_i)
img_o = zhangsuen(img_i)
#img_o = brushfire(img_i)
# iterr = zhangsuen(img_i)
# for edge in edges:
# cv2.line(img_o, edge[0], edge[1], 127, 1)
# if iterr is not None:
# try:
# img_o = iterr.next()
# except:
# iterr = None
combined = np.zeros((img_i.shape[0], img_i.shape[1]*2), np.uint8)
combined[:img_i.shape[0], :img_i.shape[1]] = img_i
combined[:img_i.shape[0], img_i.shape[1]:img_i.shape[1]*2] = img_o
cv2.imshow("main", combined)
|
6,862 | cb40141eddce9ce11fbd8475fc7c3d37438208a6 | """!
@brief Example 04
@details pyAudioAnalysis spectrogram calculation and visualization example
@author Theodoros Giannakopoulos {tyiannak@gmail.com}
"""
import numpy as np
import scipy.io.wavfile as wavfile
import plotly
import plotly.graph_objs as go
from pyAudioAnalysis import ShortTermFeatures as aF
layout = go.Layout(title='Spectrogram Extraction Example using pyAudioAnalysis',
xaxis=dict(title='time (sec)',),
yaxis=dict(title='Freqs (Hz)',))
def normalize_signal(signal):
signal = np.double(signal)
signal = signal / (2.0 ** 15)
signal = (signal - signal.mean())
return signal / ((np.abs(signal)).max() + 0.0000000001)
if __name__ == '__main__':
[Fs, s] = wavfile.read("../data/sample_music.wav")
s = normalize_signal(s)
[S, t, f] = aF.spectrogram(s, Fs, int(Fs * 0.020), int(Fs * 0.020))
heatmap = go.Heatmap(z=S.T, y=f, x=t)
plotly.offline.plot(go.Figure(data=[heatmap], layout=layout),
filename="temp.html", auto_open=True) |
6,863 | eb4271aa5abe3ddc05048858205e6ef807a4f8ac | import logging
from typing import Sequence
from django.core.exceptions import ValidationError
from django.db import IntegrityError
from django.db.models import F, Q
from django.utils import timezone
from sentry_sdk import capture_exception
from sentry.models import (
Environment,
Project,
Release,
ReleaseEnvironment,
ReleaseProjectEnvironment,
ReleaseStatus,
)
from sentry.release_health import release_monitor
from sentry.release_health.release_monitor.base import Totals
from sentry.tasks.base import instrumented_task
from sentry.utils import metrics
CHUNK_SIZE = 1000
MAX_SECONDS = 60
logger = logging.getLogger("sentry.tasks.releasemonitor")
@instrumented_task(
name="sentry.release_health.tasks.monitor_release_adoption",
queue="releasemonitor",
default_retry_delay=5,
max_retries=5,
) # type: ignore
def monitor_release_adoption(**kwargs) -> None:
metrics.incr("sentry.tasks.monitor_release_adoption.start", sample_rate=1.0)
with metrics.timer(
"sentry.tasks.monitor_release_adoption.process_projects_with_sessions", sample_rate=1.0
):
for org_id, project_ids in release_monitor.fetch_projects_with_recent_sessions().items():
process_projects_with_sessions.delay(org_id, project_ids)
@instrumented_task(
name="sentry.tasks.process_projects_with_sessions",
queue="releasemonitor",
default_retry_delay=5,
max_retries=5,
) # type: ignore
def process_projects_with_sessions(org_id, project_ids) -> None:
# Takes a single org id and a list of project ids
with metrics.timer("sentry.tasks.monitor_release_adoption.process_projects_with_sessions.core"):
# Set the `has_sessions` flag for these projects
Project.objects.filter(
organization_id=org_id,
id__in=project_ids,
flags=F("flags").bitand(~Project.flags.has_sessions),
).update(flags=F("flags").bitor(Project.flags.has_sessions))
totals = release_monitor.fetch_project_release_health_totals(org_id, project_ids)
adopted_ids = adopt_releases(org_id, totals)
cleanup_adopted_releases(project_ids, adopted_ids)
def adopt_releases(org_id: int, totals: Totals) -> Sequence[int]:
# Using the totals calculated in sum_sessions_and_releases, mark any releases as adopted if they reach a threshold.
adopted_ids = []
with metrics.timer(
"sentry.tasks.monitor_release_adoption.process_projects_with_sessions.updates"
):
for project_id, project_totals in totals.items():
for environment, environment_totals in project_totals.items():
total_releases = len(environment_totals["releases"])
for release_version in environment_totals["releases"]:
threshold = 0.1 / total_releases
if (
environment
and environment_totals["total_sessions"] != 0
and environment_totals["releases"][release_version]
/ environment_totals["total_sessions"]
>= threshold
):
rpe = None
try:
rpe = ReleaseProjectEnvironment.objects.get(
project_id=project_id,
release_id=Release.objects.get(
organization=org_id, version=release_version
).id,
environment__name=environment,
environment__organization_id=org_id,
)
updates = {}
if rpe.adopted is None:
updates["adopted"] = timezone.now()
if rpe.unadopted is not None:
updates["unadopted"] = None
if updates:
rpe.update(**updates)
except (Release.DoesNotExist, ReleaseProjectEnvironment.DoesNotExist):
metrics.incr("sentry.tasks.process_projects_with_sessions.creating_rpe")
try:
env = Environment.objects.get_or_create(
name=environment, organization_id=org_id
)[0]
try:
release = Release.objects.get_or_create(
organization_id=org_id,
version=release_version,
defaults={
"status": ReleaseStatus.OPEN,
},
)[0]
except IntegrityError:
release = Release.objects.get(
organization_id=org_id, version=release_version
)
except ValidationError:
release = None
logger.exception(
"sentry.tasks.process_projects_with_sessions.creating_rpe.ValidationError",
extra={
"org_id": org_id,
"release_version": release_version,
},
)
if release:
release.add_project(Project.objects.get(id=project_id))
ReleaseEnvironment.objects.get_or_create(
environment=env, organization_id=org_id, release=release
)
rpe = ReleaseProjectEnvironment.objects.create(
project_id=project_id,
release_id=release.id,
environment=env,
adopted=timezone.now(),
)
except (
Project.DoesNotExist,
Environment.DoesNotExist,
Release.DoesNotExist,
ReleaseEnvironment.DoesNotExist,
) as exc:
metrics.incr(
"sentry.tasks.process_projects_with_sessions.skipped_update"
)
capture_exception(exc)
if rpe:
adopted_ids.append(rpe.id)
return adopted_ids
def cleanup_adopted_releases(project_ids: Sequence[int], adopted_ids: Sequence[int]) -> None:
# Cleanup; adopted releases need to be marked as unadopted if they are not in `adopted_ids`
with metrics.timer(
"sentry.tasks.monitor_release_adoption.process_projects_with_sessions.cleanup"
):
ReleaseProjectEnvironment.objects.filter(
project_id__in=project_ids, unadopted__isnull=True
).exclude(Q(adopted=None) | Q(id__in=adopted_ids)).update(unadopted=timezone.now())
|
6,864 | cbfccffce2884e1cbebe21daf7792eebc1f88571 | #
# Copyright (c) 2011-2014 The developers of Aqualid project - http://aqualid.googlecode.com
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom
# the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE
# AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__all__ = (
'BuildManager',
'ErrorNodeDependencyCyclic', 'ErrorNodeDependencyUnknown',
)
import os.path
from aql.util_types import toSequence, AqlException
from aql.utils import eventStatus, eventWarning, eventError, logInfo, logError, logWarning, TaskManager
from aql.values import ValuesFile
#//===========================================================================//
@eventStatus
def eventNodeActual( settings, node, progress ):
msg = "(%s) ACTUAL: %s" % (progress, node.getBuildStr( settings.brief ))
logInfo( msg )
#//===========================================================================//
@eventStatus
def eventNodeOutdated( settings, node, progress ):
msg = "(%s) OUTDATED: %s" % (progress, node.getBuildStr( settings.brief ))
logInfo( msg )
#//===========================================================================//
@eventWarning
def eventBuildTargetTwice( settings, value, node1 ):
logWarning("Target '%s' is built twice. The last time built by: '%s' " %
( value.name, node1.getBuildStr( settings.brief )) )
#//===========================================================================//
@eventError
def eventFailedNode( settings, node, error ):
msg = node.getBuildStr( settings.brief )
msg += '\n\n%s\n' % (error,)
logError( msg )
#//===========================================================================//
@eventStatus
def eventNodeBuilding( settings, node ):
pass
#//===========================================================================//
@eventStatus
def eventNodeBuildingFinished( settings, node, builder_output, progress ):
msg = node.getBuildStr( settings.brief )
if settings.with_output and builder_output:
msg += '\n'
if builder_output:
msg += builder_output
msg += '\n'
msg = "(%s) %s" % (progress, msg)
logInfo( msg )
#//===========================================================================//
@eventStatus
def eventNodeBuildingFailed( settings, node, error ):
pass
#//===========================================================================//
@eventStatus
def eventNodeRemoved( settings, node, progress ):
msg = node.getBuildStr( settings.brief )
if msg:
logInfo( "(%s) Removed: %s" % (progress, msg) )
#//===========================================================================//
class ErrorNodeDependencyCyclic( AqlException ):
def __init__( self, node, deps ):
msg = "Node '%s' (%s) has a cyclic dependency: %s" % (node, node.getBuildStr(True), deps )
super(ErrorNodeDependencyCyclic, self).__init__( msg )
#//===========================================================================//
class ErrorNodeUnknown(AqlException):
def __init__( self, node ):
msg = "Unknown node '%s'" % (node, )
super(ErrorNodeUnknown, self).__init__( msg )
#//===========================================================================//
class ErrorNodeSignatureDifferent(AqlException):
def __init__( self, node ):
msg = "Two similar nodes have different signatures (sources, builder parameters or dependencies): %s" % (node.getBuildStr( brief = False ), )
super(ErrorNodeSignatureDifferent, self).__init__( msg )
#//===========================================================================//
class ErrorNodeDependencyUnknown(AqlException):
def __init__( self, node, dep_node ):
msg = "Unable to add dependency to node '%s' from node '%s'" % (node, dep_node)
super(ErrorNodeDependencyUnknown, self).__init__( msg )
#//===========================================================================//
class InternalErrorRemoveNonTailNode( AqlException ):
def __init__( self, node ):
msg = "Removing non-tail node: %s" % (node,)
super(InternalErrorRemoveNonTailNode, self).__init__( msg )
#//===========================================================================//
class InternalErrorRemoveUnknownTailNode(AqlException):
def __init__( self, node ):
msg = "Remove unknown tail node: : %s" % (node,)
super(InternalErrorRemoveUnknownTailNode, self).__init__( msg )
#//===========================================================================//
class BuildStat (object):
__slots__ = \
(
'total',
'completed',
'failed',
)
def __init__(self, total):
self.total = total
self.completed = 0
self.failed = 0
def addTotal(self, count ):
self.total += count
def incCompleted(self):
self.completed += 1
def incFailed(self):
self.failed += 1
def getProgressStr(self):
progress = "%s/%s" % (self.completed + self.failed, self.total )
return progress
#//===========================================================================//
class _NodesTree (object):
__slots__ = \
(
'node2deps',
'dep2nodes',
'tail_nodes',
)
#//-------------------------------------------------------//
def __init__( self ):
self.node2deps = {}
self.dep2nodes = {}
self.tail_nodes = set()
#//-------------------------------------------------------//
def __len__(self):
return len(self.node2deps)
#//-------------------------------------------------------//
def __hasCycle( self, node, new_deps ):
if node in new_deps:
return True
deps = set(new_deps)
node2deps = self.node2deps
while deps:
dep = deps.pop()
dep_deps = node2deps[dep]
if node in dep_deps:
return True
deps |= dep_deps
return False
#//-------------------------------------------------------//
def __depends( self, node, deps ):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
try:
current_node_deps = node2deps[ node ]
deps = { dep for dep in deps if not dep.isBuilt() }
new_deps = deps - current_node_deps
if not new_deps:
return
if self.__hasCycle( node, new_deps ):
raise ErrorNodeDependencyCyclic( node, new_deps )
self.tail_nodes.discard( node )
#//-------------------------------------------------------//
current_node_deps.update( new_deps )
#//-------------------------------------------------------//
for dep in new_deps:
dep2nodes[ dep ].add( node )
except KeyError as dep_node:
raise ErrorNodeDependencyUnknown( node, dep_node.args[0] )
#//-------------------------------------------------------//
def __add( self, nodes ):
for node in nodes:
if node not in self.node2deps:
self.node2deps[ node ] = set()
self.dep2nodes[ node ] = set()
self.tail_nodes.add( node )
node_srcnodes = node.getSourceNodes()
node_depnodes = node.getDepNodes()
self.__add( node_srcnodes ) # TODO: recursively add sources and depends
self.__add( node_depnodes ) # It would be better to rewrite this code to avoid the recursion
self.__depends( node, node_srcnodes )
self.__depends( node, node_depnodes )
#//-------------------------------------------------------//
def add( self, nodes ):
self.__add( toSequence( nodes ) )
#//-------------------------------------------------------//
def depends( self, node, deps ):
deps = toSequence( deps )
self.__add( deps )
self.__depends( node, deps )
#//-------------------------------------------------------//
def removeTail( self, node ):
node2deps = self.node2deps
try:
deps = node2deps.pop(node)
if deps:
raise InternalErrorRemoveNonTailNode( node )
except KeyError as node:
raise InternalErrorRemoveUnknownTailNode( node.args[0] )
tail_nodes = self.tail_nodes
# tail_nodes.remove( node )
for dep in self.dep2nodes.pop( node ):
d = node2deps[ dep ]
d.remove( node )
if not d:
tail_nodes.add( dep )
#//-------------------------------------------------------//
def popTails( self ):
tails = self.tail_nodes
self.tail_nodes = set()
return tails
#//-------------------------------------------------------//
def __getAllNodes(self, nodes ):
nodes = set(toSequence(nodes))
all_nodes = set( nodes )
node2deps = self.node2deps
while nodes:
node = nodes.pop()
try:
deps = node2deps[ node ] - all_nodes
except KeyError as node:
raise ErrorNodeUnknown( node.args[0] )
all_nodes.update( deps )
nodes.update( deps )
return all_nodes
#//-------------------------------------------------------//
def shrinkTo(self, nodes ):
node2deps = self.node2deps
dep2nodes = self.dep2nodes
ignore_nodes = set(node2deps) - self.__getAllNodes( nodes )
self.tail_nodes -= ignore_nodes
for node in ignore_nodes:
del node2deps[ node ]
del dep2nodes[ node ]
for dep_nodes in dep2nodes.values():
dep_nodes.difference_update( ignore_nodes )
#//-------------------------------------------------------//
def selfTest( self ):
if set(self.node2deps) != set(self.dep2nodes):
raise AssertionError("Not all deps are added")
all_dep_nodes = set()
for node in self.dep2nodes:
if node not in self.node2deps:
raise AssertionError("Missed node: %s" % (node,) )
node_deps = self.node2deps[node]
if not node_deps:
if node not in self.tail_nodes:
raise AssertionError("Missed tail node: %s, tail_nodes: %s" % (node, self.tail_nodes) )
else:
if node in self.tail_nodes:
raise AssertionError("Invalid tail node: %s" % (node,) )
all_dep_nodes |= node_deps
for dep in node_deps:
if node not in self.dep2nodes[dep]:
raise AssertionError("node not in self.dep2nodes[dep]: dep: %s, node: %s" % (dep, node) )
if all_dep_nodes - set(self.dep2nodes):
raise AssertionError("Not all deps are added")
#//===========================================================================//
class _VFiles( object ):
__slots__ = \
(
'names',
'handles',
)
#//-------------------------------------------------------//
def __init__( self ):
self.handles = {}
self.names = {}
#//-------------------------------------------------------//
def __iter__(self):
raise TypeError()
#//-------------------------------------------------------//
def __getitem__( self, builder ):
builder_name = builder.name
try:
vfilename = self.names[ builder_name ]
except KeyError:
vfilename = os.path.join( builder.getBuildDir(), '.aql.db' )
self.names[ builder_name ] = vfilename
try:
return self.handles[ vfilename ]
except KeyError:
vfile = ValuesFile( vfilename )
self.handles[ vfilename ] = vfile
return vfile
#//-------------------------------------------------------//
def close(self):
for vfile in self.handles.values():
vfile.close()
self.handles.clear()
self.names.clear()
#//-------------------------------------------------------//
def __enter__(self):
return self
#//-------------------------------------------------------//
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
#//===========================================================================//
def _buildNode( node ):
eventNodeBuilding( node )
out = node.build()
if out:
try:
out = out.strip()
except Exception:
pass
return out
#//===========================================================================//
class _NodeState( object ):
__slots__ = \
(
'initialized',
'check_depends',
'check_replace',
'check_split',
'check_actual',
'split_nodes',
)
def __init__(self ):
self.initialized = False
self.check_depends = True
self.check_replace = True
self.check_split = True
self.check_actual = True
self.split_nodes = None
def __str__(self):
return "initialized: %s, check_depends: %s, check_replace: %s, check_split: %s, check_actual: %s, split_nodes: %s" %\
(self.initialized, self.check_depends, self.check_replace, self.check_split, self.check_actual, self.split_nodes )
#//===========================================================================//
# noinspection PyAttributeOutsideInit
class _NodesBuilder (object):
__slots__ = \
(
'vfiles',
'build_manager',
'task_manager',
'node_states',
'building_nodes',
)
#//-------------------------------------------------------//
def __init__( self, build_manager, jobs = 0, keep_going = False, with_backtrace = True ):
self.vfiles = _VFiles()
self.node_states = {}
self.building_nodes = {}
self.build_manager = build_manager
self.task_manager = TaskManager( num_threads = jobs, stop_on_fail = not keep_going, with_backtrace = with_backtrace )
#//-------------------------------------------------------//
def __enter__(self):
return self
#//-------------------------------------------------------//
def __exit__(self, exc_type, exc_value, backtrace):
self.close()
#//-------------------------------------------------------//
def _getNodeState( self, node ):
try:
state = self.node_states[ node ]
except KeyError:
state = _NodeState()
self.node_states[ node ] = state
return state
#//-------------------------------------------------------//
def _removeNodeState( self, node ):
try:
del self.node_states[ node ]
except KeyError:
pass
#//-------------------------------------------------------//
def _addBuildingNode( self, node, state ):
conflicting_nodes = []
building_nodes = self.building_nodes
for name, signature in node.getNamesAndSignatures():
node_signature = (node, signature)
other_node, other_signature = building_nodes.setdefault( name, node_signature )
if other_node is not node:
if other_signature != signature:
raise ErrorNodeSignatureDifferent( node )
conflicting_nodes.append( other_node )
if conflicting_nodes:
state.check_actual = True
self.build_manager.depends( node, conflicting_nodes )
return True
return False
#//-------------------------------------------------------//
def _removeBuildingNode( self, node ):
building_nodes = self.building_nodes
for name in node.getNames():
del building_nodes[ name ]
#//-------------------------------------------------------//
def isBuilding(self):
return bool(self.building_nodes)
#//-------------------------------------------------------//
def _checkPrebuildDepends( self, node ):
dep_nodes = node.buildDepends()
if dep_nodes:
self.build_manager.depends( node, dep_nodes )
return True
return False
#//-------------------------------------------------------//
def _checkPrebuildReplace( self, node ):
if node.buildReplace():
new_node_sources = node.getSourceNodes()
if new_node_sources:
self.build_manager.depends( node, new_node_sources )
return True
return False
#//-------------------------------------------------------//
def _checkPrebuildSplit( self, node, state ):
build_manager = self.build_manager
if state.check_split:
state.check_split = False
check_actual = True
if node.isBatch() and state.check_actual:
# Check for changed sources of BatchNode
vfile = self.vfiles[ node.builder ]
actual = build_manager.isActualNode( node, vfile )
if actual:
self._removeNodeState( node )
build_manager.actualNode( node )
return True
check_actual = False
split_nodes = node.buildSplit()
if split_nodes:
state.split_nodes = split_nodes
for split_node in split_nodes:
split_state = self._getNodeState( split_node )
split_state.check_split = False
split_state.check_depends = False
split_state.check_replace = False
split_state.check_actual = check_actual
split_state.initialized = split_node.builder is node.builder
self.build_manager.depends( node, split_nodes )
return True
elif state.split_nodes is not None:
if node.isBatch():
node._populateTargets()
else:
targets = []
for split_node in state.split_nodes:
targets += split_node.getTargetValues()
node.target_values = targets
self._removeNodeState( node )
self.build_manager.completedSplitNode( node )
return True
return False
#//-------------------------------------------------------//
def _prebuild( self, node, state ):
# print( "node: %s, state: %s" % (node, state))
if not state.initialized:
node.initiate()
state.initialized = True
if state.check_depends:
state.check_depends = False
if self._checkPrebuildDepends( node ):
return True
if state.check_replace:
state.check_replace = False
if self._checkPrebuildReplace( node ):
return True
if self._checkPrebuildSplit( node, state ):
return True
return False
#//-------------------------------------------------------//
def build( self, nodes ):
build_manager = self.build_manager
vfiles = self.vfiles
addTask = self.task_manager.addTask
tasks_check_period = 10
added_tasks = 0
changed = False
for node in nodes:
node_state = self._getNodeState( node )
if self._prebuild( node, node_state ):
changed = True
continue
if self._addBuildingNode( node, node_state ):
continue
if node_state.check_actual:
vfile = vfiles[ node.builder ]
actual = build_manager.isActualNode( node, vfile )
if actual:
self._removeNodeState( node )
self._removeBuildingNode( node )
build_manager.actualNode( node )
changed = True
continue
addTask( node, _buildNode, node )
added_tasks += 1
if added_tasks == tasks_check_period:
changed = self._getFinishedNodes( block = False ) or changed
added_tasks = 0
self._getFinishedNodes( block = not changed )
#//-------------------------------------------------------//
def _getFinishedNodes( self, block = True ):
# print("tasks: %s, finished_tasks: %s" % (self.task_manager.unfinished_tasks, self.task_manager.finished_tasks.qsize()))
finished_tasks = self.task_manager.finishedTasks( block = block )
vfiles = self.vfiles
build_manager = self.build_manager
for task in finished_tasks:
node = task.task_id
error = task.error
self._removeNodeState( node )
self._removeBuildingNode( node )
vfile = vfiles[ node.builder ]
if error is None:
node.save( vfile )
build_manager.completedNode( node, task.result )
else:
if node.isBatch():
node.save( vfile )
build_manager.failedNode( node, error )
return bool(finished_tasks)
#//-------------------------------------------------------//
def clear( self, nodes ):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState( node )
node_state.check_actual = False
if self._prebuild( node, node_state ):
continue
vfile = vfiles[ node.builder ]
node.clear( vfile )
build_manager.removedNode( node )
#//-------------------------------------------------------//
def status( self, nodes ):
vfiles = self.vfiles
build_manager = self.build_manager
for node in nodes:
node_state = self._getNodeState( node )
node_state.check_actual = False
if self._prebuild( node, node_state ):
continue
vfile = vfiles[ node.builder ]
if build_manager.isActualNode( node, vfile ):
build_manager.actualNodeStatus( node )
else:
build_manager.outdatedNodeStatus( node )
#//-------------------------------------------------------//
def close( self ):
try:
self.task_manager.stop()
self._getFinishedNodes( block = False )
finally:
self.vfiles.close()
#//===========================================================================//
class BuildManager (object):
__slots__ = \
(
'_nodes',
'_built_targets',
'_failed_nodes',
'_built_node_names',
'completed',
'actual',
'explain',
)
#//-------------------------------------------------------//
def __init__(self):
self._nodes = _NodesTree()
self.__reset()
#//-------------------------------------------------------//
def __reset(self, build_always = False, explain = False ):
self._built_targets = {}
self._failed_nodes = {}
self._built_node_names = set() if build_always else None
self.completed = 0
self.actual = 0
self.explain = explain
#//-------------------------------------------------------//
def add( self, nodes ):
self._nodes.add( nodes )
#//-------------------------------------------------------//
def depends( self, node, deps ):
self._nodes.depends( node, deps )
#//-------------------------------------------------------//
def __len__(self):
return len(self._nodes)
#//-------------------------------------------------------//
def selfTest( self ):
self._nodes.selfTest()
#//-------------------------------------------------------//
def getTailNodes(self):
return self._nodes.popTails()
#//-------------------------------------------------------//
def actualNodeStatus( self, node ):
eventNodeActual( node, self.getProgressStr() )
self.actualNode( node )
#//-------------------------------------------------------//
def outdatedNodeStatus( self, node ):
self._failed_nodes[ node ] = None
eventNodeOutdated( node, self.getProgressStr() )
node.shrink()
#//-------------------------------------------------------//
def isActualNode( self, node, vfile ):
return node.checkActual( vfile, self._built_node_names, self.explain )
#//-------------------------------------------------------//
def _addToBuiltNodeNames(self, node ):
built_names = self._built_node_names
if built_names is not None:
built_names.update( node.getNames() )
#//-------------------------------------------------------//
def completedSplitNode(self, node ):
self._nodes.removeTail( node )
node.shrink()
#//-------------------------------------------------------//
def actualNode( self, node ):
self._nodes.removeTail( node )
self.actual += 1
node.shrink()
#//-------------------------------------------------------//
def completedNode( self, node, builder_output ):
self._checkAlreadyBuilt( node )
self._nodes.removeTail( node )
self._addToBuiltNodeNames( node )
self.completed += 1
eventNodeBuildingFinished( node, builder_output, self.getProgressStr() )
node.shrink()
#//-------------------------------------------------------//
def failedNode( self, node, error ):
self._failed_nodes[ node ] = error
eventNodeBuildingFailed( node, error )
#//-------------------------------------------------------//
def removedNode( self, node ):
self._nodes.removeTail( node )
self.completed += 1
eventNodeRemoved( node, self.getProgressStr() )
node.shrink()
#//-------------------------------------------------------//
def getProgressStr(self):
done = self.completed + self.actual
total = len(self._nodes) + done
processed = done + len(self._failed_nodes)
progress = "%s/%s" % (processed, total)
return progress
#//-------------------------------------------------------//
def close( self ):
self._nodes = _NodesTree()
#//-------------------------------------------------------//
def _checkAlreadyBuilt( self, node ):
values = node.getTargetValues()
built_targets = self._built_targets
for value in values:
value_sign = value.signature
other_value_sign = built_targets.setdefault( value.valueId(), value_sign )
if other_value_sign != value_sign:
eventBuildTargetTwice( value, node )
#//-------------------------------------------------------//
def build( self, jobs, keep_going, nodes = None, build_always = False, explain = False, with_backtrace = True ):
self.__reset( build_always = build_always, explain = explain )
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo( nodes )
with _NodesBuilder( self, jobs, keep_going, with_backtrace ) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails and not nodes_builder.isBuilding():
break
nodes_builder.build( tails )
return self.isOk()
#//-------------------------------------------------------//
def isOk(self):
return not bool( self._failed_nodes )
#//-------------------------------------------------------//
def failsCount(self):
return len( self._failed_nodes )
#//-------------------------------------------------------//
def printFails(self ):
for node, error in self._failed_nodes.items():
eventFailedNode( node, error )
#//-------------------------------------------------------//
def printBuildState(self):
logInfo("Failed nodes: %s" % len(self._failed_nodes) )
logInfo("Completed nodes: %s" % self.completed )
logInfo("Actual nodes: %s" % self.actual )
#//-------------------------------------------------------//
def printStatusState(self):
logInfo("Outdated nodes: %s" % len(self._failed_nodes) )
logInfo("Actual nodes: %s" % self.actual )
#//-------------------------------------------------------//
def clear( self, nodes = None ):
self.__reset()
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo( nodes )
with _NodesBuilder( self ) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.clear( tails )
#//-------------------------------------------------------//
def status( self, nodes = None, explain = False ):
self.__reset( explain = explain )
nodes_tree = self._nodes
if nodes is not None:
nodes_tree.shrinkTo( nodes )
with _NodesBuilder( self ) as nodes_builder:
while True:
tails = self.getTailNodes()
if not tails:
break
nodes_builder.status( tails )
return self.isOk()
|
6,865 | 8ed14bb9af23055f4689e06df872a1d36185cd09 | # model class for a sale record
from app.models.product import Product
class Sale(Product):
def __init__(self,product_name,quantity,unit_price,attendant,date):
super(Sale, self).__init__(product_name, quantity, unit_price)
self.attendant = attendant
self.date = date |
6,866 | 5195dcf262c0be08f83cf66e79d48e51811a67a0 | from speaker_verification import *
import numpy as np
region = 'westus'
api_key = load_json('./real_secrets.json')['api_key']
wav_path = './enrollment.wav'
temp_path = './temp.wav'
# If you want to list users by profile_id
print('All users are: ', list_users(api_key, region))
# This is handled by the development / production code, but if you want to test the identification...
profile_id = create_profile(api_key, region)
enroll_user(api_key, region, wav_path, profile_id)
print(f'Likelihood that {wav_path} came from this subject')
identify_user(api_key, region, wav_path, profile_id)
print(f'Likelihood that {wav_path} came from this subject or another (randomly chosen)')
identify_user(api_key, region, wav_path, profile_ids=[profile_id, np.random.choice(list_users(api_key, region))])
print('Removing this profile id...')
remove_user(api_key, region, profile_id)
|
6,867 | 766098753ec579e2d63893fcbd94e8819b46bc0b | import pytest
from dymopy.client import Dymo
from dymopy.client import make_xml, make_params
def test_url():
dymo = Dymo()
assert dymo.uri == "https://127.0.0.1:41951/DYMO/DLS/Printing"
def test_status():
dymo = Dymo()
status = dymo.get_status()
assert isinstance(status, dict)
assert status['status_code'] == 200
def test_printer_name():
dymo = Dymo()
printer = dymo.get_printer()
assert isinstance(printer, dict)
assert printer['status_code'] == 200
def test_xml():
label_params = make_params()
label_xml = make_xml("This is working?")
def test_printer_job():
dymo = Dymo()
label_params = make_params()
label_xml = make_xml('Hello', 'World!')
# print_resp = dymo.print(label_xml=label_xml, label_params=label_params)
# assert print_resp.status_code == 200
|
6,868 | e870900249b121f2416d7be543752ebf6392b6be | import scraperwiki, lxml.html, urllib2, re
from datetime import datetime
#html = scraperwiki.scrape("http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm")
doc = lxml.html.parse(urllib2.urlopen("http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm"))
root = doc.getroot()
#select the table that contains the offenders, ignoring the first one that contains the header row
for tr in root.xpath("//div[@id='verdiSection10']/div/div/table/tbody/tr")[1:]:
data = {
'conviction_date': datetime.strptime(
re.match("(\d+/\d+/\d+)", tr[0].text_content().strip()).group(1),
"%d/%m/%Y"), #sometimes they include two dates in the entry, so we'll have to grab the first (damnit)
'business_name': tr[1].text_content().strip(),
'business_address': tr[2].text_content().strip(),
'convicted_name': tr[3].text_content().strip(),
'agency': tr[4].text_content().strip(),
'pdf': tr[5].xpath(".//a")[0].get("href")
}
scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)
import scraperwiki, lxml.html, urllib2, re
from datetime import datetime
#html = scraperwiki.scrape("http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm")
doc = lxml.html.parse(urllib2.urlopen("http://www.public.health.wa.gov.au/2/1035/2/publication_of_names_of_offenders_list.pm"))
root = doc.getroot()
#select the table that contains the offenders, ignoring the first one that contains the header row
for tr in root.xpath("//div[@id='verdiSection10']/div/div/table/tbody/tr")[1:]:
data = {
'conviction_date': datetime.strptime(
re.match("(\d+/\d+/\d+)", tr[0].text_content().strip()).group(1),
"%d/%m/%Y"), #sometimes they include two dates in the entry, so we'll have to grab the first (damnit)
'business_name': tr[1].text_content().strip(),
'business_address': tr[2].text_content().strip(),
'convicted_name': tr[3].text_content().strip(),
'agency': tr[4].text_content().strip(),
'pdf': tr[5].xpath(".//a")[0].get("href")
}
scraperwiki.sqlite.save(unique_keys=['pdf'], data=data)
|
6,869 | aa51c8f736461f147704c1ec0669c265348fcb80 | from lib.appData import driver_queue
from lib.pyapp import Pyapp
import threading
from appium.webdriver.common.touch_action import TouchAction
from lib.logger import logger
import time
local = threading.local()
class BasePage(object):
def __init__(self, driver=None):
if driver is None:
local.driver = driver_queue.get()
local.pyapp = Pyapp(local.driver)
else:
local.driver = driver
local.pyapp = Pyapp(driver)
def quit(self):
local.pyapp.quit()
def reset_package(self):
local.pyapp.reset()
def move(self, a=1, b=2):
befor = self.source[a]
after = self.source[b]
r = (None, after[1] - befor[1], after[2] - befor[2])
return r
def relieve_device_lock_qq(self, num):
element = local.pyapp.get_elements('class=>android.view.View')[num]
location = element.location
logger.debug('location: %s' % location)
size = element.size
logger.debug('size: %s' % size)
self.source = {1: (None, location["x"] + size["width"] / 6, location["y"] + size["height"] / 6),
2: (None, location["x"] + size["width"] / 6 * 3, location["y"] + size["height"] / 6),
3: (None, location["x"] + size["width"] / 6 * 5, location["y"] + size["height"] / 6),
4: (None, location["x"] + size["width"] / 6, location["y"] + size["height"] / 6 * 3),
5: (None, location["x"] + size["width"] / 6 * 3, location["y"] + size["height"] / 6 * 3),
6: (None, location["x"] + size["width"] / 6 * 5, location["y"] + size["height"] / 6 * 3),
7: (None, location["x"] + size["width"] / 6, location["y"] + size["height"] / 6 * 5),
8: (None, location["x"] + size["width"] / 6 * 3, location["y"] + size["height"] / 6 * 5),
9: (None, location["x"] + size["width"] / 6 * 5, location["y"] + size["height"] / 6 * 5)}
logger.debug('拆分后的9个图:%s' % self.source)
TouchAction(local.driver).press(*self.source[1]).wait(300).move_to(*self.move(1, 2)).wait(300).move_to(
*self.move(2, 3)).wait(300).move_to(*self.move(3, 5)).wait(300).move_to(*self.move(5, 7)).wait(
300).move_to(
*self.move(7, 8)).wait(300).move_to(*self.move(8, 9)).wait(300).release().perform()
class QQ_Login_Page(BasePage):
def login(self):
local.pyapp.click('android=>new UiSelector().text("登 录")')
def username(self):
local.pyapp.type('content=>请输入QQ号码或手机或邮箱', 3408467505)
def passwd(self):
local.pyapp.type('content=>密码 安全', 'besttest123')
def left_close(self):
css = 'android=>new UiSelector().text("关闭")'
local.pyapp.click(css)
def login_check(self, name):
return local.pyapp.wait_and_save_exception('android=>new UiSelector().text("登 录")', name)
class SetLock(QQ_Login_Page):
def photo(self):
local.pyapp.click('content=>帐户及设置')
def set_up(self):
local.pyapp.click('content=>设置')
def set_up_of_account(self):
local.pyapp.click('android=>new UiSelector().text("帐号、设备安全")')
def set_gesture_passwd(self):
local.pyapp.click('content=>手势密码锁定')
def create_gesture(self):
local.pyapp.click('android=>new UiSelector().text("创建手势密码")')
# def set_gesture(self):
# self.relieve_device_lock_qq(12)
# time.sleep(1)
# self.relieve_device_lock_qq(12)
def set_gesture(self):
element = local.pyapp.get_elements('class=>android.view.View')[12]
location = element.location
x = location['x']
y = location['y']
size = element.size
width = size['width']
height = size['height']
sample_width = width/3/2
sample_height = height/3/2
onex= x+sample_width
oney= y+sample_height
twox = x + sample_width * 3
twoy = y +sample_height
threex = x + sample_width * 5
threey = y + sample_width
fourx = x + sample_width * 3
foury = y + sample_width * 3
TouchAction(local.driver).press(x=onex,y=oney).wait(300).move_to(x=twox-onex,y=twoy-oney).wait(300).move_to(x=threex-twox,y=threey-twoy).wait(300).move_to(x=fourx-threex,y=foury-threey).perform()
def set_lock_check(self, name):
return local.pyapp.wait_and_save_exception('android=>new UiSelector().text("修改手势密码")', name)
class Page(SetLock):
pass
|
6,870 | c500ecaa66672ac960dc548c3f3882e4bc196745 | from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Rec1(object):
def setupUi(self, Rec1):
Rec1.setObjectName("Rec1")
Rec1.setFixedSize(450, 200)
ico = QtGui.QIcon("mylogo.png")
Rec1.setWindowIcon(ico)
font = QtGui.QFont()
font.setFamily("Times New Roman")
font.setPointSize(14)
self.centralwidget = QtWidgets.QWidget(Rec1)
self.centralwidget.setObjectName("centralwidget")
self.pushButton_photo = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_photo.setGeometry(QtCore.QRect(50, 20, 350, 30))
self.pushButton_photo.setFont(font)
self.pushButton_photo.setObjectName("pushButton_photo")
self.pushButton_video = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_video.setGeometry(QtCore.QRect(50, 60, 350, 30))
self.pushButton_video.setFont(font)
self.pushButton_video.setObjectName("pushButton_video")
self.pushButton_camera = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_camera.setGeometry(QtCore.QRect(50, 100, 350, 30))
self.pushButton_camera.setFont(font)
self.pushButton_camera.setObjectName("pushButton_camera")
self.pushButton_back = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_back.setGeometry(QtCore.QRect(50, 140, 170, 30))
self.pushButton_back.setFont(font)
self.pushButton_back.setObjectName("pushButton_back")
self.pushButton_exit = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_exit.setGeometry(QtCore.QRect(230, 140, 170, 30))
self.pushButton_exit.setFont(font)
self.pushButton_exit.setObjectName("pushButton_exit")
Rec1.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(Rec1)
self.menubar.setGeometry(QtCore.QRect(0, 0, 800, 21))
self.menubar.setObjectName("menubar")
Rec1.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(Rec1)
self.statusbar.setObjectName("statusbar")
Rec1.setStatusBar(self.statusbar)
self.retranslateUi(Rec1)
QtCore.QMetaObject.connectSlotsByName(Rec1)
def retranslateUi(self, Rec1):
_translate = QtCore.QCoreApplication.translate
Rec1.setWindowTitle(_translate("Rec1", "Recognition"))
self.pushButton_photo.setText(_translate("Rec1", "Распознавание по фото"))
self.pushButton_video.setText(_translate("Rec1", "Распознавие по видео"))
self.pushButton_camera.setText(_translate("Rec1", "Распознавание с помощью веб-камеры"))
self.pushButton_back.setText(_translate("Rec1", "Назад"))
self.pushButton_exit.setText(_translate("Rec1", "Выход"))
|
6,871 | b0cc2efda4d6586b66e04b41dfe1bbce8d009e2e | def increment(number: int) -> int:
"""Increment a number.
Args:
number (int): The number to increment.
Returns:
int: The incremented number.
"""
return number + 1
|
6,872 | cb03fcf9c9cb61b3546865fe40cc411745e1fc94 | '''
Created on Jul 10, 2018
@author: daniel
'''
#from multiprocessing import Process, Manager
#from keras.utils import np_utils
import sys
import os
from keras.utils import np_utils
from _codecs import decode
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from DataHandlers.SegNetDataHandler import SegNetDataHandler
import numpy as np
import matplotlib.pyplot as plt
from keras.models import load_model
from Mylayers import MaxPoolingWithArgmax2D, MaxUnpooling2D
import math
from CustomLosses import dice_coef, dice_coef_multilabel, dice_coef_loss, combinedDiceAndChamfer, dice_coef_multilabel_loss, combinedHausdorffAndDice
from dipy.segment.mask import clean_cc_mask
DATA_DIR = os.path.abspath("../")
sys.path.append(DATA_DIR)
def computeDice(im1, im2):
im1 = np.asarray(im1).astype(np.bool)
im2 = np.asarray(im2).astype(np.bool)
if im1.shape != im2.shape:
raise ValueError("Shape mismatch: im1 and im2 must have the same shape.")
intersection = np.logical_and(im1, im2)
dice = 2. * intersection.sum() / (im1.sum() + im2.sum())
if math.isnan(dice):
return 0
return dice
def main():
num_testing_patients = 4
n_labels = 1
normalize = True
modes = ["flair"]
dataHandler = SegNetDataHandler("Data/BRATS_2018/HGG_Testing",
num_patients = num_testing_patients,
modes = modes)
dataHandler.loadData()
dataHandler.preprocessForNetwork()
x_test = np.array(dataHandler.X)
x_seg_test = dataHandler.labels
dataHandler.clear()
segnet = load_model("Models/segnet_2018-10-28-14:37/model.h5", custom_objects={'MaxPoolingWithArgmax2D': MaxPoolingWithArgmax2D,
'MaxUnpooling2D':MaxUnpooling2D,
'combinedDiceAndChamfer':combinedDiceAndChamfer,
'combinedHausdorffAndDice': combinedHausdorffAndDice,
'dice_coef':dice_coef,
'dice_coef_loss':dice_coef_loss,
'dice_coef_multilabel': dice_coef_multilabel,
'dice_coef_multilabel_loss' : dice_coef_multilabel_loss})
if normalize:
mu = np.mean(x_test)
sigma = np.std(x_test)
x_test -= mu
x_test /= sigma
decoded_imgs = segnet.predict(x_test)
if n_labels > 1:
#x_seg_test = np_utils.to_categorical(x_seg_test)
#x_seg_test = np.argmax(x_seg_test, axis=3)
decoded_imgs = [np.argmax(x, axis = 1) for x in decoded_imgs]
else:
for x in x_seg_test:
x[x > 0.5] = 1
x[x < 0.5] = 0
for x in decoded_imgs:
x[x > 0.5] = 1
x[x < 0.5] = 0
decoded_imgs = [x.reshape(dataHandler.W, dataHandler.W) for x in decoded_imgs]
N = len(decoded_imgs)
avg_dice = 0
for i in range(N):
foo = decoded_imgs[i].reshape(dataHandler.W, dataHandler.W)
dice = computeDice(x_seg_test[i], foo)
avg_dice = avg_dice + dice
print(str(avg_dice/N))
for i in range(N):
fig = plt.figure()
plt.gray();
fig.add_subplot(1,3,1)
plt.imshow(x_test[i,:,:,0])
plt.axis('off')
plt.title('Original')
fig.add_subplot(1,3,2)
plt.imshow(x_seg_test[i])
plt.axis('off')
plt.title('GT Segment')
fig.add_subplot(1,3,3)
plt.imshow(decoded_imgs[i])
plt.axis('off')
plt.title('Predicted Segment')
plt.show()
if __name__ == "__main__":
main()
|
6,873 | 45a85ff765833fd62fc1670404d8994818788707 | def cubarea(l2,b2,h2):
print("Area of cuboid =",2*(l2+b2+h2))
def cubperimeter(l2,b2,h2):
print("Perimeter of cuboid =",4*(l2+b2+h2))
|
6,874 | d4e3751b2d4796c72be497007fe4c7d8ca67e18e | from compas.geometry import Frame
|
6,875 | 99eeb039e1a369e450247d10ba22a1aa0b35dae9 | from world.enums import *
from world.content.species import SPECIES
from world.content.chargen import *
from evennia.utils.evmenu import get_input
from evennia.utils.utils import list_to_string
import re
def start(caller):
if not caller:
return
caller.ndb._menutree.points = {
"attributes": 20,
"skills": 20
}
caller.ndb._menutree.character = {
"home_planet": None,
"full_name": None,
"origin": None,
"stats": {},
"age": 16,
"is_psionic": False,
"current_term": 0,
"species": "human"
}
caller.ndb._menutree.terms = []
for attribute in AttributeEnum:
caller.ndb._menutree.character["stats"][attribute.name] = 20
text = """
Welcome to Singularity's Character Generator!
Have a paragraph about WTF is going on and some info about our game. Also here are some warnings
that you *definitely* shouldn't make multiple characters. And also here's some commands to
help get you more info! TBD!!!
|yPlease do not make multiple characters to game chargen.|n
When you're ready, go ahead and like.. type |ybegin|n to start CharGen.
"""
return text, ({"key": "begin", "goto": "node_menu"})
def node_menu(caller):
name = caller.ndb._menutree.character["full_name"]
if not name:
name = "Not Set"
species = caller.ndb._menutree.character["species"]
origin = caller.ndb._menutree.character["origin"]
if not origin:
origin = "Not Set"
d_b = "|gOk|n" if _is_basics_done(caller)[0] else "|rNo|n"
d_a = "|gOk|n" if _is_attributes_done(caller)[0] else "|rNo|n"
d_s = "|gOk|n" if _is_skills_done(caller)[0] else "|rNo|n"
d_l = "|gOk|n" if _is_life_done(caller)[0] else "|rNo|n"
text = """
Below are the general details of your character. Use the below commands
to navigate through chargen steps. Some steps may appear after others are completed.
|wFull Name:|n %s
|wSpecies:|n %s
|wOrigin:|n %s
Completed:
|wBasics:|n %s
|wAttributes:|n %s
|wStarting Skills:|n %s
|wLife path:|n %s
""" % (name, species, origin, d_b, d_a, d_s, d_l)
options = (
{"key": "basics", "goto": "node_basics"},
{"key": "attributes", "goto": "node_attributes"},
{"key": "skills", "goto": "node_skills"}
)
if _is_basics_done(caller)[0] and _is_attributes_done(caller)[0] and _is_skills_done(caller)[0]:
options.append({"key": "life path", "goto": "node_terms"})
if _is_life_done(caller)[0]:
options.append({"key": "finish", "goto": "node_finish"})
return text, options
def node_basics(caller):
character = caller.ndb._menutree.character
name = character["full_name"]
if not name:
name = "Not Set"
species = character["species"]
origin = character["origin"]
if not origin:
origin = "Not Set"
age = character["age"]
text = """
|wFull Name:|n %s
|wAdolescent Age:|n %s
|wSpecies:|n %s
|wOrigin:|n %s
Type |yhelp <command>|n to get info on available choices.
""" % (name, age, species, origin)
options = (
{"key": "return", "goto": "node_menu"},
{"key": "full_name", "goto": _node_basics_full_name},
{"key": "age", "goto": _node_basics_age},
{"key": "species", "goto": _node_basics_species},
{"key": "origin", "goto": _node_basics_origin}
)
return text, options
def _node_basics_full_name(caller):
def callback(caller, prompt, user_input):
caller.msg("You set your character's full name to: %s." % user_input)
caller.ndb._menutree.character["full_name"] = user_input
get_input(caller, ">> Enter your character's full name.", callback)
def _node_basics_age(caller):
def callback(caller, prompt, user_input):
species = next(s for s in CHARGEN["species"] if s["key"] == caller.ndb._menutree.character["species"])
if not user_input.is_integer() \
or int(user_input) < species["min_start_age"] \
or int(user_input) > species["max_start_age"]:
caller.msg("Age must be a valid number between %s and %s."
% (species["min_start_age"], species["max_start_age"]))
return
caller.msg("You set your character's age to: %s." % user_input)
caller.ndb._menutree.character["age"] = int(user_input)
get_input(caller, ">> Enter your character's age.", callback)
def _node_basics_species(caller):
def callback(caller, prompt, user_input):
character = caller.ndb._menutree.character
species = next((s for s in SPECIES if s["title"].lower().startswith(user_input.lower())), None)
if not species:
caller.msg("'%s' is not a valid species. Valid species: |wHuman|n, and |wAndroid.|n" % user_input)
return
species_chargen = next(s for s in CHARGEN["species"] if s["key"] == species["key"])
caller.msg("You set your character's species to: %s." % species["title"])
character["age"] = species_chargen["min_age"]
character["origin"] = None
character["species"] = species["key"]
get_input(caller, ">> Enter your character's species.", callback)
def _node_basics_origin(caller):
def callback(caller, prompt, user_input):
character = caller.ndb._menutree.character
origins = filter(lambda o: character["species"] in o["species_restrictions"], CHARGEN["origins"])
origin = next((o for o in origins if o["title"].lower().startswith(user_input.lower())), None)
if not origin:
caller.msg("'%s' is not a valid origin choice. Valid choices: %s"
% (user_input, list_to_string(map(lambda o: o["title"], origins))))
return
caller.msg("You set your character's origin to: %s." % user_input)
character["origin"] = origin["key"]
get_input(caller, ">> Enter your character's origin.", callback)
def _is_attributes_done(caller):
if caller.ndb._menutree.points["attributes"] != 0:
return False, "All attribute points must be allocated."
return True, ""
def _is_basics_done(caller):
character = caller.ndb._menutree.character
name = character["full_name"]
if not name or len(name) < 3:
return False, "Full name must have a value and be longer than 3 characters."
origin = character["origin"]
if not origin:
return False, "Must select an origin."
species_stats = next(s for s in CHARGEN["species"] if s["key"] == character["species"])
age = character["age"]
if age < species_stats["min_start_age"]:
return False, "Age must be equal to or more than %s." % species_stats["min_start_age"]
if age > species_stats["max_start_age"]:
return False, "Age must be equal to or less than %s." % species_stats["max_start_age"]
return True, ""
def _is_skills_done(caller):
return False, ""
def _is_life_done(caller):
return False, ""
def node_skills(caller):
text = """
"""
index = 0
stats = caller.ndb._menutree.character["stats"]
for skill in SkillEnum:
if index % 2 == 0:
text += "\n"
text += ("%s:" % skill.name).ljust(28)
value = stats.get(skill.name, 0)
text += str(value).rjust(9)
if index % 2 == 0:
text += " "
index += 1
options = (
{"key": "return", "goto": "node_menu"},
{"key": "set", "goto": ""}
)
return text, options
def node_attributes(caller):
text = ""
for attribute in AttributeEnum:
if attribute == AttributeEnum.Psi and not caller.ndb._menutree.character["is_psionic"]:
continue
text += "%s: " % attribute.name
text += "%s\r\n" % caller.ndb._menutree.character["stats"][attribute.name]
text += "\r\n%s points remaining.\r\n" % caller.ndb._menutree.points["attributes"]
text += "\r\nType \"|yadd <number> to <attribute>|n\" to adjust an attribute positively."
text += "\r\nType \"|ysub <number> from <attribute>|n\" to adjust an attribute negatively."
# options = {"key": "_default", "goto": _node_attributes}
# if caller.ndb._menutree.points["attributes"] == 0:
options = ({"key": "_default", "goto": _node_attributes},
{"key": "return", "goto": "node_menu"})
return text, options
def _node_attributes(caller, raw_string):
match = re.match(r"add (\d+) to (\w+)", raw_string)
if match:
return adjust_attribute(caller, match, True)
match = re.match(r"sub (\d+) from (\w+)", raw_string)
if match:
return adjust_attribute(caller, match, False)
if not match:
return "node_attributes"
def node_terms(caller):
text = ""
term_count = 1
for term in caller.ndb._menutree.terms:
text += "\r\n* Term %s:" % term_count + " %s" % term.title
term_count += 1
age = caller.ndb._menutree.character["age"] + (4 * caller.ndb._menutree.character["current_term"])
text += "\r\nCurrent Character Age: %s" % age
text += "\r\n\r\nType \"|ychoose <term>|n\" to begin a term."
options = ({"key": "_default", "goto": _node_terms},
{"key": "list choices", "goto": _list_term_choices},
{"key": "finish", "goto": "node_finish"})
return text, options
def _node_terms(caller, raw_string):
match = re.match(r"choose (\w+)", raw_string)
if not match:
error(caller, "I didn't understand that.")
return "node_terms"
term_token = match.group(1).lower()
term = next((x for x in TERMS if x["title"].lower().startswith(term_token)), None)
if not term:
error(caller, "%s is not a valid term. Type \"|ylist choices|n\" to get a list of all available careers.")
return "node_terms"
caller.ndb._menutree.terms.append({
"term": term["title"]
})
return "node_term"
def _list_term_choices(caller):
text = ""
for term in TERMS:
text += "\r\n* %s" % term["title"]
for assignment in term["assignments"]:
text += "\r\n\t- %s: " % assignment["title"]
text += "sample description text"
caller.msg(text)
return "node_terms"
def node_term(caller):
term_title = caller.ndb._menutree.terms[len(caller.ndb._menutree.terms) - 1]["term"]
# term = next((x for x in TERMS if x["title"] == term_title), None)
text = "Career: %s" % term_title
text += "\r\nAssignment: Not Set"
text += "\r\nPersonal Advancement: Not Set"
text += "\r\nYears: %s" % caller.ndb._menutree.character["age"]
text += "-%s" % (caller.ndb._menutree.character["age"] + 4)
text += "\r\n\r\nLife Event: |y1 Available|n"
text += "\r\n\r\nType \"|yset Assignment to <assignment>|n\" to choose an assignment."
text += "\r\nType \"|yset Advancement to <option>|n\" to choose a personal advancement."
text += "\r\n\r\nRolling for a life event is optional and may yield positive or negative results. "
text += "Once you've chosen to roll a life event, the result cannot be rerolled or changed except through mulligan."
options = ({"key": "show assignments", "goto": _list_term_assignments},
{"key": "show advancements", "goto": _list_term_advancements},
{"key": "roll life event", "goto": _do_life_event})
return text, options
def _list_term_advancements(caller):
return "node_term"
def _list_term_assignments(caller):
return "node_term"
def _do_life_event(caller):
return "node_term"
def adjust_attribute(caller, match, is_add):
attribute_token = match.group(2).lower()
attribute = next((x for x in AttributeEnum if x.name.lower().startswith(attribute_token)), None)
if not attribute:
error(caller, "%s is not a valid attribute." % match.group(2))
return "node_attributes"
value = int(match.group(1))
if not value or value < 0:
error(caller, "Value to adjust must be a positive number.")
return "node_attributes"
attribute_value = caller.ndb._menutree.character["stats"][attribute.name]
if not is_add and attribute_value - value < 10:
error(caller, attribute.name + " cannot be reduced below 10.")
return "node_attributes"
# calculate cost..
i_value = value
cost = 0
while i_value > 0:
if is_add:
new_value = i_value + attribute_value
else:
new_value = attribute_value - i_value
if new_value <= 12:
cost += 4
elif new_value <= 16:
cost += 2
elif new_value <= 23:
cost += 1
elif new_value <= 26:
cost += 2
elif new_value <= 30:
cost += 4
i_value -= 1
if not is_add:
cost *= -1
if cost > caller.ndb._menutree.points["attributes"]:
deficit = (caller.ndb._menutree.points["attributes"] - cost) * -1
error(caller, "Raising %s" % attribute.name + " costs %s total points," % cost + " %s more points than you have available." % deficit)
return "node_attributes"
# Succeeded the gauntlet. Change their stat.
if is_add:
caller.ndb._menutree.character["stats"][attribute.name] += value
else:
caller.ndb._menutree.character["stats"][attribute.name] -= value
caller.ndb._menutree.points["attributes"] -= cost
msg = "Successfully set %s " % attribute.name + "to %s" % caller.ndb._menutree.character["stats"][attribute.name]
msg += " for %s points." % cost
success(caller, msg)
return "node_attributes"
def node_finish(caller):
text = ""
options = ()
return text, options
def success(caller, msg):
caller.msg("|b<|cSystem|b>|n %s" % msg)
def error(caller, msg):
caller.msg("|y<|rError|y>|n %s" % msg)
|
6,876 | 4d1157b307d753abea721b93779ccc989c77d8e3 | import erequests
from pyarc.base import RestException
class ResultWrapper(object):
def __init__(self, client, method, url):
self.client = client
self.method = method
self.url = url
self.response = None
def get(self):
if self.response is None:
self.client.wait_all_requests_completed()
if self.response.status_code >= 400:
raise RestException(self.method,
self.url,
self.response.status_code,
self.response.text)
try:
return self.response.json()
except ValueError:
return self.response.text
_METHODS = {
'get' : erequests.async.get,
'put' : erequests.async.put,
'post' : erequests.async.post,
'delete' : erequests.async.delete
}
class ERequestsClient(object):
def __init__(self, verify = None):
self.requests_to_send = []
self.results = []
self.verify = verify or False
def start_req(self, method, prepared_url, headers, body = ''):
method = method.lower()
assert method in _METHODS, "Unknown method %s" % method
future = _METHODS[method](prepared_url,
headers = headers,
data = body,
verify = self.verify)
res = ResultWrapper(self, method, prepared_url)
self.requests_to_send.append(future)
self.results.append(res)
return res
def wait_all_requests_completed(self):
if len(self.requests_to_send) == 0:
return
try:
for resp, result in zip(erequests.map(self.requests_to_send), self.results):
result.response = resp
finally:
self.requests_to_send = []
self.results = []
|
6,877 | b112ca3dc603035f340444fa74a7941b1b95f5e5 | import time
import serial
ser = serial.Serial(
port='/dev/ttyUSB0',
baudrate=9600,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE,
bytesize=serial.EIGHTBITS,
timeout=None
)
ser.close()
ser.open()
if ser.isOpen():
print "Serial is open"
ser.flushInput()
ser.flushOutput()
while True:
mimic = ''
bytesToRead = ser.inWaiting()
mimic = ser.read( bytesToRead )
if mimic != '':
print mimic
time.sleep(0.5)
# ser.write( "Got it" )
|
6,878 | 28a3763715f5405f8abe2de17ed5f9df1019278b |
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
dataset = pd.read_csv('mgdata.dat.csv')
training_set = dataset.iloc[:1100, 1:2].values
X_train=[]
y_train=[]
for i in range(20,1090):
X_train.append(training_set[i-20:i,0])
y_train.append(training_set[i,0])
X_train=np.asarray(X_train)
y_train=np.asarray(y_train)
import keras
from keras.models import Sequential
from keras.layers import Dense
# Initialising the ANN
classifier = Sequential()
# Adding the input layer and the first hidden layer
classifier.add(Dense(output_dim = 35, init = 'uniform', activation = 'relu', input_dim = 20))
# Adding the second hidden layer
classifier.add(Dense(output_dim = 35, init = 'uniform', activation = 'relu'))
# Adding the third hidden layer
classifier.add(Dense(output_dim = 35, init = 'uniform', activation = 'relu'))
# Adding the output layer
classifier.add(Dense(output_dim = 1, init = 'uniform', activation = 'linear'))
# Compiling the ANN
classifier.compile(optimizer = 'adam', loss = 'mean_squared_error', metrics = [])
# Fitting the ANN to the Training set
history =classifier.fit(X_train, y_train, batch_size =8, nb_epoch = 60,validation_split=0.03)
dataset_test=dataset.iloc[1100:1110, 1:2].values
y_test=dataset.iloc[1100:1110, 1:2].values
dataset_test=pd.DataFrame(dataset_test)
dataset_train=pd.DataFrame(training_set)
dataset_total = pd.concat((dataset_train, dataset_test), axis = 0)
inputs = dataset_total[len(dataset_total) - len(dataset_test) - 20:].values
inputs = inputs.reshape(-1,1)
X_test = []
for i in range(20,30):
X_test.append(inputs[i-20:i, 0])
X_test = np.array(X_test)
predicted = classifier.predict(X_test)
# Visualising the results
plt.plot(y_test, color = 'red', label="real" )
plt.plot(predicted, color = 'blue', label="predicted")
plt.legend()
plt.show()
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'validation'], loc='upper left')
plt.show()
|
6,879 | cd6e15daa2360ead47f0bac95843b1c030164996 | from .start_node import StartNode
from .character_appearance import CharacterAppearance
from .character_disappearance import CharacterDisappearance
from .replica import Replica
from .end_node import EndNode
from .choice import Choice
from .set_landscape import SetLandscape
from .add_item import AddItem
from .switch_by_item import SwitchByItem
|
6,880 | d18bfdb606e4ba8a67acbb07cd9a3a6d2a0855e3 | """ Class template
Ipea's Python for agent-based modeling course
"""
import random
# class name typically Capital letter
class Pessoa:
# Usually has an __init__ method called at the moment of instance creation
def __init__(self, name, distancia):
# Armazena os parâmetros de início dentro daquela instância
# É comum ter uma ID de identificação única, ou nome
self.id = name
self.distancia = distancia
# Pode conter containers, data structures
self.members = dict()
self.ranking = list()
# Ou ainda, um valor randômico
self.luck = random.randrange(1, 60)
self.partner = None
def adiciona_distancia(self, quantia):
# Modifica um valor armazenado
self.distancia += quantia
def compara(self, outro_agente):
# Pode comparar-se com outro agente e acessar métodos do outro agente
if self.distancia > outro_agente.distancia:
return True
else:
return False
def adiciona_sorte(self):
# Um método pode acessar um outro método.
# Nesse caso, adicionando um valor aleatório ao arg1!
self.adiciona_distancia(self.luck)
def match(self, outro_agente):
# Esse método recebe outro agente (dessa mesma classe) e guarda/adiciona/salva o outro agente como uma variavel,
# dentro deste próprio agente
self.partner = outro_agente
outro_agente.partner = self
if __name__ == '__main__':
tita = Pessoa('Tita', 10)
max = Pessoa('Max', 20)
fred = Pessoa('Fred', 0)
aveia = Pessoa('Aveia', 11)
print(aveia.compara(tita))
max.match(aveia)
|
6,881 | c1a83c9551e83e395a365210a99330fee7877dff | from django.urls import path,include
from . import views
urlpatterns = [
path('register_curier/',views.curier_register,name="register_curier"),
path('private_сurier/',views.private_сurier,name="private_сurier"),
path('private_сurier2/',views.private_сurier2,name="private_сurier2"),
path('private_curier/select/<int:id>',views.curier_select,name="curier_select"),
path('private_curier/cancel/<int:id>',views.curier_cancel,name="curier_cancel"),
path("private_curier_raschet/<str:day>",views.rashet_view,name="curier_rashet"),
path("private_curier_history/",views.curier_history,name="curier_history"),
]
|
6,882 | 8834548f6180fc864d73a71194125b22d230a393 | #!/usr/bin/python
# encoding=utf-8
"""
@Author : Don
@Date : 9/16/2020 1:40 PM
@Desc :
"""
import os
import yaml
config_path = os.path.join(os.path.dirname(os.path.abspath(__file__)), "config.yaml")
with open(config_path, "r", encoding="utf-8") as f:
conf = yaml.load(f.read(), Loader=yaml.FullLoader)
|
6,883 | d3f52d4713ba4b7b4cd736b26809968e259be63c | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import logging
from django.db import transaction
from ralph_scrooge.models import ProfitCenter
from ralph_scrooge.plugins import plugin_runner
from ralph_scrooge.plugins.collect.utils import get_from_ralph
logger = logging.getLogger(__name__)
@transaction.atomic
def update_profit_center(pc):
profit_center, created = ProfitCenter.objects.get_or_create(
ralph3_id=pc['id'],
defaults=dict(
name=pc['name'],
)
)
profit_center.name = pc['name']
profit_center.description = pc['description']
profit_center.save()
return created
@plugin_runner.register(chain='scrooge')
def ralph3_profit_center(**kwargs):
new_pc = total = 0
for pc in get_from_ralph("profit-centers", logger):
created = update_profit_center(pc)
if created:
new_pc += 1
total += 1
return True, '{} new profit center(s), {} updated, {} total'.format(
new_pc,
total - new_pc,
total,
)
|
6,884 | 8fb5ef7244a8ca057f11cbcdf42d383665dade5e | # Packages
import PySimpleGUI as sg
import mysql.connector
import secrets
# TODO Add a view all button
# TODO Catch errors (specifically for TimeDate mismatches)
# TODO Add a downtime graph
# TODO Add a system feedback window instead of putting this in the out id textbox
error_sel_flag = False # Flag to check whether an error has been selected before performing logic requiring it
guest_user_flag = False # Flag to check whether the user is a guest, and limit which functions of the applciation (and database) they can use
unresolved_errors = [] # MEEP, could probably do without this in the refactor
current_error = { # Dictionary to hold all information about the current/selected error. This removes the need to hit the database for every bit of logic that requires an error
'fault_id': 'Null',
'fault_status': 'Null',
'fault_description': 'Null',
'voyage': 'Null',
'time_of_fault': 'Null',
'time_of_solution': 'Null',
'fault_type': 'Null',
'location': 'Null',
'sensor_id': 'Null',
'sensor_type': 'Null',
'fault_message': 'Null',
'log_date': 'Null'
}
# Dictionary for search parameters. NOTE: deviation from script naming convention is due to the naming convention used in the database
search_dict = {
'Voyage': '',
'FaultStatus': '',
'FaultType': '',
'Location': '',
'SensorID': '',
'SensorType': '',
'TimeOfFault': '',
'TimeOfSolution': ''
}
class DatabaseConnection():
''' This class instantiates and maintains the database connection, and encapsulates all functions that work directly with that connection.'''
def __init__(self, host, user, password, database):
''' This function is called whenever a new instance of 'DatabaseConnection' is instantiated. It created the connection and cursor to the
database, both of which are used by other functions of this class.'''
try:
self.connection = mysql.connector.connect(
host=host,
user=user,
passwd=password,
database=database,
auth_plugin='mysql_native_password'
)
self.cursor = self.connection.cursor()
except mysql.connector.Error as e:
print("Error %d: %s" % (e.args[0], e.args[1]))
exit(69)
def save_to_errors(self, fault_status, fault_desciption, voyage, time_of_fault, time_of_solution, fault_type, location, sensor_id, sensor_type, fault_message):
''' This function creates and carries out an 'INSERT' query for the 'errors' table. It forces null values for the time fields in the case that the GUI
returns blank values, this is to avoid a type mismatch with the database (This could probably be better handled somewhere else but it gets the job done for now).'''
if time_of_fault == '':
time_of_fault = "NULL"
if time_of_solution == '':
time_of_solution = "NULL"
if fault_status == '':
fault_status = "Unresolved"
insert_query = "INSERT INTO errors (FaultDescription, FaultMessage, FaultStatus, FaultType, Location, SensorID, SensorType, TimeOfFault, TimeOfSolution, Voyage) VALUES ('{}', '{}', '{}', '{}', '{}', '{}', '{}', {}, {}, '{}')".format(fault_desciption, fault_message, fault_status, fault_type, location, sensor_id, sensor_type, time_of_fault, time_of_solution, voyage)
print(insert_query)
self.cursor.execute(insert_query)
self.connection.commit()
def save_to_downtime(self, voyage, stop_time, start_time, reason, assosciated_error):
''' This function creates and carries out an 'INSERT' query for the 'downtime' table. It forces null values for the time fields in the case that the GUI
returns blank values, this is to avoid a type mismatch with the database (Again, this is not perfect but I'll relook it at a later stage).'''
insert_query = "INSERT INTO downtime (Voyage, StopTime, StartTime, Reason, AssosciatedError) VALUES ('{}', '{}', '{}', '{}', '{}')".format(voyage, stop_time, start_time, reason, assosciated_error)
print(insert_query)
self.cursor.execute(insert_query)
self.connection.commit()
pass
def fetch(self, fetch_query):
''' This function carries out a 'SELECT' query from the MySQL database and returns the result.'''
print("Fetch " + str(fetch_query))
_ = self.cursor.execute(fetch_query)
result = self.cursor.fetchall()
return result
def update(self, fault_status, fault_desciption, voyage, time_of_fault, time_of_solution, fault_type, location, sensor_id, sensor_type, fault_message, fault_id):
# ToDo Test the robustness of this, seems like it doens't like updating with unchanged fields
if time_of_fault == 'None':
time_of_fault = "NULL"
if time_of_solution =='None':
time_of_solution = "NULL"
update_query = "UPDATE errors SET FaultStatus = '{}', FaultDescription = '{}', Voyage = '{}', TimeOfFault = {}, TimeOfSolution = {}, FaultType = '{}', Location = '{}', SensorID = '{}', SensorType = '{}', FaultMessage = '{}' WHERE FaultID = {}".format(fault_status, fault_desciption, voyage, time_of_fault, time_of_solution, fault_type, location, sensor_id, sensor_type, fault_message, fault_id)
print(update_query)
self.cursor.execute(update_query)
self.connection.commit()
print("Updated")
def search(self, voyage, status, fault_type, location, sensor_id, sensor_type, start_time, end_time):
''' This function creates and carries out a 'SELECT' query from the MySQL database and returns the result.
It fills a dictionary and reduces it to only include the provided search terms in the query.'''
search_dict['Voyage'] = voyage
search_dict['FaultStatus'] = status
search_dict['FaultType'] = fault_type
search_dict['Location'] = location
search_dict['SensorID'] = sensor_id
search_dict['SensorType'] = sensor_type
search_dict['TimeOfFault'] = start_time
search_dict['TimeOfSolution'] = end_time
# Remove empty values so that only the required search parameters are included
reduced_search_dict = dict((k, v) for k, v in search_dict.items() if v) # New dictionary with all empty values removed
if(len(reduced_search_dict) < 2):
print("Please enter at least two search criteria (sorry, Nic rushed this section!)")
return 0
key_list = list(reduced_search_dict.keys())
value_list = list(reduced_search_dict.values())
# Remove enclosing apostrophes as is required in the MySQL syntax
key_tuple = tuple(key_list)
seperator = ", "
usable_key_tuple = seperator.join(key_tuple)
search_query = "SELECT * FROM errors WHERE ({}) = {}".format(usable_key_tuple, str(tuple(value_list)))
print(search_query)
_ = self.cursor.execute(search_query)
result = self.cursor.fetchall()
return result
def shutdown(self):
# Implement logic to close connection
pass
# Create window functions
def create_login_window():
''' This function contains the layout for, invokes, and monitors the login window. When a user logs in, it creates an instance of
the 'DatabaseConnection' class, establishing a connection to the database for use by the main application. This function returns the
created instance of 'DatabaseConnection' for use by other functions in the script.
'''
# Window setup
login_layout = [[sg.Text('Hostname: '), sg.In(size = (25, 0), key = '-HOST-')],
[sg.Text('Username: '), sg.In(size = (25, 0), key = '-USER-')],
[sg.Text('Password: '), sg.In(size = (25, 0), pad = (3, 0), password_char = '*', key='-PASS-')],
[sg.Button('Login', size = (14, 0), pad = ((0, 10), (5, 0)), enable_events = True, bind_return_key = True, key = '-LOGIN-'), sg.Button('Guest Login.', size = (14, 0), pad = ((10, 0), (5, 0)), enable_events = True, key = '-LOGIN GUEST-')]
]
login_window = sg.Window("LLMSDID - Login",
layout=login_layout,
margins=(20, 10),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
# Logic
while True:
login_event, login_values = login_window.read()
if login_event == '-LOGIN-':
current_db = DatabaseConnection(login_values['-HOST-'], login_values['-USER-'], login_values['-PASS-'], "LLMSDID") # Instantiate instance of 'DatabaseConnection'
login_window.close()
return current_db
if login_event == '-LOGIN GUEST-':
current_db = DatabaseConnection('localhost', secrets.guestUsername, secrets.guestPassword, "LLMSDID") # Instantiate instance of 'DatabaseConnection'
global guest_user_flag
guest_user_flag = True
login_window.close()
return current_db
# If the user closes the window, exit this loop so that the program can close
if login_event == sg.WIN_CLOSED:
login_window.close()
exit(69)
def create_update_window(selected_error, database):
update_col_1 = sg.Column([[sg.Frame('Current values', [[sg.Column([[sg.Text("Voyage: ", size=(12,1)), sg.Text(selected_error['voyage'])],
[sg.Text("Status: ", size=(12,1)), sg.Text(selected_error['fault_status'])],
[sg.Text("Description: ", size=(12,4)), sg.Multiline(selected_error['fault_description'], size=(40, 4))],
[sg.Text("Fault message: ", size=(12,2)), sg.Multiline(selected_error['fault_message'], size=(40,2))],
[sg.Text("Fault type: ", size=(12,1)), sg.Text(selected_error['fault_type'])],
[sg.Text("Fault location: ", size=(12,1)), sg.Text(selected_error['location'])],
[sg.Text("Sensor ID: ", size=(12,1)), sg.Text(selected_error['sensor_id'])],
[sg.Text("Sensor type: ", size=(12,1)), sg.Text(selected_error['sensor_type'])],
[sg.Text("From: ", size=(12,1)), sg.Text(selected_error['time_of_fault'])],
[sg.Text("To: ", size=(12,1)), sg.Text(selected_error['time_of_solution'])]],
)]])]])
update_col_2 = sg.Column([[sg.Frame('Updated values', [[sg.Column([[sg.Text("Voyage: ", size=(12,1)), sg.In(selected_error['voyage'], size=(40,1), key='-NEW VOYAGE-')],
[sg.Text("Status: ", size=(12,1)), sg.InputCombo(["Unresolved", "Resolved"], default_value=selected_error['fault_status'], key='-NEW STATUS-')],
[sg.Text("Description: ", size=(12,4)), sg.Multiline(selected_error['fault_description'], size=(40,4), key='-NEW DESC-')],
[sg.Text("Fault message: ", size=(12,2)), sg.Multiline(selected_error['fault_message'], size=(40,2), key='-NEW MESSAGE-')],
[sg.Text("Fault type: ", size=(12,1)), sg.In(selected_error['fault_type'], size=(40,1), key='-NEW FTYPE-')],
[sg.Text("Fault location: ", size=(12,1)), sg.In(selected_error['location'], size=(40,1), key='-NEW LOC-')],
[sg.Text("Sensor ID: ", size=(12,1)), sg.In(selected_error['sensor_id'], size=(40,1), key='-NEW ID-')],
[sg.Text("Sensor type: ", size=(12,1)), sg.In(selected_error['sensor_type'], size=(40,1), key='-NEW STYPE-')],
[sg.Text("From: ", size=(12,1)), sg.In(selected_error['time_of_fault'], size=(40,1), key='-NEW FROM-')],
[sg.Text("To: ", size=(12,1)), sg.In(selected_error['time_of_solution'], size=(40,1), key='-NEW TO-')]],
)]])]])
update_col_3 = sg.Column([[sg.Frame('Actions', [[sg.Column([[sg.Button("Update", enable_events=True,
tooltip="Press me if you'd like to update this fault.",
key='-SAVE UPDATE-'),
sg.Button("Cancel", enable_events=True,
tooltip="Press me if you'd like to cancel this update.",
key='-CANCEL UPDATE-')]])]])]])
updateLayout = [[update_col_1, update_col_2], [update_col_3]]
update_window = sg.Window("LLMSDID - Update",
layout=updateLayout,
margins=(200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
print("Updating " + str(selected_error['fault_id']))
while True:
update_event, update_value = update_window.read()
if update_event == '-SAVE UPDATE-':
database.update(update_value['-NEW STATUS-'], update_value['-NEW DESC-'], update_value['-NEW VOYAGE-'], update_value['-NEW FROM-'], update_value['-NEW TO-'], update_value['-NEW FTYPE-'], update_value['-NEW LOC-'], update_value['-NEW ID-'], update_value['-NEW STYPE-'], update_value['-NEW MESSAGE-'], selected_error['fault_id'])
update_window.close()
break
# If the user closes the window, exit this loop so that the program can close
if update_event == sg.WIN_CLOSED or update_event == '-CANCEL UPDATE-':
update_window.close()
break
def create_log_window(database):
log_layout = [
[sg.Text("Fault description", size=(12,1)), sg.In(size=(40, 40), key='-DESCRIPTION-')],
[sg.Text("Fault message", size=(12,1)), sg.In(size=(40, 40), key='-MESSAGE-')],
[sg.Text("Status", size=(12,1)), sg.InputCombo(["Unresolved", "Resolved"], key='-STATUS-')],
[sg.Text("Fault type", size=(12,1)), sg.In(size = (25, 1), key='-TYPE-')],
[sg.Text("Location", size=(12,1)), sg.In(size=(25, 1), key='-LOCATION-')],
[sg.Text("Sensor ID", size=(12,1)), sg.In(size=(25, 1), key='-SENSOR ID-')],
[sg.Text("Sensor type", size=(12,1)), sg.In(size=(25, 1), key='-SENSOR TYPE-')],
[sg.Text("Time of fault", tooltip = "dd-mm-yy hh:mm:ss", size=(12,1)), sg.In(size=(25, 1), key='-START-')],
[sg.Text("Time of solution", tooltip = "dd-mm-yy hh:mm:ss", size=(12,1)), sg.In(size=(25, 1), key='-END-')],
[sg.Text("Voyage", size=(12,1)), sg.In(size=(25, 1), key='-VOYAGE-')],
[sg.Button("Save", enable_events=True, key='-LOG SAVE-'), sg.Button("Cancel", enable_events=True, key='-LOG CANCEL-')]
]
log_window = sg.Window("LLMSDID - Log an error",
layout=log_layout,
margins=(200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
while True:
log_event, log_values = log_window.read()
if log_event == '-LOG SAVE-':
database.save_to_errors(log_values['-STATUS-'], log_values['-DESCRIPTION-'], log_values['-VOYAGE-'], log_values['-START-'], log_values['-END-'], log_values['-TYPE-'], log_values['-LOCATION-'], log_values['-SENSOR ID-'], log_values['-SENSOR TYPE-'], log_values['-MESSAGE-'])
log_window.close()
break
# If the user closes the window, exit this loop so that the program can close
if log_event == sg.WIN_CLOSED or log_event == '-LOG CANCEL-':
log_window.close()
break
def create_more_window(selected_error, database):
more_col_1 = sg.Column([[sg.Frame('Parameter', [[sg.Column([[sg.Text("Fault ID: ")],
[sg.Text("Voyage: ")],
[sg.Text("Status: ")],
[sg.Text("Description: ")],
[sg.Text("Fault message: ")],
[sg.Text("Fault type: ")],
[sg.Text("Fault location: ")],
[sg.Text("Sensor ID: ")],
[sg.Text("Sensor type: ")],
[sg.Text("From: ")],
[sg.Text("To: ")],
[sg.Text("Log date: ")]],
)]])]])
more_col_2 = sg.Column([[sg.Frame('Value', [[sg.Column([[sg.Text("Fault ID: ", size=(12,1)), sg.Text(selected_error['fault_id'], size=(40,1))],
[sg.Text("Voyage: ", size=(12,1)), sg.Text(selected_error['voyage'], size=(40,1))],
[sg.Text("Status: ", size=(12,1)), sg.Text(selected_error['fault_status'], size=(40,1))],
[sg.Text("Description: ", size=(12,4)), sg.Multiline(selected_error['fault_description'], size=(40,4))],
[sg.Text("Fault message: ", size=(12,2)), sg.Multiline(selected_error['fault_message'], size=(40,2))],
[sg.Text("Fault type: ", size=(12,1)), sg.Text(selected_error['fault_type'], size=(40,1))],
[sg.Text("Fault location: ", size=(12,1)), sg.Text(selected_error['location'], size=(40,1))],
[sg.Text("Sensor ID: ", size=(12,1)), sg.Text(selected_error['sensor_id'], size=(40,1))],
[sg.Text("Sensor type: ", size=(12,1)), sg.Text(selected_error['sensor_type'], size=(40,1))],
[sg.Text("From: ", size=(12,1)), sg.Text(selected_error['time_of_fault'], size=(40,1))],
[sg.Text("To: ", size=(12,1)), sg.Text(selected_error['time_of_solution'], size=(40,1))],
[sg.Text("Log date: ", size=(12,1)), sg.Text(selected_error['log_date'], size=(40,1))]],
)]])]])
more_col_3 = sg.Column([[sg.Frame('Actions', [[sg.Column([[sg.Button("Thanks", enable_events=True,
tooltip="Press me if you're done having a look.",
key='-THANKS-')
]])]])]])
moreLayout = [[more_col_2], [more_col_3]]
more_window = sg.Window("LLMSDID - More",
layout=moreLayout,
margins=(200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
while True:
more_event, more_value = more_window.read()
# If the user closes the window, exit this loop so that the program can close
if more_event == sg.WIN_CLOSED or more_event == '-THANKS-':
more_window.close()
break
def create_downtime_window(database):
downtime_layout = [
[sg.Text("Voyage"), sg.In(size=(40, 40), key='-VOYAGE-')],
[sg.Text("System Stop Time"), sg.In(size=(40, 40), key='-STOP-')],
[sg.Text("System Restart Time", tooltip = "dd-mm-yy hh:mm:ss"), sg.In(size=(40, 40), key='-START-')],
[sg.Text("Reason for Downtime", tooltip = "dd-mm-yy hh:mm:ss"), sg.In(size=(25, 1), key='-REASON-')],
[sg.Text("Assosciated Error"), sg.In(size=(25, 1), key='-ASSOSCIATED ERROR-')],
[sg.Button("Save", enable_events=True, key='-LOG SAVE-'),
sg.Button("Cancel", enable_events=True, key='-LOG CANCEL-')]
]
downtime_window = sg.Window("LLMSDID - Log some downtime",
layout=downtime_layout,
margins=(200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1)
)
while True:
downtime_event, downtime_values = downtime_window.read()
if downtime_event == '-LOG SAVE-':
database.save_to_downtime(downtime_values['-VOYAGE-'], downtime_values['-STOP-'], downtime_values['-START-'], downtime_values['-REASON-'], downtime_values['-ASSOSCIATED ERROR-'])
downtime_window.close()
break
# If the user closes the window, exit this loop so that the program can close
if downtime_event == sg.WIN_CLOSED or downtime_event == '-LOG CANCEL-':
downtime_window.close()
break
# Main window layout
main_column_1 = sg.Column([[sg.Frame('Advanced search', [[sg.Column([[sg.Text("Voyage: ", tooltip = "Let me know which voyage you'd like to see the errors for."), sg.In(size = (15, 1), pad = ((34, 0), (0, 0)), key = '-VOYAGE SEARCH-')],
[sg.Text("Status: ", tooltip = "Would you like to look at errors we've already solved? Let me know here!"), sg.In(size = (15, 1), pad = ((40, 0), (0, 0)), key = '-STATUS SEARCH-')],
[sg.Text("Fault type: ", tooltip = "Here you can let me know what type of fault you'd like to search for."), sg.In(size = (15, 1), pad = ((20, 0), (0, 0)), right_click_menu = ("Cable", "Hardware", "Sensor", "Connector"), key = '-TYPE SEARCH-')],
[sg.Text("Fault location: ", tooltip = "If you suspect that your fault might be location-specific, say so here to see previous errors that have occurred in that location."), sg.In(size = (15, 1), pad = ((0, 0), (0, 0)), key = '-LOCATION SEARCH-')],
[sg.Text("Sensor ID: ", tooltip = "Think that your error could be sensor-specific? Find previous issues with your exact sensor by entering it's asset number here."), sg.In(size = (15, 1), pad = ((21, 0), (0, 0)), key = '-SENSOR ID SEARCH-')],
[sg.Text("Sensor type: ", tooltip = "Search for previous errors that have been encountered with your specific type of sensor."), sg.In(size = (15, 1), pad = ((8, 0), (0, 0)), key = '-SENSOR TYPE SEARCH-')],
[sg.Text("From: ", tooltip = "Enter the start date for your search."), sg.In(size = (15, 1), tooltip = "dd-mm-yy hh:mm:ss", pad = ((48, 0), (0, 0)), key = '-FROM SEARCH-')],
[sg.Text("To: ", tooltip = "Enter the end date for your search."), sg.In(size = (15, 1), tooltip = "dd-mm-yy hh:mm:ss", pad = ((64, 0), (0, 0)), key = '-TO SEARCH-')],
[sg.Button("Search errors", size = (12, 1), pad = ((93, 0), (7, 0)), enable_events=True, tooltip = "Press me if you'd like to search for specific error characteristics.",key = '-SEARCH ERROR-')]], pad = (3, 3))]])]])
main_column_2 = sg.Column([[sg.Frame('Faults:', [[sg.Column([[sg.Listbox(unresolved_errors, enable_events = True, size=(20, len(unresolved_errors)), key = '-ERROR LIST-')]]),
sg.Column([[sg.Text("Error ID: ", size=(14,1)), sg.Text("", size=(20,1), key='-OUT ID-')],
[sg.Text("Error Description: ", size=(14,15)), sg.Multiline("", size=(20,15), key='-OUT DESC-')],
]) ],
[sg.Button("Update", enable_events = True, tooltip = "Press me if you'd like to update some of the information about the selected error.", key = '-UPDATE ERROR-'),
sg.Button("Give me more!", enable_events = True, tooltip = "Press me if you'd like to view all the information about this specific error.", key = '-SHOW ME MORE-'),
sg.Button("Show me unresolved errors", enable_events = True, tooltip="Press me if you'd like to see all the unresolved errors", key = '-UNRESOLVED-')]], pad=(0, 0))]])
main_column_3 = sg.Column([[sg.Frame('Actions', [[sg.Column([[sg.Button("Log a new error", enable_events=True, tooltip = "Press me if you'd like to log a new error.", key = '-LOG ERROR-'),
sg.Button("Log some downtime", enable_events=True, tooltip="Press me if you'd like to log system downtime as a result of a logged error.", key='-LOG DOWNTIME-')]])]])]])
main_layout = [[main_column_1, main_column_2], [main_column_3]]
main_window = sg.Window("LLMSDID - Home",
layout = main_layout,
margins = (200, 100),
grab_anywhere=True,
default_button_element_size=(12, 1))
if __name__ == "__main__":
db_object = create_login_window()
while True:
event, values = main_window.read()
if event == '-UNRESOLVED-':
update_query = "SELECT FaultID, FaultDescription FROM errors WHERE FaultStatus = 'Unresolved'"
unresolved_errors = db_object.fetch(update_query)
main_window['-ERROR LIST-'].update(unresolved_errors)
main_window.refresh()
if values['-ERROR LIST-']:
selected_error = values['-ERROR LIST-'][0]
error_sel_flag = True
fetch_query = "SELECT * FROM errors WHERE FaultId = " + str(selected_error[0])
current_error_list = db_object.fetch(fetch_query)
current_error['fault_id'] = current_error_list[0][0]
current_error['fault_status'] = current_error_list[0][1]
current_error['fault_description'] = current_error_list[0][2]
current_error['voyage'] = current_error_list[0][3]
current_error['time_of_fault'] = current_error_list[0][4]
current_error['time_of_solution'] = current_error_list[0][5]
current_error['fault_type'] = current_error_list[0][6]
current_error['location'] = current_error_list[0][7]
current_error['sensor_id'] = current_error_list[0][8]
current_error['sensor_type'] = current_error_list[0][9]
current_error['fault_message'] = current_error_list[0][10]
current_error['log_date'] = current_error_list[0][11]
main_window['-OUT ID-'].update(current_error['fault_id'])
main_window['-OUT DESC-'].update(current_error['fault_description'])
if event == '-UPDATE ERROR-':
if guest_user_flag:
print("User does not have privileges to update issues")
else:
if error_sel_flag:
create_update_window(current_error, db_object) # MEEP: point to db_object?
else:
main_window['-OUT ID-'].update("Please select a fault for us to update.")
print("No fault selected")
if event == '-LOG ERROR-':
if guest_user_flag:
print("User does not have privileges to log an error")
else:
create_log_window(db_object)
# TODO Set current issue as logged issue if it is unresolved
if event == '-SEARCH ERROR-':
unresolved_errors = db_object.search(values['-VOYAGE SEARCH-'], values['-STATUS SEARCH-'], values['-TYPE SEARCH-'], values['-LOCATION SEARCH-'], values['-SENSOR ID SEARCH-'], values['-SENSOR TYPE SEARCH-'], values['-FROM SEARCH-'], values['-TO SEARCH-'])
main_window['-ERROR LIST-'].update(unresolved_errors)
main_window.refresh()
if event == '-SHOW ME MORE-':
if error_sel_flag:
create_more_window(current_error, db_object)
else:
main_window['-OUT ID-'].update("Please select a fault for us to have a look at.")
print("No fault selected")
if event == '-LOG DOWNTIME-':
if(guest_user_flag):
print("User does not have privileges to log downtime")
else:
create_downtime_window(db_object)
if event == sg.WIN_CLOSED:
break |
6,885 | 6f877dccab8d62e34b105bbd06027cbff936e3aa | mlt = 1
mlt_sum = 0
num_sum = 0
for i in range(1,101):
mlt = (i ** 2)
mlt_sum += mlt
num_sum += i
print((num_sum ** 2) - mlt_sum)
|
6,886 | a5646a5d42dbf6e70e9d18f28513ee2df68a28b1 | # (1) Obtain your values here (https://core.telegram.org/api/obtaining_api_id)
api_id = 000000
api_hash = '00000000000000000000000'
phone = '+000000000000'
username = 'theone'
project_id = 000000000
|
6,887 | 991260c268d53fbe73e9bff9990ac536ed802d7a | '''
Author: ulysses
Date: 1970-01-01 08:00:00
LastEditTime: 2020-08-03 15:44:57
LastEditors: Please set LastEditors
Description:
'''
from pyspark.sql import SparkSession
from pyspark.sql.functions import split, explode
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName('StructedSocketWordCount')\
.master('local[4]')\
.getOrCreate()
sc =spark.sparkContext
sc.setLogLevel('WARN')
# 从socket源读取stream
lines = spark\
.readStream\
.format('socket')\
.option('host', 'localhost')\
.option('port', 9999)\
.load()
words = lines.select(
explode(
split(lines.value, ' ') # 空格拆开
).alias('word') # 将一行列表 打开 一列数据
)
# word , count
wordcounts = words.groupBy('word').count()
# 输出
query = wordcounts\
.writeStream\
.outputMode('complete')\
.format('console')\
.trigger(processingTime="8 seconds")\
.start()
query.awaitTermination()
|
6,888 | ab36b3d418be67080e2efaba15edc1354386e191 | import requests
response = requests.get('https://any-api.com:8443/https://rbaskets.in/api/version')
print(response.text) |
6,889 | 38906a31ab96e05a9e55a51260632538872ed463 | #!/usr/bin/env python3
# coding: utf-8
"""
Blaise de Vigenère (1523–1596) mathematician, developed encryption scheme,
VigenereCipher algorithm is implemented based on his work, with a utility
of relative strength index for encryption and decryption.
VERSION : 1.0
LICENSE : GNU GPLv3
STYLE : PEP 8
AUTHOR : AKULA.S.S.S.R.Krishna
Date : 05/11/2020
PURPOSE : To encrypt and decrypt text based files
INPUT : python3 VingenerCipher -i sample_file.txt -e "sample password"
OUTPUT : sample_file.txt will be replaced with encrypted data.
"""
import os
import argparse
class VigenereCipher(object):
def __init__(self, key):
print('Vigenere Cipher Encription')
self.key = key
def encode(self, text): # Based on password every character
key = self.key # will be encrypted with different bias
ans = ''
for index, i in enumerate(text):
if(ord('!') <= ord(i) <= ord('~')):
index %= len(key)
if(ord(i) + ord(key[index]) - ord('!') > ord('~')):
ans += (chr(ord('!') + (ord(i) + ord(key[index])
- ord('!')) % ord('~') - 1))
else:
ans += (chr(ord(i) + ord(key[index]) - ord('!')))
else:
ans += i
return ans
def decode(self, text): # Based on password every character
key = self.key # will be decrypted with different bias
ans = ''
for index, i in enumerate(text):
if(ord('!') <= ord(i) <= ord('~')):
index %= len(key)
if((ord('!') + ord(i) - ord(key[index])) < ord('!')):
ans += (chr(ord('~') + (ord(i) - ord(key[index])) + 1))
else:
ans += (chr(ord('!') + ord(i) - ord(key[index])))
else:
ans += i
return ans
def read_from_file(file_name):
f = open(file_name, 'r')
data = f.read()
f.close()
return data
def write_to_file(file_name, data):
f = open(file_name, 'w')
data = f.write(data)
f.close()
def encode_from_file(file_name, obj):
data = read_from_file(file_name)
for _ in range(args.strength):
data = obj.encode(data)
write_to_file(file_name, data) # Replaces file with encrypted data
print('encode file -> ' + file_name)
def decode_from_file(file_name, obj):
data = read_from_file(file_name)
for _ in range(args.strength):
data = obj.decode(data)
write_to_file(file_name, data) # Replaces file with decrypted data
print('decode file -> ' + file_name)
def encription_form_path(PATH, obj): # Recursive function (MT-safe)
try:
for path in os.listdir(PATH):
encription_form_path(PATH + '/' + path, obj)
except(OSError):
if(args.encode):
encode_from_file(PATH, obj)
elif(args.decode):
decode_from_file(PATH, obj)
"""
input can be either -i file / -f folder,
encode -e, decode -d for encryption and decryption respectively,
strength -s indicates number of times to be encrypted / decrypted.
"""
parser = argparse.ArgumentParser('Description of your program')
parser.add_argument('-i', '--input_file',
help='input file name', required=False)
parser.add_argument('-e', '--encode',
help='encode password', required=False)
parser.add_argument('-d', '--decode',
help='decode password', required=False)
parser.add_argument('-f', '--folder',
help='folder name', required=False)
parser.add_argument('-s', '--strength',
help='encription strength', type=int,
default=1, required=False)
args = (parser.parse_args())
if(args.input_file):
PATH = args.input_file
elif(args.folder):
PATH = args.folder
else:
exit('Need --input_file or --folder\nUse -h for help')
if(args.encode):
pswd = args.encode
elif(args.decode):
pswd = args.decode
else:
exit('Need --encode or --decode\nUse -h for help')
obj = VigenereCipher(pswd)
encription_form_path(PATH, obj)
|
6,890 | 7c19b9521dc874a1ff4bed87dae0452cc329224a | import Environment.spot_environment_model
"""This is basically the control center. All actions here are being condensed and brought in from
spot_market_model...... the BRAIN of the simulator"""
class SpotEnvironmentController():
def __init__(self, debug=False): # debug builds an error trap
self.debug = debug
if self.debug == True:
print("... In Controller -> __init__")
self.sem = Environment.spot_environment_model.SpotEnvironmentModel(self.debug)
def load_file(self, path):
self.sem.load_file(path) # loads file by pulling from file path
def load_file_json(self, path):
self.sem.load_file_json(path) # loads file by pulling from file path
def save_project(self, path):
self.sem.save_file(path)
def save_project_json(self, path):
self.sem.save_file_json(path)
def reset_market(self):
pass
self.sem.reset_market()
def make_market(self, make_d):
if self.debug == True:
print("... In Controller -> make_market")
self.sem.make_market(make_d)
def set_market_parms(self, parms):
if self.debug == True:
print("... In Controller -> set_market_params")
self.sem.set_market_parms(parms)
def add_buyer(self, bn, values):
if self.debug == True:
print("... In Controller -> add_buyer")
print ("... Buyer {}, values {}".format(bn, values))
self.sem.add_buyer(bn, values)
def add_seller(self, sn, costs):
if self.debug == True:
print("... In Controller -> add_seller")
print ("... Seller {}, costs {}".format(sn, costs))
self.sem.add_seller(sn, costs)
def get_num_buyers(self):
return self.sem.get_num_buyers()
def get_num_sellers(self):
return self.sem.get_num_sellers()
def get_num_units(self):
return self.sem.get_num_units()
def get_seller_costs(self, seller):
return self.sem.get_seller_costs(seller)
def get_buyer_values(self, buyer):
return self.sem.get_buyer_values(buyer)
def make_demand(self):
self.sem.make_demand()
def make_supply(self):
self.sem.make_supply()
def show_env_buyers(self):
self.sem.show_env_buyers()
def show_environment(self):
self.sem.show_environment()
def get_supply_demand_plot_info(self):
return self.sem.get_supply_demand_plot_info()
def get_supply_demand_list(self):
return self.sem.get_supply_demand_list()
def get_equilibrium(self):
return self.sem.get_equilibrium()
def show(self):
self.sem.show()
def plot(self):
self.sem.make_demand()
self.sem.make_supply()
self.sem.plot_supply_demand()
def plot_gui(self, name):
self.sem.make_demand()
self.sem.make_supply()
self.sem.plot_supply_demand_gui(name) |
6,891 | 59d543ed443c156ac65f9c806ba5bada6bcd0c21 | import unittest
def is_multiple(value, base):
return 0 == (value % base)
def fizz_buzz(value):
if is_multiple(value, 5) and is_multiple(value, 3):
return "FizzBuzz"
if is_multiple(value, 3):
return "Fizz"
if is_multiple(value, 5):
return "Buzz"
return str(value)
class FizzBuzzTest(unittest.TestCase):
def check_fizz_buzz(self, value, expected):
result = fizz_buzz(value)
self.assertEqual(expected, result)
def test_fizz_buzz__fizz_buzz_1_1(self):
self.check_fizz_buzz(1, "1")
def test_fizz_buzz__fizz_buzz_2_2(self):
self.check_fizz_buzz(2, "2")
def test_fizz_buzz__fizz_buzz_3_Fizz(self):
self.check_fizz_buzz(3, "Fizz")
def test_fizz_buzz__fizz_buzz_5_Buzz(self):
self.check_fizz_buzz(5, "Buzz")
def test_fizz_buzz__fizz_buzz_6_Fizz(self):
self.check_fizz_buzz(6, "Fizz")
def test_fizz_buzz__fizz_buzz_10_Buzz(self):
self.check_fizz_buzz(10, "Buzz")
def test_fizz_buzz__fizz_buzz_15_FizzBuzz(self):
self.check_fizz_buzz(15, "FizzBuzz")
if __name__ == "__main__":
print("Running all unit tests...")
unittest.main()
|
6,892 | 502e92d3e5d059d73016702ce0b2591a123810d3 | # -*- coding: utf-8 -*-
#
# This file is part of REANA.
# Copyright (C) 2017, 2018 CERN.
#
# REANA is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Pytest configuration for REANA-Workflow-Controller."""
from __future__ import absolute_import, print_function
import os
import shutil
import pytest
from reana_db.models import Base, User
from sqlalchemy_utils import create_database, database_exists, drop_database
from reana_workflow_controller.factory import create_app
@pytest.fixture(scope="module")
def base_app(tmp_shared_volume_path):
"""Flask application fixture."""
config_mapping = {
"SERVER_NAME": "localhost:5000",
"SECRET_KEY": "SECRET_KEY",
"TESTING": True,
"SHARED_VOLUME_PATH": tmp_shared_volume_path,
"SQLALCHEMY_DATABASE_URI": "sqlite:///testdb.db",
"SQLALCHEMY_TRACK_MODIFICATIONS": False,
"ORGANIZATIONS": ["default"],
}
app_ = create_app(config_mapping)
return app_
|
6,893 | 6194079dd506553b4e5b66f1fb92bb8642704b59 | # -*- coding: utf-8 -*-
from copy import copy
from openprocurement.api.utils import (
json_view,
context_unpack,
APIResource,
get_now,
)
from openprocurement.tender.core.utils import save_tender, apply_patch
from openprocurement.tender.core.validation import (
validate_requirement_data,
validate_patch_requirement_data,
validate_operation_ecriteria_objects,
validate_patch_exclusion_ecriteria_objects,
validate_change_requirement_objects,
validate_put_requirement_objects,
)
class BaseTenderCriteriaRGRequirementResource(APIResource):
@json_view(
content_type="application/json",
validators=(
validate_operation_ecriteria_objects,
validate_patch_exclusion_ecriteria_objects,
validate_requirement_data,
),
permission="edit_tender"
)
def collection_post(self):
requirement = self.request.validated["requirement"]
self.request.context.requirements.append(requirement)
tender = self.request.validated["tender"]
if (
self.request.authenticated_role == "tender_owner"
and tender.status == "active.tendering"
and hasattr(tender, "invalidate_bids_data")
):
tender.invalidate_bids_data()
if save_tender(self.request):
self.LOGGER.info(
"Created requirement group requirement {}".format(requirement.id),
extra=context_unpack(
self.request,
{"MESSAGE_ID": "requirement_group_requirement_create"},
{"requirement_id": requirement.id},
),
)
self.request.response.status = 201
self.request.response.headers["Location"] = self.request.route_url(
"{}:Requirement Group Requirement".format(self.request.validated["tender"].procurementMethodType),
tender_id=self.request.validated["tender_id"],
criterion_id=self.request.validated["criterion"].id,
requirement_group_id=self.request.validated["requirement_group"].id,
requirement_id=requirement.id
)
return {"data": requirement.serialize("view")}
@json_view(permission="view_tender")
def collection_get(self):
return {"data": [i.serialize("view") for i in self.request.context.requirements]}
@json_view(permission="view_tender")
def get(self):
return {"data": self.request.validated["requirement"].serialize("view")}
@json_view(
content_type="application/json",
validators=(
validate_change_requirement_objects,
validate_patch_requirement_data,
),
permission="edit_tender"
)
def patch(self):
requirement = self.request.context
apply_patch(self.request, save=False, src=requirement.serialize())
tender = self.request.validated["tender"]
if self.request.authenticated_role == "tender_owner" and hasattr(tender, "invalidate_bids_data"):
tender.invalidate_bids_data()
if save_tender(self.request):
self.LOGGER.info(
"Updated {}".format(requirement.id),
extra=context_unpack(self.request, {"MESSAGE_ID": "requirement_group_requirement_patch"}),
)
return {"data": requirement.serialize("view")}
@json_view(
content_type="application/json",
validators=(
validate_put_requirement_objects,
validate_patch_requirement_data,
),
permission="edit_tender"
)
def put(self):
old_requirement = self.request.context
requirement = old_requirement
if self.request.validated["data"].get("status") != "cancelled":
model = type(old_requirement)
data = copy(self.request.validated["data"])
for attr_name in type(old_requirement)._fields:
if data.get(attr_name) is None:
data[attr_name] = getattr(old_requirement, attr_name)
# To avoid new version creation if no changes and only id's were regenerated
if "eligibleEvidences" not in self.request.json.get("data", {}):
data["eligibleEvidences"] = [
evidence.to_primitive(role="create") for evidence in getattr(old_requirement, "eligibleEvidences")
]
requirement = model(data)
if old_requirement.to_primitive() == requirement.to_primitive():
return {"data": (old_requirement.serialize("view"),)}
requirement.datePublished = get_now()
requirement.dateModified = None
self.request.validated["requirement_group"].requirements.append(requirement)
if old_requirement.status == "active":
old_requirement.status = "cancelled"
old_requirement.dateModified = get_now()
tender = self.request.validated["tender"]
if (
self.request.authenticated_role == "tender_owner"
and tender.status == "active.tendering"
and hasattr(tender, "invalidate_bids_data")
):
tender.invalidate_bids_data()
if save_tender(self.request):
self.LOGGER.info(
"New version of requirement {}".format(requirement.id),
extra=context_unpack(self.request, {"MESSAGE_ID": "requirement_group_requirement_put"}),
)
return {"data": (requirement.serialize("view"), old_requirement.serialize("view_old"))}
|
6,894 | 25641b3a9919db1f172fca22acf413062505de1b | #Simple Pig Latin
def pig_it(text):
return " ".join( letter if letter == "!" or letter == "?" else (letter[1:] + letter[0] + "ay") for letter in text.split(" "))
|
6,895 | 31a0c9a143a06ac86c8e8616fb273a0af844a352 |
__author__ = "Yong Peng"
__version__ = "1.0"
import time
import re
import getpass
from netmiko import (
ConnectHandler,
NetmikoTimeoutException,
NetmikoAuthenticationException,
)
with open('./device_list.txt','r') as f:
device_list = [i.strip() for i in f.readlines() if len(i.strip()) != 0] # read the device list.
print("Data will be collected on below switches:")
for device in device_list:
print(device)
go = input("\nPress y to continue: ")
if go != "y" and go != "Y":
exit(2)
u_id = input("Please input login ID:")
factor_1 = getpass.getpass("ID Password for login:")
# cmd_4_IOS = ['show version | in from','show stack','show flash',\
# 'show license', 'show boot-preference',\
# 'show ip bgp summ', 'show interface brief',\
# 'show ip inter', 'show vlan',\
# 'show vlan brief', 'show lag', 'show lag brief',\
# 'show lldp neighbor', 'show 802-1w', 'show ip route',\
# 'show run']
# cmd_4_IOS = ['show version | in from', 'show flash | in Pri Code|Sec Code']
# cmd_4_IOS = ['show vlan brief', 'show ip interface', 'show version | in from', 'show ip osp inter brief',
# 'show run']n
# cmd_4_IOS = ['show vlan id 464']
with open("temp.txt",'r') as f:
cmd_4_IOS = [i.strip() for i in f.readlines()]
def send_show_command(device, commands):
OutputPath = 'c:/script/output/' + str(device['host']) + '.txt'
result = open(OutputPath, 'w')
flag = True
try:
with ConnectHandler(**device) as ssh:
ssh.enable()
for command in commands:
output = ssh.send_command(command, strip_command=False, strip_prompt=False)
result.write(output + "\n" + 30 * '+' + "\n" + "\n")
except Exception as error:
print(error)
flag = False
result.close()
if flag:
print("Data collection on %s is done. \n \n" % (i))
else:
print("Data collection for %s is NOT done. \n \n" % (i))
switch = {}
for i in device_list:
switch["device_type"] = "ruckus_fastiron"
switch["host"] = i
switch["username"] = u_id
factor_2 = input("Trying to login to %s, enter DUO Code:"%(i))
switch["password"] = str(factor_1) + str(factor_2)
switch['secret'] = '',
switch['port'] = 22
send_show_command(switch, cmd_4_IOS)
print("All collection is done.") |
6,896 | ac99c19294661657d383b036c9ab83e7b610cb7d | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011-Today Serpent Consulting Services Pvt.Ltd. (<http://www.serpentcs.com>).
# Copyright (C) 2004 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
from odoo import api, models
import time
class location_accommodation(models.AbstractModel):
_name = 'report.sg_accommodation.view_location_report'
@api.model
def get_companies(self):
company_list=[]
self.td_list = []
comp_ids=self.env['res.company'].search([('tenant', '=', True)])
for comp in comp_ids:
company_list.append(comp.company_code)
if company_list:
company_list.sort()
no_of_td=company_list
for td in range(0,len(no_of_td)):
self.td_list.append(td)
return company_list
@api.multi
def render_html(self, docids, data=None):
report = self.env['report']._get_report_from_name('sg_accommodation.view_location_report')
records = self.env['accommodation.accommodation'].browse(self.ids)
docargs = {'doc_ids' : self.ids,
'doc_model' : report.model,
'data' : data,
'docs' : records,
'time' : time,
'get_companies' : self.get_companies}
return self.env['report'].render('sg_accommodation.view_location_report', docargs)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
6,897 | 6a8cab1fceffa0d70441cc600137417a8b81d7b1 | string=input();
string=string.replace("(","");
string=string.replace(")","");
string=list(map(int,string.split(",")));
if(1 in string):
string.remove(1);
mid=[string[0]];
string.remove(string[0]);
result=0;
tar=0;
while(string!=[]):
tar=0;
length=len(string);
i=0
while(i<len(string)):
cout=0;
count=0
for j in mid:
for k in range(2,min(string[i],j)+1):
if(string[i]%k==0)&(j%k==0):
mid.append(string[i]);
string.remove(string[i]);
count=1;
break;
if(count==0):
cout+=1;
else:
break;
if(count==0):
i+=1;
if(cout==len(mid)):
tar+=1;
if (tar == length)|(string==[]):
if (len(mid) > result):
result = len(mid);
if(string!=[]):
mid = [string[0]];
string.remove((string[0]));
if(len(mid)>result):
reuslt=len(mid);
print(result)
|
6,898 | cc7a44754dc1371733420fd3a1e51ab6b5e7c4d8 | __author__ = 'xcbtrader'
# -*- coding: utf-8 -*-
from bitcoin import *
def crear_addr_word(word):
priv = sha256(word)
pub = privtopub(priv)
addr = pubtoaddr(pub)
wif = encode_privkey(priv, 'wif')
return addr, priv, wif
word = input('Entra la palabra para crear direccion bitcoin:? ')
addr, priv, wif = crear_addr_word(word)
print('####################################################')
print('WORD: ' + word)
print('ADDR: ' + addr)
print('PRIV: ' + priv)
print('WIF: ' + wif)
print('####################################################')
|
6,899 | 682b3e1d6d40f4b279052ac27df19268d227fef8 | '''引入数据,并对数据进行预处理'''
# step 1 引入数据
import pandas as pd
with open('D:\\Desktop\西瓜数据集3.0.csv', 'r', encoding='utf-8') as data_obj:
df = pd.read_csv(data_obj)
# Step 2 对数据进行预处理
# 对离散属性进行独热编码,定性转为定量,使每一个特征的取值作为一个新的特征
# 增加特征量 Catagorical Variable -> Dummy Variable
# 两种方法:Dummy Encoding VS One Hot Encoding
# 相同点:将Catagorical Variable转换为定量特征
# 不同点:Dummy Variable将Catagorical Variable转为n-1个特征变量
# One Hot Encoding 将其转换为n个特征变量,但会存在哑变量陷阱问题
# pandas自带的get_dummies()函数,可以将数据集中的所有标称变量转为哑变量
# sklearn 中的OneHotEncoder 也可以实现标称变量转为哑变量(注意要将非数字型提前通过LabelEncoder编码为数字类型,再进行转换,且只能处理单列属性)
# pybrain中的_convertToOneOfMany()可以Converts the target classes to a 1-of-k representation, retaining the old targets as a field class.
# 对target class独热编码,并且保留原target为字段类
'''
dataset = pd.get_dummies(df, columns=df.columns[:6]) # 将离散属性变为哑变量
dataset = pd.get_dummies(dataset, columns=[df.columns[8]]) # 将标签转为哑变量
# columns接受序列形式的对象,单个字符串不行
'''
dataset = pd.get_dummies(df)
pd.set_option('display.max_columns', 1000) # 把所有的列全部显示出来
X = dataset[dataset.columns[:-2]]
Y = dataset[dataset.columns[-2:]]
labels = dataset.columns._data[-2:]
# Step 3:将数据转换为SupervisedDataSet/ClassificationDtaSet对象
from pybrain.datasets import ClassificationDataSet
ds = ClassificationDataSet(19, 1, nb_classes=2, class_labels=labels)
for i in range(len(Y)):
y = 0
if Y['好瓜_是'][i] == 1:
y = 1
ds.appendLinked(X.ix[i], y)
ds.calculateStatistics() # 返回一个类直方图?搞不懂在做什么
# Step 4: 分开测试集和训练集
testdata = ClassificationDataSet(19, 1, nb_classes=2, class_labels=labels)
testdata_temp, traindata_temp = ds.splitWithProportion(0.25)
for n in range(testdata_temp.getLength()):
testdata.appendLinked(testdata_temp.getSample(n)[0],testdata_temp.getSample(n)[1])
print(testdata)
testdata._convertToOneOfMany()
print(testdata)
traindata = ClassificationDataSet(19, 1, nb_classes=2, class_labels=labels)
for n in range(traindata_temp.getLength()):
traindata.appendLinked(traindata_temp.getSample(n)[0], traindata_temp.getSample(n)[1])
traindata._convertToOneOfMany()
'''
# 使用sklean的OneHotEncoder
# 缺点是只能单列进行操作,最后再复合,麻烦
from sklearn.preprocessing import OneHotEncoder
from sklearn.preprocessing import LabelEncoder
a = LabelEncoder().fit_transform(df[df.columns[0]])
# dataset_One = OneHotEncoder.fit(df.values[])
# print(df['色泽']) # 单独的Series?
print(a)
aaa = OneHotEncoder(sparse=False).fit_transform(a.reshape(-1, 1))
print(aaa)
# 怎么复合暂时没写
'''
'''开始整神经网络'''
# Step 1 :创建神经网络框架
from pybrain.tools.shortcuts import buildNetwork
from pybrain.structure import SoftmaxLayer
# 输入数据是 19维,输出是两维,隐层设置为5层
# 输出层使用Softmax激活,其他:学习率(learningrate=0.01),学习率衰减(lrdecay=1.0,每次训练一步学习率乘以),
# 详细(verbose=False)动量因子(momentum=0最后时步的梯度?),权值衰减?(weightdecay=0.0)
n_h = 5
net = buildNetwork(19, n_h, 2, outclass=SoftmaxLayer)
# Step 2 : 构建前馈网络标准BP算法
from pybrain.supervised import BackpropTrainer
trainer_sd = BackpropTrainer(net, traindata)
# # 或者使用累积BP算法,训练次数50次
# trainer_ac = BackpropTrainer(net, traindata, batchlearning=True)
# trainer_ac.trainEpochs(50)
# err_train, err_valid = trainer_ac.trainUntilConvergence(maxEpochs=50)
for i in range(50): # 训练50次,每及测试结果次打印训练结果
trainer_sd.trainEpochs(1) # 训练网络一次,
# 引入训练误差和测试误差
from pybrain.utilities import percentError
trainresult = percentError(trainer_sd.testOnClassData(), traindata['class'])
testresult = percentError(trainer_sd.testOnClassData(dataset=testdata), testdata['class'])
# 打印错误率
print('Epoch: %d', trainer_sd.totalepochs, 'train error: ', trainresult, 'test error: ', testresult)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.