text stringlengths 38 1.54M |
|---|
#!/usr/bin/env python3
from vosk import Model, KaldiRecognizer
import os
import pyaudio
import pyttsx3
import json
import core
from nlu.classifier import classify
# Síntese de fala
engine = pyttsx3.init()
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[-2].id)
def speak(text):
engine.say(text)
engine.runAndWait()
def evaluate(text):
#Reconhecer entidade do texto.
entity = classify(text)
if entity == 'time|getTime':
speak(core.SystemInfo.get_time())
elif entity == 'time|getDate':
speak(core.SystemInfo.get_date())
# Abrir programas
elif entity == 'open|notepad':
speak('Abrindo o bloco de notas')
os.system('notepad.exe')
elif entity == 'open|chrome':
speak('Abrindo o google chrome')
os.system('"C:/Program Files/Google/Chrome/Application/chrome.exe"')
print('Text: {} Entity: {}'.format(text, entity))
# Reconhecimento de fala
model = Model('model')
rec = KaldiRecognizer(model, 16000)
p = pyaudio.PyAudio()
stream = p.open(format=pyaudio.paInt16, channels=1, rate=16000, input=True, frames_per_buffer=2048)
stream.start_stream()
# Loop do reconhecimento de fala
while True:
data = stream.read(2048)
if len(data) == 0:
break
if rec.AcceptWaveform(data):
result = rec.Result()
result = json.loads(result)
if result is not None:
text = result['text']
evaluate(text)
|
class Solution(object):
def characterReplacement(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
result = 0
counts = [0] * 128
left_index = 0
current_max = 0
for right_index, letter in enumerate(s):
counts[ord(letter)] += 1
current_max = max(current_max, counts[ord(letter)])
while right_index - left_index + 1 - current_max > k:
counts[ord(s[left_index])] -= 1
left_index += 1
result = max(result, right_index - left_index + 1)
return result
|
import random
class BaseUI:
def __init__(self, element, type, id=None, draggable=False):
self._element=element
if id is None:
self._element.setAttribute('id','%s_%s' % (type, int(100000*random.random())))
else:
self._element.setAttribute('id',id)
if draggable:
self.draggable()
self.attach=self.append
def get_id(self):
return self._element.id
def append(self, element_id):
""" append this DOM component to DOM element element_id"""
doc.get(id=element_id)[0].appendChild(self._element)
def draggable(self):
def drag(e):
self._element.style.top='%spx' % (e.clientY - self._deltaY)
self._element.style.left='%spx' % (e.clientX - self._deltaX)
def mouseDown(e):
self._element.style.position='absolute'
self._deltaX=e.clientX - self._element.offsetLeft
self._deltaY=e.clientY - self._element.offsetTop
win.addEventListener('mousemove', drag, true)
def mouseUp(e):
win.removeEventListener('mousemove', drag, true)
self._element.addEventListener('mousedown', mouseDown, False)
self._element.addEventListener('mouseup', mouseUp, False)
def show(self):
self._element.setAttribute('display', 'block')
def hide(self):
self._element.setAttribute('display', 'none')
|
# encoding: utf-8
"""
@author: forencen
@time: 2020/11/26 5:57 下午
@desc:
"""
CONFIG = {
"KAFKA": {
"KAFKA_REDIS_URL": "redis://:test@127.0.0.1/6", # your server
"PRODUCER_COUNT": 1,
"WAITING_PUBLISH_MESSAGE_QUEUE": "kafka_waiting_publish_message",
"bootstrap.servers": ".....", # your server
"session.timeout.ms": 100000,
"heartbeat.interval.ms": 30000,
"enable.auto.commit": False,
"max.poll.interval.ms": 500000,
# "default.topic.config": {"auto.offset.reset": "latest"},
"default.topic.config": {"auto.offset.reset": "earliest"},
"queued.max.messages.kbytes": 2048576,
"fetch.message.max.bytes": 1048576
},
}
|
# -*- coding=UTF-8 -*-
import datetime
import dateutil
import os
import pyExcelerator
import re
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.db import connection
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from lib.decorators import render_to
from lib.paginator import SimplePaginator
from lib.qs_filter import QSFilter
from lib.sort import SortHeaders
from data.models import (
OrderedItem, BrandGroup, Area, Basket, Shipment, Package, BalanceItem,
calc_parts_client, get_search_func, BALANCEITEM_TYPE_PREINVOICE
)
from data.forms import (
OrderedItemsFilterForm, ShipmentsFilterForm, BalanceClientFilterForm,
CLIENT_FIELD_LIST, CLIENT_SHIPMENTS_FIELD_LIST,
BALANCE_CLIENT_FIELD_LIST_CLIENT
)
from client.forms import SearchForm
def get_period(request, prefix, field, default_period=None):
PERIOD_PARAM = 'period'
PERIOD_PARAM_WEEK = 'w'
PERIOD_PARAM_MONTH = 'm'
PERIOD_PARAM_YEAR = 'y'
PERIOD_PARAM_ALL = 'a'
PERIOD_PARAMS = (
PERIOD_PARAM_WEEK,
PERIOD_PARAM_MONTH,
PERIOD_PARAM_YEAR,
PERIOD_PARAM_ALL
)
PERIOD_PARAM_DEFAULT = default_period or PERIOD_PARAM_ALL
period = request.GET.get(PERIOD_PARAM)
session_key = "%s_%s" % (prefix, PERIOD_PARAM)
if not period:
period = request.session.get(session_key, None)
else:
request.session[session_key] = period
request.session.modified = True
if not period or period not in PERIOD_PARAMS:
period = PERIOD_PARAM_DEFAULT
rated_period = None
if period:
now = datetime.datetime.now()
if period == 'w':
rated_period = now + dateutil.relativedelta.relativedelta(
weeks=-1, weekday=dateutil.relativedelta.calendar.SATURDAY,
hour=0, minute=0, second=0, microsecond=0
)
elif period == 'm':
rated_period = now + dateutil.relativedelta.relativedelta(
day=1, hour=0, minute=0, second=0, microsecond=0
)
elif period == 'y':
rated_period = now + dateutil.relativedelta.relativedelta(
month=1, day=1, hour=0, minute=0, second=0, microsecond=0
)
elif period == 'a':
pass
if rated_period:
return period, {'%s__gte' % field: rated_period}
return period, {}
def get_items_per_page(request, prefix):
ITEM_PER_PAGE_PARAM = 'items_per_page'
ITEM_PER_PAGE_PARAM_ALL = 'all'
ITEM_PER_PAGE_DEFAULT = 50
items_per_page = request.GET.get(ITEM_PER_PAGE_PARAM)
session_key = "%s_%s" % (prefix, ITEM_PER_PAGE_PARAM)
if not items_per_page:
items_per_page = request.session.get(session_key)
else:
request.session[session_key] = items_per_page
request.session.modified = True
if items_per_page == ITEM_PER_PAGE_PARAM_ALL:
items_per_page = 10000
if not items_per_page:
items_per_page = ITEM_PER_PAGE_DEFAULT
try:
items_per_page = int(items_per_page)
except (ValueError, TypeError):
items_per_page = ITEM_PER_PAGE_DEFAULT
return items_per_page
def get_show_preinvoices(request, prefix):
SHOW_PREINVOICES_PARAM = 'preinvoices'
SHOW_PREINVOICES_PARAM_DEFAULT_VALUE = 0
try:
show_preinvoices = int(request.GET.get(SHOW_PREINVOICES_PARAM))
except (ValueError, TypeError):
show_preinvoices = None
session_key = "%s_%s" % (prefix, SHOW_PREINVOICES_PARAM)
if show_preinvoices is None:
show_preinvoices = request.session.get(
session_key, SHOW_PREINVOICES_PARAM_DEFAULT_VALUE)
request.session[session_key] = show_preinvoices
request.session.modified = True
return show_preinvoices
@login_required
@render_to('client/search.html')
def search(request):
parts = []
analogs = []
msg = ''
show_maker_field = False
if request.method == 'POST':
_post = request.POST.copy()
_post['part_number'] = re.sub('[\W]+', '', _post['part_number']).strip().upper()
form = SearchForm(_post)
if form.is_valid():
maker = form.cleaned_data['maker']
part_number = form.cleaned_data['part_number']
search_type = form.cleaned_data['search_type']
search_in_analogs = form.cleaned_data['search_in_analogs']
search_func = get_search_func(search_type)
part_founds, analog_founds = search_func(
maker, part_number, search_in_analogs=search_in_analogs)
if part_founds:
makers = set(x['maker'] for x in part_founds)
if len(makers) > 1:
show_maker_field = True
form.fields['maker'].widget.choices = [
('', '----')] + [(x, x) for x in sorted(makers)]
else:
parts = calc_parts_client(part_founds, request.user)
analogs = calc_parts_client(analog_founds, request.user)
elif not part_founds and analog_founds:
analogs = calc_parts_client(analog_founds, request.user)
else:
msg = u"Ничего не найдено"
else:
form = SearchForm()
context = {
'form': form,
'data': parts,
'analogs': analogs,
'msg': msg,
'show_maker_field': show_maker_field,
}
context['basket_items'] = Basket.objects.filter(
creator=request.user,
order_item_id__isnull=True
).order_by('-id')
context['basket_price_sum'] = reduce(
lambda x, y: x + y,
[x.get_price_total() for x in context['basket_items']],
0
)
return context
class ClientOrderItemDisplay(object):
def __init__(self, obj, field, format):
val = getattr(obj, field, u"")
if val is None:
val = u""
self.value = val
self.format = format
def __unicode__(self):
if 'date' in self.format:
dateformat = self.format.split("::")[1]
try:
return self.value.strftime(dateformat)
except AttributeError:
return u"%s" % self.value
return self.format % self.value
class ClientOrderItemRow(object):
def __init__(self, fields, obj):
self.fields = fields
self.obj = obj
def __iter__(self):
for field_name, field_format in self.fields:
yield ClientOrderItemDisplay(self.obj, field_name, field_format)
class ClientOrderItemList(object):
def __init__(self, request, filter_form, session_store_prefix="client_index"):
self.user = request.user
self.set_fields()
self.request = request
self.filter = QSFilter(request, filter_form)
session_store_prefix = session_store_prefix
self.items_per_page = get_items_per_page(request, session_store_prefix)
self.period, self.period_filter = get_period(request, session_store_prefix, "created")
self.results = self.result_list()
self.headers = self.list_headers()
self.filters = self.list_filters()
def set_fields(self):
try:
user_fields = self.user.get_profile().get_order_fields()
except Exception:
user_fields = None
if user_fields:
self.CLIENT_FIELDS = [x for x in CLIENT_FIELD_LIST if x[2] in user_fields]
else:
self.CLIENT_FIELDS = CLIENT_FIELD_LIST
self.LIST_HEADERS = [(x[0], x[1]) for x in self.CLIENT_FIELDS]
def result_list(self):
return [
ClientOrderItemRow([(x[2], x[3]) for x in self.CLIENT_FIELDS], obj)
for obj in self.get_query_set()
]
def list_headers(self):
sort_headers = SortHeaders(self.request, self.LIST_HEADERS)
return list(sort_headers.headers())
def list_filters(self):
def _inner():
for x in self.CLIENT_FIELDS:
try:
form_field = self.filter.form.__getitem__(x[4])
yield form_field
except Exception:
yield ""
return list(_inner())
def get_query_set(self):
order_field = self.request.GET.get('o', None)
order_direction = self.request.GET.get('ot', None)
order_by = '-created'
if order_field:
if order_direction == 'desc':
order_direction = '-'
else:
order_direction = ''
order_by = order_direction + self.LIST_HEADERS[int(order_field)][1]
filters = self.filter.get_filters()
qs = OrderedItem.objects.select_related() \
.filter(client=self.request.user) \
.filter(**filters)
if self.period_filter:
qs = qs.filter(**self.period_filter)
# calculate totals by filter
self.total_row = {}
td = "U0"
EXCLUDED_FILTER = {
'status__in': ('failure',)
}
q, params = qs.exclude(**EXCLUDED_FILTER)._as_sql(connection)
from_clause = q.split("FROM")[1]
sql = """
SELECT
SUM(%(p)s.total_cost),
SUM(%(p)s.weight*%(p)s.quantity),
SUM(%(p)s.delivery),
SUM(%(p)s.quantity*COALESCE(%(p)s.price_discount, %(p)s.price_sale, 0))
FROM %(from)s
""" % {'p': td, 'from': from_clause}
cursor = connection.cursor()
cursor.execute(sql, params)
res = cursor.fetchall()
if len(res) > 0:
self.total_row = dict(zip(
('total_cost', 'weight', 'delivery', 'price_sale'),
res[0]
))
if order_by:
qs = qs.order_by(order_by)
self.paginator = SimplePaginator(self.request, qs, self.items_per_page, 'page')
return self.paginator.get_page_items()
def get_total_row(self):
for f in self.CLIENT_FIELDS:
field_name = f[2].split("__")[0]
if field_name[:2] == "po":
yield u"Итого"
else:
yield self.total_row.get(field_name, u"")
@login_required
@render_to('client/index.html')
def index(request):
response = {}
cl = ClientOrderItemList(request, OrderedItemsFilterForm)
response['cl'] = cl
response['paginator'] = cl.paginator
response['items_per_page'] = cl.items_per_page
response['period'] = cl.period
response['qs_filter_param'] = cl.filter.get_filters()
return response
class ClientShipmentList(object):
def __init__(self, request, filter_form):
self.request = request
self.filter = QSFilter(request, filter_form)
session_store_prefix = "client_shipments"
self.items_per_page = get_items_per_page(request, session_store_prefix)
self.period, self.period_filter = get_period(request, session_store_prefix, "created_at")
self.results = self.result_list()
self.headers = self.list_headers()
self.filters = self.list_filters()
def result_list(self):
return self.get_query_set()
def list_headers(self):
sort_headers = SortHeaders(self.request, [(x[0], x[1]) for x in CLIENT_SHIPMENTS_FIELD_LIST])
return list(sort_headers.headers())
def list_filters(self):
def _inner():
for x in CLIENT_SHIPMENTS_FIELD_LIST:
try:
form_field = self.filter.form.__getitem__(x[3])
yield form_field
except Exception:
yield ""
return list(_inner())
def get_query_set(self):
order_field = self.request.GET.get('o', None)
order_direction = self.request.GET.get('ot', None)
order_by = '-created_at'
if order_field:
if order_direction == 'desc':
order_direction = '-'
else:
order_direction = ''
order_by = order_direction + CLIENT_SHIPMENTS_FIELD_LIST[int(order_field)][1]
qs = (
Shipment.objects.select_related()
.filter(client=self.request.user)
.filter(**self.filter.get_filters())
)
if self.period_filter:
qs = qs.filter(**self.period_filter)
if order_by:
qs = qs.order_by(order_by)
self.paginator = SimplePaginator(self.request, qs, self.items_per_page, 'page')
return self.paginator.get_page_items()
@login_required
@render_to('client/shipments.html')
def shipments(request):
response = {}
cl = ClientShipmentList(request, ShipmentsFilterForm)
response['cl'] = cl
response['paginator'] = cl.paginator
response['items_per_page'] = cl.items_per_page
response['period'] = cl.period
response['shipments'] = shipments
response['qs_filter_param'] = cl.filter.get_filters()
return response
class ClientPackageOrderItemDisplay(object):
def __init__(self, obj, field, format):
val = getattr(obj, field, u"")
if val is None:
val = u""
self.value = val
if field == 'part_number':
self.value = obj.description
self.format = format
def __unicode__(self):
if 'date' in self.format:
dateformat = self.format.split("::")[1]
try:
return self.value.strftime(dateformat)
except AttributeError:
return u"%s" % self.value
return self.format % self.value
class ClientShipmentOrderItemRow(object):
def __init__(self, fields, obj):
self.fields = fields
self.obj = obj
def __iter__(self):
for field_name, field_format in self.fields:
yield ClientPackageOrderItemDisplay(self.obj, field_name, field_format)
class ClientShipmentOrderItemList(ClientOrderItemList):
def __init__(self, request, filter_form, shipment):
self.shipment = shipment
super(ClientShipmentOrderItemList, self).__init__(
request,
filter_form,
session_store_prefix="client_shipment"
)
def get_query_set(self):
order_field = self.request.GET.get('o', None)
order_direction = self.request.GET.get('ot', None)
order_by = '-created'
if order_field:
if order_direction == 'desc':
order_direction = '-'
else:
order_direction = ''
order_by = order_direction + self.LIST_HEADERS[int(order_field)][1]
qs = OrderedItem.objects.select_related() \
.filter(client=self.request.user, shipment=self.shipment) \
.filter(**self.filter.get_filters())
if self.period_filter:
qs = qs.filter(**self.period_filter)
self.packages = Package.objects.filter(shipment=self.shipment).order_by('-created_at')
self.packages_list = [
ClientShipmentOrderItemRow([(x[2], x[3]) for x in self.CLIENT_FIELDS], obj)
for obj in self.packages
]
# calculate totals by filter
total = {}
td = "U0"
EXCLUDED_FILTER = {
'status__in': ('failure',)
}
q, params = qs.exclude(**EXCLUDED_FILTER)._as_sql(connection)
from_clause = q.split("FROM")[1]
sql = """
SELECT
SUM(%(p)s.total_cost),
SUM(%(p)s.weight*%(p)s.quantity),
SUM(%(p)s.delivery),
SUM(%(p)s.quantity*COALESCE(%(p)s.price_discount, %(p)s.price_sale, 0))
FROM %(from)s
""" % {'p': td, 'from': from_clause}
cursor = connection.cursor()
cursor.execute(sql, params)
res = cursor.fetchall()
if len(res) > 0:
total = dict(zip(
('total_cost', 'weight', 'delivery', 'price_sale'),
res[0]
))
total_packages = {}
sql = """
SELECT
SUM(total_cost),
SUM(weight * quantity),
SUM(delivery)
FROM
data_package
WHERE
shipment_id = %(shipment_id)i
""" % {'shipment_id': self.shipment.id}
cursor = connection.cursor()
cursor.execute(sql)
res = cursor.fetchall()
if len(res) > 0:
total_packages = dict(zip(
('total_cost', 'weight', 'delivery'), res[0])
)
def _sum(d1, d2):
d = {}
for k, v1 in d1.items():
v2 = d2.get(k)
if v1 is None and v2 is None:
d[k] = None
else:
d[k] = (v1 or 0) + (v2 or 0)
return d
self.total_row = []
total = _sum(total, total_packages)
for f in self.CLIENT_FIELDS:
field_name = f[2].split("__")[0]
if field_name[:2] == "po":
self.total_row.append(u"Итого:")
else:
self.total_row.append(total.get(field_name, u""))
if order_by:
qs = qs.order_by(order_by)
self.paginator = SimplePaginator(self.request, qs, self.items_per_page, 'page')
return self.paginator.get_page_items()
def get_total_row(self):
for f in self.total_row:
yield f
@login_required
@render_to('client/shipment.html')
def shipment(request, shipment_id):
shipment = get_object_or_404(Shipment, id=shipment_id)
response = {}
cl = ClientShipmentOrderItemList(request, OrderedItemsFilterForm, shipment)
response['cl'] = cl
response['paginator'] = cl.paginator
response['items_per_page'] = cl.items_per_page
response['period'] = cl.period
response['shipment'] = shipment
response['qs_filter_param'] = cl.filter.get_filters()
return response
class ClientBalanceList(object):
def __init__(self, request, filter_form):
self.request = request
self.filter = QSFilter(request, filter_form)
self.session_store_prefix = "client_balance"
self.items_per_page = get_items_per_page(request, self.session_store_prefix)
self.period, self.period_filter = get_period(request, self.session_store_prefix, "created_at", "a")
self.show_preinvoices = get_show_preinvoices(request, self.session_store_prefix)
self.results = self.result_list()
self.headers = self.list_headers()
self.filters = self.list_filters()
def result_list(self):
return self.get_query_set()
def list_headers(self):
sort_headers = SortHeaders(self.request, [(x[0], x[1]) for x in BALANCE_CLIENT_FIELD_LIST_CLIENT])
return list(sort_headers.headers())
def list_filters(self):
def _inner():
for x in BALANCE_CLIENT_FIELD_LIST_CLIENT:
try:
form_field = self.filter.form.__getitem__(x[3])
yield form_field
except Exception:
yield ""
return list(_inner())
def get_query_set(self):
order_field = self.request.GET.get('o', None)
order_direction = self.request.GET.get('ot', None)
order_by = '-created_at'
if order_field:
if order_direction == 'desc':
order_direction = '-'
else:
order_direction = ''
order_by = order_direction + BALANCE_CLIENT_FIELD_LIST_CLIENT[int(order_field)][1]
qs = (
BalanceItem.objects.select_related()
.filter(user=self.request.user)
.filter(**self.filter.get_filters())
)
if self.period_filter:
qs = qs.filter(**self.period_filter)
if not self.show_preinvoices:
qs = qs.exclude(item_type=BALANCEITEM_TYPE_PREINVOICE)
if order_by:
qs = qs.order_by(order_by)
self.total_amount = sum(x.amount for x in qs)
self.paginator = SimplePaginator(self.request, qs, self.items_per_page, 'page')
return self.paginator.get_page_items()
@login_required
@render_to('client/balance.html')
def balance(request):
response = {}
cl = ClientBalanceList(request, BalanceClientFilterForm)
response['cl'] = cl
response['paginator'] = cl.paginator
response['items_per_page'] = cl.items_per_page
response['period'] = cl.period
response['shipments'] = shipments
response['qs_filter_param'] = cl.filter.get_filters()
response['show_preinvoices'] = get_show_preinvoices(
request, cl.session_store_prefix)
return response
@render_to('client/help/list.html')
def help_area_list(request, brandgroup_id):
try:
area = BrandGroup.objects.get(id=brandgroup_id).area.all()
except BrandGroup.DoesNotExist:
area = []
return {'list': area}
@render_to('client/help/list.html')
def help_brands_list(request, area_id):
try:
brands = Area.objects.get(id=area_id).brands.all().order_by('title')
except Area.DoesNotExist:
brands = []
return {'list': brands}
@login_required
def export_order(request):
LIST_HEADERS = (
(u'PO', 'po_verbose'),
(u'Направление', 'brandgroup'),
(u'Поставщик', 'brand'),
(u'BRAND', 'area'),
(u'PART #', 'part_number'),
(u'ЗАМЕНА', 'part_number_superseded'),
(u'Q', 'quantity'),
(u'RUS', 'description_ru'),
(u'ENG', 'description_en'),
(u'Комментарий', 'comment_customer'),
(u'WEIGHT', 'weight'),
(u'SHIPPING', 'delivery'),
(u'PRICE', 'price_sale'),
(u'NEW PRICE', 'price_discount'),
(u'COST', 'cost'),
(u'TOTAL', 'total_cost'),
(u'Инвойс', 'invoice_code'),
(u'Статус', 'status'),
)
def get_list_headers():
try:
fields = request.user.get_profile().get_order_fields()
except Exception:
fields = None
if fields:
return [(x[0], x[1]) for x in LIST_HEADERS if x[1] in fields]
return LIST_HEADERS
_filter = QSFilter(request, OrderedItemsFilterForm, clear_old=False)
orders = OrderedItem.objects.select_related() \
.filter(client=request.user) \
.filter(**_filter.get_filters()) \
.order_by('brandgroup__direction__po', 'ponumber')
filename = os.path.join(settings.MEDIA_ROOT, 'temp.xls')
book = pyExcelerator.Workbook()
sheet = book.add_sheet('ORDERS')
i = 0
curr_line = 0
for key, value in get_list_headers():
sheet.write(curr_line, i, key)
i += 1
for order in orders:
i = 0
curr_line += 1
for key, value in get_list_headers():
value = getattr(order, value) or ''
try:
sheet.write(curr_line, i, value)
i += 1
except AssertionError:
value = unicode(value)
sheet.write(curr_line, i, value)
i += 1
book.save(filename)
os.chmod(filename, 0777)
content = open(filename, 'rb').read()
response = HttpResponse(content, mimetype='application/vnd.ms-excel')
name = '%s-%s.xls' % ('orders', datetime.datetime.now().strftime('%m-%d-%Y-%H-%M'))
response['Content-Disposition'] = 'inline; filename=%s' % name
os.remove(filename)
return response
|
from django.db import models
# Create your models here.
class Img(models.Model):
title = models.CharField(max_length=20)
img=models.CharField(max_length=255)
objects=models.Manager() |
#!/usr/bin/env/python
# File name : server.py
# Production : GWR
# Website : www.gewbot.com
# E-mail : gewubot@163.com
# Author : William
# Date : 2019/07/24
import socket
import time
import threading
import info
def info_send_client():
SERVER_IP = addr[0]
SERVER_PORT = 2256 #Define port serial
SERVER_ADDR = (SERVER_IP, SERVER_PORT)
Info_Socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) #Set connection value for socket
Info_Socket.connect(SERVER_ADDR)
print(SERVER_ADDR)
while 1:
try:
Info_Socket.send((info.get_cpu_tempfunc()+' '+info.get_cpu_use()+' '+info.get_ram_info()+' '+str(servo.get_direction())).encode())
time.sleep(1)
except:
time.sleep(10)
pass
def ap_thread():
os.system("sudo create_ap wlan0 eth0 MyRobot 12345678")
def run():
speed_set = 100
posBuffer = [0,50,0,0]
info_threading=threading.Thread(target=info_send_client) #Define a thread for FPV and OpenCV
info_threading.setDaemon(True) #'True' means it is a front thread,it would close when the mainloop() closes
info_threading.start() #Thread starts
while True:
data = ''
data = str(tcpCliSock.recv(BUFSIZ).decode())
if not data:
continue
elif 'forward' == data:
pass
print(data)
def wifi_check():
try:
s =socket.socket(socket.AF_INET,socket.SOCK_DGRAM)
s.connect(("1.1.1.1",80))
ipaddr_check=s.getsockname()[0]
s.close()
print(ipaddr_check)
# screen.screen_show(2, 'IP:'+ipaddr_check)
# screen.screen_show(3, 'AP MODE OFF')
except:
ap_threading=threading.Thread(target=ap_thread) #Define a thread for data receiving
ap_threading.setDaemon(True) #'True' means it is a front thread,it would close when the mainloop() closes
ap_threading.start() #Thread starts
if __name__ == '__main__':
while 1:
HOST = ''
PORT = 10223 #Define port serial
BUFSIZ = 1024 #Define buffer size
ADDR = (HOST, PORT)
while 1:
wifi_check()
try:
tcpSerSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcpSerSock.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1)
tcpSerSock.bind(ADDR)
tcpSerSock.listen(5) #Start server,waiting for client
print('waiting for connection...')
tcpCliSock, addr = tcpSerSock.accept()
print('...connected from :', addr)
break
except:
pass
try:
run()
except Exception as e:
print(e)
time.sleep(1)
|
from turtle import *
from random import randint
def Create_Path():
for row in range(15):
#t.speed(2)
t.write(row,align='center')
t.right(90)
for coln in range(10):
t.penup()
t.forward(10)
t.pendown()
t.forward(10)
t.speed(0)
t.penup()
t.backward(200)
t.left(90)
t.forward(20)
def Create_Turtle():
listColor=['Red','Blue','Green','Yellow']
m=0
p=100
for k in listTurtle:
k = Turtle()
k.shape('turtle')
k.color(listColor[m])
k.penup()
k.goto(-160,p)
p-=30
k.pendown()
if m%2==0:
for turn in range(10):
k.right(36)
else:
for turn in range(10):
k.left(36)
listTurtle[m]=k
m+=1
def Lets_Race():
for turn in range(100):
listTurtle[0].forward(randint(1,5))
listTurtle[1].forward(randint(1,5))
listTurtle[2].forward(randint(1,5))
listTurtle[3].forward(randint(1,5))
t=Turtle()
t.penup()
t.setpos(-150,150)
listTurtle=[1,2,3,4]
Create_Path()
Create_Turtle()
Lets_Race()
|
# Generated by Django 2.2 on 2020-05-27 14:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('secondapp', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Statement',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('month', models.CharField(max_length=20)),
('year', models.CharField(max_length=10)),
('type_scheme', models.CharField(choices=[('NFSA', 'NFSA'), ('APL', 'APL'), ('MDM', 'MDM'), ('PMGKY', 'PMGKY')], default='NFSA', max_length=20)),
('type_grain', models.CharField(choices=[('RICE', 'RICE'), ('WHEAT', 'WHEAT'), ('DAL', 'DAL')], default='RICE', max_length=20)),
('os', models.IntegerField(default=0)),
('uthan', models.IntegerField(default=0)),
('vitran', models.IntegerField(default=0)),
('agami', models.IntegerField(default=0)),
],
),
]
|
from dynamodb_encryption_sdk.encrypted import CryptoConfig
from dynamodb_encryption_sdk.encrypted.item import (
decrypt_dynamodb_item as aws_decrypt_dynamodb_item,
encrypt_dynamodb_item as aws_encrypt_dynamodb_item,
)
from dynamodb_encryption_sdk.transform import ddb_to_dict, dict_to_ddb
from dynamodb_encryption_sdk.structures import AttributeActions, EncryptionContext
from ..key_store import KeyStore
from .materials_provider import KeyStoreMaterialsProvider
def encrypt_dynamodb_item(
item,
key_id: str,
key_store: KeyStore,
encryption_context: EncryptionContext,
attribute_actions: AttributeActions,
):
materials_provider = KeyStoreMaterialsProvider(
key_store=key_store,
material_description={"key_id": key_id},
)
crypto_config = CryptoConfig(
materials_provider=materials_provider,
encryption_context=encryption_context,
attribute_actions=attribute_actions,
)
return aws_encrypt_dynamodb_item(item, crypto_config)
def encrypt_python_item(
item,
key_id: str,
key_store: KeyStore,
encryption_context: EncryptionContext,
attribute_actions: AttributeActions,
):
ddb_item = dict_to_ddb(item)
encrypted_ddb_item = encrypt_dynamodb_item(
item=ddb_item,
key_id=key_id,
key_store=key_store,
encryption_context=encryption_context,
attribute_actions=attribute_actions,
)
return ddb_to_dict(encrypted_ddb_item)
def decrypt_dynamodb_item(
item,
key_store: KeyStore,
encryption_context: EncryptionContext,
attribute_actions: AttributeActions,
):
materials_provider = KeyStoreMaterialsProvider(
key_store=key_store,
)
crypto_config = CryptoConfig(
materials_provider=materials_provider,
encryption_context=encryption_context,
attribute_actions=attribute_actions,
)
return aws_decrypt_dynamodb_item(item, crypto_config)
def decrypt_python_item(
item,
key_store: KeyStore,
encryption_context: EncryptionContext,
attribute_actions: AttributeActions,
):
ddb_item = dict_to_ddb(item)
decrypted_ddb_item = decrypt_dynamodb_item(
item=ddb_item,
key_store=key_store,
encryption_context=encryption_context,
attribute_actions=attribute_actions,
)
return ddb_to_dict(decrypted_ddb_item)
|
#! python3
import sys
from vininfo import Vin
from pprint import pprint
try:
vin = Vin(sys.argv[1])
pprint(vin.annotate())
except Exception as e:
print('Error: %s' % e)
sys.exit(1)
|
class Article:
def __init__(self,id_num, title, link, pubDate, description):
self.id_num = id_num
self.title = title
self.link = link
self.pubDate = pubDate
self.description = description
self.vector = []
self.score = 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 11 17:50:04 2019
@author: guisier
"""
import numpy as np
import scipy.stats as st
from statistics import stdev
# use ARMA(s,m)-GARCH(1,1) validate (figure 4.3)
validate=[]
validate[0:728]=excess_ret[728:1456]
s=0
m=4
# model the conditional mean
model = ARMA(train, order=(s,m))
model_fit = model.fit(disp=-1)
print(model_fit.summary())
error = train - model_fit.fittedvalues
coefficients=model_fit.params
# coefficients
coef = coefficients[1:s+1]
coef1= coef[::-1]
miu1 = coefficients[0]
coef3 = coefficients[s+1:s+m+1]
coef2 = coef3[::-1]
am = arch.arch_model(error,mean='zero',lags=0,vol='GARCH')
res = am.fit()
print(res.summary())
coefficients2=res.params
# coefficients
omega1 = coefficients2[0]
alpha= coefficients2[1]
beta=coefficients2[2]
# obtain all the error terms
y= excess_ret[728-s:1456]
err= error[728-m:1456]
p1=s
q1=m
for i in range(0,728):
mean = np.sum(y[i:(i+p1)]*coef1)+np.sum(err[i:(i+q1)]*coef2)+miu1
y = np.append(y,mean)
err=np.append(err,validate[i]- mean)
err=err[m:]
error4 = np.append(error,err)
# all the sigma square terms
sigma0=np.array([omega1])
errorsq=error4**2
p2=1
q2=1
for i in range(0,1455):
sigmasq=omega1+errorsq[i:(i+p2)]*alpha+sigma0[i:(i+q2)]*beta
sigma0=np.append(sigma0,sigmasq)
# validate
p=s
q1=m
p2=1
q2=1
M=5000
l=7
a = np.zeros((M,l))
UB = np.zeros(728)
LB = np.zeros(728)
for k in range(0,104):
for j in range(0,M):
y = excess_ret[728-s+7*k:728+7*k]
err=error4[728-m+7*k:728+7*k]
errorsq=error4**2
epsilonsq=errorsq[728-m+7*k:728+7*k]
sigmasq=sigma0[728-m+7*k:728+7*k]
for i in range(0,l):
mean = np.sum(y[i:(i+p)]*coef1)+np.sum(err[i:(i+q1)]*coef2)+miu1
sigmasq_t = np.sum(epsilonsq[i:(i+q2)]*alpha)+np.sum(sigmasq[i:(i+q2)]*beta)+omega1
epsilon_t = np.random.normal(loc=0,scale = sqrt(sigmasq_t))
y = np.append(y,mean+epsilon_t)
epsilonsq = np.append(epsilonsq, epsilon_t*epsilon_t)
err=np.append(err, epsilon_t)
sigmasq=np.append(sigmasq,sigmasq_t)
a[j,:] = y[s:]
UBk = np.zeros(l)
LBk = np.zeros(l)
for j in range(0,l):
S = a[:,j]
(LBk[j],UBk[j])=st.t.interval(0.99, len(S)-1, loc=np.mean(S), scale=stdev(S))
UB[7*k:(7*(k+1))]=UBk
LB[7*k:(7*(k+1))]=LBk
plt.plot(UB,label='93% upper bound')
plt.plot(LB,label='93% lower bound')
plt.plot(validate,color='red',alpha=0.7,linewidth = '0.5',label='R-B')
plt.legend()
plt.title('ARMA(0,4)-GARCH(1,1)')
|
import socket
import sys
from escape_room import EscapeRoom
import _thread
if len(sys.argv) > 1:
port = int(sys.argv[1])
# print("port: ", port)
else:
port = 9999
host = ""
s = socket.socket()
# binding socket
try:
# print("Binding the port " + str(port))
s.bind((host, port))
s.listen(1)
except socket.error as msg:
# print("Socket binding error: ", str(msg) + '\n' + "Retry....")
pass
# establish connection with client (socket must be listening)
def new_client(conn, addr):
while True:
room = EscapeRoom()
room.start()
while room.status() == 'locked':
cmd = str(conn.recv(1024), "utf-8")
# print("command received:", cmd, " from: ", addr)
out = room.command(cmd)
if room.status() == 'dead':
out = out + '\n' + 'You died!'
elif room.status() == 'escaped':
out = out + '\n' + 'You escaped!'
conn.send(str.encode(out))
if cmd == 'quit':
# print("closing connection from: ", addr)
conn.close() # close connection
# s.close() # close socket
sys.exit() # close cmd
conn.close()
sys.exit()
# send commands to client
while True: # for keeping connection open
conn, address = s.accept()
# print("Connection has been established ! " + "IP" + address[0] + " Port" + str(address[1]))
_thread.start_new_thread(new_client, (conn, address))
s.close()
# sys.exit()
|
# hash table
class MyHashMap(object):
def __init__(self, size=1024):
self.size = size
self.hash = [[] for _ in range(self.size)]
def put(self, key: int, value: int) -> None:
for item in self.hash[key]:
if item[0] == key:
item[1] = value
return
self.hash[key].append([key, value])
def get(self, key: int) -> int:
for item in self.hash[key]:
if item[0] == key:
return item[1]
return -1
def remove(self, key: int) -> None:
delete = []
for item in self.hash[key]:
if item[0] == key:
delete = item
if delete:
self.hash[key].remove(delete)
|
# 实现不同页面使用不同登录方式
import time
user,passwd='jfsu','abc'
def auth(auth_type):
print('the func is %s'% auth_type)
def outer_wrapper(func):
def wrapper(*args,**kwargs):
print('the wrapper is:',*args,**kwargs)
if auth_type == 'local':
username = input('Username:').strip() #去掉输入两边的空格
password = input('Password:').strip()
if user == username and passwd == password:
print('\033[31;1mPass\033[0m')
res = func(*args,**kwargs) # from home
print(res)
return res
else:
exit('\033[42;1m Invild username or password\033[0m')
elif auth_type == 'ldap':
print("auth is ldap")
return wrapper
return outer_wrapper
# @auth
# def index():
# print('welcome to my web \033[42;1m Index \033[0m')
@auth(auth_type="local")
def bbs():
print('welcome to my \033[31;1mBBS\033[0m')
@auth(auth_type="ldap") # home = wrapper()
def home():
print('Welcome to my \033[35;1mHomepage\033[0m')
return 'from home'
# index()
bbs()
home() |
#!/usr/bin/env python3
import requests
matchID="5280316503"
key="KEY GOES HERE"
baseURL= "http://api.steampowered.com/IEconDOTA2_570/GetHeroes/v1/"
response = requests.get(baseURL + "?key=" + key)
print(response.status_code)
print(response.json())
|
from django.db import models
# Create your models here
class HotPoint(models.Model):
title = models.CharField(max_length=50)
number = models.IntegerField()
charts = models.JSONField(null = True)
class User(models.Model):
name = models.CharField(max_length=10)
password = models.CharField(max_length=20)
history = models.CharField(max_length=50,null = True) |
import re
__author__ = 'Переверза Дмитрий Витальевич'
# Задание-1: уравнение прямой вида y = kx + b задано в виде строки.
# Определить координату y точки с заданной координатой x.
equation = input("Введите уравнение вида y = kx + b:\n")
x = input('X = ')
if not x.isnumeric():
raise AttributeError('Вывведи не число!')
x = int(x)
# вычислите и выведите y
result = re.search(r"(?<=\=)[^x]+", equation)
x_koef = result.group(0)
result = re.search(r'(?<=\+).+$', equation)
free_koef = result.group(0)
print('y = {}'.format(int(x_koef) * x + int(free_koef)))
# Задание-2: Дата задана в виде строки формата 'dd.mm.yyyy'.
# Проверить, корректно ли введена дата.
# Условия корректности:
# 1. День должен приводиться к целому числу в диапазоне от 1 до 30(31)
# (в зависимости от месяца, февраль не учитываем)
# 2. Месяц должен приводиться к целому числу в диапазоне от 1 до 12
# 3. Год должен приводиться к целому положительному числу в диапазоне от 1 до 9999
# 4. Длина исходной строки для частей должна быть в соответствии с форматом
# (т.е. 2 символа для дня, 2 - для месяца, 4 - для года)
# Пример корректной даты
date = '01.11.1985'
# Примеры некорректных дат
date = '01.22.1001'
date = '1.12.1001'
date = '-2.10.3001'
date = input('Введите дату в формате dd.mm.yyyy:')
if len(date) != 10:
raise AttributeError('Неверный формат даты')
day = date[0:2]
month = date[3:5]
year = date[6:10]
if not month.isnumeric() or not 0 < int(month) <= 12:
raise AttributeError('Неверно введен мясяц')
month = int(month)
if not year.isnumeric() or not 0 < int(year) <= 9999:
raise AttributeError('Неверно введен год')
year = int(year)
if not day.isnumeric() or not 0 < int(day) <= (31 if month%2 else 30):
raise ArithmeticError('Неверно введен день')
day = int(day)
print('Дата корректна')
# Задание-3: "Перевёрнутая башня" (Задача олимпиадного уровня)
#
# Вавилонцы решили построить удивительную башню —
# расширяющуюся к верху и содержащую бесконечное число этажей и комнат.
# Она устроена следующим образом — на первом этаже одна комната,
# затем идет два этажа, на каждом из которых по две комнаты,
# затем идёт три этажа, на каждом из которых по три комнаты и так далее:
# ...
# 12 13 14
# 9 10 11
# 6 7 8
# 4 5
# 2 3
# 1
#
# Эту башню решили оборудовать лифтом --- и вот задача:
# нужно научиться по номеру комнаты определять,
# на каком этаже она находится и какая она по счету слева на этом этаже.
#
# Входные данные: В первой строчке задан номер комнаты N, 1 ≤ N ≤ 2 000 000 000.
#
# Выходные данные: Два целых числа — номер этажа и порядковый номер слева на этаже.
#
# Пример:
# Вход: 13
# Выход: 6 2
#
# Вход: 11
# Выход: 5 3
# 589
# h - блок этаж
# N = h^2
# n = input("Введите номер комнаты: \n")
n = 5
preCountRoom = 0
h = 1
level = 0
while True:
numRoom = h ** 2 + preCountRoom
print('h = {} level = {} numRoom = {}'.format(h, level, numRoom))
if numRoom >= n:
diffRoom = n - (numRoom - h ** 2)
curLvl = (diffRoom // h) + (numRoom - h ** 2)
offset = diffRoom % h
print(curLvl)
print(offset)
break
level += h
preCountRoom = numRoom
h += 1
|
#import celerite
import numpy as np
import h5py #Maybe separate this, as a lot of utils can work without ever using h5py or local files
import matplotlib
import matplotlib.pyplot as plt
from tqdm import tqdm
#from celerite import terms
from scipy.optimize import minimize
from scipy.signal import medfilt
from astropy.io import fits
from astropy.wcs import WCS
from astropy.coordinates import SkyCoord
from astropy.nddata.utils import Cutout2D
from lightkurve.targetpixelfile import TargetPixelFileFactory
def dilution_factor(m_primary, m_comp, sep, pixscale=21):
fac = 10**((m_primary - m_comp)/2.5) * np.exp(-1.68*sep/pixscale)
dfac = 1/(1+np.sum(fac))
return dfac
def mask_planet(t, t0, period, dur=0.25):
phase = (t - t0 + 0.5*period) % period - 0.5*period
mask = np.abs(phase) < dur
return ~mask
'''
def BLSer(t, y, yerr, mw=351, maximum_period=30.):
#Input needs to be normalized
yr = (y / np.nanmedian(y) -1)*1e6
yt = medfilt(yr, mw)
y = yr - yt
durations = np.linspace(0.05, 0.2, 10)
model = BLS(t, y)
results = model.autopower(durations, maximum_period=maximum_period, frequency_factor=5.0)
#TIC Period t0 Duration Depth SNR Depth_even Depth_odd Depth_half
idx = np.argmax(results.power)
period = results.period[idx]
t0 = results.transit_time[idx]
depth = results.depth[idx]
dur = results.duration[idx]
SNR = results.depth_snr[idx]
return period, t0, dur, depth, SNR
def detrender(t, y, yerr):
kernel = terms.Matern32Term(log_sigma=np.log(np.nanvar(y)), log_rho=-np.log(10.0)) + terms.JitterTerm(log_sigma=np.log(np.nanvar(y)))
gp = celerite.GP(kernel)#, mean=mean_model, fit_mean=True)
gp.compute(t, yerr)
def neg_log_like(params, y, gp):
gp.set_parameter_vector(params)
return -gp.log_likelihood(y)
def grad_neg_log_like(params, y, gp):
gp.set_parameter_vector(params)
return -gp.grad_log_likelihood(y)[1]
initial_params = gp.get_parameter_vector()
bounds = gp.get_parameter_bounds()
soln = minimize(neg_log_like, initial_params, jac=grad_neg_log_like, method='L-BFGS-B', bounds=bounds, args=(y, gp))
gp.set_parameter_vector(soln.x)
mu, var = gp.predict(y, t, return_var=True)
std = np.sqrt(var)
return mu, std
'''
def cmap_map(function, cmap):
""" Applies function (which should operate on vectors of shape 3: [r, g, b]), on colormap cmap.
This routine will break any discontinuous points in a colormap.
"""
cdict = cmap._segmentdata
step_dict = {}
# Firt get the list of points where the segments start or end
for key in ('red', 'green', 'blue'):
step_dict[key] = list(map(lambda x: x[0], cdict[key]))
step_list = sum(step_dict.values(), [])
step_list = np.array(list(set(step_list)))
# Then compute the LUT, and apply the function to the LUT
reduced_cmap = lambda step : np.array(cmap(step)[0:3])
old_LUT = np.array(list(map(reduced_cmap, step_list)))
new_LUT = np.array(list(map(function, old_LUT)))
# Now try to make a minimal segment definition of the new LUT
cdict = {}
for i, key in enumerate(['red','green','blue']):
this_cdict = {}
for j, step in enumerate(step_list):
if step in step_dict[key]:
this_cdict[step] = new_LUT[j, i]
elif new_LUT[j,i] != old_LUT[j, i]:
this_cdict[step] = new_LUT[j, i]
colorvector = list(map(lambda x: x + (x[1], ), this_cdict.items()))
colorvector.sort()
cdict[key] = colorvector
return matplotlib.colors.LinearSegmentedColormap('colormap',cdict,1024)
def FFICut(ffis, x, y, size):
with h5py.File(ffis, 'r', libver='latest') as ffis:
ncads = len(ffis['FFIs'])
x = int(x)
y = int(y)
#aflux = np.transpose(ffis['FFIs'], axes=[2,0,1])[:, x-size//2:x+size//2+1, y-size//2:y+size//2+1]
#aerrs = np.transpose(ffis['errs'], axes=[2,0,1])[:, x-size//2:x+size//2+1, y-size//2:y+size//2+1]
aflux = ffis['FFIs'][:, x-size//2:x+size//2+1, y-size//2:y+size//2+1]
aerrs = ffis['errs'][:, x-size//2:x+size//2+1, y-size//2:y+size//2+1]
boxing = TargetPixelFileFactory(n_cadences=ncads, n_rows=size, n_cols=size)
for i,f in enumerate(tqdm(aflux)):
ti = ffis['data'][0,i]
tf = ffis['data'][1,i]
b = ffis['data'][2,i]
q = ffis['data'][3,i]
header = {'TSTART': ti, 'TSTOP': tf,
'QUALITY': q}
boxing.add_cadence(frameno=i, flux=f, flux_err=aerrs[i], header=header)
TPF = boxing.get_tpf()
#TPF.hdu[1].data['QUALITY'] = ffis['data'][2]
#TPF.hdu[1].data['TIME'] = ffis['data'][0]
#TPF.hdu[1].header['BJDREFI'] = hdr['BJDREFI']
#TPF.hdu[1].data.columns['TIME'].unit = 'BJD - %d' % hdr['BJDREFI']
return TPF
def pixel_border(mask):
ydim, xdim = mask.shape
x = []
y = []
for i in range(1,ydim-1):
for j in range(1,xdim-1):
if mask[i,j]:
if not mask[i-1,j]:
x.append(np.array([j,j+1]))
y.append(np.array([i,i]))
if not mask[i+1,j]:
x.append(np.array([j,j+1]))
y.append(np.array([i+1,i+1]))
if not mask[i,j-1]:
x.append(np.array([j,j]))
y.append(np.array([i,i+1]))
if not mask[i,j+1]:
x.append(np.array([j+1,j+1]))
y.append(np.array([i,i+1]))
return x,y
|
import scrapy
from land_register import db_handler
def generate_scraping_objects(ids):
"""Get next url."""
db = db_handler.get_dataset()
table = db['stavebni_objekt_ref'].table
statement = table.select(table.c.id.in_(tuple(ids)))
results = db.query(statement)
for obj in results:
yield obj
def update_scraping_object(scraping_object, status):
db = db_handler.get_dataset()
db['stavebni_objekt_ref'].update(dict(
id=scraping_object['id'],
stav_scrapingu=status
), ['id'])
class BuildingObjectsSpider(scrapy.Spider):
"""Scraping of building chosen building objects."""
name = "BuildingObjectsSpider"
custom_settings = {
'ITEM_PIPELINES': {
'land_register.pipelines.building_objects_pipeline.BuildingObjectsPipeline': 101
}
}
def __init__(self, ids, **kwargs):
self.scraping_objects = generate_scraping_objects(ids)
super().__init__(**kwargs)
def start_requests(self):
scraping_object = next(self.scraping_objects)
yield scrapy.Request(
scraping_object['url'],
meta={'scraping_object': scraping_object}
)
def parse(self, response):
"""Building object (stavebni objekt) parsing."""
scraping_object = response.meta['scraping_object']
building_object_item = {
'id_lv': scraping_object['id_lv'],
'item_type': 'STAVEBNI_OBJEKT',
'data': {}
}
building_object_data = {
'ext_id_parcely': scraping_object.get('ext_id_parcely'),
'ext_id_stavebniho_objektu': scraping_object.get('ext_id_stavebniho_objektu')
}
# atributy
detail_table1 = response.xpath(
'(//table[@class="detail detail2columns"])[2]/tr')
for index, row in enumerate(detail_table1):
name = row.xpath('td[1]/text()').extract_first()
name = {
'Čísla popisná nebo evidenční:': 'cisla_popis_evid',
'Typ:': 'typ',
'Způsob využití:': 'zpusob_vyuziti'
}.get(name)
if name is None:
continue
value = row.xpath('td[2]/text()').extract_first()
building_object_data[name] = value
detail_table2 = response.xpath('//table[@class="detail"]/tr')
for index, row in enumerate(detail_table2):
name = {
0: 'datum_dokonceni',
1: 'pocet_bytu',
2: 'zastavena_plocha',
4: 'podlahova_plocha',
5: 'pocet_podlazi'
}.get(index)
if name is None:
continue
value = row.xpath('td[2]/text()').extract_first()
building_object_data[name] = value
building_object_item['data'] = building_object_data
yield building_object_item
update_scraping_object(scraping_object, status='F')
try:
yield from self.start_requests()
# next_scraping_object = next(self.scraping_objects)
# yield scrapy.Request(
# scraping_object['url'],
# meta={'scraping_object': scraping_object}
# )
except StopIteration:
pass
def errback(self, failure):
from scrapy.spidermiddlewares.httperror import HttpError
from twisted.internet.error import DNSLookupError
from twisted.internet.error import TimeoutError, TCPTimedOutError
# log all failures
self.logger.error(repr(failure))
# in case you want to do something special for some errors,
# you may need the failure's type:
if failure.check(HttpError):
# these exceptions come from HttpError spider middleware
# you can get the non-200 response
response = failure.value.response
self.logger.error('HttpError on %s', response.url)
elif failure.check(DNSLookupError):
# this is the original request
request = failure.request
self.logger.error('DNSLookupError on %s', request.url)
elif failure.check(TimeoutError, TCPTimedOutError):
request = failure.request
self.logger.error('TimeoutError on %s', request.url)
def response_is_ban(self, request, response):
"""Defines, which status codes mean ban."""
return response.status == 403 or response.status == 500
def closed(self, reason):
"""Called when spider is closed due to any error or success."""
pass |
from train import emotion_analysis, reshape_dataset
import matplotlib.pyplot as plt
import numpy as np
from keras.preprocessing import image
from model import build_model
if __name__ == '__main__':
num_classes = 7
# x_train, y_train, x_test, y_test = reshape_dataset(path, num_classes)
model = build_model(num_classes)
model.load_weights('C:/Users/54532/Desktop/facial_expression_detection/weights.h5')
img = image.load_img("C:/Users/54532/Desktop/facial_expression_detection/dataset/9.jpg", grayscale=True, target_size=(48, 48))
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
x /= 255
custom = model.predict(x)
t1 = emotion_analysis(custom[0])
x = np.array(x, 'float32')
x = x.reshape([48, 48])
plt.gray()
plt.imshow(x)
plt.show()
|
from django.db import models
class Course(models.Model):
name = models.SlugField()
descr = models.CharField(max_length=200)
def __str__(self):
return self.name
class Meta:
ordering = ('name',)
|
#coding=utf-8
'''
unittes使用
'''
import sys
sys.path.append("E:\\AppiumProjectAndroid")
import unittest
import HTMLTestReportCN
import threading
import multiprocessing
from util.server import Server
import time
from util.write_user_command import WriteUserCommand
# from appium import webdriver
from business.login_business import Login_Business
from business.quitLogin_business import QuitLogin_Business
from business.queryTransfer_business import QueryTransfer_Business
from business.myAccount_business import MyAccount_Business
from base.base_driver import BaseDriver
from util.date_number import DataNumber3
from util.write_excel import Write_Excel
from util.read_excel import Read_Excel
class ParameTest_Case(unittest.TestCase):
def __init__(self,methodName='runTest',parame = None):
'''
构造参数
:param methodName:
:param parame:
:return:
'''
super(ParameTest_Case,self).__init__(methodName)
global parames #声明全局变量
parames = parame
global write_excel
global read_excel
write_excel = Write_Excel('E:\\AppiumProjectAndroid\\config\\AndroidAutomationTestCase.xlsx')
read_excel = Read_Excel('E:\\AppiumProjectAndroid\\config\\AndroidAutomationTestCase.xlsx','Sheet1')
class CaseTest(ParameTest_Case):
#加上注解,在python叫容器,当有2个case的时候,是全局的,就运行一次
#当前实例本身的一个函数
driver = None
@classmethod
def setUpClass(cls):
global driver
print('setUpClass--->',parames)
# cls.login_business = LoginBusiness(parames)
# # cls.quitLogin_business = QuitLoginBusiness(parames)
# cls.driver = cls.login_business.drivers()
# # return driver
base_driver = BaseDriver()
driver = base_driver.android_driver(parames)
cls.login_business = Login_Business(driver)
cls.queryTransfer_business = QueryTransfer_Business(driver)
cls.quitLogin_business = QuitLogin_Business(driver)
cls.myAccount_business = MyAccount_Business(driver)
cls.login_business.login_before()
def setUp(self):
print('this is setup\n')
# print('write_excel方法setUp:', write_excel)
# 运行test_01前要运行setUP()
# @unittest.skip('暂停CaseTest')
def test_1(self):
# self.assertEqual(3,3)
print('test case_01 里面的参数',parames)
# self.login_business.login_before()
# self.login_business.login_user_error()
user_flag = self.login_business.login_user_error()
print('user_flag = ', user_flag)
if user_flag == True:
write_excel.write(2, 10, '通过')
else:
write_excel.write(2, 10, '失败')
# @unittest.skip('暂停CaseTest')
def test_2(self):
# self.assertEqual(2, 1, 'error')
print('this is case02\n')
print('test case_02 里面的参数', parames)
# self.login_business.login_before()
self.login_business.login_pass()
# @unittest.skip('暂停CaseTest')
def test_3(self):
self.queryTransfer_business.query_transferF_input()
transfer_submitted_mark = self.queryTransfer_business.query_transfer_page(4, 11, 4, 7, 4, 12)
verification_result = self.assertEqual(transfer_submitted_mark, '转账已提交', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(4, 10, '通过')
# print('get_cell(4, 10):', read_excel.get_cell(4, 10))
# @unittest.skip('暂停CaseTest')
def test_4(self):
# print('方法test_4:',driver)
self.queryTransfer_business.query_transferF_select()
transfer_submitted_mark = self.queryTransfer_business.query_transfer_page(5, 11, 5, 7, 5, 12)
verification_result = self.assertEqual(transfer_submitted_mark, '转账已提交', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(5, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_5(self):
error_prompt = self.queryTransfer_business.query_transferF_payee_space()
print('温馨提示:', error_prompt)
verification_result = self.assertEqual(error_prompt, '请输入账户姓名', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(6, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_6(self):
# print('用例编号:111CZSX024')
error_prompt = self.queryTransfer_business.query_transferF_amount_zero()
print('温馨提示:', error_prompt)
verification_result = self.assertEqual(error_prompt, '转账金额输入有误', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(7, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_7(self):
error_prompt = self.queryTransfer_business.query_transferF_beyond_balance()
print('温馨提示:', error_prompt)
if error_prompt == 'MBS030212':
# 设置单笔转账金额,交易金额不能大于单笔限额
verification_result = self.assertEqual(error_prompt, 'MBS030212', '验证失败')
else:
verification_result = self.assertEqual(error_prompt, '余额不足', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(8, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_8(self):
error_prompt = self.queryTransfer_business.query_transferF_amount_space()
print('温馨提示:', error_prompt)
verification_result = self.assertEqual(error_prompt, '请输入转账金额', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(9, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_9(self):
error_prompt = self.queryTransfer_business.query_transferF_amount_capital()
print('温馨提示:', error_prompt)
verification_result = self.assertEqual(error_prompt, '请输入转账金额', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(10, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_10(self):
error_prompt = self.queryTransfer_business.query_transferF_payee_wrong()
print('温馨提示:', error_prompt)
verification_result = self.assertEqual(error_prompt, '账户和户名不匹配', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(11, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_11(self):
transfer_submitted_mark = self.queryTransfer_business.query_transferF_same_payer()
print('转账成功提示:', transfer_submitted_mark)
verification_result = self.assertEqual(transfer_submitted_mark, '转入转出账户不能相同', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(12, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_12(self):
error_prompt = self.queryTransfer_business.query_transferF_drawpassword_wrong()
print('温馨提示:', error_prompt)
verification_result = self.assertEqual(error_prompt, '您已输错密码[1]次,再输错[2]次,将锁卡', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(13, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_13(self):
transfer_submitted_mark = self.queryTransfer_business.query_transferF_drawpassword_again()
print('转账成功提示:', transfer_submitted_mark)
verification_result = self.assertEqual(transfer_submitted_mark, '转账已提交', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(14, 10, '通过')
# @unittest.skip('暂停CaseTest') #由于涉及到锁卡,暂时不执行
def test_14(self):
error_prompt = self.queryTransfer_business.query_transferF_drawpassword_lock()
print('温馨提示:', error_prompt)
verification_result = self.assertEqual(error_prompt, '您已输错密码[1]次,再输错[2]次,将锁卡', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(15, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_15(self):
error_prompt = self.queryTransfer_business.query_transferF_drawpassword_lack()
print('温馨提示:', error_prompt)
verification_result = self.assertEqual(error_prompt, '取款密码输入不正确,请重新输入', '验证失败')
print('verification_result:',verification_result)
if verification_result ==None:
write_excel.write(16, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_16(self):
element_exist = self.queryTransfer_business.query_transferF_drawpassword_excess()
# print('获取的元素:', element_exist)
verification_result = self.assertIsNotNone(element_exist, '输入取款密码超过6位失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(17, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_17(self):
element_exist = self.queryTransfer_business.query_transferF_bank_list()
print('获取的元素:', element_exist)
verification_result = self.assertIsNotNone(element_exist, '进入银行列表失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(18, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_18(self):
element_exist = self.queryTransfer_business.query_transferF_bank_search()
# print('获取的元素:', element_exist)
verification_result = self.assertIsNotNone(element_exist, '搜索银行名称失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(19, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_19(self):
element_exist = self.queryTransfer_business.query_transferF_payee_list()
# print('获取的元素:', element_exist)
print(self.assertIsNotNone(element_exist, '进入收款人列表失败'))
write_excel.write(20, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_20(self):
element_compare = self.queryTransfer_business.query_transferF_payee_choice()
print('获取元素长度对比结果:', element_compare)
print(self.assertTrue(element_compare, '选择收款人信息失败'))
write_excel.write(21, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_21(self):
payeelist_compare = self.queryTransfer_business.query_transferF_payee_add()
print('获取元素长度对比结果:', payeelist_compare)
print(self.assertTrue(payeelist_compare, '添加收款人信息失败'))
write_excel.write(22, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_22(self):
payeelist_compare = self.queryTransfer_business.query_transferF_payee_delete()
print('获取元素长度对比结果:', payeelist_compare)
print(self.assertTrue(payeelist_compare, '删除收款人信息失败'))
write_excel.write(23, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_23(self):
compare_branch_amount = self.queryTransfer_business.query_transferF_payee_branch()
print('获取支行数量对比结果:', compare_branch_amount)
print(self.assertTrue(compare_branch_amount, '进入支行页面失败'))
write_excel.write(24, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_24(self):
city_list = self.queryTransfer_business.query_transferF_payee_city()
print('获取城市列表页面标题:', city_list)
print(self.assertIsNotNone(city_list, '弹出城市列表页面失败'))
write_excel.write(25, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_25(self):
branch_bank = self.queryTransfer_business.query_transferF_branch_keyword()
print('获取分行查询结果:', branch_bank)
print(self.assertIsNotNone(branch_bank, '关键字分行查询失败'))
write_excel.write(26, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_26(self):
account_amount = self.queryTransfer_business.query_transferF_account_detail()
print('获取账户种类有:' ,account_amount)
print(self.assertIsNotNone(account_amount, '获取账户种类失败'))
write_excel.write(27, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_27(self):
self.queryTransfer_business.query_transferG_select()
transfer_submitted_mark = self.queryTransfer_business.query_transfer_page(28, 11, 28, 7, 28, 12)
# self.myAccount_business.hide_balance()
verification_result = self.assertEqual(transfer_submitted_mark, '转账已提交', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(28, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_28(self):
error_prompt = self.queryTransfer_business.query_transferG_amount_zero()
verification_result = self.assertEqual(error_prompt, '转账金额输入有误', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(29, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_29(self):
error_prompt = self.queryTransfer_business.query_transferG_beyond_balance()
verification_result = self.assertEqual(error_prompt, '余额不足', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(30, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_30(self):
error_prompt = self.queryTransfer_business.query_transferG_amount_space()
verification_result = self.assertEqual(error_prompt, '请输入转账金额', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(31, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_31(self):
error_prompt = self.queryTransfer_business.query_transferG_amount_capital()
verification_result = self.assertEqual(error_prompt, '请输入转账金额', '验证失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(32, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_32(self):
error_prompt = self.queryTransfer_business.query_transferG_payee_wrong()
verification_result = self.assertEqual(error_prompt, '账户和户名不匹配', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(33, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_33(self):
error_prompt = self.queryTransfer_business.query_transferG_payee_space()
verification_result = self.assertEqual(error_prompt, '请输入账户姓名', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(34, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_34(self):
transfer_submitted_mark = self.queryTransfer_business.query_transferG_same_payer()
verification_result = self.assertEqual(transfer_submitted_mark, '转入转出账户不能相同', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(35, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_35(self):
error_prompt = self.queryTransfer_business.query_transferG_drawpassword_wrong()
verification_result = self.assertEqual(error_prompt, '您已输错密码[1]次,再输错[2]次,将锁卡', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(36, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_36(self):
transfer_submitted_mark = self.queryTransfer_business.query_transferG_drawpassword_again()
verification_result = self.assertEqual(transfer_submitted_mark, '转账已提交', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(37, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_37(self):
error_prompt = self.queryTransfer_business.query_transferG_drawpassword_lack()
verification_result = self.assertEqual(error_prompt, '取款密码输入不正确,请重新输入', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(38, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_38(self):
element_exist = self.queryTransfer_business.query_transferG_drawpassword_excess()
verification_result = self.assertIsNotNone(element_exist, '输入取款密码超过6位失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(39, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_39(self):
element_exist = self.queryTransfer_business.query_transferG_bank_list()
verification_result = self.assertIsNotNone(element_exist, '进入银行列表失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(40, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_40(self):
element_exist = self.queryTransfer_business.query_transferG_bank_search()
verification_result = self.assertIsNotNone(element_exist, '搜索银行名称失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(41, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_41(self):
element_exist = self.queryTransfer_business.query_transferG_payee_list()
verification_result = self.assertIsNotNone(element_exist, '进入收款人列表失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(42, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_42(self):
element_compare = self.queryTransfer_business.query_transferG_payee_choice()
verification_result = self.assertTrue(element_compare, '选择收款人信息失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(43, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_43(self):
payeelist_compare = self.queryTransfer_business.query_transferG_payee_add()
verification_result = self.assertTrue(payeelist_compare, '添加收款人信息失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(44, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_44(self):
payeelist_compare = self.queryTransfer_business.query_transferG_payee_delete()
verification_result = self.assertTrue(payeelist_compare, '删除收款人信息失败')
print('verification_result:',verification_result)
if verification_result == None:
write_excel.write(45, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_45(self):
compare_branch_amount = self.queryTransfer_business.query_transferG_payee_branch()
verification_result = self.assertTrue(compare_branch_amount, '进入支行页面失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(46, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_46(self):
city_list = self.queryTransfer_business.query_transferG_payee_city()
verification_result = self.assertIsNotNone(city_list, '弹出城市列表页面失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(47, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_47(self):
branch_bank = self.queryTransfer_business.query_transferG_branch_keyword()
verification_result = self.assertIsNotNone(branch_bank, '关键字分行查询失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(48, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_48(self):
account_amount = self.queryTransfer_business.query_transferG_account_detail()
verification_result = self.assertIsNotNone(account_amount, '获取账户种类失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(49, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_49(self):
self.queryTransfer_business.query_transferT_select()
transfer_submitted_mark = self.queryTransfer_business.query_transfer_page(50, 11, 50, 7, 50, 12)
verification_result = self.assertEqual(transfer_submitted_mark, '转账已提交', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(50, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_50(self):
error_prompt = self.queryTransfer_business.query_transferT_payee_space()
verification_result = self.assertEqual(error_prompt, '请输入账户姓名', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(51, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_51(self):
error_prompt = self.queryTransfer_business.query_transferT_amount_zero()
verification_result = self.assertEqual(error_prompt, '转账金额输入有误', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(52, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_52(self):
error_prompt = self.queryTransfer_business.query_transferT_beyond_balance()
verification_result = self.assertEqual(error_prompt, '余额不足', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(53, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_53(self):
error_prompt = self.queryTransfer_business.query_transferT_amount_space()
verification_result = self.assertEqual(error_prompt, '请输入转账金额', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(54, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_54(self):
error_prompt = self.queryTransfer_business.query_transferT_amount_capital()
verification_result = self.assertEqual(error_prompt, '请输入转账金额', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(55, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_55(self):
error_prompt = self.queryTransfer_business.query_transferT_payee_wrong()
verification_result = self.assertEqual(error_prompt, '账户和户名不匹配', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(56, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_56(self):
transfer_submitted_mark = self.queryTransfer_business.query_transferT_same_payer()
verification_result = self.assertEqual(transfer_submitted_mark, '转入转出账户不能相同', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(57, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_57(self):
error_prompt = self.queryTransfer_business.query_transferT_drawpassword_wrong()
verification_result = self.assertEqual(error_prompt, '您已输错密码[1]次,再输错[2]次,将锁卡', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(58, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_58(self):
transfer_submitted_mark = self.queryTransfer_business.query_transferT_drawpassword_again()
verification_result = self.assertEqual(transfer_submitted_mark, '转账已提交', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(59, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_59(self):
error_prompt = self.queryTransfer_business.query_transferT_drawpassword_lack()
verification_result = self.assertEqual(error_prompt, '取款密码输入不正确,请重新输入', '验证失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(60, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_60(self):
element_exist = self.queryTransfer_business.query_transferT_drawpassword_excess()
verification_result = self.assertIsNotNone(element_exist, '输入取款密码超过6位失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(61, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_61(self):
element_exist = self.queryTransfer_business.query_transferT_bank_list()
verification_result = self.assertIsNotNone(element_exist, '进入银行列表失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(62, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_62(self):
element_exist = self.queryTransfer_business.query_transferT_bank_search()
verification_result = self.assertIsNotNone(element_exist, '搜索银行名称失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(63, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_63(self):
element_exist = self.queryTransfer_business.query_transferT_payee_list()
verification_result = self.assertIsNotNone(element_exist, '进入收款人列表失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(64, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_64(self):
element_compare = self.queryTransfer_business.query_transferT_payee_choice()
verification_result = self.assertTrue(element_compare, '选择收款人信息失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(65, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_65(self):
payeelist_compare = self.queryTransfer_business.query_transferT_payee_add()
verification_result = self.assertTrue(payeelist_compare, '添加收款人信息失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(66, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_66(self):
payeelist_compare = self.queryTransfer_business.query_transferT_payee_delete()
verification_result = self.assertTrue(payeelist_compare, '删除收款人信息失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(67, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_67(self):
compare_branch_amount = self.queryTransfer_business.query_transferT_payee_branch()
verification_result = self.assertTrue(compare_branch_amount, '进入支行页面失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(68, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_68(self):
city_list = self.queryTransfer_business.query_transferT_payee_city()
verification_result = self.assertIsNotNone(city_list, '弹出城市列表页面失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(69, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_69(self):
branch_bank = self.queryTransfer_business.query_transferT_branch_keyword()
verification_result = self.assertIsNotNone(branch_bank, '关键字分行查询失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(70, 10, '通过')
# @unittest.skip('暂停CaseTest')
def test_70(self):
account_amount = self.queryTransfer_business.query_transferT_account_detail()
verification_result = self.assertIsNotNone(account_amount, '获取账户种类失败')
print('verification_result:', verification_result)
if verification_result == None:
write_excel.write(71, 10, '通过')
@unittest.skip('暂停CaseTest')
def test_100(self):
self.quitLogin_business.quitLogin_pass()
def tearDown(self):
time.sleep(1)
print('this is teardown!')
#错误截图方法,sys.exc_info():是python自动捕抓异常的语句
if sys.exc_info()[0]:
self.login_business.login_handle.login_page.driver.save_screenshot('../jpg/test01.png')
@classmethod
def tearDownClass(cls):
time.sleep(2)
print('this is class teardown.\n')
def appium_init():
print('从这里开始启动appium')
server = Server()
server.main()
def get_suite(i):
'''
设计testcase执行顺序,以及生成报告
:param i:
:return:
'''
print('get_suite里面的', i)
suite = unittest.TestSuite()
read_excel = Read_Excel('E:\\AppiumProjectAndroid\\config\\AndroidAutomationTestCase.xlsx', 'Sheet1')
print('read_excel方法get_suite:', read_excel)
rows = read_excel.get_rows()
print('rows方法get_suite:',rows)
for j in range(1, 71):
# print('j循环:', j)
list = read_excel.get_excel_value("test_" + str(j))
# print("list:", list)
if list[1] == "Y" and list[9] == None:
# print("符合条件的:", list[0])
suite.addTest(CaseTest(list[0], parame=i))
else:
continue
# print("不符合条件:", list[0])
# for j in range(1,32):
#
# suite.addTest(CaseTest('test_'+ str(j), parame=i))
strnow_number3 = DataNumber3()
html_file = "E:\\AppiumProjectAndroid\\report\\report" + strnow_number3 + str(i) + ".html"
# print('文件名:', html_file)
fp = open(html_file,'wb')
runner = HTMLTestReportCN.HTMLTestRunner(
stream=fp,
title='自动化测试报告',
tester='Susun.Zeng'
)
runner.run(suite)
fp.close()
def get_count():
'''
返回设备个数
:return:count
'''
write_user_file = WriteUserCommand()
count = write_user_file.get_file_lines()
print('count:',count)
return count
if __name__ == '__main__':
print('启动appium')
appium_init()
threads = []
# print('在什么情况?')
for i in range(get_count()):
print("i:",i)
# t = threading.Thread(target=get_suite, args=(i,)) # 注意args=(i,)要加上逗号
t = multiprocessing.Process(target=get_suite, args=(i,))
threads.append(t)
# print('threads:', threads)
# t.start()
for j in threads:
j.start()
time.sleep(2)
|
lista_nomes = ['Ana', 'Ana Maria', 'Pedro', 'Elena', 'Helena', 'Elen']
for nome in lista_nomes:
print(nome.replace("", " | "))
|
"""
module: __init__.py
------------------------------------------------------------------------
Author: David J. Sanders
Student No: H00035340
Last Update: 15 December 2015
Update: Revise documentation
------------------------------------------------------------------------
Overivew: The module called when runserver.py is executed using the
uWSGI service.
Purpose: Launches the application
Called By: n/a
Patterns: Larger Applications pattern from Flask website (Ronacher,
2013)
References
----------
Ronacher, A., 2013, 'Larger Applications' [ONLINE]. Available at:
http://flask.pocoo.org/docs/0.10/patterns/packages/ (Accessed: 04
November 2015)
"""
# Import the module Flask from the flask package
from flask import Flask
# Import the module Api from the flask_restful package
from flask_restful import Api
# The application context
app = Flask(__name__)
# Create an Api object using app
api = Api(app)
# Import the main.py module and run the program
import notifications.main
|
from src.constant import market_constants
class Company:
def __init__(self, index):
self.index = index
# self.value = np.random.randint(low=10, high=100)
self.value = 100
self.margin_coefficient = market_constants.market_margin_coefficient[index]
# self.p_margin = 0.01
# self.p_labor = np.random.randint(low=0, high=10) / 10
self.p_labor = 0.5
self.p_product = 1 - self.p_labor
self.p_re_investment = 0.5
self.profit = 0
self.p()
def get_margin(self):
return self.margin_coefficient / self.value
def operate(self):
margin = self.value * self.get_margin()
market_constants.market_margin[self.index] = self.get_margin()
if self.get_margin() < max(market_constants.market_margin):
margin *= 0.8
market_constants.bank_balance += margin / 4
else:
if market_constants.bank_balance > 10:
market_constants.bank_balance -= 10
self.value += 10
self.value += margin * self.p_re_investment
self.profit += margin * (1 - self.p_re_investment)
def p(self):
print(str(self.index) + " -> Margin:\t" + "{:.3f}".format(self.get_margin()))
print(str(self.index) + " -> Value:\t\t" + "{:.3f}".format(self.value))
print(str(self.index) + " -> Profit:\t" + "{:.3f}".format(self.profit))
# print(str(self.index) + " -> Re-Investment Rate:\t" + "{:.3f}".format(self.p_re_investment))
print()
def get_value(self):
return self.value
def get_profit(self):
return self.profit
|
from string import join
import os
from settings import MEDIA_ROOT
from photos.models import models
from django.contrib import admin
# Esto nos permite manipular el listado a mostrar para los admins
class ImageAdmin(admin.ModelAdmin):
list_display = ["titulo", "usuario", "descripcion", "imagen"]
list_filter = ["usuario"]
def save_model(self, request, obj, form, change):
obj.user = request.user
obj.save() |
import torch
from torch.utils.data import DataLoader
import numpy as np
import cv2
import argparse
import os
import random
import time
import datetime
import pickle as pkl
from utils.util import *
from utils.datasets import *
from models import *
import matplotlib.pyplot as plt
def arg_parse():
parser = argparse.ArgumentParser(description='YOLOv3 Detection Module')
parser.add_argument('--image_folder',type=str,default='test_examples',help='path to images')
parser.add_argument('--config_path',type=str,default='cfg/yolov3.cfg',help='path to config path')
parser.add_argument('--weights_path',type=str,default='weights/yolov3.weights',help='path tp weights')
parser.add_argument('--class_path',type=str,default='data/coco.names',help='path to class label file')
parser.add_argument('--conf_th',type=float,default=0.8,help='object confidence threshold')
parser.add_argument('--nms_th',type=float,default=0.4,help='iou threshold for non-maximum suppression')
parser.add_argument('--batch_size',type=int,default=1,help='batch size')
parser.add_argument('--n_cpu',type=int,default=8,help='numbers of cpu threads to use during batch generatioin')
parser.add_argument('--img_size',type=int,default=416,help='size of each image')
parser.add_argument('--det_folder',type=str,default='det_result',help='path to store detection results')
return parser.parse_args()
def main():
opt = arg_parse()
print(opt)
# CUDA = torch.cuda.is_available()
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# load network
print('Loading networks......')
model = DarkNet(opt.config_path)
model.load_weights(opt.weights_path)
print('Network loaded successfully')
# load class
classes = load_classes(opt.class_path)
model.to(device)
model.eval()
if not os.path.exists(opt.det_folder):
os.makedirs(opt.det_folder)
dataloader = DataLoader(ImageFolder(opt.image_folder, opt.img_size),
batch_size=opt.batch_size, shuffle=False, num_workers=opt.n_cpu)
imgs = []
img_detections = []
print('\n Detecting......')
prev_time = time.time()
for batch_i, (img_paths, input_imgs) in enumerate(dataloader):
input_imgs = input_imgs.to(device)
with torch.no_grad():
detections = model(input_imgs)
detections = non_max_suppression(detections, 80, opt.conf_th, opt.nms_th)
# detections.shape:(bs,7), x1,y1,x2,y2,conf,max_class_conf,max_class_ind
current_time = time.time()
inference_time = datetime.timedelta(seconds=current_time-prev_time)
prev_time = current_time
print('\t+ Batch %d, Inference Time: %s' % (batch_i, inference_time))
imgs.extend(img_paths)
img_detections.extend(detections)
print('image_paths:', imgs)
# # Bounding-box colors
# cmap = plt.get_cmap('tab20b')
# colors = [cmap(i) for i in np.linspace(0, 1, 20)]
print('\n Saving images......')
for img_i, (path, detections) in enumerate(zip(imgs, img_detections)):
print("(%d) Image: '%s'" % (img_i, path))
img = cv2.imread(path)
# img = np.array(Image.open(path))
# plt.figure()
# fig, ax = plt.subplots()
# ax.imshow(img)
# h > w , x pading
pad_x = max(img.shape[0] - img.shape[1], 0) * (opt.img_size / max(img.shape))
pad_y = max(img.shape[1] - img.shape[0], 0) * (opt.img_size / max(img.shape))
# think: resize then pad
unpad_h = opt.img_size - pad_y
unpad_w = opt.img_size - pad_x
if detections is not None:
unique_labels = detections[:, -1].cpu().unique()
n_cls_pred = len(unique_labels)
colors = pkl.load(open("utils/colors", "rb"))
bbox_colors = random.sample(colors, n_cls_pred)
for x1, y1, x2, y2, conf, cls_conf, cls_ind in detections:
print('\t+ Label: %s, Conf: %.5f' %(classes[int(cls_ind)], cls_conf))
box_h = ((y2 - y1) / unpad_h) * img.shape[0]
box_w = ((x2 - x1) / unpad_w) * img.shape[1]
y1 = ((y1 - pad_y // 2) / unpad_h) * img.shape[0]
x1 = ((x1 - pad_x // 2) / unpad_w) * img.shape[1]
y2 = y1 + box_h
x2 = x1 + box_w
color = bbox_colors[int(np.where(unique_labels == int(cls_ind))[0])]
cv2.rectangle(img, (x1, y1), (x2, y2), color, 2)
label = classes[int(cls_ind)]
t_size = cv2.getTextSize(label, cv2.FONT_HERSHEY_PLAIN, 1, 1)[0]
c2 = x1 + t_size[0] + 3, y1 + t_size[1] + 4
cv2.rectangle(img, (x1, y1), c2, color, -1)
cv2.putText(img, label, (x1, y1+t_size[1]+4), cv2.FONT_HERSHEY_PLAIN, 1, [225,255,255], 1)
cv2.imwrite(opt.det_folder + '//%d.png' % img_i, img)
'''
bbox = patches.Rectangle((x1, y1), box_w, box_h, linewidth=2,
edgecolor=color,facecolor='none')
# add box
ax.add_patch(bbox)
# add label
plt.text(x1, y1, s=classes[int(cls_ind)], color='white', verticalalignment='top',
bbox={'color':color, 'pad':0})
plt.axis('off')
plt.gca().xaxis.set_major_locator(NullLocator())
plt.gca().yaxis.set_major_locator(NullLocator())
plt.savefig(opt.det_folder + '//%d.png' % img_i, bbox_inches='tight', pad_inches=0.0)
plt.close()
'''
if __name__ == '__main__':
main() |
import matplotlib.pyplot as plt
import fact.plotting
import numpy as np
plt.ion()
data = np.random.normal(loc=5, scale=1, size=1440)
bad_pixels = [863, 868, 297, 927, 80, 873, 1093, 1094, 527, 528, 721, 722]
f, axes = plt.subplots(2,2)
axes[0,0].plot(data, ".")
axes[0,0].set_title("Data vs pixel Id")
axes[0,1].hist(data, bins=np.linspace(0,10,100))
axes[0,1].set_title("distribution")
axes[1,0].factcamera(data, pixelset=bad_pixels, pixelsetcolour="r")
axes[1,0].set_title("bad_pixels highlighted")
axes[1,1].factcamera(data, pixelset=data>6)
axes[1,1].set_title("data > 6 highlighted")
plt.suptitle("Maximize me and see me adjust the pixel size")
|
#!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# -*- coding: utf-8 -*-
"""Runtime functions to use in docker / testing"""
__author__ = 'Pedro Larroy'
__version__ = '0.1'
import os
import sys
import subprocess
import argparse
import logging
from subprocess import call, check_call, Popen, DEVNULL, PIPE
import time
import sys
import types
import glob
import vmcontrol
from vmcontrol import qemu_ssh, qemu_provision, qemu_rsync_to_host, VM
def activate_this(base):
import site
import os
import sys
if sys.platform == 'win32':
site_packages = os.path.join(base, 'Lib', 'site-packages')
else:
site_packages = os.path.join(base, 'lib', 'python%s' % sys.version[:3], 'site-packages')
prev_sys_path = list(sys.path)
sys.real_prefix = sys.prefix
sys.prefix = base
# Move the added items to the front of the path:
new_sys_path = []
for item in list(sys.path):
if item not in prev_sys_path:
new_sys_path.append(item)
sys.path.remove(item)
sys.path[:0] = new_sys_path
def run_ut_py3_qemu():
"""Run unit tests in the emulator and copy the results back to the host through the mounted
volume in /mxnet"""
from vmcontrol import VM
with VM() as vm:
qemu_provision(vm.ssh_port)
logging.info("execute tests")
qemu_ssh(vm.ssh_port, "./runtime_functions.py", "run_ut_python3_qemu_internal")
qemu_rsync_to_host(vm.ssh_port, "*.xml", "mxnet")
logging.info("copied to host")
logging.info("tests finished, vm shutdown.")
vm.shutdown()
def run_ut_python3_qemu_internal():
"""this runs inside the vm"""
pkg = glob.glob('mxnet_dist/*.whl')[0]
logging.info("=== NOW Running inside QEMU ===")
logging.info("PIP Installing %s", pkg)
check_call(['sudo', 'pip3', 'install', pkg])
logging.info("PIP Installing mxnet/test_requirements.txt")
check_call(['sudo', 'pip3', 'install', '-r', 'mxnet/test_requirements.txt'])
logging.info("Running tests in mxnet/tests/python/unittest/")
check_call(['nosetests', '--with-timer', '--with-xunit', '--xunit-file', 'nosetests_unittest.xml', '--verbose', 'mxnet/tests/python/unittest/test_engine.py'])
# Example to run a single unit test:
# check_call(['nosetests', '--with-timer', '--with-xunit', '--xunit-file', 'nosetests_unittest.xml', '--verbose', 'mxnet/tests/python/unittest/test_ndarray.py:test_ndarray_fluent'])
def run_qemu_interactive():
vm = VM(interactive=True)
vm.detach()
vm.start()
vm.wait()
logging.info("QEMU finished")
################################
def parsed_args():
parser = argparse.ArgumentParser(description="""python runtime functions""", epilog="")
parser.add_argument('command',nargs='*',
help="Name of the function to run with arguments")
args = parser.parse_args()
return (args, parser)
def script_name() -> str:
return os.path.split(sys.argv[0])[1]
def chdir_to_script_directory():
# We need to be in the same directory than the script so the commands in the dockerfiles work as
# expected. But the script can be invoked from a different path
base = os.path.split(os.path.realpath(__file__))[0]
os.chdir(base)
def main():
logging.getLogger().setLevel(logging.INFO)
logging.basicConfig(format='{}: %(asctime)-15s %(message)s'.format(script_name()))
chdir_to_script_directory()
# Run function with name passed as argument
(args, parser) = parsed_args()
logging.info("%s", args.command)
if args.command:
fargs = args.command[1:]
globals()[args.command[0]](*fargs)
return 0
else:
parser.print_help()
fnames = [x for x in globals() if type(globals()[x]) is types.FunctionType]
print('\nAvailable functions: {}'.format(' '.join(fnames)))
return 1
if __name__ == '__main__':
sys.exit(main())
|
for i in range (100):
count = i + 1
print(count)
for i in range (100):
count= i +1
if count% 3==0:
print("fizz")
else:
print(count)
for i in range (101):
count=i+1
if count%3==0:
print("fizz")
if count % 5== 0:
print("buzz")
elif count % 3 == 0 and count % 5 == 0:
print("fizzbuzz")
else :
print(count)
|
#!/usr/bin/python3
from sys import argv
def main():
argc = len(argv)
total = 0
for i in range(1, argc):
total += int(argv[i])
print(total)
if __name__ == "__main__":
main()
|
"""
python2to3.py
By Paul Malmsten, 2011
Helper functions for handling Python 2 and Python 3 datatype shenanigans.
"""
import sys
def byteToInt(byte):
"""
byte -> int
Determines whether to use ord() or not to get a byte's value.
"""
if hasattr(byte, 'bit_length'):
# This is already an int
return byte
return ord(byte) if hasattr(byte, 'encode') else byte[0]
def intToByte(i):
"""
int -> byte
Determines whether to use chr() or bytes() to return a bytes object.
"""
return chr(i) if hasattr(bytes(), 'encode') else bytes([i])
def stringToBytes(s):
"""
string -> bytes
Converts a string into an appropriate bytes object
"""
return s.encode('ascii') if sys.version_info >= (3, 0) else s |
import rospy
import numpy
import random
from gym import spaces
from openai_ros.robot_envs import turtlebot2_joy_env
from gym.envs.registration import register
from geometry_msgs.msg import Point
from openai_ros.task_envs.task_commons import LoadYamlFileParamsTest
from openai_ros.openai_ros_common import ROSLauncher
from scipy.spatial.transform import Rotation as R
import os
from gazebo_msgs.srv import SpawnModel, DeleteModel
from geometry_msgs.msg import Pose
import tensorflow as tf
from gazebo_msgs.msg import ModelState
from gazebo_msgs.srv import SetModelState
from gazebo_msgs.srv import GetModelState
import time
'''
with Manually preprocessed U and lidar readings
'''
global data_collect
class TurtleBot2HumanModelEnv(turtlebot2_joy_env.TurtleBot2Env):
def __init__(self):
"""
This Task Env is designed for having the TurtleBot2 in some kind of maze.
It will learn how to move around the maze without crashing.
"""
# This is the path where the simulation files, the Task and the Robot gits will be downloaded if not there
ros_ws_abspath = rospy.get_param("/turtlebot2/ros_ws_abspath", None)
assert ros_ws_abspath is not None, "You forgot to set ros_ws_abspath in your yaml file of your main RL script. Set ros_ws_abspath: \'YOUR/SIM_WS/PATH\'"
assert os.path.exists(ros_ws_abspath), "The Simulation ROS Workspace path " + ros_ws_abspath + \
" DOESNT exist, execute: mkdir -p " + ros_ws_abspath + \
"/src;cd " + ros_ws_abspath + ";catkin_make"
ROSLauncher(rospackage_name="turtlebot_gazebo",
launch_file_name="start_goal_world.launch",
ros_ws_abspath=ros_ws_abspath)
# Load Params from the desired Yaml file
LoadYamlFileParamsTest(rospackage_name="openai_ros",
rel_path_from_package_to_file="src/openai_ros/task_envs/turtlebot2/config",
yaml_file_name="turtlebot2_goal_continuous_humanmodel.yaml")
# Here we will add any init functions prior to starting the MyRobotEnv
super(TurtleBot2HumanModelEnv, self).__init__(ros_ws_abspath)
# Only variable needed to be set here
high = numpy.array([1,1,1,1])
low = numpy.array([-1,-1,-1,-1])
self.action_space = spaces.Box(low, high)
# We set the reward range, which is not compulsory but here we do it.
self.reward_range = (-numpy.inf, numpy.inf)
self.success = True
#number_observations = rospy.get_param('/turtlebot2/n_observations')
"""
We set the Observation space for the 6 observations
cube_observations = [
round(current_disk_roll_vel, 0),
round(y_distance, 1),
round(roll, 1),
round(pitch, 1),
round(y_linear_speed,1),
round(yaw, 1),
]
"""
# Actions and Observations
self.init_linear_forward_speed = rospy.get_param('/turtlebot2/init_linear_forward_speed')
self.init_linear_turn_speed = rospy.get_param('/turtlebot2/init_linear_turn_speed')
self.new_ranges = 180
self.min_range = rospy.get_param('/turtlebot2/min_range')
self.max_laser_value = rospy.get_param('/turtlebot2/max_laser_value')
self.min_laser_value = rospy.get_param('/turtlebot2/min_laser_value')
# Get Desired Point to Get
self.desired_point = Point()
self.desired_point.x = rospy.get_param("/turtlebot2/desired_pose/x")
self.desired_point.y = rospy.get_param("/turtlebot2/desired_pose/y")
self.desired_point.z = rospy.get_param("/turtlebot2/desired_pose/z")
self.state_msg = ModelState()
self.state_msg.model_name = 'mobile_base'
self.state_msg.pose.position.x = 0
self.state_msg.pose.position.y = 0
self.state_msg.pose.position.z = 0
self.state_msg.pose.orientation.x = 0
self.state_msg.pose.orientation.y = 0
self.state_msg.pose.orientation.z = 0
self.state_msg.pose.orientation.w = 0
# We create two arrays based on the binary values that will be assigned
# In the discretization method.
laser_scan = self.get_laser_scan()
rospy.logdebug("laser_scan len===>" + str(len(laser_scan.ranges)))
#high = numpy.array([0.5,1,1,1,1,1,6,3.14])#,numpy.array([12,6,3.14,1,3.14,0.5,1]),6*numpy.ones([self.new_ranges]),numpy.array([12,6,3.14,1,3.14,0.5,1]),6*numpy.ones([self.new_ranges])))
high = numpy.hstack((numpy.array([0.5,1,0.5,1]),6*numpy.ones([self.new_ranges])))
#high = numpy.hstack((numpy.array([1,1]),numpy.ones([self.new_ranges]),numpy.array([1,1]),numpy.ones([self.new_ranges]),numpy.array([1,1]),numpy.ones([self.new_ranges])))
#low = numpy.array([-0.5,-1,-1,-1,-1,-1, 0,-3.14])#,numpy.array([-1,-1*6,-1*3.14,-1,-3.14,-0.5,-1]),numpy.zeros([self.new_ranges]),numpy.array([-1,-1*6,-1*3.14,-1,-3.14,-0.5,-1]),numpy.zeros([self.new_ranges])))
low = numpy.hstack((numpy.array([-0.5,-1,-0.5,-1]),numpy.zeros([self.new_ranges])))
#low = numpy.hstack((numpy.array([-1,-1]),numpy.zeros([self.new_ranges]),numpy.array([1,1]),numpy.ones([self.new_ranges]),numpy.array([1,1]),numpy.ones([self.new_ranges])))
# We only use two integers
self.observation_space = spaces.Box(low, high)
rospy.logdebug("ACTION SPACES TYPE===>"+str(self.action_space))
rospy.logdebug("OBSERVATION SPACES TYPE===>"+str(self.observation_space))
# Rewards
self.forwards_reward = rospy.get_param("/turtlebot2/forwards_reward")
self.turn_reward = rospy.get_param("/turtlebot2/turn_reward")
self.end_episode_points = rospy.get_param("/turtlebot2/end_episode_points")
self.cumulated_steps = 0.0
############################## goal ##############################################
self.goal_position = Pose()
self.f = open('/home/i2rlab/shahil_files/shahil_RL_ws_new/src/turtlebot/turtlebot_gazebo/worlds/goal/model.sdf','r')
self.sdff = self.f.read()
self.n_d = 0
self.goal_space()
#self.xy = numpy.array([[8.1,-1],[8.2,-5],[9,4.5],[2,1],[0.5,-1],[6.5,5],[8.2,-5],[0,1],[7.3,-2.5],[0.5,-1.5],[-8.2,5],[6,-8.2],[-7,-7]])
#self.xy = numpy.array([-8.1,-7.0])
############################## Obstacle ##########################################
self.angle=numpy.linspace(-179,179,180)/180*numpy.pi
self.cos = numpy.cos(self.angle)
self.sin = numpy.sin(self.angle)
############################## Human Model ######################################
config = tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True))
self.sess = tf.Session(config=config)
self.S1 = tf.placeholder(tf.float32, [None, 5], 'S1')
self.S2 = tf.placeholder(tf.float32, [None, 198,1], 'S2')
self.keep_prob = tf.placeholder(tf.float32)
self.a_predict = self.build_c(self.S1,self.S2,self.keep_prob)
self.loader()
self.goal_space()
#self.joy = self.get_joy()
def loader(self):
loader= tf.train.Saver()
loader.restore(self.sess,tf.train.latest_checkpoint('Human_model_3'))
def build_c(self,S1,S2,keep_prob):
orth_init = tf.initializers.orthogonal(gain=numpy.sqrt(2))
net1 = tf.compat.v1.layers.conv1d(S2, filters=32, kernel_size=19, strides=1, padding='valid', activation=tf.nn.relu, trainable=True,kernel_initializer=orth_init,name='net1')
net2 = tf.compat.v1.layers.conv1d(net1, filters=32, kernel_size=8, strides=4, padding='valid', activation=tf.nn.relu, trainable=True,kernel_initializer=orth_init,name='net2')
net3 = tf.compat.v1.layers.conv1d(net2, filters=64, kernel_size=4, strides=4, padding='valid', activation=tf.nn.relu, trainable=True,kernel_initializer=orth_init,name='net3')
net4 = tf.compat.v1.layers.conv1d(net3, filters=64, kernel_size=3, strides=2, padding='valid', activation=tf.nn.relu, trainable=True,kernel_initializer=orth_init,name='net4')
net4_flat = tf.reshape(net4,[-1,5*64])
net5 = tf.layers.dense(net4_flat, 512, activation=tf.nn.relu, trainable=True,kernel_initializer=orth_init,name='net5')
net6 = tf.layers.dense(net5, 64, trainable=True, name='net6')
net7 = tf.contrib.layers.layer_norm(net6, center=True, scale=True)
net8 = tf.nn.relu(net7)
net9_input = tf.concat([S1, net8], 1)
net9 = tf.layers.dense(net9_input, 256, activation=tf.nn.relu, name='l1', trainable=True, kernel_regularizer=tf.contrib.layers.l2_regularizer(0.003))
drop_out9 = tf.nn.dropout(net9, keep_prob)
net10 = tf.layers.dense(drop_out9,256, activation=tf.nn.relu,name='l2',trainable=True, kernel_regularizer=tf.contrib.layers.l2_regularizer(0.003))
drop_out10 = tf.nn.dropout(net10, keep_prob)
net11 = tf.layers.dense(drop_out10,128, activation=tf.nn.relu,name='l3',trainable=True, kernel_regularizer=tf.contrib.layers.l2_regularizer(0.003))
drop_out11 = tf.nn.dropout(net11, keep_prob)
a = tf.layers.dense(drop_out11,2,trainable=True,activation=tf.tanh)
return tf.multiply(a, [0.5,1], name='scaled_a')
def choose_action(self,s):
bs1 = s[:, :5]
#bs2 = numpy.hstack((s[:, 5:],s[:, -18:])).reshape([-1,198,1])
bs2 = s[:,5:].reshape([-1,198,1])
a = self.sess.run(self.a_predict, {self.S1: bs1,self.S2: bs2,self.keep_prob:1})
print("original a:",a)
#return a
return a[0]
def _set_init_pose(self):
"""Sets the Robot in its init pose
"""
self.move_base( self.init_linear_forward_speed,
self.init_linear_turn_speed,
epsilon=0.05,
update_rate=10)
return True
def _init_env_variables(self):
"""
Inits variables needed to be initialised each time we reset at the start
of an episode.
:return:
"""
# For Info Purposes
self.cumulated_reward = 0.0
# Set to false Done, because its calculated asyncronously
self._episode_done = False
self._outofrange = False
try:
self.deleteModel()
except:
pass
self.respawnModel()
self.moveto()
odometry = self.get_odom()
self.previous_distance_from_des_point = self.get_distance_from_desired_point(odometry.pose.pose.position)
self.n_d+=1
def _set_action(self, ratio):
"""
"""
ratio=list(ratio)
rospy.logdebug("Start Set Action ==>"+str(ratio))
joy = self.get_joy()
#print("ratio:",ratio)
#if joy.linear.x==0 and joy.angular.z==0: #Condition to get input from human agent
linear_speed=self.joy_linear
angular_speed=self.joy_angular
#self.angular_old = angular_speed
#else:
#linear_speed = joy.linear.x
#angular_speed = joy.angular.z
'''elif abs(self.joy_linear)<0.2 or abs(self.theta_bt_uh_ob)> 1.3 or self.e_norm>1.5:
linear_speed = joy.linear.x # human input
angular_speed = joy.angular.z
#angular_speed = 0.6*self.theta_dot+0.4*angular_speed
#self.angular_old = angular_speed
else:
#linear_speed = ratio[0]*self.joy_linear+ratio[1]*self.a_ao[0,0]+ratio[2]*self.a_c[0,0]+ratio[3]*self.a_cc[0,0]
#angular_speed = ratio[0]*self.joy_angular+ratio[1]*self.a_ao[1,0]+ratio[2]*self.a_c[1,0]+ratio[3]*self.a_cc[1,0]
linear_speed=self.joy_linear
angular_speed=self.joy_angular'''
#angular_speed = 0.6*self.theta_dot+0.4*angular_speed
#self.angular_old = angular_speed
# We tell TurtleBot2 the linear and angular speed to set to execute
self.move_base(linear_speed, angular_speed, epsilon=0.05, update_rate=10)
self.u_b = numpy.array([linear_speed,angular_speed])
rospy.logdebug("END Set Action ==>"+str(ratio))
def _get_obs(self):
"""
Here we define what sensor data defines our robots observations
To know which Variables we have acces to, we need to read the
TurtleBot2Env API DOCS
:return:
"""
#self.obstaclemoveto()
rospy.logdebug("Start Get Observation ==>")
# We get the laser scan data
laser_scan = self.get_laser_scan()
discretized_laser_scan = self.discretize_observation( laser_scan,
self.new_ranges
)
# We get the odometry so that SumitXL knows where it is.
odometry = self.get_odom()
x_position = odometry.pose.pose.position.x
#print('x_position',x_position)
y_position = odometry.pose.pose.position.y
base_orientation_quat = odometry.pose.pose.orientation
base_roll, base_pitch, base_yaw = self.get_orientation_euler(base_orientation_quat)
v = odometry.twist.twist.linear.x
self.theta_dot = odometry.twist.twist.angular.z
###################human input ##############################
joy = self.get_joy()
if joy.linear.x > 0 or joy.angular.z > 0:
self.joy_linear = joy.linear.x
self.joy_angular = joy.angular.z
###################human agent ##################################
else:
print('I am here')
xdiff = self.desired_point.x - x_position
ydiff = self.desired_point.y - y_position
observations = [round(base_yaw, 2),round(v, 2),round(self.theta_dot, 2),round(xdiff, 2),round(ydiff, 2)]#+ discretized_laser_scan
observations = numpy.append(observations,numpy.array(discretized_laser_scan))
observations = numpy.append(observations,numpy.array(discretized_laser_scan[:18]))
s = numpy.array([observations])
a = self.choose_action(s)
print('a[0]: ',a[0])
self.joy_linear = a[0]
self.joy_angular = a[1]
###################obstacle avoidance###########################################
tran = numpy.array([[numpy.cos(base_yaw),-numpy.sin(base_yaw)],[numpy.sin(base_yaw),numpy.cos(base_yaw)]]).dot(numpy.array([[1,0],[0,0.1]]))
self.u_gtg = tran.dot(numpy.array([[self.joy_linear],[self.joy_angular]]))
self.obstacle_avoidance()
# We round to only two decimals to avoid very big Observation space
################################################ Data Collection for Human agent Training ########################################################################
#is_data_needed = numpy.array([base_yaw,v,self.theta_dot,self.joy_linear,self.joy_angular,self.desired_point.x-(x_position),self.desired_point.y-(y_position)])
if joy.linear.x > 0 or joy.angular.z > 0:
del_x = (self.desired_point.x-(x_position))
del_y = (self.desired_point.y-(y_position))
laser_scan_recorded = numpy.append(discretized_laser_scan,discretized_laser_scan[0])
#data_needed = numpy.array(["%10.3e"%(base_yaw),"%10.3e"%(v),"%10.3e"%(self.theta_dot),"%10.3e"%(del_x),"%10.3e"%(del_y)])#.encode() # save the needed data in form of string
data_needed = numpy.array([(base_yaw),(v),(self.theta_dot),(del_x),(del_y)])
data_needed = numpy.concatenate((data_needed,laser_scan_recorded),axis=0)
#data_needed = numpy.append(data_needed,"%10.3e"%(self.joy_linear),"%10.3e"%(self.joy_angular))
data_needed = numpy.append(data_needed,(joy.linear.x))
data_needed = numpy.append(data_needed,(joy.angular.z))
#print( data_needed)
with open("scaled_house_data_test.dat", "a", newline='') as f:
#f.write(data_needed+b"\n") # write the data to the file
f.write(str(data_needed).replace('\n','').replace('[','').replace(']','')+'\n')
################################################ Change Observations ########################################################################################
observations = [round(x_position, 2),round(y_position, 2),round(base_yaw, 2),round(v, 2),round(self.theta_dot, 2),round(self.joy_linear, 2),round(self.joy_angular, 2)]+discretized_laser_scan+[self.e_norm, round(self.theta_bt_uh_ob,2)]
#print("Observations==>"+observations)
rospy.logdebug("Observations==>"+str(observations))
rospy.logdebug("END Get Observation ==>")
return observations
def obstacle_avoidance(self):
lidar = self.get_laser_scan().ranges
#lidar = self.discretize_observation( lidar,self.new_ranges)
odometry = self.get_odom()
base_orientation_quat = odometry.pose.pose.orientation
base_roll, base_pitch, yaw = self.get_orientation_euler(base_orientation_quat)
#yaw = round(yaw,2)
n = numpy.argmin(lidar)
if n<=89:
self.delta_angle = ((n-89)*2-1)/180*numpy.pi
else:
self.delta_angle = ((n-90)*2+1)/180*numpy.pi
orientation = yaw + self.delta_angle
self.e_norm = lidar[n]
if self.e_norm<1.5:
share = 1
else:
share = 0
self.share_talker.publish(share)
e = numpy.array([[-self.e_norm*numpy.cos(orientation)],[-self.e_norm*numpy.sin(orientation)]])
u_ao = 0.5/self.e_norm*(1/(self.e_norm**2+0.1))*e
u_c = 0.5*numpy.array([[0,1],[-1,0]]).dot(u_ao)
u_cc = 0.5*numpy.array([[0,-1],[1,0]]).dot(u_ao)
self.a_ao = numpy.array([[1,0],[0,1/0.115]]).dot(numpy.array([[numpy.cos(-yaw),-numpy.sin(-yaw)],[numpy.sin(-yaw),numpy.cos(-yaw)]])).dot(u_ao)
self.a_c = numpy.array([[1,0],[0,1/0.115]]).dot(numpy.array([[numpy.cos(-yaw),-numpy.sin(-yaw)],[numpy.sin(-yaw),numpy.cos(-yaw)]])).dot(u_c)
self.a_cc = numpy.array([[1,0],[0,1/0.115]]).dot(numpy.array([[numpy.cos(-yaw),-numpy.sin(-yaw)],[numpy.sin(-yaw),numpy.cos(-yaw)]])).dot(u_cc)
#print("u_ao:",u_ao)
self.limitu()
self.theta_bt_uh_ob = abs(numpy.arctan2(self.u_gtg[1,0],self.u_gtg[0,0])-orientation)
if self.theta_bt_uh_ob > numpy.pi:
self.theta_bt_uh_ob = 2 * numpy.pi - self.theta_bt_uh_ob
def limitu(self,):
if abs(self.a_ao[0,0])>0.5 or abs(self.a_ao[1,0])>0.3:
self.a_ao =self.a_ao/max(abs(self.a_ao/[[0.5],[0.3]]))
if abs(self.a_c[0,0]>0.5) or abs(self.a_c[1,0])>0.4:
self.a_c =self.a_c/max(abs(self.a_c/[[0.5],[0.4]]))
if abs(self.a_cc[0,0])>0.5 or abs(self.a_cc[1,0])>0.4:
self.a_cc =self.a_cc/max(abs(self.a_cc/[[0.5],[0.4]]))
#print(self.a_ao,self.a_c,self.a_cc)
def _is_done(self, observations):
if self._episode_done:
#rospy.logerr("TurtleBot2 is Too Close to wall==>")
pass
else:
#rospy.logerr("TurtleBot2 didnt crash at least ==>")
current_position = Point()
current_position.x = observations[0]
current_position.y = observations[1]
#current_v = observations[3]
#current_thetadot = observations[4]
current_position.z = 0.0
MAX_X = 10
MIN_X = -10
MAX_Y = 10
MIN_Y = -10
# We see if we are outside the Learning Space
if current_position.x <= MAX_X and current_position.x > MIN_X:
if current_position.y <= MAX_Y and current_position.y > MIN_Y:
rospy.logdebug("TurtleBot Position is OK ==>["+str(current_position.x)+","+str(current_position.y)+"]")
# We see if it got to the desired point
if self.is_in_desired_position(current_position):
#if current_v == 0 and current_thetadot == 0:
self._episode_done = True
else:
rospy.logerr("TurtleBot to Far in Y Pos ==>"+str(current_position.x))
self._episode_done = True
self._outofrange = True
else:
rospy.logerr("TurtleBot to Far in X Pos ==>"+str(current_position.x))
self._episode_done = True
self._outofrange = True
return self._episode_done
def _compute_reward(self, observations, done):
current_position = Point()
current_position.x = observations[0]
current_position.y = observations[1]
current_position.z = 0.0
min_distance = observations[-2]
current_yaw = observations[2]
################ ##################################
xdiff = self.desired_point.x - observations[0]
ydiff = self.desired_point.y - observations[1]
distance_from_des_point = self.get_distance_from_desired_point(current_position)
#print("distance to des", + distance_from_des_point)
distance_difference = distance_from_des_point - self.previous_distance_from_des_point
theta = numpy.arctan2(ydiff,xdiff)
d_theta = abs(theta - current_yaw)
if d_theta > numpy.pi:
d_theta = 2 * numpy.pi - d_theta
######################################################
if not done:
'''
if min_distance < 2:
reward = -5/min_distance
else:
reward = 0
'''
if distance_difference < 0.0:
#rospy.logwarn("DECREASE IN DISTANCE GOOD")
reward = -20 * abs(numpy.pi - d_theta) * distance_difference#-30*(1-numpy.tanh(4.5*(min_distance-0.5)))
else:
reward = -5
if min_distance < 0.8:
reward -= 5/min_distance
self.success = True
else:
if self.is_in_desired_position(current_position):
#reward = 0
reward = 1000
self.success = True
elif self._outofrange:
reward = -1000
self.success = False
else:
reward = -500
self.success = True
print("reward:",+ reward)
self.previous_distance_from_des_point = distance_from_des_point
rospy.logdebug("reward=" + str(reward))
self.cumulated_reward += reward
rospy.logdebug("Cumulated_reward=" + str(self.cumulated_reward))
self.cumulated_steps += 1
rospy.logdebug("Cumulated_steps=" + str(self.cumulated_steps))
#return round(reward,2)
return reward
# Internal TaskEnv Methods
def discretize_observation(self,data,new_ranges):
"""
Discards all the laser readings that are not multiple in index of new_ranges
value.
"""
self._episode_done = False
discretized_ranges = []
mod = len(data.ranges)/new_ranges
rospy.logdebug("data=" + str(data))
rospy.logwarn("new_ranges=" + str(new_ranges))
rospy.logwarn("mod=" + str(mod))
for i, item in enumerate(data.ranges):
if (i%mod==0):
if item == float ('Inf') or numpy.isinf(item):
discretized_ranges.append(self.max_laser_value)
elif numpy.isnan(item):
discretized_ranges.append(self.min_laser_value)
else:
discretized_ranges.append(round(item,3))
if (self.min_range > item > 0):
#rospy.logerr("done Validation >>> item=" + str(item)+"< "+str(self.min_range))
self._episode_done = True
#else:
#rospy.logwarn("NOT done Validation >>> item=" + str(item)+"> "+str(self.min_range))
return discretized_ranges
def is_in_desired_position(self,current_position, epsilon=0.5):
"""
It return True if the current position is similar to the desired poistion
"""
is_in_desired_pos = False
#print(self.get_distance_from_desired_point(current_position))
is_in_desired_pos = self.get_distance_from_desired_point(current_position) <= epsilon
'''
x_pos_plus = self.desired_point.x + epsilon
x_pos_minus = self.desired_point.x - epsilon
y_pos_plus = self.desired_point.y + epsilon
y_pos_minus = self.desired_point.y - epsilon
x_current = current_position.x
y_current = current_position.y
x_pos_are_close = (x_current <= x_pos_plus) and (x_current > x_pos_minus)
y_pos_are_close = (y_current <= y_pos_plus) and (y_current > y_pos_minus)
is_in_desired_pos = x_pos_are_close and y_pos_are_close
'''
return is_in_desired_pos
def get_distance_from_desired_point(self, current_position):
"""
Calculates the distance from the current position to the desired point
:param start_point:
:return:
"""
distance = self.get_distance_from_point(current_position,
self.desired_point)
return distance
def get_distance_from_point(self, pstart, p_end):
"""
Given a Vector3 Object, get distance from current position
:param p_end:
:return:
"""
a = numpy.array((pstart.x, pstart.y, pstart.z))
b = numpy.array((p_end.x, p_end.y, p_end.z))
distance = numpy.linalg.norm(a - b)
return distance
def get_orientation_euler(self, quaternion_vector):
# We convert from quaternions to euler
orientation_list = [quaternion_vector.x,
quaternion_vector.y,
quaternion_vector.z,
quaternion_vector.w]
r = R.from_quat(orientation_list)
roll, pitch, yaw = r.as_rotvec()
return roll, pitch, yaw
def respawnModel(self):
if self.success:
#self.obstacle2_msg.pose.position.x= random.uniform(4.5,7.5)
#lw = 1.5-abs(self.obstacle2_msg.pose.position.x-6)
#self.obstacle2_msg.pose.position.y= random.uniform(-lw,lw)
n = random.randint(0,self.xy.shape[0]-1)
self.desired_point.x = self.xy[n,0]
self.desired_point.y = self.xy[n,1]
#self.desired_point.x = float(numpy.random.uniform(low=-1.5, high=9, size=1))
#self.desired_point.y = float(numpy.random.uniform(low=-5.5, high=6, size=1))
rospy.wait_for_service('gazebo/spawn_sdf_model')
self.goal_position.position.x = self.desired_point.x
self.goal_position.position.y = self.desired_point.y
spawn_model_prox = rospy.ServiceProxy('gazebo/spawn_sdf_model', SpawnModel)
spawn_model_prox("goal", self.sdff, "", self.goal_position, "world")
#spawn_model_prox("obstacle1", self.obstacle, "", self.obstacle_position_1, "world")
#spawn_model_prox("obstacle2", self.obstacle2, "", self.obstacle_position_2, "world")
# i=5
# while i<10:
# if self.success:
# #self.obstacle2_msg.pose.position.x= random.uniform(4.5,7.5)
# #lw = 1.5-abs(self.obstacle2_msg.pose.position.x-6)
# #self.obstacle2_msg.pose.position.y= random.uniform(-lw,lw)
#
# self.desired_point.x = self.xy[i,0]
# self.desired_point.y = self.xy[i,1]
# rospy.wait_for_service('gazebo/spawn_sdf_model')
# self.goal_position.position.x = self.desired_point.x
# self.goal_position.position.y = self.desired_point.y
# spawn_model_prox = rospy.ServiceProxy('gazebo/spawn_sdf_model', SpawnModel)
# name="goal"+str(i)
# spawn_model_prox(name, self.sdff, "", self.goal_position, "world")
# i+=1
def deleteModel(self):
rospy.wait_for_service('gazebo/delete_model')
del_model_prox = rospy.ServiceProxy('gazebo/delete_model', DeleteModel)
del_model_prox("goal")
def moveto(self):
rospy.wait_for_service('/gazebo/set_model_state')
set_state = rospy.ServiceProxy('/gazebo/set_model_state', SetModelState)
self.get_statemsg()
set_state(self.state_msg )
def get_statemsg(self):
if self.success:
n = random.randint(0,self.xy.shape[0]-1)
self.state_msg.pose.position.x = self.xy[n,0]
self.state_msg.pose.position.y = self.xy[n,1]
#self.state_msg.pose.position.x = numpy.random.uniform(low=-1.5, high=9, size=1)
#self.state_msg.pose.position.y = numpy.random.uniform(low=-5.5, high=6, size=1)
'''goal_position=numpy.array([self.desired_point.x,self.desired_point.y])
while numpy.linalg.norm(goal_position-numpy.array([self.state_msg.pose.position.x,self.state_msg.pose.position.y]))<5:
n = random.randint(0,self.xy.shape[0]-1)
self.state_msg.pose.position.x = self.xy[n,0]
self.state_msg.pose.position.y = self.xy[n,1]
print("try new robot position")'''
D = random.uniform(0,360)
r = R.from_euler('z', D, degrees=True)
self.state_msg.pose.orientation.x, self.state_msg.pose.orientation.y, self.state_msg.pose.orientation.z, self.state_msg.pose.orientation.w = r.as_quat()
def goal_space(self):
X = []
Y = []
X,Y = numpy.mgrid[8:9:0.1, -6:-0.2:0.1] # mesh for bottom left room
self.xy = numpy.vstack((X.flatten(), Y.flatten())).T
X = []
Y = []
X,Y = numpy.mgrid[4.1:7.3:0.1, -6:-4.7:0.1] # mesh for bottom left room
self.xy = numpy.append(self.xy,numpy.vstack((X.flatten(), Y.flatten())).T, axis=0)
X = []
Y = []
X,Y = numpy.mgrid[4.1:7.3:0.1, -2.5:0.4:0.1] # mesh for bottom left room
self.xy = numpy.append(self.xy,numpy.vstack((X.flatten(), Y.flatten())).T, axis=0)
X = []
Y = []
X,Y = numpy.mgrid[4.1:5.8:0.1, -4.2:-2.9:0.1] # mesh for bottom left room
self.xy = numpy.append(self.xy,numpy.vstack((X.flatten(), Y.flatten())).T, axis=0)
X = []
Y = []
X,Y = numpy.mgrid[3.7:6.1:0.1, 1.0:6.3:0.1] # mesh for middle room
self.xy = numpy.append(self.xy,numpy.vstack((X.flatten(), Y.flatten())).T, axis=0)
X = []
Y = []
X,Y = numpy.mgrid[8.1:9:0.1, 1.6:6.3:0.1] # mesh for bottom right room
self.xy = numpy.append(self.xy,numpy.vstack((X.flatten(), Y.flatten())).T, axis=0)
X = []
Y = []
X,Y = numpy.mgrid[-0.9:2.5:0.1, 4.2:6.3:0.1] # mesh for top right room
self.xy = numpy.append(self.xy,numpy.vstack((X.flatten(), Y.flatten())).T, axis=0)
X = []
Y = []
X,Y = numpy.mgrid[-0.9:2.8:0.1, -2.9:0.2:0.1] # mesh for top left room
self.xy = numpy.append(self.xy,numpy.vstack((X.flatten(), Y.flatten())).T, axis=0)
X = []
Y = []
X,Y = numpy.mgrid[-0.3:1.4:0.1, 0.6:2.4:0.1] # mesh for top left room
self.xy = numpy.append(self.xy,numpy.vstack((X.flatten(), Y.flatten())).T, axis=0)
X = []
Y = []
X,Y = numpy.mgrid[0:2.8:0.1, -5.9:-4.4:0.1] # mesh for top left room
self.xy = numpy.append(self.xy,numpy.vstack((X.flatten(), Y.flatten())).T, axis=0)
'''def goal_space(self):
X =[]
Y =[]
for i in numpy.arange(0,1.8,0.1):
X11,Y11 = numpy.mgrid[i:i+0.1:0.1, -(4.13+2/3.5*i):-1.5:0.1]
X12,Y12 = numpy.mgrid[i:i+0.1:0.1, 1.5:(4.13+2/3.5*i):0.1]
X = numpy.hstack((X, X11.flatten(), X12.flatten()))
Y = numpy.hstack((Y, Y11.flatten(), Y12.flatten()))
X21,Y21 = numpy.mgrid[7.5:8.5:0.1, -5.5:-4.9:0.1]
X22,Y22 = numpy.mgrid[7.5:8.5:0.1, -2:1.6:0.1]
X23,Y23 = numpy.mgrid[7.5:8.5:0.1, 4.5:5.6:0.1]
xy1 = numpy.vstack((X, Y)).T
xy21 = numpy.vstack((X21.flatten(), Y21.flatten())).T
xy22 = numpy.vstack((X22.flatten(), Y22.flatten())).T
xy23 = numpy.vstack((X23.flatten(), Y23.flatten())).T
self.xy=numpy.vstack((xy1,xy21,xy22,xy23))'''
|
#!/usr/bin/python -Wall
# ================================================================
# Please see LICENSE.txt in the same directory as this file.
# John Kerl
# kerl.john.r@gmail.com
# 2007-05-31
# ================================================================
import re
import copy
class coset:
slots = []
def __init__(self, slots):
self.slots = copy.copy(slots)
self.slots.sort()
# xxx need deep sort
def __eq__(a,b):
#xxx check lens
n = len(a.slots)
for i in range(0, n):
if (a.slots[i] != b.slots[i]):
return 0
return 1
def __ne__(a,b):
return not (a == b)
def __mul__(a,b):
#xxx check lens
#xxx take a.slots[0] * b.slots and sort.
#Don't check well-definedness here.
n = len(a.slots)
c = coset(a.slots)
for i in range(0, n):
c.slots[i] = a.slots[0] * b.slots[i]
c.slots.sort()
return c
def inv(a):
#xxx take a.slots[i].inv and sort.
n = len(a.slots)
c = coset(a.slots)
for i in range(0, n):
c.slots[i] = a.slots[i].inv()
c.slots.sort()
return c
def __str__(self):
string = "["
string += str(self.slots[0])
n = len(self.slots)
for i in range(1, n):
string += ","
string += str(self.slots[i])
string += "]"
return string
def __repr__(self):
return self.__str__()
|
##! /usr/bin/python
import os
# recursive dir structuresa
import glob
#glob.glob(pattern, exclude); # wildcard processsing like "*.txt"
#pickle modules: dump and load
#a = ['wre',2431,'test',23.324]
#import pickle
#f = open('c:/files/pick.txt','wb')
#pickle.dump(a,f)
#f.close()
#f2 = open('c:/files/pic.txt','r')
#a = pickle.load(f2)#
#f2.close()
print os.getcwd()
for root, dirs, files in os.walk(os.curdir):
print('{0} has {1} files and {2} sub-folders'.format(root, len(files), len(dirs)))
import wiper
wiper.w()
for files in glob.glob(os.curdir + "/*.py"):
print files
|
#7
def string_to_int (list:[str]):
intList : [int] = []
for i in list:
intList.append(len(i))
return intList
print(string_to_int(["aaa", "ccc"])) |
from collections import deque
unks = deque() # queue, left in & right out
cands = []
docs = [] # list of doc. doc is a dictionary of word vector
results = {} # dict key:unk , value:cands list
trainvectors = {}
trainwords = set(trainvectors.keys())
def update_doc(doc, unk, cand):
if cand == '':
return doc
updated_doc = doc.copy()
updated_doc[cand] = trainvectors[cand]
del updated_doc[unk]
return updated_doc
def find_cands(match_word_test, testvectors, test_vector_number):
matchs = []
testwords = set(testvectors.keys())
knownwords = trainwords & testwords
matchwords_train = list(trainwords - knownwords)
# Get knn
# calculate clostest known_words in test_vectors which count is topnum
# The test_vectors could be change in bean search
knn_size = 50
heap = []
knn = []
for known_word in knownwords:
dist = calcdist(testvectors[match_word_test],testvectors[known_word])
heappush(heap, (dist,known_word))
for item in nsmallest(knn_size, heap):
knn.append(item[1])
# End of Get knn
# Get match words
# get match word & match_score in train_words to test_word
# which size based on topnum
# "knn", "test_vectors" could be change in bean_search
heap = []
for match_word_train in matchwords_train:
print >>sys.stderr, 'Generating match scores for: ', match_word_train, ' and ', match_word_test
match_score = calcMatchScore(match_word_test, match_word_train, knn, trainvectors, testvectors)
heappush(heap, (match_score,match_word_train, test_vector_number))
# for item in nsmallest(topnum, heap):
# matchs.append(item)
# end for Get match words
return heap
def beam_search(beam_width):
while unks.empty() != True:
# if len(cands) != 0:
result(unks[-1], cands) # add2result(unks[-1],cands) # top unks, all cands
cur_unk = unks.pop()
# generate docs
temp_docs = []
for i in xrange(len(docs)):
temp_docs.append(update_doc(docs[i], cur_unk, cand))
# get cands
temp_cands_tuple = [] # generate tuple ( score, cand, doc number )
for i in xrange(len(temp_docs)):
merge(temp_cands_tuple, find_cands(cur_unk, temp_docs[i], i))
# generate res
res_docs = set()
cands = []
for item in nsmallest(beam_width, temp_cands_tuple):
res_docs.add(item[2])
cands.append(item[1])
docs = list(res_docs)
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
import numpy as np
import pandas as pd
import pylab
import seaborn as sns
import matplotlib.pyplot as plt
import scipy.stats as sci
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'bakeMass预测.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
# region 训练数据
# 选择数据源
data = pd.read_csv('bakeMass.csv')
# 将数据集分为训练集和测试集
train = data[0:5000]
test = data[5000:]
# 日期格式转换
data['DTIME'] = pd.to_datetime(data['DTIME'], format='%d/%m/%Y %H:%M:%S')
data.index = data['DTIME']
# 数据按天聚合
data = data.resample('D').mean()
train['DTIME'] = pd.to_datetime(train['DTIME'], format='%d/%m/%Y %H:%M:%S')
train.index = train['DTIME']
train = train.resample('D').mean()
test['DTIME'] = pd.to_datetime(test['DTIME'], format='%d/%m/%Y %H:%M:%S')
test.index = test['DTIME']
test = test.resample('D').mean()
# 设置绘制大小,标题
train.HF.plot(figsize=(12, 8), title='HF', fontsize=140)
test.HF.plot(figsize=(12, 8), title='HF', fontsize=14)
plt.show()
# endregion
# 查看数据
data.head()
# 查看数据集形状
data.shape
# 查看数据集数据类型
data.dtypes
# 2. 分析目标变量 在分析之前,首先要了解目标,查看目标类型、分布,有无异常值。根据不同的类型选择不同的模型。
data['HF'].describe()
# cannot convert float NaN to integer 存在空值
# 删除指定字段[HF]为NaN的行
indexs = list(data[np.isnan(data['HF'])].index)
data = data.drop(indexs)
# 目标变量分布
sns.distplot(data['HF'])
plt.show()
# 3. 挑选最佳特征
# a. 针对连续型变量,可以使用“皮尔逊相关系数”找出与目标变量最相关的特征
# 1) 体积密度
sns.jointplot(x='TJMD', y='HF', data=data, stat_func=sci.pearsonr)
# b. 针对分类变量,无法使用皮尔逊相关系数,可以通过观察每个分类值上目标变量的变化程度来查看相关性,通常来说,在不同值上数据范围变化较大,两变量相关性较大。
# 盒须图
sns.boxplot(x='TJMD', y='HF', data=data)
# 柱状图
grouped = data.groupby('TJMD')
g1 = grouped['HF'].mean().reset_index('TJMD')
sns.barplot(x='TJMD', y='HF', data=g1)
# c. 以上两种分析都是针对单个特征与目标变量逐一分析,这种方法非常耗时繁琐,下面介绍一种系统性分析特征与目标变量相关性的方法,通过对数据集整体特征(数值型数据)进行分析,来找出最佳特征。
# 热力图 sns.heatmap()
# 设置图幅大小
pylab.rcParams['figure.figsize'] = (15, 10)
# 计算相关系数
corrmatrix = data.corr()
# 绘制热力图,热力图横纵坐标分别是data的index/column,vmax/vmin设置热力图颜色标识上下限,center显示颜色标识中心位置,cmap颜色标识颜色设置
sns.heatmap(corrmatrix, square=True, vmax=1, vmin=-1, center=0.0, cmap='coolwarm')
# 特征较多,且相关性不大的特征可以忽略,选取相关性排前k的特征:
# 取相关性前10的特征
k = 10
# data.nlargest(k, 'target')在data中取‘target'列值排前十的行
# cols为排前十的行的index,在本例中即为与’HF‘相关性最大的前十个特征名
cols = corrmatrix.nlargest(k, 'HF')['HF'].index
cm = np.corrcoef(data[cols].values.T)
# data[cols].values.T
# 设置坐标轴字体大小
sns.set(font_scale=1.25)
# sns.heatmap() cbar是否显示颜色条,默认是;cmap显示颜色;annot是否显示每个值,默认不显示;
# square是否正方形方框,默认为False,fmt当显示annotate时annot的格式;annot_kws为annot设置格式
# yticklabels为Y轴刻度标签值,xticklabels为X轴刻度标签值
hm = sns.heatmap(cm, cmap='RdPu', annot=True, square=True, fmt='.2f', annot_kws={'size': 10},
yticklabels=cols.values, xticklabels=cols.values)
# 上例提供了求相关系数另一种方法,也可以直接用data.corr(),更方便
cm1 = data[cols].corr()
hm2 = sns.heatmap(cm1, square=True, annot=True, cmap='RdPu', fmt='.2f', annot_kws={'size': 10})
plt.show() |
from kodijson import Kodi
import time
current_milli_time = lambda: int(round(time.time() * 1000))
class ibusKodi(Kodi):
kodi = Kodi("http://192.168.10.1:8080/jsonrpc", "kodi", "kodi")
cdNumber = 1
trackNumber = 1
kodiTrNumbers = 60 #dummy value
preDefPlaylist=["/media/pi/Adus/DiscoPolo", "/media/pi/Adus/Dance","/media/pi/Adus/Nowe"]
percentage = 0
numberOfPlaylist = 0 #[0:xx]
def __init__(self, debug):
self.debug = debug
self.pingKodi(40)
self.initPlaylists()
self.setPlaylist()
self.playSong()
def pingKodi(self,timeout):
sec = 0;
while sec < timeout:
try:
self.kodi.JSONRPC.Ping()
except:
self.dbgPrint("Kodi in not pingable")
else:
self.dbgPrint("Received Pong after " + str(sec*2))
return
time.sleep(2)
sec += 1
def playSong(self):
self.kodi.Player.GoTo({"playerid":0, "to":self.trackNumber-1})
def stopPlay(self):
self.kodi.Player.PlayPause({"playerid":0})
def initPlaylists(self):
self.numberOfPlaylist = len(self.preDefPlaylist)
print("Number of playlists: " + str(self.numberOfPlaylist))
def setPlaylist(self):
if self.cdNumber <= self.numberOfPlaylist and self.cdNumber > 0:
now = current_milli_time()
result = self.kodi.Playlist.Clear({"playlistid":0 }) #playlist 0 is audio playlist
self.dbgPrint("Clear result: " + str(result))
result = self.kodi.Playlist.Add({"item":{"directory":self.preDefPlaylist[self.cdNumber-1]},"playlistid":0})
self.dbgPrint("Add result: " + str(result))
out = self.kodi.Playlist.GetItems({"playlistid":0, "limits":{"end":1},"sort":{"order":"ascending","method":"dateadded"}})
self.dbgPrint("PLaylist track limits: " + str(out['result']['limits']))
self.kodiTrNumbers = out['result']['limits']['total'] #total tracks in current Playlist
result = self.kodi.Player.Open({"item":{"playlistid":0},"options":{"repeat":"all"}})
self.dbgPrint("Open result: " + str(result)+ " number of tracks " + str(self.kodiTrNumbers))
then = current_milli_time()
then = then - now
self.dbgPrint("Reading kodi playlist info took: " + str(then))
else:
print("ERROR cd number out of range! CD: " + str(self.cdNumber) + " playlists: " + str(self.numberOfPlaylist))
def dbgPrint(self, string):
if self.debug:
print(string)
|
# -*- coding: utf-8 -*-
"""
Created on Fri May 18 15:36:34 2018
@author: Sumudu Tennakoon
"""
import re
EMAIL_FORMAT = re.compile(r'([\w\.-]+@[\w\.-]+\.\w+)')
DOMAIN_FORMAT = re.compile(r'@([\w\.-]+)')
PHONENUM_FORMAT = re.compile(r'(\d{3})\D*(\d{3})\D*(\d{4})\D*(\d*)$')
MESSAGID_FORMAT = re.compile(r'<(.*?)>')
def ExtractEmailAddresses(text):
return re.findall(EMAIL_FORMAT, text)
def ExtractDomains(text):
return re.findall(DOMAIN_FORMAT, text)
def ExtractMessageIDs(EmailColumn):
return re.findall(MESSAGID_FORMAT, str(EmailColumn))
def ExtractPhoneNumber(text):
return re.findall(PHONENUM_FORMAT, text)
def RemoveQuotedPrintableEncoding(Text):
#Quoted-Printable Content-Transfer-Encoding
#Source: http://www.freesoft.org/CIE/RFC/1521/6.htm
#https://www.w3.org/Protocols/rfc1341/5_Content-Transfer-Encoding.html
#https://stackoverflow.com/questions/25710599/content-transfer-encoding-7bit-or-8-bit
QPCTE = {
'= ' :' ', #Soft line break (ignore).
'==09' :'', #Soft line break (ignore).
'==20' :' ', #Soft line break (ignore).
'=\t' :'\t', #Soft line break (ignore).
'=\n' :'', #Soft line break (ignore).
'=09' :'\t',
'=0A' :'\n',
'=0C' :'\f',
'=0D' :'\r',
'=0D=0A':'\r\n',
'=20' :' ', #Space
'=21' :'!',
'=22' :'"',
'=23' :'#',
'=24' :'$',
'=25' :'%',
'=26' :'&',
'=27' :"'",
'=28' :'(',
'=29' :')',
'=2A' :'*',
'=2B' :'+',
'=2C' :',',
'=2D' :'-',
'=2E' :'.',
'=2F' :'/',
'=3A' :':',
'=3B' :';',
'=3C' :'<',
'=3D' :'=',
'=3E' :'>',
'=3F' :'?',
'=85' :'...',
'=91' :"'",
'=92' :"'",
'=93' :'"',
'=94' :'"',
'=95' :u'\x95',
'=96' :'-',
'=97' :'--',
'=98' :'~',
'=99' :u'\x99', #TM
'=A9' :u'\xA9', #Copyright
'=AE' :u'\xAE', #Registered
'=E0' :u'\xE0',
'=E1' :u'\xE1',
'=E2' :u'\xE2',
'=E3' :u'\xE3',
'=E4' :u'\xE4',
'=E8' :u'\xE8',
'=E9' :u'\xE9',
'=EA' :u'\xEA',
'=EB' :u'\xEB',
'=EC' :u'\xEC',
'=ED' :u'\xED',
'=EE' :u'\xEE',
'=F1' :u'\xF1',
'=F2' :u'\xF2',
'=F3' :u'\xF3',
'=F4' :u'\xF4',
'=F5' :u'\xF5',
'=F6' :u'\xF6',
'=F7' :u'\xF7',
'=F8' :u'\xF8',
'=F9' :u'\xF9',
'=FA' :u'\xFA',
'=FB' :u'\xFB',
'=FC' :u'\xFC',
'=FD' :u'\xFD',
}
for (pattern,replacement) in QPCTE.items():
Text = Text.replace (pattern, replacement)
return Text
def RemoveHTMLTagsEntities(Text):
# apply rules in given order!
rules = [
{ r'\s+' : u' '}, # Remove Consecutive spaces
{ r'\s*<br\s*/?>\s*' : u'\n'}, # Convert <br> to Newline
{ r'</(p|h\d)\s*>\s*' : u'\n\n'}, # Add double newline after </p>, </div> and <h1/>
{ r'<head>.*<\s*(/head|body)[^>]*>' : u'' }, # Remove everything from <head> to </head>
{ r'<script>.*<\s*/script[^>]*>' : u'' }, # Remove evrything from <script> to </script> (javascipt)
{ r'<style>.*<\s*/style[^>]*>' : u'' }, # Remove evrything from <style> to </style> (stypesheet)
{ r'<[^<]*?/?>' : u'' }, # remove remaining tags
#{ r'<[^>]+>' : u''}
]
for rule in rules:
for (pattern,replacement) in rule.items():
Text = re.sub (pattern, replacement, Text)
#https://www.w3schools.com/charsets/ref_html_entities_4.asp
#https://docs.python.org/3/library/html.entities.html#html.entities.html5
HTML5Entity={
'<' :'<',
'>' :'>',
' ' :' ',
'& nbsp;' :' ',
'&n bsp;' :' ',
'&nb sp;' :' ',
'&nbs p;' :' ',
'"' :'"',
'cent;' :u'\xA2',
'£' :u'\xA3',
'©' :u'\xA9',
'®' :u'\xAE',
'±':u'\xB1',
'¼':u'\xBC',
'½':u'\xBD',
'¾':u'\xBE',
'×' :u'\xD7',
'′' :u'\x2032',
'&Prime' :u'\x2033',
'∗':u'\x2217',
'≠' :u'\x2260',
'™' :u'\x2122',
'&' :'&'
}
for (pattern,replacement) in HTML5Entity.items():
Text = Text.replace (pattern, replacement)
return Text
def RemoveConsecutiveWhiteSpace(Text):
Text=re.sub('[\x20][\x20]+' , ' ', Text)
Text=re.sub('[\n][\n]+' , '\n', Text)
Text=re.sub('[\t][\t]+' , '\t', Text)
Text=re.sub('[\r][\r]+' , '\r', Text)
return Text |
import boto3, time, os
from botocore.exceptions import ClientError
s3_resource = boto3.resource('s3')
s3_client = boto3.client('s3')
buckets = {
'cis3110-ccorneli': ['3110Assignment1.pdf', '3110Lecture1.pdf', '3110Lecture2.pdf', '3110Lecture3.pdf'],
'cis1300-ccorneli': ['1300Assignment1.pdf', '1300Assignment2.pdf', '1300Assignment3.pdf', '1300Assignment4.pdf'],
'cis4010-ccorneli': ['4010Lecture1.pdf', '4010Lecture2.pdf', '4010Assignment1.pdf']
}
# TODO Ensure using os path functions
def create_buckets():
start = time.perf_counter()
print('Creating S3 Buckets')
try:
for bucket in buckets.keys():
# https://stackoverflow.com/a/26871885
try:
if s3_client.head_bucket(Bucket=bucket):
print(bucket + ' already exists')
continue
except ClientError as e:
print(bucket + ' does not exist. Creating.')
s3_client.create_bucket(Bucket=bucket)
for obj in buckets[bucket]:
# https://boto3.amazonaws.com/v1/documentation/api/latest/guide/s3-uploading-files.html
s3_client.upload_file(os.path.join('data', obj), bucket, obj)
print(bucket + ' created successfully.')
except ClientError as e:
print(e)
end = time.perf_counter()
print('\nBucket creation completed in ' + str(end - start) + 's')
def list_buckets_and_contents():
start = time.perf_counter()
try:
response = s3_client.list_buckets()
except ClientError as e:
print(e)
for buck in response['Buckets']:
list_objects(buck['Name'])
end = time.perf_counter()
print_benchmark(start, end)
def list_objects(bucket_name, print_stats=False):
start = time.perf_counter()
try:
bucket = s3_resource.Bucket(bucket_name)
print(bucket_name + ':')
for obj in bucket.objects.all():
print('\t- ' + obj.key)
except ClientError:
print('ERROR That bucket does not exist!')
end = time.perf_counter()
if print_stats:
print_benchmark(start, end)
# Searches all buckets for objects with names that match an input string
# NOTE that this matches to lowercase
def search_objects(obj_name):
start = time.perf_counter()
try:
found = False
for buck in s3_client.list_buckets()['Buckets']:
bucket = s3_resource.Bucket(buck['Name'])
for obj in bucket.objects.all():
if obj_name.lower() in obj.key.lower():
print('\t- ' + obj.key + ' found in ' + buck['Name'])
found = True
if not found:
print('No objects have a name containing \'' + obj_name + '\'')
except ClientError as e:
print(e)
end = time.perf_counter()
print_benchmark(start, end)
# https://stackoverflow.com/a/34562141
def download_object(obj_name):
start = time.perf_counter()
try:
found = False
for buck in s3_client.list_buckets()['Buckets']:
bucket = s3_resource.Bucket(buck['Name'])
#check if the object exists, and if it does, download it
if len(list(bucket.objects.filter(Prefix=obj_name))) == 1:
s3_client.download_file(buck['Name'], obj_name, obj_name)
print(obj_name + ' downloaded successfully.')
found = True
break
if not found:
print('The object ' + obj_name + ' does not exist in any buckets')
except ClientError as e:
print(e)
end = time.perf_counter()
print_benchmark(start, end)
def get_bucket_name():
list_objects(input('Enter the name of the bucket you wish to see the contents of: '), True)
def get_object_name_list_objects():
search_objects(input('Enter the full or partial name of the object you wish to search for: '))
def get_object_and_bucket_names():
download_object(input('Enter the exact name of the object you wish to download: '))
def prompt():
return "\nChoose one of the following commands:\n\
- list objects in (a)ll containers\n\
- list objects in a (s)pecific container\n\
- list objects (w)ith a specific name\n\
- (d)ownload a specific object\n\
- (q)uit\n>"
def print_benchmark(start, end):
print('\nTask completed in ' + str(end-start) + 's')
# options for command inputs
# https://stackoverflow.com/a/11479840
options = {
'a': list_buckets_and_contents,
's': get_bucket_name,
'w': get_object_name_list_objects,
'd': get_object_and_bucket_names,
'q': exit,
'quit': exit
}
create_buckets()
print("\nWelcome to the S3 client wrapper!\n")
cmd = input(prompt())
while cmd != 'q' or cmd != 'quit':
if cmd in options.keys():
options[cmd]()
else:
print('Please enter a valid command')
cmd = input(prompt()) |
import argparse
import gzip
import pandas as pd
import math
import subprocess
def parse_specific_peaks(file_object):
columns = ['chr', 'start', 'stop']
specific_peaks = []
for line in file_object:
entries = line.strip().split('\t')
entry_dict = dict(zip(columns, entries))
specific_peaks.append(entry_dict)
final_dict = {}
for peak in specific_peaks:
final_dict['%s.%s.%s' % (peak['chr'], peak['start'], peak['stop'])] = peak
return final_dict
def parse_template_file(file_object):
results = []
file_object.readline()
for line in file_object:
columns = ['CHR', 'BP', 'SNP', 'CM', 'LIFTOVER', 'INTERSECTS_PEAK', 'PEAK_CHR', 'PEAK_START', 'PEAK_STOP']
entries = line.strip().split('\t')
entries_dict = dict(zip(columns, entries))
results.append(entries_dict)
return results
def add_categories(template_snps, specific_peaks):
new_peaks = []
for snp in template_snps:
if snp['INTERSECTS_PEAK'] == "1":
peak = '%s.%s.%s' % (snp['PEAK_CHR'], snp['PEAK_START'], snp['PEAK_STOP'])
if peak in specific_peaks:
snp['CATEGORY'] = 0
new_peaks.append(snp)
return new_peaks
def ldsc(ldsc_path, bfile, annotation_file, output_file_prefix, hapmap_snps):
command = "python %s --l2 --bfile %s --ld-wind-cm 1 --annot %s --out %s --print-snps %s" % (ldsc_path, bfile, annotation_file, output_file_prefix, hapmap_snps)
subprocess.call(command, shell=True)
if __name__ == '__main__':
parser = argparse.ArgumentParser('Script to take a set of cluster specific peaks and a LDSC template and generate final annotation file and LDSC model.')
parser.add_argument('template_file', help='LDSC template file generated by generate_ld_score_annotation_template.py')
parser.add_argument('specific_peaks_file', help='BED file with chrom, start, and end (no header row). Any other fields are ignored.')
parser.add_argument('output_file_prefix', help='Prefix to use for LDSC output files.')
parser.add_argument('--ldsc_path', required=True, help='Path to LDSC python script.')
parser.add_argument('--bfile', required=True, help='Prefix to PLINK files from LDSC')
parser.add_argument('--hapmap_snps', required=True, help='hapmap SNPs file to pass to LDSC')
args = parser.parse_args()
# Load peaks
specific_peaks = parse_specific_peaks(open(args.specific_peaks_file))
# Load template SNPs and annotate with categories
template_snps = parse_template_file(gzip.open(args.template_file, 'rt'))
template_snps_with_categories = add_categories(template_snps, specific_peaks)
# Output file
output_file = '%s.annot.gz' % args.output_file_prefix
output_columns = ['CHR', 'BP', 'SNP', 'CM']
category_names = ['SPECIFIC_PEAK_INTERSECT']
with gzip.open(output_file, 'wt') as out:
out.write('\t'.join(output_columns + category_names) + '\n')
for snp in template_snps_with_categories:
output_entries = [snp[column] for column in output_columns]
# Get category indicators
category_indicator = [0]
if 'CATEGORY' in snp:
category_indicator[snp['CATEGORY']] = 1
final_entries = [str(x) for x in output_entries + category_indicator]
out.write('\t'.join(final_entries) + '\n')
# Run LDSC on prepared file to generate model
ldsc(args.ldsc_path, args.bfile, output_file, args.output_file_prefix, args.hapmap_snps)
|
from redis import Redis
from tc2.env.EnvType import EnvType
from tc2.log.LogFeed import LogFeed
from tc2.log.Loggable import Loggable
class AbstractRedisWorker(Loggable):
"""A class equipped with a redis client; used to perform a group of specific tasks."""
client: Redis
env_type: EnvType
def __init__(self, logfeed_program: LogFeed, client: Redis, env_type: EnvType):
# Route all logs to the main LogFeed
super().__init__(logfeed_program=logfeed_program, logfeed_process=logfeed_program)
self.client = client
self.env_type = env_type
def get_prefix(self) -> str:
"""
Returns the string which identifies the info's data environment.
All data stored in redis must be prefixed in order to distinguish it from other environment's data.
"""
return self.env_type.value + '_'
|
import sys
sys.path.append('../production')
import logging
log = logging.getLogger(__name__)
from pprint import pprint
from gcc_utils import cons_to_list
import dis
def to_int32(x):
return (x & 0xFFFFFFFF) - ((x & 0x80000000) << 1)
def do_stuff():
code, field = (999888777, (((0, 0), ((0, 0), ((0, 0), ((0, 0), ((0, 0), 0))))), (((0, 0), ((0, 0), ((0, 0), ((0, 0), ((0, 0), 0))))), (((0, 0), ((0, 0), ((0, 0), ((0, 0), ((0, 0), 0))))), (((0, 0), ((0, 0), ((0, 0), ((0, 0), ((0, 0), 0))))), (((0, 0), ((0, 0), ((0, 0), ((0, 0), ((0, 0), 0))))), 0))))))
pprint([cons_to_list(line) for line in cons_to_list(field)])
pprint(map(cons_to_list, cons_to_list(field)))
def main():
# log from this script at debug,
# from `some_module` at info,
# and from everywhere else at warning
logging.basicConfig(level=logging.WARNING)
log.setLevel(logging.DEBUG)
logging.getLogger('some_module').setLevel(logging.INFO)
log.debug('start')
do_stuff()
log.debug('end')
if __name__ == '__main__':
main()
|
__all__ = ['send_command']
import logging
import my_fastnetmon.config as config
logger = logging.getLogger("log")
def send_command(rule):
try:
f = open(config.get('EXABGP_PIPE'),"w")
f.write(rule+"\n")
f.close()
return 0
except (OSError, IOError) as err:
logging.error('File error: %s',err)
return 1
|
l=list(map(int,input().split()))
m=list(map(int,input().split()))
flag=1
for i in l:
if i in m:
flag=0
break
if(flag==0):
print("not unique" )
else:
print("unique")
|
# this program implements a bubblesort algorithm
# in bubblesort, each pair of adjacent elements is compared and the elements are swapped if they are not in the correct order. this continues until everything is properly ordered
def bubble_sort(list):
for pair in range(len(list)-1, 0, -1): #iterate over each pair in the list
for i in range(pair): #iterate over each item in the pair
if(list[i]>list[i+1]): #if unordered, create temp, switch elements
temp = list[i]
list[i] = list[i+1]
list[i+1] = temp
#example list to test
list = [19,2,31,45,6,11,121,27]
bubble_sort(list)
print(list) |
# -*- coding: utf-8 -*-
"""
Created on 2020/1/31 9:43
@author: dct
"""
from scrapy.crawler import CrawlerProcess
from scrapy.utils.project import get_project_settings
if __name__ == '__main__':
process = CrawlerProcess(get_project_settings())
process.crawl('DoubanSpider') # 你需要将此处的spider_name替换为你自己的爬虫名称
process.start() |
class Header:
Account = 0
Flag = 1
CheckNumber = 2
Date = 3
Payee = 4
Category = 5
MasterCategory = 6
SubCategory = 7
Memo = 8
Outflow = 9
Inflow = 10
Cleared = 11
RunningBalance = 12 |
import RPi.GPIO as GPIO
class Config():
MEDIA_NAME = "./video/sawmill.mov" # ./ is relative to PiMediaSync repo
DMX_DEVICE = "/dev/ttyUSB0"
GPIO_VALUES = {
'pin': 10,
'pull_up_down': GPIO.PUD_OFF,
}
AUTOREPEAT=False # causes automatic start and repeat of media sequence
DEFAULT_VALUE = 255
DEFAULT_TRANSITION_TIME = 1
CHANNELS = [25, 26, 27, 22, 23, 24, 19, 20, 21, 16, 17, 18] # order of DMX channels
LIGHTING_SEQUENCE = [
{
'dmx_levels': [22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 0, 0],
'dmx_transition': 1,
'end_time': 65
},
{
'dmx_levels': [255, 22, 22, 22, 22, 22, 22, 22, 22, 22, 0, 0],
'dmx_transition': 1,
'end_time': 98
},
{
'dmx_levels': [22, 255, 22, 22, 22, 22, 22, 22, 22, 22, 0, 0],
'dmx_transition': 1,
'end_time': 117
},
{
'dmx_levels': [22, 22, 255, 22, 22, 22, 22, 22, 22, 22, 0, 0],
'dmx_transition': 1,
'end_time': 137
},
{
'dmx_levels': [22, 22, 22, 255, 22, 22, 22, 22, 22, 22, 0, 0],
'dmx_transition': 1,
'end_time': 159
},
{
'dmx_levels': [22, 22, 22, 22, 255, 22, 22, 22, 22, 22, 0, 0],
'dmx_transition': 1,
'end_time': 189
},
{
'dmx_levels': [22, 22, 22, 22, 22, 255, 22, 22, 22, 22, 0, 0],
'dmx_transition': 1,
'end_time': 253
},
{
'dmx_levels': [22, 22, 22, 22, 22, 22, 255, 22, 22, 22, 0, 0],
'dmx_transition': 1,
'end_time': 287
},
{
'dmx_levels': [22, 22, 22, 22, 22, 22, 22, 255, 22, 22, 0, 0],
'dmx_transition': 1,
'end_time': 315
},
{
'dmx_levels': [22, 22, 22, 22, 22, 22, 22, 22, 255, 22, 0, 0],
'dmx_transition': 1,
'end_time': 327
},
{
'dmx_levels': [22, 22, 22, 22, 22, 22, 22, 22, 22, 255, 0, 0],
'dmx_transition': 1,
'end_time': 357
},
{
# end
'dmx_levels': [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
'dmx_transition': 1,
'end_time': 372
}
]
|
# Generated by Django 2.0.1 on 2018-03-31 17:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('ActivityLog', '0005_auto_20180401_0149'),
]
operations = [
migrations.RemoveField(
model_name='activitylog',
name='img_url',
),
]
|
# -*- coding: utf-8 -*-
###################################################################################
# scrapy configurations
BOT_NAME = 'oldHouse'
SPIDER_MODULES = ['oldHouse.spiders']
NEWSPIDER_MODULE = 'oldHouse.spiders'
ROBOTSTXT_OBEY = False
RETRY_TIMES = 8
SCHEDULER_PERSIST = True
SCHEDULER_FLUSH_ON_START = False
SCHEDULER = 'scrapy_redis.scheduler.Scheduler'
DUPEFILTER_CLASS = 'scrapy_redis.dupefilter.RFPDupeFilter'
###################################################################################
# to improve the performance settings
# DOWNLOAD_DELAY = 0.5
CONCURRENT_REQUESTS = 32
DOWNLOAD_TIMEOUT = 25
###################################################################################
# db configurations
MONGO_URL = 'localhost'
MONGO_DATABASE = 'old58Houser'
REDIS_URL = 'redis://USER:PASSWORD@IP:PORT'
###################################################################################
# widget whether test each proxy before crawling.
# if True, it cost much time when starting project, but more fluently when working,
# if False, it cost less time when starting project, but may less efficient when working.
# set True recommended
TEST_PROXY = True
TEST_URL = 'https://bj.58.com/ershoufang/'
PROXY_JSON_FILE = 'oldHouse/service/proxy.json'
###################################################################################
# log configurations
# LOG_FILE = "mySpider.log"
# LOG_LEVEL = "DEBUG"
###################################################################################
# pipeline
ITEM_PIPELINES = {
'oldHouse.pipelines.MongoPipeline': 300,
}
###################################################################################
# middleware
DOWNLOADER_MIDDLEWARES = {
'oldHouse.middlewares.OldhouseDownloaderMiddleware': 543,
'oldHouse.middlewares.MyProxyMiddleWare': 542,
'scrapy.downloadermiddlewares.useragent.UserAgentMiddleware': None,
'oldHouse.middlewares.MyUserAgentMiddleWare': 541,
# 'scrapy.downloadermiddlewares.retry.RetryMiddleware': None,
# 'oldHouse.middlewares.MyRetryMiddleware': 551,
'scrapy.downloadermiddlewares.redirect.RedirectMiddleware': None,
'oldHouse.middlewares.MyRedirectMiddleware': 601,
}
|
#
# Copyright (c) 2010-2014, MIT Probabilistic Computing Project
#
# Lead Developers: Jay Baxter and Dan Lovell
# Authors: Jay Baxter, Dan Lovell, Baxter Eaves, Vikash Mansinghka
# Research Leads: Vikash Mansinghka, Patrick Shafto
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import sys
import pytest
from bayesdb.client import Client
def teardown_function(function):
for fname in os.listdir('.'):
if fname[-4] == '.png':
os.remove(fname)
def run_example(name):
# Default upgrade_key_column is None, to let the user choose, but need to avoid
# user input during testing, so default will be to create a new key column.
client = Client(testing=True)
file_path = os.path.join('../../examples/%s/%s_analysis.bql' % (name, name))
results = client(open(file_path, 'r'), yes=True, pretty=False, plots=False, key_column=0)
for r in results:
if 'Error' in r or ('error' in r and r['error']):
raise Exception(str(r))
def test_dha_example():
run_example('dha')
def test_gss_example():
run_example('gss')
def test_chicago_small_example():
run_example('chicago_small')
def test_flights_example():
run_example('flights')
def test_kiva_example():
run_example('kiva')
def test_employees_example():
run_example('employees')
|
from django.conf.urls import url
from django.urls import include, path
from .views import PostAPIView, PostRudView
urlpatterns = [
path('post/', PostAPIView.as_view(), name='post-article-create'),
path('post/<int:pk>/', PostRudView.as_view(), name='post-article-rud'),
]
|
import urllib.request, json
from .models import Article, Quote
def configure_request(app):
global quotes_url, pixelbay_api_key, pixelbay_api_url
quotes_url = app.config['QUOTES_URL']
pixelbay_api_key = app.config['PIXELBAY_API_KEY']
pixelbay_api_url = app.config['PIXELBAY_API_URL']
def get_quote_of_the_day():
with urllib.request.urlopen(quotes_url) as url:
api_response = url.read()
quote_json = json.loads(api_response)
quote_data = quote_json['quotes']
quoteOBJ = dict()
for item in quote_data:
quoteOBJ['text'] = item.get('text')
quoteOBJ['author'] = item.get('author')
quoteOBJ['category'] = item.get('tag')
image_res = get_pexels_image(item.get('tag'))
quoteOBJ['image_url'] = image_res
print(image_res)
return quoteOBJ
def get_pexels_image(category):
request_url = pixelbay_api_url.format(pixelbay_api_key, category)
with urllib.request.urlopen(request_url) as url:
api_response = url.read()
photo_obj = json.loads(api_response)
return photo_obj['hits'][0]['webformatURL'] |
#!/usr/bin/env python
# coding=utf-8
import re
from pymongo import MongoClient
from basic import BaseHandler
class HomeHandler(BaseHandler):
def get(self):
self.render('home.html',title='HomePage')
|
from fht.reader.ht_reader import *
from fht.helpers.fht import *
from fht.helpers.compare import *
from fht.helpers.average import *
class Signature:
def __init__(self, file, offset, extension):
self.file = file
self.offset = offset
self.file_extension = extension
self.signature = None
self.last_compare = {}
def generate_signature(self):
reader = HTFileReader(self.file, self.offset)
self.signature = FHTAnalyzer(self.offset)
reader.read(self.signature.compute)
return self
def get_signature(self):
return {
"header_signature": self.signature.signature()[0],
"trailer_signature": self.signature.signature()[1]
}
# correlaciona a assinatura do objeto com a assinatura passada como parâmetro para gerar a matriz de correlação e % de precisão da correlação
# Após isso é acumulada as duas assinaturas e gerada a assinatura média da correlação usada para comparar com os arquivos de entrada.
def compare_to(self, signature_to_compare, extension_to_compare):
signature_to_backend = (self.get_signature()["header_signature"], self.get_signature()["trailer_signature"])
compare = CompareFHT(signature_to_backend, signature_to_compare)
average = FHTAverage(signature_to_backend, 1)
self.last_compare['assurance'] = compare.assuranceLevel() * 100
self.last_compare['compared_extension'] = extension_to_compare
self.last_compare['final_signature'] = {
"header_signature": average.accumulate(signature_to_compare).fingerprint()[0],
"trailer_signature": average.accumulate(signature_to_compare).fingerprint()[1]
}
self.last_compare['correlation_matrix'] = {
'header_correlation': compare.correlate()[0],
'trailer_correlation': compare.correlate()[1]
}
|
from typing import List
class Solution:
# 2
def twoSum(self, nums: List[int], target: int) -> List[int]:
j = 0
for i in range(len(nums)):
num = target - nums[i]
if num in nums[i+1:]:
j = nums[i+1:].index(num) + i + 1
break
return [i, j]
# 3
def twoSum(self, nums: List[int], target: int) -> List[int]:
hashmap = {}
for i, num in enumerate(nums):
if hashmap.get(target - num) is not None:
return [i, hashmap.get(target - num)]
hashmap[num] = i
|
from __future__ import print_function, division
import numpy as np
N = 2001
eps = 0.1
rc = 2.**(1./6.)
r = np.linspace(0, 2*rc, N+2)[1:-1]
support = (r>=rc)
def LJ(r):
rm6 = r**(-6)
return 4*rm6*(rm6-1)+1
def F_LJ(r):
return 24*(2*r**(-13)-r**(-7))
def FPRIME_LJ(r):
return -24*(26*r**(-14)-7*r**(-8))
def mirror_LJ(r):
rm6 = (2*rc - r)**(-6)
return 4*rm6*(rm6-1)+1
def mirror_F_LJ(r):
rr = 2*rc - r
return -24*(2*rr**(-13)-rr**(-7))
def mirror_FPRIME_LJ(r):
rr = 2*rc - r
return 24*(26*rr**(-14)-7*rr**(-8))
V = np.zeros_like(r)
V[support] = mirror_LJ(r[support])
do_plot = False
if do_plot:
import matplotlib.pyplot as plt
plt.plot(r, LJ(r))
plt.plot(r, F_LJ(r), ls='--')
plt.plot(r, V)
plt.plot(r[support], mirror_F_LJ(r[support]), ls='--')
plt.ylim(-2,5)
plt.show()
r = np.linspace(eps, 2*rc-eps, N)
print("# rlo={rlo}, rhi={rhi}".format(rlo=rc, rhi=2*rc-eps))
print("""# Mirror Lennard-Jones potential
MIRROR_LJ
N {N} FP {fplo} {fphi}
""".format(N=N, rlo=eps, rhi=2*rc-eps, fplo=FPRIME_LJ(eps), fphi=mirror_FPRIME_LJ(2*rc-eps)))
for idx, x in enumerate(r):
if x<rc:
print("%i %f %f %f" % (idx+1, x, LJ(x), F_LJ(x)))
else:
print("%i %f %f %f" % (idx+1, x, mirror_LJ(x), mirror_F_LJ(x)))
|
from django.db import models
from django.urls import reverse
# Create your models here.
class faculty(models.Model):
#описание факультета
name = models.CharField(max_length=200, help_text="Введите наименование факультета")
def __str__(self):
return self.name
class course(models.Model):
#описание учебного курса
title = models.CharField(max_length=200)
teacher = models.ForeignKey('Teacher', on_delete=models.SET_NULL, null=True)
summary = models.TextField(max_length=1000, help_text="Введите описание учебного курса")
faculty = models.ManyToManyField(faculty, help_text="Выберете факультет")
def __str__(self):
return self.title
def get_absolute_url(self):
return reverse('данные о курсе', args=[str(self.id)])
def display_faculty(self):
return ','.join([ faculty.name for faculty in self.faculty.all() [:3] ])
display_faculty.short_description = 'Faculty'
class teacher(models.Model):
#данные о преподавателе
first_name = models.CharField(max_length=100)
last_name = models.CharField(max_length=100)
date_of_birth = models.DateField(null=True, blank=True)
def get_absolute_url(self):
return reverse('Данные о преподавателе', args=[str(self.id)])
def __str__(self):
return '%s, %s' % (self.last_name, self.first_name)
|
import enum
import typing
from pathlib import Path
import rivals_workshop_assistant.info_files as info_files
from rivals_workshop_assistant.paths import ASSISTANT_FOLDER
if typing.TYPE_CHECKING:
from rivals_workshop_assistant.aseprite_handling import TagColor
FILENAME = "assistant_config.yaml"
PATH = ASSISTANT_FOLDER / FILENAME
ASEPRITE_PATH_FIELD = "aseprite_path"
def read_project_config(root_dir: Path) -> dict:
"""Controller"""
return info_files.read(root_dir / PATH)
def make_default_override(exe_dir: Path, content: str):
info_files.create_file(path=(exe_dir / FILENAME), content=content)
def read_default_override(exe_dir: Path) -> dict:
"""Read the config from the exe directory, used to overwrite the default config"""
return info_files.read(exe_dir / FILENAME)
def overwrite_default_config(
initial_default_config: dict, user_default_config_override: dict
) -> dict:
result = initial_default_config.copy()
result.update(user_default_config_override)
return result
class UpdateLevel(enum.Enum):
MAJOR = "major"
MINOR = "minor"
PATCH = "patch"
NONE = "none"
LIBRARY_UPDATE_LEVEL_FIELD = "library_update_level"
LIBRARY_UPDATE_LEVEL_DEFAULT = UpdateLevel.PATCH
def get_library_update_level(config: dict) -> UpdateLevel:
return UpdateLevel(
config.get(LIBRARY_UPDATE_LEVEL_FIELD, LIBRARY_UPDATE_LEVEL_DEFAULT)
)
ASSISTANT_SELF_UPDATE_FIELD = "assistant_should_self_update"
ASSISTANT_SELF_UPDATE_DEFAULT = True
def get_assistant_self_update(config: dict) -> bool:
return config.get(ASSISTANT_SELF_UPDATE_FIELD, ASSISTANT_SELF_UPDATE_DEFAULT)
ANIM_TAG_COLOR_FIELD = "anim_tag_color"
ANIM_TAG_COLOR_DEFAULT = "blue"
WINDOW_TAG_COLOR_FIELD = "window_tag_color"
WINDOW_TAG_COLOR_DEFAULT = "red"
def get_anim_tag_color(config: dict) -> "TagColor":
return config.get(ANIM_TAG_COLOR_FIELD, ANIM_TAG_COLOR_DEFAULT)
def get_window_tag_color(config: dict) -> "TagColor":
return config.get(WINDOW_TAG_COLOR_FIELD, WINDOW_TAG_COLOR_DEFAULT)
WARNINGS_FIELD = "warnings"
WARNING_DESYNC_OBJECT_VAR_SET_IN_DRAW_SCRIPT_VALUE = (
"desync_object_var_set_in_draw_script"
)
WARNING_DESYNC_UNSAFE_CAMERA_READ_VALUE = "desync_unsafe_camera_read"
WARNING_CHECK_WINDOW_TIMER_WITHOUT_CHECK_HITPAUSE = (
"check_window_timer_without_check_hitpause"
)
WARNING_RECURSIVE_SET_ATTACK = "recursive_set_attack"
def get_initial_default_config() -> dict:
return info_files.YAML_HANDLER.load(DEFAULT_CONFIG)
def override_default_config(default_config, user_default_config_override):
raise NotImplementedError
GENERATE_HURTBOXES_FIELD = "generate_hurtboxes"
GENERATE_HURTBOXES_DEFAULT = True
def get_hurtboxes_enabled(config: dict):
return config.get(GENERATE_HURTBOXES_FIELD, GENERATE_HURTBOXES_DEFAULT)
DEFAULT_CONFIG = f"""\
# Format is <key name>: <value> (with a space after the : )
# E.g.
# update_level: patch
{ASEPRITE_PATH_FIELD}: # FILL THIS IN TO USE ASEPRITE
# Point this to your Aseprite.exe absolute path, for example:
# aseprite_path: C:/Program Files/Aseprite/aseprite.exe
# This is needed for the assistant to automatically export your animations to spritesheets.
# If you use Steam for Aseprite, you can find the path with:
# The aseprite page of your library, The gear icon at the top right,
# Manage, Browse Local Files, Copy the path of Aseprite.exe to the config.
#
# Aseprite Tag Color Configs
# Legal values are:
# black, red, orange, yellow, green, blue, purple, gray
{ANIM_TAG_COLOR_FIELD}: {ANIM_TAG_COLOR_DEFAULT}
# The color of Aseprite tag representing an animation.
# If you keep multiple aseprite animations in a file, put each in a tag with this
# color, and the assistant will export them under that tag's name.
{WINDOW_TAG_COLOR_FIELD}: {WINDOW_TAG_COLOR_DEFAULT}
# The color of Aseprite tag representing an attack window.
# If a tag of this color is found, it will be used to add animation meta-data to the
# bottom of the attack's script.
{GENERATE_HURTBOXES_FIELD}: {GENERATE_HURTBOXES_DEFAULT}
# If the assistant should automatically generate hurtboxes from your anim files.
# See TODO PUT A LINK TO THE ASSISTANT. TELL QAZZQUIMBY ON DISCORD IF HE FORGETS TO REPLACE THIS!
{LIBRARY_UPDATE_LEVEL_FIELD}: {LIBRARY_UPDATE_LEVEL_DEFAULT.value}
# What kind of library updates to allow.
# This only affects the functions available to inject, not assistant behavior.
# {UpdateLevel.MAJOR.value} = All updates are allowed, even if they may
# break existing code.
# {UpdateLevel.MINOR.value} = Don't allow breaking changes to existing
# functions, but do allow new functions. Could cause name collisions.
# {UpdateLevel.PATCH.value} = Only allow changes to existing functions
# that fix bugs or can't break current functionality.
# {UpdateLevel.NONE.value} = No updates.
{ASSISTANT_SELF_UPDATE_FIELD}: {ASSISTANT_SELF_UPDATE_DEFAULT}
# If the assistant should automatically receive behavior updates.
#
{WARNINGS_FIELD}:
- {WARNING_DESYNC_OBJECT_VAR_SET_IN_DRAW_SCRIPT_VALUE}
- {WARNING_DESYNC_UNSAFE_CAMERA_READ_VALUE}
- {WARNING_CHECK_WINDOW_TIMER_WITHOUT_CHECK_HITPAUSE}
- {WARNING_RECURSIVE_SET_ATTACK}
# Comment out any warnings you want to disable with `#`.
# Learn more about warnings at https://rivalslib.com/assistant/warnings/
"""
def get_aseprite_path(assistant_config: dict) -> typing.Optional[Path]:
path_string = assistant_config.get(ASEPRITE_PATH_FIELD, None)
if path_string:
return Path(path_string)
else:
return None
|
import sys
f = open(sys.argv[1],"r") #Read in the file given as the first command line argument
contents = f.read() #Read the file contents into a variable
length = len(contents) #Find the length of the cipher text block
maxFreq = 0 #What is the greatest number of occurances
keyLengthFreq = [0]*length #Array to hold the number of coincidences for different key lengths, indexed
sortedkeyLengthFreq = [0]*5 #Array to hold the number of coincidences for different key lengths, sorted
distance = 0 #Distance counter between characters
start = 0 #Boolean for whether the count for distance measurement has started yet
end = 0 #Boolean for whether the current distance measurement has been concluded
keyLength = 0 #Most probable key length
count = [0]*13
charCounter = 0
for i in range(0,length):
if (charCounter == 13):
charCounter = 0
if(ord(contents[i]) == 10):
count[charCounter]+=1
charCounter+=1
print(count)
|
import sys
import importlib
class _ObjInfoParents:
def spawn_parents(self):
""" Attempt to generate parents after creation if missing.
:param generallibrary.ObjInfo self: """
if self.get_parent(spawn=False) is None:
module_name = getattr(self.origin, "__module__", None)
module = sys.modules.get(module_name)
qualname = getattr(self.origin, "__qualname__", "")
split_qualname = qualname.split(".")
objInfo = None
if self.is_module():
split_name = self.origin.__name__.split(".")
for i in range(len(split_name) - 1):
parent_module = importlib.import_module(name=".".join(split_name[0:i + 1]))
objInfo = self.ObjInfo(obj=parent_module, parent=objInfo)
else:
# To make the parent of a bound method an instance instead of class
if dunder_self := getattr(self.obj, "__self__", None):
objInfo = self.ObjInfo(obj=dunder_self)
# Start with module and iterate downwards excluding last name in qualname, which we connect manually to self
elif module and qualname and "<locals>" not in split_qualname:
objInfo = self.ObjInfo(obj=module)
for name in split_qualname[:-1:]:
objInfo = self.ObjInfo(obj=getattr(objInfo.obj, name), parent=objInfo, name=name)
# Find module which contains self.obj
else:
first_module, name = next(self._find_modules(), (None, None))
if first_module:
# self.name = name # No idea what this is, but it was strangely breaking test_origins
objInfo = self.ObjInfo(obj=first_module)
if objInfo:
self.set_parent(objInfo)
def _find_modules(self):
""" Get a list of modules that has self.obj as a direct attribute.
:param generallibrary.ObjInfo self: """
for module in sys.modules.values():
for key, value in module.__dict__.items():
if self.identifier() == self.identifier(obj=value):
if self.obj != value:
print("Values don't match even though their identifier does.", self.obj, value, self.identifier(), self.identifier(obj=value))
# raise AttributeError("Values don't match even though their identifier does.")
yield module, key
@classmethod
def check_if_parent_eligible(cls, parent_obj, child_obj, name):
""" Check relationship eligibility of parent to child.
:param generallibrary.ObjInfo cls:
:param parent_obj:
:param child_obj:
:param name: """
parent_attr_obj = getattr(parent_obj, name, None)
parent_attr_obj = cls.get_origin(parent_attr_obj)
child_obj = cls.get_origin(child_obj)
if parent_attr_obj is child_obj:
return True
return False
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
msg = "glhf"
s = pd.Series([1,3,5,np.nan,6,8])
print(msg)
print(s)
# msg.capitalize |
from django.contrib import admin
from .models import accident
from .models import acmodels
# decorator
@admin.register(accident)
class accident_Admin(admin.ModelAdmin):
list_display = ('title','date','content','acmodels')
list_filter = ('date','acmodels')
search_fields = ('title','content')
date_hierarchy = 'date'
ordering = ('date','acmodels')
admin.site.register(acmodels)
# Register your models here.
|
# coding: utf-8
import json
from decimal import Decimal
import requests
from django.http import JsonResponse
from django.contrib.auth.decorators import login_required, permission_required
from django.shortcuts import render_to_response
from math import ceil
from common.views import add_common_var
from common.views import report_render
from phone_fee.models import CpPhoneFeeProduct, PtDaojiaOrderGuarantee
from pt_card.click_action import click_action_url, filter_gids
from pt_card.models import PtCard, PtCardScope, PtCardGoods, PtEntityCard
from pt_card.views.pt_card_pub import PtConst, Paginator, HttpResponse, get_ptcard_info, update_goods_id
from wallet.views import vip_pub
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib.auth.hashers import make_password
from django.template import RequestContext
from django.db import connection, transaction
import time
def get_putao_card_list(request):
"""
葡萄卡列表
:param request:
:return:
"""
try:
data = {}
per_page = int(request.GET.get("per_page", 30))
cur_page = int(request.GET.get("cur_page", 1))
start_site = (cur_page - 1) * per_page
end_site = start_site + per_page
r_data = PtCard.objects.all()[start_site:end_site]
all_page = int(ceil(PtCard.objects.count() / float(per_page)))
if cur_page > all_page or cur_page <= 0:
return {'code': '-1', 'msg': '页数太大或太小'}
r_list = []
for obj in r_data:
if obj.service_length == 0:
service_len = u'不限'
else:
service_len = str(obj.service_length) + '分钟' if obj.service_length < 60 else str(
round(obj.service_length / 60.0, 2)) + '小时'
r_list.append(dict(
id=obj.id,
icon=obj.icon,
remark=obj.remark if obj.remark else '',
name=obj.name,
retail_price=round(obj.retail_price / 100.0, 2),
usable_times=obj.usable_times,
service_length=service_len,
instruction=obj.instruction,
expire_dates=obj.expire_dates if obj.expire_dates else '不限',
))
data['data'] = r_list
data['page'] = all_page
data['code'] = '0'
return data
except Exception as e:
return {'code': '-1', 'msg': e.message}
def create_putao_card(request):
"""
葡萄卡商品创建
:param request:
:return:
"""
data = {}
try:
request_data = json.loads(request.body) if request.body else ''
if request_data:
icon = request_data.get('putaoDitu', '')
icon_inactive = request_data.get('icon_inactive', '')
name = request_data.get('puName', '')
remark = request_data.get('remark', '')
retail_price = request_data.get('retail_price', '')
usable_times = request_data.get('serviceCount', '')
service_length = request_data.get('serviceTime', '')
timeType = request_data.get('timeType', '')
cancel_minutes = float(str(request_data.get('timeLimit', '')))
instruction = request_data.get('description', '')
expire_dates = request_data.get('validity', '')
# is_app_sale = 1 if request_data.get('is_sale') else 0
all_scope = request_data.get('all_scope')
service_length = None if len(service_length) == 0 else float(str(service_length))
if service_length is not None:
service_length = int(round(service_length * 60)) if timeType == 'hour' else int(round(service_length))
else:
service_length = 0
if all_scope is None:
return {"msg": u"无服务范围", "code": '0'}
pt = PtCard.objects.create(
name=name,
icon=icon,
icon_inactive=icon_inactive,
remark=remark,
retail_price=int(round(float(str(retail_price)) * 100)), # 单位分
usable_times=int(usable_times),
service_length=service_length,
cancel_minutes=int(round(cancel_minutes * 60)),
instruction=instruction,
expire_dates=expire_dates if expire_dates else 365,
is_app_sale=0,
)
try:
click_url, gids = click_action_url(all_scope, pt.id)
pt.click_action = click_url
pt.save()
except Exception as err:
return {'msg': 'action:' + err.message, 'code': '-1'}
for i in all_scope:
str_gid, str_sku = filter_gids(i['gids'], 1)
str_gid_x, str_sku_x = filter_gids(i['gids_x'], 1)
PtCardScope.objects.create(
card_id=pt.id,
positive_second_category_id=i['goods_cat'],
reverse_second_category_id=i['goods_cat_x'],
positive_category_id=i['sanji'],
reverse_category_id=i['sanji_x'],
positive_cpid=i['cps'],
reverse_cpid=i['cps_x'],
positive_gid=str_gid,
reverse_gid=str_gid_x,
positive_skuid=str_sku,
reverse_skuid=str_sku_x,
)
return {"msg": u"添加成功", "code": '0'}
data = {"msg": u"缺少参数", "code": '-1'}
except Exception as err:
data = {"msg": err.message, "code": '-1'}
return data
def format_sku(str_id, type):
"""
type=0 str id 前面加s
type=1 str id 前面加g
:param str_id:
:return:
"""
lt_id = str_id.split(',') if str_id else []
if type == 0:
return ','.join(map(lambda x: 's' + x, lt_id)) if lt_id else ''
else:
return ','.join(map(lambda x: 'g' + x, lt_id)) if lt_id else ''
def get_one_putao(obj):
"""
获取单个葡萄卡商品
:param request:
:return:
"""
data = {}
ptcardscope = PtCardScope.objects.filter(card_id=obj.id)
all_scope = []
for i in ptcardscope:
gids = format_sku(i.positive_gid, 1) if i.positive_gid is not None else ''
gids_x = format_sku(i.reverse_gid, 1) if i.reverse_gid is not None else ''
sku = format_sku(i.positive_skuid, 0) if i.positive_skuid is not None else ''
sku_x = format_sku(i.reverse_skuid, 0) if i.reverse_skuid is not None else ''
gid_format = gids + ',' + sku if gids or sku else gids + sku
gid_x_format = gids_x + ',' + sku_x if gids_x or sku_x else gids_x + sku_x
all_scope.append(
dict(
goods_cat=i.positive_second_category_id if i.positive_second_category_id is not None else '',
goods_cat_x=i.reverse_second_category_id if i.reverse_second_category_id is not None else '',
sanji=i.positive_category_id if i.positive_category_id is not None else '',
sanji_x=i.reverse_category_id if i.reverse_category_id is not None else '',
cps=i.positive_cpid if i.positive_cpid is not None else '',
cps_x=i.reverse_cpid if i.reverse_cpid is not None else '',
gids=gid_format,
gids_x=gid_x_format,
)
)
if obj.service_length < 60:
timeType = 'minute'
service_length = obj.service_length
else:
timeType = 'hour'
service_length = round(obj.service_length / 60.0, 2)
data['data'] = dict(
putaoDitu=obj.icon,
icon_inactive=obj.icon_inactive,
puName=obj.name,
remark=obj.remark,
retail_price=round(obj.retail_price / 100.0, 2),
serviceCount=obj.usable_times,
serviceTime=service_length if service_length != 0 else '',
timeType=timeType,
timeLimit=round(obj.cancel_minutes / 60.0, 2),
description=obj.instruction,
validity=obj.expire_dates if obj.expire_dates else 365,
# is_sale=True if obj.is_app_sale == 1 else False,
all_scope=all_scope,
)
data['code'] = '0'
return data
def update_putao(pk, request):
"""
更改单个葡萄卡商品
:param request:
:return:
"""
data = {}
try:
ptcard = PtCard.objects.filter(id=pk)
request_data = json.loads(request.body) if request.body else ''
if request_data:
icon = request_data.get('putaoDitu', '')
icon_inactive = request_data.get('icon_inactive', '')
name = request_data.get('puName', '')
remark = request_data.get('remark', '')
# retail_price = request_data.get('retail_price', '')
# usable_times = request_data.get('serviceCount', '')
service_length = request_data.get('serviceTime', '')
timeType = request_data.get('timeType', '')
cancel_minutes = float(str(request_data.get('timeLimit', '')))
instruction = request_data.get('description', '')
expire_dates = request_data.get('validity', '')
# is_app_sale = 1 if request_data.get('is_sale') else 0
all_scope = request_data.get('all_scope')
service_length = None if len(service_length) == 0 else float(str(service_length))
if service_length is not None:
service_length = int(round(service_length * 60)) if timeType == 'hour' else int(round(service_length))
else:
service_length = 0
if all_scope is None:
return {"msg": u"无服务范围", "code": '0'}
ptcard.update(
name=name,
icon=icon,
icon_inactive=icon_inactive,
remark=remark,
service_length=service_length,
cancel_minutes=cancel_minutes * 60,
instruction=instruction,
expire_dates=expire_dates if expire_dates else 365,
# is_app_sale=is_app_sale,
)
try:
ptcard_get = PtCard.objects.get(id=pk)
click_url, gids = click_action_url(all_scope, pk)
ptcard_get.click_action = click_url
ptcard_get.save()
except Exception as err:
return {'msg': 'action:' + err.message, 'code': '-1'}
PtCardScope.objects.filter(card_id=pk).delete()
for i in all_scope:
str_gid, str_sku = filter_gids(i['gids'], 1)
str_gid_x, str_sku_x = filter_gids(i['gids_x'], 1)
PtCardScope.objects.filter(card_id=pk).create(
card_id=pk,
positive_second_category_id=i['goods_cat'],
reverse_second_category_id=i['goods_cat_x'],
positive_category_id=i['sanji'],
reverse_category_id=i['sanji_x'],
positive_cpid=i['cps'],
reverse_cpid=i['cps_x'],
positive_gid=str_gid,
reverse_gid=str_gid_x,
positive_skuid=str_sku,
reverse_skuid=str_sku_x,
)
# 同步商品
pt_goods = update_goods_id(pk)
for i in pt_goods:
if not i:
continue
param = get_ptcard_info([pk], i[0], i[1], 0)
ptgoodsurl = PtConst.UPDATEPTGOODS
r = requests.post(ptgoodsurl, data=param)
re = r.json()
if re['code'] != 0:
return {"msg": '同步商品失败:' + re["msg"], "code": '-1'}
# 同步实体卡
pt_ent = PtEntityCard.objects.filter(card_id=pk)
for e in pt_ent:
param = get_ptcard_info([pk], e.id, '', 1)
ptgoodsurl = PtConst.UPDATEPTGOODS
r = requests.post(ptgoodsurl, data=param)
re = r.json()
if re['code'] != 0:
return {"msg": '同步实体卡失败' + re["msg"], "code": '-1'}
return {"msg": u"添加成功", "code": '0'}
data = {"msg": u"缺少参数", "code": '-1'}
except Exception as err:
data = {"msg": err.message, "code": '-1'}
return data
@login_required
@permission_required(u'man.%s' % PtConst.PT_CARD, raise_exception=True)
@add_common_var
def putao_card_info(request, template_name):
return report_render(request, template_name, {
}, context_instance=RequestContext(request))
@login_required
@permission_required(u'man.%s' % PtConst.PT_CARD, raise_exception=True)
@add_common_var
def putao_card_edit(request, template_name):
return report_render(request, template_name,
{},
context_instance=RequestContext(request))
@login_required
@permission_required(u'man.%s' % PtConst.PT_CARD, raise_exception=True)
def putao_card_goods(request):
"""
葡萄卡的列表显示和卡的增加
:param request:
:return:
"""
data = {'code': '-1', 'msg': 'GET or POST 方法'}
if request.method == 'GET':
data = get_putao_card_list(request)
elif request.method == 'POST':
data = create_putao_card(request)
return JsonResponse(data)
@login_required
@permission_required(u'man.%s' % PtConst.PT_CARD, raise_exception=True)
def putao_card_goods_detail(request):
"""
get 单个信息
post 更改信息
delete 葡萄卡的删除
:param request:
:return:
"""
data = {'code': '-1', 'msg': 'GET or PUT 方法'}
try:
pk = request.GET.get('pk', request.POST.get('pk'))
did = PtCard.objects.get(id=pk)
except:
data = {'msg': u'没有这个值', 'code': '1'}
else:
if request.method == 'GET':
data = get_one_putao(did)
elif request.method == 'PUT':
data = update_putao(pk, request)
elif request.method == 'DELETE':
pass
return JsonResponse(data)
|
# -*- encoding=UTF-8 -*-
from skimage import util
import cv2
import numpy as np
import random
def skimage_function():
img_original = cv2.imread("images/lenna.png")
cv2.imshow("img_original1",img_original)
img_gauss = util.random_noise(img_original,mode='gaussian')
cv2.imshow("img_gauss1",img_gauss)
skimage_function()
def handwrite_function():
img_original = cv2.imread("images/lenna.png")
cv2.imshow("img_original2", img_original)
h,w,c = img_original.shape
img_gauss = np.zeros((h,w,c))
for i in range(c):
for j in range(h):
for k in range(w):
img_gauss[j,k,i] = img_original[j,k,i] + random.gauss(0,29.9)
if img_gauss[j,k,i] < 0:
img_gauss[j, k, i] = 0
elif img_gauss[j,k,i] > 255:
img_gauss[j, k, i] = 255
cv2.imshow("img_gauss2", img_gauss.astype("uint8"))
handwrite_function()
cv2.waitKey(0)
cv2.destroyAllWindows() |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @File : udp_receive.py
# @Author: ly
# @Date : 2018/12/2
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @File : udp.py
# @Author: ly
# @Date : 2018/12/2
import socket
'''
create socket
'''
upd_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
upd_socket.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, True)
upd_socket.bind(('', 6666))
receive_data, remote_address = upd_socket.recvfrom(10<<10)
print('receive data from %s data is %s' % (str(remote_address), receive_data.decode()))
upd_socket.close()
|
from pyspark.sql import SparkSession
from pyspark.sql import SQLContext
#Build and initiate the spark session for leveraging spark
if __name__ == '__main__':
spark = SparkSession \
.builder \
.appName("csv_processing") \
.getOrCreate()
#Building the spark context
sc=spark.sparkContext
#Providing the location for the file to be processed
data_file = r"C:\Users\shubh\Documents\Truata_Tasks\groceries.csv"
#Since we have to use the Spark RDD API we are using the sc.textfile as it stores it in a rdd. using the spark.read.text() would not be appropriate in this case as it would store it in a data frame
rdd_read = sc.textFile(data_file)
#Defining a function to split the data based on ,
def funsplit(lines):
lines = lines.split(',')
return lines
#Using the flatmap function as we do not want a nested list and we want a flat continuous list
flat_rdd=rdd_read.flatMap(funsplit)
#Calling an action to this so the transformations are applied. As spark follows a lazy transformations approach
flat_rdd.collect()
#Since we have to get the list of individual elements
cnt_dist=flat_rdd.distinct().collect()
#Task 1
#Printing in the required format
f=open('/Users/shubh/Documents/Truata_Output/Task_1/out_1_1.txt','w')
for i in cnt_dist:
f.write("Product ")
f.write(str(i))
f.write("\n")
f.close()
#Printing the total count
#Task 2
f=open('/Users/shubh/Documents/Truata_Output/Task_1/out_1_2.txt','w')
f.write("The total count is ")
f.write("\n")
f.write(str(flat_rdd.count()))
f.close()
#Task 3
#Since we have flattened out the rdd we can assign a number to each item and then later we can compute the aggregate and for that
#aggregate we need to have a key value pair to perform aggragation,so converting it to a key value pair
dict_rdd=flat_rdd.map(lambda x: (x,1))
#We are now grouping the elements
dict_grp=dict_rdd.groupByKey()
#Applying the mapValues function here because we need to get a sum of the values to get the total count. Here the key is the grocery product and the value is the number of times it is occuring.
#Here we only want to transform the values(take the sum) but do not want our keys to be affected hence using mapvalues
grp_val = dict_grp.mapValues(sum).map(lambda x: (x[1],x[0])).sortByKey(False)
f=open('/Users/shubh/Documents/Truata_Output/Task_1/out_1_3.txt','w')
for i in grp_val.collect():
f.write(str(i[1]) + ' ')
f.write(str(i[0]))
f.write("\n")
f.close()
|
numX = 20
stringX = str(20)
result1 = numX * 10
result2 = stringX * 10
print("Result 1 = ", result1)
print("Result 2 = ", result2)
|
from vc_wrap import SvetObject
from combine_runs import ConstraintObject
iso_name = "PJM"
Scenario_time_series_filename = "/Users/zhenhua/Desktop/price_data/hourly_timeseries_pjm_2019_200x.csv"
# Scenario_time_series_filename = "/Users/zhenhua/Desktop/price_data/hourly_timeseries_2019_200x.csv"
Finance_customer_tariff_filename = "/Users/zhenhua/Desktop/price_data/tariff_data_fake/e5d4.csv"
# Finance_customer_tariff_filename = "/Users/zhenhua/Desktop/price_data/tariff_data/original_documents/caiso_pge_b20_2020.csv"
# value stacking, should be the best scenario
caiso_all = SvetObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
default_params_file="Model_Parameters_2v1-0-2_default_03-2021.csv",
shortname="{} DCM+SR+NSR on".format(iso_name),
description=iso_name,
Scenario_n="36",
Scenario_time_series_filename=Scenario_time_series_filename,
Finance_customer_tariff_filename=Finance_customer_tariff_filename,
DCM_active='yes',
retailTimeShift_active='yes',
DA_active='no',
SR_active='yes',
NSR_active='yes',
FR_active="no",
FR_CombinedMarket="1"
)
caiso_all.run_storagevet()
# use ths to determine monthly peak net load
caiso_baseline = SvetObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
default_params_file="Model_Parameters_2v1-0-2_default_03-2021.csv",
shortname="{} DCM on".format(iso_name),
description=iso_name,
Scenario_n="36",
Scenario_time_series_filename=Scenario_time_series_filename,
Finance_customer_tariff_filename=Finance_customer_tariff_filename,
DCM_active='yes',
retailTimeShift_active='yes',
DA_active='no',
SR_active='no',
NSR_active='no',
FR_active="no",
FR_CombinedMarket="1"
)
caiso_baseline.run_storagevet()
# DCM constraints
DCMconstraint = ConstraintObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
shortname=caiso_baseline.shortname, baseline_runID=caiso_baseline.runID,
app_hours=[0, 23],
regulation_scenario=1,
constraint_init=True)
DCMconstraint.set_DCM_user_constraints()
# determine sr constraints
caiso_all_w_dcm = SvetObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
default_params_file="Model_Parameters_2v1-0-2_default_03-2021.csv",
shortname=DCMconstraint.new_shortname,
description=iso_name,
Scenario_n="36",
Scenario_time_series_filename=DCMconstraint.new_hourly_timeseries_path,
Finance_customer_tariff_filename=Finance_customer_tariff_filename,
User_active="yes", User_price=DCMconstraint.values,
DCM_active='no',
retailTimeShift_active='yes',
DA_active='no',
SR_active='yes',
NSR_active='yes',
FR_active="no",
FR_CombinedMarket="1"
)
caiso_all_w_dcm.run_storagevet()
# SR has priority from 2-8pm with RS=3
SRconstraint = ConstraintObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
shortname=caiso_all_w_dcm.shortname, baseline_runID=caiso_all_w_dcm.runID,
app_hours=[14, 19],
regulation_scenario=3,
constraint_init=False)
SRconstraint.set_SR_user_constraints()
caiso_sr_priority = SvetObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
default_params_file="Model_Parameters_2v1-0-2_default_03-2021.csv",
shortname=SRconstraint.new_shortname,
description=iso_name,
Scenario_n="36",
Scenario_time_series_filename=SRconstraint.new_hourly_timeseries_path,
Finance_customer_tariff_filename=Finance_customer_tariff_filename,
User_active="yes", User_price=SRconstraint.values,
DCM_active='no',
retailTimeShift_active='yes',
DA_active='no',
SR_active='no',
NSR_active='yes',
FR_active="no",
FR_CombinedMarket="1"
)
caiso_sr_priority.run_storagevet()
# SR has priority from 2-8pm with RS=1
SRconstraint = ConstraintObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
shortname=caiso_all_w_dcm.shortname, baseline_runID=caiso_all_w_dcm.runID,
app_hours=[14, 19],
regulation_scenario=1,
constraint_init=False)
SRconstraint.set_SR_user_constraints()
caiso_sr_priority = SvetObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
default_params_file="Model_Parameters_2v1-0-2_default_03-2021.csv",
shortname=SRconstraint.new_shortname,
description=iso_name,
Scenario_n="36",
Scenario_time_series_filename=SRconstraint.new_hourly_timeseries_path,
Finance_customer_tariff_filename=Finance_customer_tariff_filename,
User_active="yes", User_price=SRconstraint.values,
DCM_active='yes',
retailTimeShift_active='yes',
DA_active='no',
SR_active='no',
NSR_active='yes',
FR_active="no",
FR_CombinedMarket="1"
)
caiso_sr_priority.run_storagevet()
# SR has priority from 8-2pm with RS=3
SRconstraint = ConstraintObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
shortname=caiso_all_w_dcm.shortname, baseline_runID=caiso_all_w_dcm.runID,
app_hours=[8, 13],
regulation_scenario=3,
constraint_init=False)
SRconstraint.set_SR_user_constraints()
caiso_sr_priority = SvetObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
default_params_file="Model_Parameters_2v1-0-2_default_03-2021.csv",
shortname=SRconstraint.new_shortname,
description=iso_name,
Scenario_n="36",
Scenario_time_series_filename=SRconstraint.new_hourly_timeseries_path,
Finance_customer_tariff_filename=Finance_customer_tariff_filename,
User_active="yes", User_price=SRconstraint.values,
DCM_active='no',
retailTimeShift_active='yes',
DA_active='no',
SR_active='no',
NSR_active='yes',
FR_active="no",
FR_CombinedMarket="1"
)
caiso_sr_priority.run_storagevet()
# SR has priority from 8-2pm with RS=1
SRconstraint = ConstraintObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
shortname=caiso_all_w_dcm.shortname, baseline_runID=caiso_all_w_dcm.runID,
app_hours=[8, 13],
regulation_scenario=1,
constraint_init=False)
SRconstraint.set_SR_user_constraints()
caiso_sr_priority = SvetObject(SVet_absolute_path="/Applications/storagevet2v101/StorageVET-master-git/",
default_params_file="Model_Parameters_2v1-0-2_default_03-2021.csv",
shortname=SRconstraint.new_shortname,
description=iso_name,
Scenario_n="36",
Scenario_time_series_filename=SRconstraint.new_hourly_timeseries_path,
Finance_customer_tariff_filename=Finance_customer_tariff_filename,
User_active="yes", User_price=SRconstraint.values,
DCM_active='yes',
retailTimeShift_active='yes',
DA_active='no',
SR_active='no',
NSR_active='yes',
FR_active="no",
FR_CombinedMarket="1"
)
caiso_sr_priority.run_storagevet()
|
# -*- coding: utf-8 -*-
# QT IVVI DAC controller
# Version 1.1 (2020-02-07)
# Daan Wielens (ICE/QTM)
# PUT YOUR COM PORT HERE:
COMport = 1
import sys
from datetime import datetime
try:
# These modules will import succesfully for Python 2.x
import Tkinter
import ttk
import tkMessageBox as messagebox
import tkFont as font
except ImportError:
# For Python 3.x, we need these (renamed) modules
import tkinter as Tkinter
from tkinter import ttk
from tkinter import messagebox
from tkinter import font
novisa = 0
# Testing GUI on a pc without VISA / pySerial:
if len(sys.argv) == 2:
if sys.argv[1] == '--novisa':
novisa = 1
else:
print('Warning: your input argument (' + sys.argv[1] + ') is not recognized by this program and will be ignored. Please use --novisa if you want to simulate the GUI.')
# Initialise connection
if novisa == 0:
import serial
ser = serial.Serial()
ser.baudrate = 115200
ser.port = 'COM' + str(COMport)
ser.parity = serial.PARITY_ODD
ser.stopbits = 1
ser.bytesize = 8
ser.timeout = 1
def write_dac(dac, val):
val = float(val)
dac = int(dac)
if val > 2:
print('DAC setpoint > 2. The setpoint will be set to 2.')
val = 2
elif val < -2:
print('DAC setpoint < 2. The setpoint will be set to 2.')
val = -2
bytevalue = int(((val+2)/4) * 65535).to_bytes(length=2, byteorder='big')
set_msg = bytes([7, 0, 2, 1, dac]) + bytevalue
ser.open()
ser.write(set_msg)
ser.read(2)
ser.close()
def UpdateValues():
if novisa == 0:
# Read values, but also test connection and update the label
try:
ser.open()
# Open port, request values of all DACs, close port, process data, output to text boxes
read_msg = bytes([4, 0, 34, 2])
ser.write(read_msg)
resp = ser.read(34)
ser.close()
if len(resp) == 0:
raise Exception
now = datetime.now()
dt_string = now.strftime('%Y-%m-%d %H:%M:%S')
lblStatus.config(fg="Green")
statText.set('Last seen: ' + dt_string)
except Exception:
lblStatus.config(fg="Red")
statText.set('No COM connection')
return
values_int = list(range(16))
values_Volts = list(range(16))
for i in range(16):
values_int[i] = int.from_bytes(resp[2*(i+1):4+2*i], byteorder='big')
values_Volts[i] = round(((values_int[i]) / 65535 * 4 - 2), 8)
try:
exec("d"+str(i+1)+"val.delete(0, Tkinter.END)")
exec("d"+str(i+1)+"val.insert(0, "+str(values_Volts[i])+")")
except Exception:
print('Not able to update text')
else:
try:
for i in range(16):
# Hack to dynamically change variable names - useful!
exec("d"+str(i+1)+"val.insert(0, 'No COM')")
except Exception:
print('Not able to update text')
def WriteDacsZero():
for i in range(16):
write_dac(i+1, 0)
UpdateValues()
# Write functions for all 16 DACs in a short (but not very Pythonic) way
for i in range(16):
exec("def WriteDac"+str(i+1)+"():\n val = float(d"+str(i+1)+"val.get()); write_dac("+str(i+1)+", val); UpdateValues()")
# Create GUI
top = Tkinter.Tk()
top.lift()
top.title('IVVI DAC controller - v1.1 (2020-02-07)')
# Title bar
titleFont = font.Font(family="Helvetica", size=16, weight="bold")
titleText = Tkinter.Label(top, text="IVVI DAC controller", fg="white", bg="#00adef", font=titleFont).grid(row=0, columnspan=1000, sticky=Tkinter.W+Tkinter.E)
vLine1 = Tkinter.Frame(top, bg="black", width=580, height=1).grid(row=1, columnspan=1000)
# Main area
lblFont = font.Font(family="Helvetica", size=12)
headFont = font.Font(family="Helvetica", size=14, weight="bold")
# DACs 1-8 frame
fDAC1 = Tkinter.Frame(top, width=400, height=400, relief=Tkinter.RIDGE, borderwidth=3)
fDAC1.grid(row=2, padx=3, pady=3, sticky=Tkinter.W+Tkinter.N)
lblHead1 = Tkinter.Label(fDAC1, text="DACs 1-8", font=headFont).grid(row=3, sticky=Tkinter.W+Tkinter.N)
vLine2 = Tkinter.Frame(fDAC1, bg="black", width=275, height=1).grid(row=4, columnspan=1000)
for i in range(8):
exec("d"+str(i+1)+"lbl = Tkinter.Label(fDAC1, text='DAC " + str(i+1) + "', font=lblFont).grid(row=" + str(i+5) + ", sticky=Tkinter.W+Tkinter.N)")
exec("d"+str(i+1)+"val = Tkinter.Entry(fDAC1, exportselection=0)")
exec("d"+str(i+1)+"val.grid(row=" + str(i+5) + ", column=2, sticky=Tkinter.W)")
exec("d"+str(i+1)+"unt = Tkinter.Label(fDAC1, text=' V ', font=lblFont).grid(row=" + str(i+5) + ", column=3, sticky=Tkinter.W)")
exec("d"+str(i+1)+"btn = Tkinter.Button(fDAC1, text='Set', command=WriteDac"+str(i+1)+").grid(row=" + str(i+5) + ", column=4, columnspan=1000, sticky=Tkinter.W)")
# DACs 9-16 frame
fDAC2 = Tkinter.Frame(top, width=500, height=400, relief=Tkinter.RIDGE, borderwidth=3)
fDAC2.grid(row=2, column=3, padx=(0, 3), pady=3, sticky=Tkinter.W+Tkinter.N)
lblHead2 = Tkinter.Label(fDAC2, text="DACs 9-16", font=headFont).grid(row=3, sticky=Tkinter.W+Tkinter.N)
vLine3 = Tkinter.Frame(fDAC2, bg="black", width=275, height=1).grid(row=4, columnspan=1000)
for i in range(8):
exec("d"+str(i+9)+"lbl = Tkinter.Label(fDAC2, text='DAC " + str(i+9) + "', font=lblFont).grid(row=" + str(i+5) + ", sticky=Tkinter.W+Tkinter.N)")
exec("d"+str(i+9)+"val = Tkinter.Entry(fDAC2, exportselection=0)")
exec("d"+str(i+9)+"val.grid(row=" + str(i+5) + ", column=2, sticky=Tkinter.W)")
exec("d"+str(i+9)+"unt = Tkinter.Label(fDAC2, text=' V ', font=lblFont).grid(row=" + str(i+5) + ", column=3, sticky=Tkinter.W)")
exec("d"+str(i+9)+"btn = Tkinter.Button(fDAC2, text='Set', command=WriteDac"+str(i+9)+").grid(row=" + str(i+5) + ", column=4, columnspan=1000, sticky=Tkinter.W)")
# Buttons
btnReadDACs = Tkinter.Button(top, text="Read DACs", command=UpdateValues).grid(row=10,column=0, sticky=Tkinter.W)
btnDACszero = Tkinter.Button(top, text="DACs to zero", command=WriteDacsZero).grid(row=10, column=0, padx=75, sticky=Tkinter.W)
# Status
statFont = font.Font(family="Helvetica", size=9)
statText = Tkinter.StringVar()
statText.set('Connection info here')
lblStatus = Tkinter.Label(top, textvariable=statText, font=statFont)
lblStatus.grid(row=10, column=3, sticky=Tkinter.W)
# Main loop(s)
UpdateValues()
top.mainloop()
|
import asyncio
import logging
from typing import Any, Dict, List, Optional, TYPE_CHECKING, Tuple, Union
import aiohttp
from .canvas import Canvas
from .color import Color
from .exceptions import Cooldown, HttpException, Ratelimit
from .ratelimits import Ratelimits
if TYPE_CHECKING:
from .source import Source
logger = logging.getLogger("dpixels")
class Client:
e_base_url = "https://pixels.pythondiscord.com/"
e_get_size = "get_size"
e_get_canvas = "get_pixels"
e_get_pixel = "get_pixel"
e_swap_pixel = "swap_pixel"
e_set_pixel = "set_pixel"
def __init__(
self,
token: str,
save_file: str = "ratelimits.json",
*,
user_agent: str = "Ciruit dpixels (Python/aiohttp)",
):
self.headers = {
"Authorization": "Bearer " + token.strip(),
"User-Agent": user_agent,
}
self.session: Optional[aiohttp.ClientSession] = None
self.ratelimits = Ratelimits(save_file)
self.canvas: Optional[Canvas] = None
async def draw_sources(
self, sources: List["Source"], forever: bool = True
):
async def do_draw(s: "Source"):
val = s.get_next_pixel()
if not val:
return
x, y, p = val
if self.canvas[x, y] == p:
return
try:
await self.set_pixel(x, y, p)
return
except (Cooldown, Ratelimit) as e:
await e.ratelimit.pause()
async def any_needs_update() -> bool:
for s in sources:
await s.update_fix_queue(self.canvas)
if s.needs_update:
return True
return False
going = True
while going:
await self.get_canvas()
going = forever or any_needs_update()
for s in sources:
if not s.needs_update:
continue
await do_draw(s)
break
async def get_canvas_size(self):
data = await self.request("GET", self.e_get_size)
return int(data["width"]), int(data["height"])
async def get_canvas(self):
size = asyncio.create_task(self.get_canvas_size())
data = await self.request("GET", self.e_get_canvas, parse_json=False)
size = await size
self.canvas = Canvas(size[0], size[1], data)
return self.canvas
async def set_pixel(
self, x: int, y: int, color: "Color", *, retry: bool = False
):
if self.canvas:
current = self.canvas[x, y]
rgb = current.add_color_with_alpha(color)
current.r, current.g, current.b = rgb
ashex = current.hex
else:
ashex = color.hex
data = await self.request(
"POST",
self.e_set_pixel,
data={
"x": x,
"y": y,
"rgb": ashex,
},
retry_on_ratelimit=retry,
)
logger.debug(data["message"])
return data["message"]
async def get_pixel(
self, x: int, y: int, *, retry: bool = True
) -> "Color":
data = await self.request(
"GET",
self.e_get_pixel,
params={
"x": x,
"y": y,
},
retry_on_ratelimit=retry,
)
c = Color.from_hex(data["rgb"])
if self.canvas:
self.canvas.grid[y][x] = c
return c
async def swap_pixels(
self,
xy0: Tuple[int, int],
xy1: Tuple[int, int],
*,
retry: bool = False,
):
data = await self.request(
"POST",
self.e_swap_pixel,
data={
"origin": {
"x": xy0[0],
"y": xy0[1],
},
"dest": {
"x": xy1[0],
"y": xy1[1],
},
},
retry_on_ratelimit=retry,
)
return data["message"]
async def get_session(self):
if (not self.session) or self.session.closed:
self.session = aiohttp.ClientSession(headers=self.headers)
return self.session
async def request(
self,
method: str,
endpoint: str,
*,
data: Optional[Dict[Any, Any]] = None,
params: Optional[Dict[Any, Any]] = None,
parse_json: bool = True,
retry_on_ratelimit: bool = True,
) -> Union[Dict[Any, Any], str]:
session = await self.get_session()
ratelimit = self.ratelimits.ratelimits[endpoint]
await ratelimit.lock.acquire()
if not ratelimit.valid:
logger.debug("Ratelimit is invalid.")
retry_after = None
else:
retry_after = ratelimit.retry_after
if retry_after:
if not retry_on_ratelimit:
ratelimit.lock.release()
raise Ratelimit(endpoint, retry_after, ratelimit)
ratelimit.lock.release()
await ratelimit.pause()
return await self.request(
method,
endpoint,
data=data,
params=params,
parse_json=parse_json,
retry_on_ratelimit=False,
)
async with session.request(
method,
self.e_base_url + endpoint,
json=data,
params=params,
) as resp:
ratelimit.update(resp.headers)
ratelimit.lock.release()
if resp.status == 429:
raise Cooldown(endpoint, ratelimit.retry_after, ratelimit)
if 500 > resp.status > 400:
data = await resp.json()
raise HttpException(resp.status, data["detail"])
if parse_json:
return await resp.json()
else:
return await resp.read()
async def close(self):
await self.session.close()
self.ratelimits.save()
|
# Importing modules
import json
import dash_cytoscape as cyto
from dash import dcc
from dash import html
from dash import dash_table
from dash.dependencies import Input, Output
import dash_bootstrap_components as dbc
import numpy as np
import pandas as pd
import plotly.express as px
import plotly.graph_objs as go
import datetime
import math
from app import app
#Read in the processed data with post_dates set prior to the earliest comment_date
data = pd.read_csv('outputs/post-centric_graph/time_elapsed.csv',encoding="utf-8")
data['post_time'] = pd.to_datetime(data['post_time'])
data['post_id'] = data['hashed_post_id'].astype(str)
data['comment_id'] = data['hashed_comment_id'].astype(str)
data['comment_time'] = pd.to_datetime(data['comment_time'])
data['time_elapsed'] = pd.to_timedelta(data['time_elapsed'])
all_labels = ['All']
all_labels = data['post_text_pred'].unique()
#Sort in descending order
all_labels[::-1].sort()
all_labels = np.append(['All'], all_labels)
#Prepare dropdown for group filter
all_groups = ['All']
all_groups = np.append(all_groups, data['group'].unique())
#Prepare stylesheet to design node and edge colors
stylesheets = [{"selector": "node",
"style": {
"fontSize": "100px",
"textValign": "center",
"textHalign": "center",
'width':50,
'height':50,
'backgroundColor':'#551f02' #dark brown
},
},
{
'selector': '.post',
'style': {
"width": "data(size)",
"height": "data(size)",
"shape": "triangle"
}
},
{
'selector': 'edge',
'style': {
'curve-style': 'bezier',
"opacity": 1,
'width':"1.5",
}
}
]
#Get sorted list of topic labels
labels = np.sort(data['post_text_pred'].unique())
#Each color is matched to the corresponding label (feel free to modify the colors)
colors = ['#316102', '#0459eb', '#f0063e', '#f0d137', '#8105c0', '#7cf605', '#cacdc7','#551f02', '#eb8bbe', '#cfa502', '#B6D0E2']
for index in range(min(len(labels), len(colors))):
node_color = {
'selector' : f'[label ^= "{labels[index]}"]',
'style' : {
'backgroundColor': f'{colors[index]}'
}
}
edge_color = {
'selector' : f".{labels[index]}",
'style': {
'line-color':f'{colors[index]}',
'backgroundColor':f'{colors[index]}'
}
}
stylesheets.append(node_color)
stylesheets.append(edge_color)
#Style for transparent node with topic label overlay
stylesheets.append({
'selector': '.comment',
'style': {
'content': 'data(label)',
"backgroundColor":"white",
"width": "250",
"height": "100",
"fontSize": "100px",
"textValign": "center",
"textHalign": "center"
}
})
layout = html.Div([
html.Div([
html.Div([dcc.Markdown("**Post-centric Network Graph**",style={'color': 'black', 'fontSize': 25,'textAlign': 'center'})]),
html.Div([
dcc.Markdown('Filter by Class'),
dcc.Dropdown(
id='filter_label',
options=[{'label': i, 'value': i} for i in all_labels],
value=all_labels[1]
)], style={'display':'inline-block', 'width':'15%'}),
html.Div([
dcc.Markdown('Filter by Group'),
dcc.Dropdown(
id='filter_group',
options=[{'label': i, 'value': i} for i in all_groups],
value=all_groups[0]
)], style={'display':'inline-block', 'width':'15%', 'paddingLeft':30}),
html.Div([
dcc.Markdown('Top **N** Posts by Reaction Count'),
dcc.Dropdown(
id='top_n_clusters',
options=[{'label': i, 'value': i} for i in range(1, 10)],
value=1
)], style={'display':'inline-block', 'width':'20%', 'paddingLeft':30}),
html.Div([
dcc.Markdown('Search for a Post'),
dcc.Input(
id = "search_box",
type = "text",
placeholder = "Input search term",
debounce=True,
value = ' ',
)],style={'display':'inline-block', 'width':'20%', 'float':'right'})]),
dcc.Loading(
cyto.Cytoscape(
id='cytoscape-graph',
style={'width': '100%', 'height': 750},
layout={'name': 'cose','animate':True, 'fit':True
, 'numIter':100, 'gravity': 100, 'nodeRepulsion': 500000000,
'boundingBox':{'x1':0, 'x2':7000, 'y1':0, 'y2':5000}},
responsive = True,
maxZoom = 0.2,
minZoom = 0.08,
stylesheet= stylesheets
)
),
html.Div([
dcc.Markdown("**Time Elapsed (Click on triangle node to filter)**",
style={'color': 'black', 'fontSize': 15,
'textAlign': 'center', 'float':'middle'}),
dcc.Slider(
id = 'time_slider'
),
html.Div([
html.Div([
html.H5("Details on Source Post",
style={'color': 'black', 'fontSize': 15,
'textAlign': 'center', 'float':'middle'}),
html.Pre(id='cytoscape-postNodeData-json', style={'whiteSpace': 'break-spaces','height': 'auto'})
], style={'border': 'thin lightgrey solid','overflowY': 'auto','height':'300px',
'width':'50%', 'display':'inline-block'
}),
html.Div([
html.H5("Details on Hovered Node",
style={'color': 'black', 'fontSize': 15,
'textAlign': 'center', 'float':'middle'}),
html.Pre(id='cytoscape-tapNodeData-json', style={'whiteSpace': 'break-spaces','height': 'auto'})
], style={'border': 'thin lightgrey solid','overflowY': 'auto','height':'300px',
'width':'50%', 'display':'inline-block'
})
])
],style={'width':'100%', 'float':'middle','paddingLeft':100, 'paddingRight':100}),
html.Br()
])
#Function to create a node
def make_node(input, node_type, cluster_size = 1):
if node_type == 'post':
hashed_id = input['hashed_post_id']
group = input['group']
label = input['post_text_pred']
time = input['post_time'].strftime('%d-%m-%Y %X')
username = input['hashed_username']
#Check for NaN
if input['post_text'] == input['post_text']:
#Remove next line characters
text = input['post_text'].replace('\n','')
text = text.replace('\r','')
else:
text = ' '
likes = input['likes']
reactions = input['reactions']
sentiment = round(input['sentiment'], 2)
element = {'data': {'label': label, 'text': text, 'cluster_size':cluster_size,'id': hashed_id, 'post_id': hashed_id, 'time': time, 'username': username,
'sentiment': sentiment, 'group': group,'likes': likes, 'size': min(400, max(200,likes/5)), 'reactions':reactions},'classes':'post'}
elif node_type == 'comment':
#comment
hashed_id = input['hashed_comment_id']
post_id = input['hashed_post_id']
group = input['group']
label = input['comment_text_pred']
time = input['comment_time'].strftime('%d-%m-%Y %X')
username = input['hashed_commenter_name']
if input['comment_text'] == input['comment_text']:
text = input['comment_text'].replace('\n','')
text = text.replace('\r','')
else:
text = ' '
likes = input['likes']
sentiment = round(input['sentiment'], 2)
#Convert time elapsed to minutes
elapsed = input['time_elapsed'].days*1440 + round(input['time_elapsed'].seconds/60,2)
element = {'data': {'label': label, 'text': text, 'num_comments':cluster_size,'id': hashed_id, 'post_id':post_id,
'group': group, 'time': time, 'username': username,'sentiment': sentiment,'likes':likes,
'time_elapsed':f'{elapsed:.2f} minutes'}}
return element
#Post-centric network graph
@app.callback(
Output('cytoscape-graph', 'elements'),
Input('search_box', 'value'),
Input('filter_label', 'value'),
Input('filter_group', 'value'),
Input('top_n_clusters', 'value'),
Input('time_slider', 'value'),
Input("time_slider", "max"))
def update_graph(search, label_name, group_name, num_clusters, time_elapsed, time_limit):
#Prepare post data
post_df = data.loc[data.groupby('hashed_post_id')['post_time'].idxmin()].reset_index(drop = True).copy()
#Prepare comments data
comments = data[data['hashed_comment_id'] != '0'].copy()
graph_edges = []
nodes = []
#Filter by label dropdown
if label_name != 'All':
post_df = post_df[post_df['post_text_pred'] == label_name]
#Filter by groups
if group_name != 'All':
post_df= post_df[post_df['group'] == group_name]
#Filter by search entry
if search:
post_df = post_df[post_df['post_text'].str.contains(search, case = False, na=False)]
#Get top n posts in terms of number of reactions
posts=post_df[['group','likes','comments','shares','reactions', 'sentiment',
'reaction_count','hashed_post_id',
'hashed_username','post_text','post_time','post_text_pred']].reset_index(drop = True)
posts = posts.sort_values(by = 'reaction_count', ascending = False).iloc[0:num_clusters].reset_index(drop = True)
#Filter comments based on time elapsed
if time_elapsed < time_limit:
comments = comments[comments['time_elapsed'] < datetime.timedelta(minutes = time_elapsed)].copy()
else:
comments = comments.copy()
#Iterate through unique post_ids
for count, post_id in enumerate(posts['hashed_post_id'].unique()):
comment_df = comments[comments['hashed_post_id'] == post_id]
comment_df = comment_df.dropna(subset = ['comment_text', 'comment_time'])
comment_df=comment_df[['group','likes','comments','shares','reactions','reaction_count','hashed_post_id', 'sentiment',
'hashed_comment_id','hashed_commenter_name','comment_text','comment_time','comment_text_pred',
'comment_text_pred_prob', 'time_elapsed']]
#Add comment edges and nodes
for row in comment_df.index:
#Add edge
target = comment_df.loc[row, 'hashed_comment_id']
edge_id = post_id + ',' + target
element = {'data': {'id': edge_id, 'source':comment_df.loc[row, 'comment_text_pred'] + str(count), 'target':target},
'classes':comment_df.loc[row, 'comment_text_pred']}
graph_edges.append(element)
#Add node
comment_row = comment_df.loc[row]
label = comment_df.loc[row, 'comment_text_pred']
comment_cluster = len(comment_df[comment_df['comment_text_pred'] == label])
nodes.append(make_node(comment_row, 'comment', cluster_size=comment_cluster))
#Add post node
cluster_size = len(comment_df)
post_row = posts.loc[count]
nodes.append(make_node(post_row, 'post', cluster_size = cluster_size))
#Group together all comments with the same label
for label in comment_df['comment_text_pred'].unique():
filtered_comments = comment_df[comment_df['comment_text_pred'] == label]
nodes.append({'data':{'id':label + str(count), 'label':label, 'num_comments':len(filtered_comments),
'post_id':post_id},'classes':'comment'})
target = label + str(count)
edge_id = post_id + ',' + target
element = {'data': {'id': edge_id, 'source':post_id, 'target':target}}
graph_edges.append(element)
all_elements = nodes + graph_edges
return all_elements
#Display node data when hovering
@app.callback(
Output('cytoscape-tapNodeData-json', 'children'),
Output('cytoscape-postNodeData-json', 'children'),
Input('cytoscape-graph', 'mouseoverNodeData'),
Input('time_slider', 'value'),
Input("time_slider", "max"))
def displayTapNodeData(node, time_elapsed, time_limit):
data_copy = {}
source_post = {}
output = []
if node:
post_id = node['post_id']
#Check if it is a post
if 'cluster_size' in node:
data_copy['Label'] = node['label']
data_copy['Post Text'] = node['text']
data_copy['No. of Comments for This Post'] = node['cluster_size']
data_copy['Post Sentiment'] = node['sentiment']
data_copy['Date Posted'] = node['time']
data_copy['Group'] = node['group']
data_copy['Reactions'] = node['reactions']
output.append(json.dumps(data_copy, indent=2, ensure_ascii=False,sort_keys=False))
#Check if it is a comment
elif 'time_elapsed' in node:
data_copy['Label'] = node['label']
data_copy['Comment Text'] = node['text']
data_copy['Comment Sentiment'] = node['sentiment']
data_copy['Time Elapsed Before Comment Was Made'] = node['time_elapsed']
data_copy['Comment Time'] = node['time']
data_copy['No. of Comments with the Same Label'] = node['num_comments']
output.append(json.dumps(data_copy, indent=2, ensure_ascii=False,sort_keys=False))
else:
output.append('')
#Generate post details from post_id
post_df = data.loc[data.groupby('hashed_post_id')['post_time'].idxmin()]
post_df = post_df.loc[post_df['hashed_post_id'] == post_id].reset_index(drop = True).iloc[0]
#Get cluster size
comments = data[data['hashed_comment_id'] != '0'].copy()
if time_elapsed < time_limit:
comments = comments[comments['time_elapsed'] < datetime.timedelta(minutes = time_elapsed)].copy()
comment_df = comments[comments['hashed_post_id'] == post_id]
comment_df = comment_df.dropna(subset = ['comment_text', 'comment_time'])
source_post['Label'] = post_df['post_text_pred']
if post_df['post_text'] == post_df['post_text']:
#Remove next line characters
text = post_df['post_text'].replace('\n','')
text = text.replace('\r','')
else:
text = ' '
source_post['Post Text'] = text
source_post['No. of Comments for This Post'] = len(comment_df)
source_post['Post Sentiment'] = round(post_df['sentiment'], 2)
source_post['Date Posted'] = post_df['post_time'].strftime('%d-%m-%Y %X')
source_post['Group'] = post_df['group']
source_post['Reactions'] = post_df['reactions']
output.append(json.dumps(source_post, indent=2, ensure_ascii=False,sort_keys=False))
else:
output = ['', '']
return output
#Filter time slider by clicked post
@app.callback(
[Output("time_slider", "min"),
Output("time_slider", "max"),
Output("time_slider", "marks"),
Output("time_slider", "value")],
Input('cytoscape-graph', 'tapNodeData'))
def display_click_data(clickData):
marks_dict = {}
for i in range(0, 101, 10):
marks_dict[i] = {'label': str(i) + ' min'}
if clickData and 'size' in clickData:
#Get the clicked post id
post = clickData['id']
#Filter the data to retrieve comments corresponding to this post
filtered_by_time = data[data['hashed_post_id'] == post]
#If no comments are found, set elapsed time to be 1
elapsed_time = 1
if len(filtered_by_time) > 0:
elapsed_time = data[data['hashed_post_id'] == post]['time_elapsed']
#Get min number of minutes elapsed
lower = elapsed_time.min().days*1440 + math.floor(elapsed_time.min().seconds/60)
#Get max number of minutes elapsed
upper = elapsed_time.max().days*1440 + math.ceil(elapsed_time.max().seconds/60)
#Set the minimum to be 1
if lower == 0:
lower += 1
upper += 1
#Set the range to be 99 minutes, or up until the upper limit
upper_limit = min(upper, lower + 99)
#Slider markings
markings = max(1, math.ceil((upper_limit - lower)/5))
marks_dict = {}
for i in range(lower, upper_limit + 1, markings):
unit = ' min'
if i == upper_limit and upper_limit < upper:
unit = '+' + unit
marks_dict[i] = {'label': str(i) + unit}
return [lower, upper_limit, marks_dict, upper_limit]
#Placeholder
return [1,100, marks_dict, 100]
|
import random
def do_weighted_draw(weights):
total = 0;
current_total = 0;
bucket = 0;
for weight in weights:
total += weight
rand = random.random() * total
for weight in weights:
current_total += weight
if rand > current_total:
bucket += 1
else:
break
return bucket
|
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
from settings import DEBUG
admin.autodiscover()
urlpatterns = patterns(
'',
#url(r'^$', index, name='index'),
url(r'^admin_tools/', include('admin_tools.urls')),
url(r'^admin/', include(admin.site.urls))
)
if DEBUG:
from settings import MEDIA_ROOT
urlpatterns.extend([
url(r'^media/(?P<path>.*.[css|js|png|ico|jpg|gif|svg|htm|html])$', 'django.views.static.serve', {'document_root': MEDIA_ROOT})
])
|
def sumOfTwo(l,n):
count=0
for i in range(len(l)):
for j in range(i+1,len(l)):
if l[i]+l[j]== n:
count+=1
return count
print sumOfTwo([1,2,3,4,5,6,7,8,9],10)
|
#!/usr/bin/env python3.7.0
# -*- coding: utf-8 -*-
# @Time : 2020/4/8 14:01
# @Author : XiaShengSheng
# @FileName: make_wordcloud.py
# @Software: PyCharm
from collections import Counter
import jieba
record = open("data/neg.txt", 'r', encoding='utf-8')
#print(record)
#%%
#读取文档,分词,并将分词后的单词用空格连接,形成字符串。
cut_words = ' '
for line in record:
line.strip('')
#seg_list = jieba.cut_for_search(line) #搜索引擎模式
#seg_list = jieba.cut(line,cut_all=True) #全模式
seg_list = jieba.cut(line) #精准模式
cut_words += (' '.join(seg_list))
#print(cut_words)
#%%
#读取停用词,形成停用词列表
stop = open("data/stoplist.txt", 'r',encoding='UTF-8')
stop_words=[]
for line in stop:
line.strip( )
stop_words.append(line.strip())
#print(stop_words)
#%%
#去除停用词,
cut_list = cut_words.split()
words=[]
for s in cut_list:
s.strip()
if s in stop_words:
continue
else:
words.append(s)
print(words)
#%%
#统计词频
c = Counter()
for x in words:
if len(x) > 1 and x != '\r\n':
c[x] += 1
print('\n词频统计结果:')
for (k, v) in c.most_common(200): # 输出词频最高的前200个词
print("%s:%d" % (k, v))
type(c)
#%%
#自定义字体颜色
import matplotlib.colors as colors
#将字体颜色定义为黑白灰系的颜色
color = ['#d8dcd6','#929591','#59656d','#000000']
colormap = colors.ListedColormap(color)
#%%
#引入词云包,绘制词云图,通过词频绘制词云图
from wordcloud import WordCloud
wordshow = WordCloud(font_path="C:/Windows/Fonts/STSONG.ttf",
background_color="white"
,width=400
,height=300
,max_words=100
,max_font_size=60
,collocations=False
,colormap=colormap
,stopwords=stop_words)
wc = wordshow.fit_words(c)
# wc.to_file('data/pf9.png')
#%%
#停用词去除后,需用空格连接字符
cloudword = ' '.join(words)
print(cloudword)
#%%
#通过自动分词,绘制词云图
wordshow = WordCloud(font_path="C:/Windows/Fonts/STSONG.ttf",
background_color="white"
,width=800
,height=600
,max_words=150
,max_font_size=60
,collocations=False
,stopwords=stop_words)
wc = wordshow.generate(cloudword)
wc.to_file('data/neg.jpg') |
#!/usr/bin/python
import time
w, h = 1024, 1024;
Matrix = [[0 for x in range(w)] for y in range(h)] ;
startTime = time.time()
for i in range(0, 1024):
for j in range(0, 1024):
Matrix[i][j] = 1;
endTime = time.time()
total = endTime - startTime
print total
|
import os
import json
import random
from src import Google_API, Google_datastore
class Question:
def __init__(self, path):
self.path = path
def get_questions(self):
# with open(self.path) as f:
# questions_json = json.load(f)
# return questions_json['questions']
ds = Google_datastore.Datastore()
qns = ds.get_data()
questions = [{"id": qn.id, "image": qn['image'], "label": qn['label'], "question": qn['question'],
"audio": qn['audio']} for qn in qns]
return questions
def random_questions(self, count):
all_qns = [qn for qn in self.get_questions()]
res_qns = []
qns = 0
while qns < count and len(all_qns) > 3:
random.shuffle(all_qns)
candidates = all_qns[:4]
qn = random.choice(candidates)
qn_id = qn['id']
qn_str = qn['question']
qn_options = [c['image'] for c in candidates]
qn_answer = candidates.index(qn)+1
candidates.remove(qn)
all_qns.remove(qn)
res_qns.append({
"id": qn_id,
"question": qn_str,
"images": qn_options,
"answer": qn_answer
})
qns += 1
return res_qns
@staticmethod
def speech_answer(speech, answer):
results = Google_API.speech_text(speech.read())
return len(set.intersection(set(results), set({answer}))) > 0
@staticmethod
def create_questions(ds):
q_id = 1
qns = []
src_qns = ds.get_data()
src_qns = [{"id": qn.id} for qn in src_qns]
ds.delete_data(src_qns)
for file in os.listdir("resources/images"):
if file.endswith(".jpg"):
label = Google_API.annotate_image(os.path.join("resources/images", file))['labels'][0].description
qn_str = "Which of the following images has {}"
qn_str = qn_str.format(("an " if label.lower()[0] in ["a", "e", "i", "o", "u"] else "a ") + label)
qn = {
"id": q_id,
"image": file,
"label": label,
"question": qn_str,
"audio": str(q_id)+".mp3"
}
qns.append(qn)
q_id += 1
Question.write_questions(qns, ds)
Question.generate_questions_audio(qns)
@staticmethod
def write_questions(questions, ds):
# with open(os.path.join('resources', 'questions.json'), 'w') as outfile:
# json.dump({"questions": questions}, outfile)
[ds.add_data(q) for q in questions]
@staticmethod
def generate_questions_audio(questions):
questions_dict = {}
for q in questions:
questions_dict[q['id']] = q['question']
if questions_dict:
Google_API.text_speech(questions_dict)
|
from _Lib_.Lib import *
def SetUpDriver(HeadLess=True):
options = webdriver.ChromeOptions()
if HeadLess:
options.add_argument('headless')
options.add_argument('--no-sandbox')
options.add_argument("start-maximized")
options.add_argument("disable-infobars")
options.add_argument("--disable-extensions")
options.add_argument('--disable-dev-shm-usage')
#options.add_argument('--user-agent=Mozilla/5.0 (Linux; Android 10; SM-A205U) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/84.0.4147.89 Mobile Safari/537.36')
Drive = webdriver.Chrome(chrome_options=options)
return Drive
Drive=SetUpDriver(False)
Drive.get("https://blog.feedspot.com/gaming_youtube_channels/")
Cases=Drive.find_elements_by_class_name("ext")
Videos=[]
for Case in Cases:
a=(str(Case.get_attribute('href')))
a=a.split("/")
a=a[-2]
Videos.append(a)
print (Videos)
|
import seaborn as sns
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score,recall_score,precision_score,roc_auc_score,f1_score
from sklearn.metrics import roc_curve
pbl_data = pd.read_csv("uci-secom.csv",sep=',')
file = open("result_data_2.txt","w")
msg = ""
states = 40
features = 589
maxlen = 3
minlen = 2
case = []
for i in range(1,minlen) :
case.append(0)
for i in range(minlen,maxlen) :
case.append(0)
for j in range(0,len(case)) :
case[j] = j
while True :
X = pd.DataFrame()
X_Pass = pd.DataFrame()
X_Fail = pd.DataFrame()
X['Pass/Fail'] = pbl_data['Pass/Fail']
for v in range(0,len(case)) :
X[str(case[v])] = pbl_data[str(case[v])]
X = X.dropna(axis =0)
X = X.reset_index()
for v in range(0,len(X.index)) :
#print (X.iloc[[v]])
if X.iloc[[v]]['Pass/Fail'][v] == -1 :
X_Pass = X_Pass.append(X.iloc[[v]])
else :
X_Fail = X_Fail.append(X.iloc[[v]])
X_Pass = X_Pass.reset_index()
X_Fail = X_Fail.reset_index()
random = np.random.choice(len(X_Pass.index),len(X_Fail.index),replace=False)
X_choice = pd.DataFrame()
for v in random :
X_choice = X_choice.append(X_Pass.iloc[[v]])
X = X_choice.append(X_Fail)
y = X['Pass/Fail']
X =X.drop('Pass/Fail',axis=1)
print("Fail len : ",len(X.index),"\t Total len : ",len(X_Fail.index))
print (case)
msg = str(i)
msg += ' : [ '
for v in range(0,len(case)-1) :
msg += str(case[v])
msg += ' '
msg += str(case[len(case)-1])
msg += ' ] '
if len((X.index)) > 50 and len(pd.value_counts(y.values, sort=False)) == 2 :
X_train , X_test , y_train , y_test = train_test_split(X,y,test_size=0.2,random_state = states)
if len(pd.value_counts(y_train.values, sort=False)) == 2 and len(pd.value_counts(y_test.values, sort=False)) == 2 and pd.value_counts(y_test.values, sort=False)[1] > 10 :
log_reg = LogisticRegression(random_state=states,solver='liblinear',C=10.)
log_reg.fit(X_train,y_train)
pred = log_reg.predict(X_test)
if accuracy_score(y_test,pred) != 1 :
msg += " accuracy : "
msg += str(accuracy_score(y_test,pred))
msg += ", recall : "
msg += str(recall_score(y_test,pred, average="weighted",zero_division=1))
msg += ", precision : "
msg += str(precision_score(y_test,pred, average="weighted",zero_division=1))
msg += ", f1 : "
msg += str(f1_score(y_test,pred, average="weighted",zero_division=1))
msg += ", roc_auc : "
msg += str(roc_auc_score(y_test,log_reg.predict_proba(X_test)[:,1], average='weighted'))
msg += '\n'
file.write(msg)
end = 0;
for k in range(0,len(case)) :
if case[k] == features - len(case) + k :
end = end +1
if end == len(case) :
break
case[len(case)-1] = case[len(case)-1] +1
if case[len(case)-1] > features -1 :
for t in range(len(case)-1,0,-1):
case[t] = case[t] +1
if case[t] < features-1 :
for u in range(t+1,len(case)) :
case[u] = case[u-1] +1
break
else :
if t==1 :
case[0] = case[0] +1
for u in range(1,len(case)) :
case[u] = case[u-1] +1
|
import logging
import os
from argparse import ArgumentParser
import numpy as np
import pandas as pd
import torch
from sklearn.metrics import (
accuracy_score, precision_recall_fscore_support
)
from transformers import (
T5Tokenizer, T5ForConditionalGeneration,
Trainer, TrainingArguments,
)
from dataset import read_dataframe, SecReqDataset
from constants import (
MAX_LENGTH, MODEL_TYPE, DEFAULT_EPOCHS,
SEC_LABEL, NONSEC_LABEL,
TRAINING_APPLICATION_NAME,
TMP_FOLDER_NAME, MODEL_FOLDER, MODEL_FILENAME,
TRAIN_DATASET_PATH, VALID_DATASET_PATH,
)
logger = logging.getLogger(TRAINING_APPLICATION_NAME)
def setup_parser(parser):
parser.add_argument(
"-d", "--train_path",
help="path to train dataset",
)
parser.add_argument(
"-v", "--valid_path",
help="path to valid dataset",
)
parser.add_argument(
"-o", "--output_model_name",
help="model output name",
default=MODEL_FILENAME,
)
parser.add_argument(
"-l", "--max_len",
help="maximum input sequence length"
default=MAX_LENGTH,
)
parser.add_argument(
"-m", "--model_type",
help="T5 model version (e.g. t5-small)",
default=MODEL_TYPE,
)
parser.add_argument(
"-e", "--epochs",
help="number of epochs to train model",
default=DEFAULT_EPOCHS,
)
def prepare_labels_mappings(tokenizer):
global idxs_to_label
labels = [SEC_LABEL, NONSEC_LABEL]
sec_idxs, non_sec_idxs = tokenizer.prepare_seq2seq_batch(labels)['input_ids']
idxs_to_label = {
tuple(sec_idxs): 1,
tuple(non_sec_idxs): 0,
}
def compute_metrics(pred):
labels = pred.label_ids
preds = pred.predictions[0].argmax(-1)
def _convert_to_labels(idxs):
label = idxs_to_label.get(tuple(idxs), -1)
return label
targets = np.fromiter(map(_convert_to_labels, labels), dtype=np.int)
predictions = np.fromiter(map(_convert_to_labels, preds), dtype=np.int)
wrong_predictions = np.where((predictions == -1))[0]
wrong_predictions_number = wrong_predictions.shape[0]
acc = accuracy_score(targets, predictions)
targets = np.delete(targets, wrong_predictions)
predictions = np.delete(predictions, wrong_predictions)
precision, recall, f1, _ = precision_recall_fscore_support(targets, predictions, average='binary')
return {
'accuracy': acc,
'f1': f1,
'precision': precision,
'recall': recall,
'wrong_predictions': wrong_predictions_number,
}
def load_model(model_path, device="cuda"):
logger.info("===Started model loading===")
model = T5ForConditionalGeneration.from_pretrained(model_path).to(device)
logger.info("===Finished model loading===")
return model
def train(epochs):
model = load_model(arguments.model_type)
training_args = TrainingArguments(
num_train_epochs=epochs,
warmup_steps=300,
weight_decay=0.01,
evaluation_strategy="epoch",
)
train_dataset = torch.load(TRAIN_DATASET_PATH)
valid_dataset = torch.load(VALID_DATASET_PATH)
logger.info("===Started model training===")
trainer = Trainer(
model=model,
args=training_args,
train_dataset=train_dataset,
eval_dataset=valid_dataset,
compute_metrics=compute_metrics,
)
trainer.train()
logger.info("===Finished model training===")
return model
def prepare_data(train_path, valid_path, model_type, max_len):
logger.info("===Started tokenizer loading===")
tokenizer = T5Tokenizer.from_pretrained(model_type)
logger.info("===Finished tokenizer loading===")
logger.info("===Started data preparation===")
train_dataframe = read_dataframe(train_path)
valid_dataframe = read_dataframe(valid_path)
train_dataset = SecReqDataset(train_dataframe, tokenizer, True, max_len)
valid_dataset = SecReqDataset(valid_dataframe, tokenizer, True, max_len)
if not os.path.isdir(TMP_FOLDER_NAME):
os.mkdir(TMP_FOLDER_NAME)
torch.save(train_dataset, TRAIN_DATASET_PATH)
torch.save(valid_dataset, VALID_DATASET_PATH)
prepare_labels_mappings(tokenizer)
logger.info("===Finished data preparation===")
def main():
parser = ArgumentParser(prog=TRAINING_APPLICATION_NAME)
setup_parser(parser)
arguments = parser.parse_args()
prepare_data(arguments.train_path, arguments.valid_path,
arguments.model_type, arguments.max_len)
model = train(arguments.epochs)
if not os.path.isdir(MODEL_FOLDER):
os.mkdir(MODEL_FOLDER)
model.save_pretrained(os.path.join(MODEL_FOLDER, arguments.output_model_name))
if __name__=="__main__":
main() |
# Make Server
# Functionality Wish list:
# allow users to send and receive messages
# Connect to server from anywhere/ maybe end up federating users/
# serverless? prebaked ami?
# end to end encryption
# option for message to self delete on read
# Bonus objective: integrate media project
|
# pylint: disable=too-many-instance-attributes, too-few-public-methods
""" Duckdown configuration """
import os
import time
import logging
from pkg_resources import resource_filename
LOGGER = logging.getLogger(__name__)
class Config:
""" holding all the variables """
# constants
PAGE_PATH = "pages/"
TEMPLATE_PATH = "templates/"
IMAGES_PATH = "static/images/"
SCRIPT_PATH = "scripts/"
STATIC_PATH = "static/"
USERS_PATH = "users.json"
IMG_PATH = "/static/images/"
ASSETS_PREFIX = "/edit/assets/"
# variables
debug = False
port = int(os.getenv("PORT", "8080"))
bucket_name = ""
credentials = {}
app_path = ""
app_name = "duckdown_app"
static_prefix = STATIC_PATH
image_path = IMAGES_PATH
image_bucket = None
users_path = USERS_PATH
image_credentials = {}
img_path = IMG_PATH
convert_image_paths = True
duck_assets_prefix = ASSETS_PREFIX
duck_assets = resource_filename("duckdown", "assets")
duck_templates = resource_filename("duckdown", "templates")
cookie_secret = "it was a dark and stormy duckdown"
login_url = "/login"
login_handler = None
vue_page = "vue.html"
vue_src = "./client/src/"
vue_assets = resource_filename("duckdown", "assets/vue/")
vue_manifest = resource_filename("duckdown", "assets/vue/manifest.json")
def __init__(self, app_path=None, bucket=None, debug=None, port=None):
""" init derived data members """
# allow for command line override
if app_path is not None:
self.app_path = app_path
if bucket is not None:
self.bucket_name = bucket
if debug is not None:
self.debug = debug
if port is not None:
self.port = port
LOGGER.debug("init app_path: %s", self.app_path)
self.static_path = os.path.join(self.app_path, self.STATIC_PATH)
self.template_path = os.path.join(self.app_path, self.TEMPLATE_PATH)
self.script_path = os.path.join(self.app_path, self.SCRIPT_PATH)
self.page_path = os.path.join(self.app_path, self.PAGE_PATH)
self.cookie_name = f"{self.app_name}-user"
def tornado_settings(self, settings):
""" setup tornado settings dict """
settings.setdefault("debug", self.debug)
settings.setdefault("port", self.port)
settings.setdefault("app_name", self.app_name)
settings.setdefault("app_path", self.app_path)
# site paths
settings.setdefault("users_path", self.users_path)
settings.setdefault("static_path", self.static_path)
settings.setdefault("static_prefix", self.static_prefix)
settings.setdefault("template_path", self.template_path)
settings.setdefault("script_path", self.script_path)
settings.setdefault("page_path", self.page_path)
settings.setdefault("img_path", self.img_path)
settings.setdefault("convert_image_paths", self.convert_image_paths)
# editor setup
settings.setdefault("duck_assets_prefix", self.duck_assets_prefix)
settings.setdefault("duck_assets", self.duck_assets)
settings.setdefault("duck_templates", self.duck_templates)
# access control
settings.setdefault("cookie_name", f"{self.app_name}-user")
if settings.get("debug") is True:
settings[
"cookie_secret"
] = f"it was a dark and stormy duckdown {time.time()}"
else:
settings.setdefault("cookie_secret", self.cookie_secret)
settings.setdefault("login_url", self.login_url)
return settings
|
# Use this formula for the distance that a car travels down the interstate:
# Distance = Speed * Time
# The car is traveling 82 miles per hour. Write a program that displays the following:
# 1. The distance the car will travel in 6 hours
# 2. The distance the car will travel in 10 hours
# 3. The distance the car will travel in 15 hours.
# Initiating constant variable for the speed of the car
CAR_SPEED = 82
# Initiating distance variables for each hour
six_hour_distance = 0.0
ten_hour_distance = 0.0
fifteen_hour_distance = 0.0
# Calculating the distance for 6 hours
six_hour_distance = float(CAR_SPEED) * 6
# Calculating the distance for 10 hours
ten_hour_distance = float(CAR_SPEED) * 10
# Calculating the distance for 15 hours
fifteen_hour_distance = float(CAR_SPEED) * 15
# Displaying the distance the car will travel in 6 hours
print('The distance the car will travel in 6 hours is:', format(six_hour_distance, ',.2f'), 'miles')
# Displaying the distance the car will travel in 10 hours
print('The distance the car will travel in 10 hours is:', format(ten_hour_distance, ',.2f'), 'miles')
# Displaying the distance the car will travel in 15 hours
print('The distance the car will travel in 15 hours is:', format(fifteen_hour_distance, ',.2f'), 'miles')
|
# Copyright (c) 2015
#
# All rights reserved.
#
# This file is distributed under the Clear BSD license.
# The full text can be found in LICENSE in the root directory.
import rootfs_boot
import time
from devices import board, wan, lan, wlan, prompt
class Connection_Stress(rootfs_boot.RootFSBootTest):
'''Measured CPU use while creating thousands of connections.'''
def runTest(self):
num_conn = 5000
# Wan device: Create small file in web dir
fname = 'small.txt'
cmd = '\nhead -c 10000 /dev/urandom > /var/www/%s' % fname
wan.sendline(cmd)
wan.expect(prompt)
# Lan Device: download small file a lot
concurrency = 25
url = 'http://192.168.0.1/%s' % fname
# Start CPU monitor
board.sendline('\nmpstat -P ALL 10000 1')
# Lan Device: download small file a lot
lan.sendline('\nab -dn %s -c %s %s' % (num_conn, concurrency, url))
lan.expect('Benchmarking', timeout=5)
lan.expect('Requests per second:\s+(\d+)')
reqs_per_sec = int(lan.match.group(1))
lan.expect(prompt)
# Stop CPU monitor
board.sendcontrol('c')
board.expect('Average:\s+all(\s+[0-9]+.[0-9]+){10}\r\n')
idle_cpu = float(board.match.group(1))
avg_cpu = 100 - float(idle_cpu)
board.expect(prompt)
msg = "ApacheBench measured %s connections/second, CPU use = %s%%." % (reqs_per_sec, avg_cpu)
self.result_message = msg
time.sleep(5) # Give router a few seconds to recover
def recover(self):
board.sendcontrol('c')
board.expect(prompt)
lan.sendcontrol('c')
time.sleep(2) # Give router a few seconds to recover
|
## Leira Salene 1785752
print("Davy's auto shop services")
print("Oil change -- $35")
print("Tire rotation -- $19")
print("Car wash -- $7")
print("Car wax -- $12\n")
def getCost(s):
if s == 'Oil change':
return 35
if s == 'Tire rotation':
return 19
if s == 'Car wash':
return 7
if s == 'Car wax':
return 12
print ("Davy's auto shop services")
print ("\nOil change--$35")
print ("\nTire rotation--$19")
print ("\nCar wash--$7")
print ("\nCar wax--$12\n")
first = input ('Select first service:\n')
second = input ('Select second service:\n')
print ("\nDavy's auto shop invoice\n")
one = getCost(first)
two = 0
print ('Service 1: %s, $%d' %(first, one))
if second == '-':
print ("Service 2: No service")
else:
two = getCost(second)
print ("Service 2: %s, $%d" %(second, two))
print("\nTotal: $%d" %(one+two)) |
# -*- coding: utf-8 -*-
import tushare as ts
import pandas as pd
from dataget.helper import *
import dataget.info as info
import os
import time
from datetime import datetime
from datetime import timedelta
#def write_db_all_stock_1day(start_symbol = '', end = ''):
# symbols = ts.get_stock_basics()
# ok = 0
# if end == '':
# end = datetime.today().strftime('%Y-%m-%d')
# for code in symbols.index:
# if start_symbol == '':
# ok = 1
# elif start_symbol == code:
# ok = 1
# if ok:
# start = str(symbols.loc[code]['timeToMarket'])
# if start == '0':
# continue
# if len(start) == 8:
# start = start[0:4] + '-' + start[4:6] + '-' + start[6:]
# print('do %s, start = %s' % (code, start))
# df = ts.get_h_data(code, start=start, end=end, index=False)
# df.to_csv('%s/stock.1d/%s.csv' % (bar_path, code), encoding='utf-8')
#
#def write_db_all_index_1day(index_path = '', start_symbol = '', end = ''):
# df = pd.read_csv(index_path)
# ok = 0
# if end == '':
# end = datetime.today().strftime('%Y-%m-%d')
# for symbol in df.symbol:
# code = symbol.split('.')[1]
# if start_symbol == '':
# ok = 1
# elif start_symbol == code:
# ok = 1
# if ok:
# start = '1999-01-01'
# print('do %s, start = %s' % (code, start))
# df = ts.get_h_data(code, start=start, end=end, index=True)
# df.to_csv('%s/index.1d/%s.csv' % (bar_path, code), encoding='utf-8')
def write_db_all_1d(start_symbol='', end='', index=False):
if index:
path = index_1d_path
else:
path = stock_1d_path
symbols = info.get_symbol_list(index)
ok = 0
start = '1999-01-01'
if end == '':
end = datetime.today().strftime('%Y-%m-%d')
for code in symbols:
if start_symbol == '':
ok = 1
elif start_symbol == code:
ok = 1
if ok:
df = ts.get_h_data(code, start=start, end=end, index=index)
df.to_csv('%s/%s.csv' % (path, code), encoding='utf-8')
def update_db_all_1d(start_symbol='', end='', index=False, sleep=2):
if index:
path = index_1d_path
else:
path = stock_1d_path
symbols = info.get_symbol_list(index)
ok = 0
for code in symbols:
if start_symbol == '':
ok = 1
elif start_symbol == code:
ok = 1
if ok:
update_db_1d(code, end=end, index=index)
time.sleep(sleep)
def update_db_1d_auto(start_symbol='', end='', index=False, sleep_step=5):
if index:
path = index_1d_path
else:
path = stock_1d_path
ok = 0
symbols = info.get_symbol_list(index)
total = len(symbols)
count = 0
for code in symbols:
count += 1
if start_symbol == '':
ok = 1
elif start_symbol == code:
ok = 1
if ok:
fail_num = 1
while 1:
delay = sleep_step*fail_num
try:
if delay >= 60:
delay = 60
print('delay %d' % delay)
time.sleep(delay)
update_db_1d(code, end=end, index=index)
except :
fail_num *= 2
if delay >= 60:
print('update %s failed' % code)
break
else:
print('update %s finish, process %3.0f%%' % (code, count/total*100))
break
def update_db_1d(code, end = '', index=False):
if index:
path = index_1d_path
else:
path = stock_1d_path
csv = '%s/%s.csv' % (path, code)
if end == '':
end = datetime.today().strftime('%Y-%m-%d')
if os.path.exists(csv):
with open(csv) as f:
lines = f.readlines()
for i in range(len(lines)-1, -1, -1):
if lines[i] != '\n':
start = datetime.strptime(lines[i].split(',')[0], '%Y-%m-%d')
break
start += timedelta(1)
start = start.strftime('%Y-%m-%d')
f.close()
print('update %s from %s to %s' % (code, start, end))
df = ts.get_h_data(code, start=start, end=end, index=index)
if len(df):
df.sort_index().to_csv(csv, header=False, mode='a', encoding='utf-8')
print('finish stock: %s' % code)
else:
print('s: %s get 0 length' % code)
else:
print('do %s' % code)
df = ts.get_h_data(code, start='2017-08-15', end=end, index=index)
if len(df):
df.sort_index().to_csv(csv, encoding='utf-8')
else:
print('s: %s get 0 length' % code)
def get_1d(symbol, index=False):
if index:
s = '%s/%s.csv' % (index_1d_path, symbol)
else:
s = '%s/%s.csv' % (stock_1d_path, symbol)
if os.path.exists(s):
return pd.read_csv(s, index_col='date', date_parser=lambda x : pd.Timestamp(x))
else:
print('ERROR: can not find %s\n use update_all_1d to update db' % s)
def update_db_day_all(date=''):
df = ts.get_day_all(date)
df.to_csv('%s/%s.csv' % (day_all_path, date), encoding='utf-8')
def get_day_all(date=''):
if date == '':
date = datetime.today().strftime('%Y-%m-%d')
s = '%s/%s.csv' % (day_all_path, date)
if os.path.exists(s):
return pd.read_csv(s, index_col=0, dtype={'code': 'O'})
else:
return None
|
## 135. Candy
#
# There are N children standing in a line. Each child is assigned a rating value.
#
# You are giving candies to these children subjected to the following requirements:
#
# Each child must have at least one candy.
# Children with a higher rating get more candies than their neighbors.
# What is the minimum candies you must give?
#
# Example 1:
#
# Input: [1,0,2]
# Output: 5
# Explanation: You can allocate to the first, second and third child with 2, 1, 2 candies respectively.
#
# Example 2:
#
# Input: [1,2,2]
# Output: 4
# Explanation: You can allocate to the first, second and third child with 1, 2, 1 candies respectively.
# The third child gets 1 candy because it satisfies the above two conditions.
##
class Solution(object):
def candy(self, ratings):
"""
:type ratings: List[int]
:rtype: int
"""
result = [1] * len(ratings)
for i in range(1, len(ratings)):
if ratings[i-1] < ratings[i]:
result[i] = result[i-1] + 1
for j in range(len(ratings)-2, -1, -1):
if ratings[j] > ratings[j+1]:
result[j] = max(result[j+1]+1, result[j])
return sum(result)
if __name__ == "__main__":
arr1 = [1, 0, 2]
print(Solution().candy(arr1))
arr1 = [1, 2, 2]
print(Solution().candy(arr1))
|
from __future__ import print_function
import time
import os
import numpy as np
import matplotlib.pyplot as plt
import tensorflow as tf
import scipy.ndimage
# from Net import Generator, WeightNet
from scipy.misc import imread, imsave
from skimage import transform, data
from glob import glob
from model import Model
import matplotlib.image as mpimg
MODEL_SAVE_PATH = './model/model.ckpt'
output_path='./results/vis-ir/TNO/'
path = './test_imgs/vis-ir/TNO/'
path1 = path + 'vis/'
path2 = path + 'ir/'
# output_path='./results/vis-ir/RoadScene/'
# path = './test_imgs/vis-ir/RoadScene/'
# path1 = path + 'vis/'
# path2 = path + 'ir/'
# output_path ='./results/medical/'
# path = './test_imgs/medical/'
# path1 = path + 'pet/'
# path2 = path + 'mri/'
# output_path='./results/multi-exposure/dataset1/'
# path = './test_imgs/multi-exposure/dataset1/'
# path1 = path + 'oe/'
# path2 = path + 'ue/'
# output_path='./results/multi-exposure/dataset2/'
# path = './test_imgs/multi-exposure/dataset2/'
# path1 = path + 'oe/'
# path2 = path + 'ue/'
# output_path='./results/multi-focus/'
# path = './test_imgs/multi-focus/'
# path1 = path + 'far/'
# path2 = path + 'near/'
def main():
print('\nBegin to generate pictures ...\n')
Format='.png'
for i in range(10):
file_name1 = path1 + str(i + 1) + Format
file_name2 = path2 + str(i + 1) + Format
img1 = imread(file_name1) / 255.0
img2 = imread(file_name2) / 255.0
print('file1:', file_name1)
print('file2:', file_name2)
Shape1 = img1.shape
h1 = Shape1[0]
w1 = Shape1[1]
Shape2 = img2.shape
h2 = Shape2[0]
w2 = Shape2[1]
assert (h1 == h2 and w1 == w2), 'Two images must have the same shape!'
print('input shape:', img1.shape)
img1 = img1.reshape([1, h1, w1, 1])
img2 = img2.reshape([1, h1, w1, 1])
with tf.Graph().as_default(), tf.Session() as sess:
M = Model(BATCH_SIZE=1, INPUT_H=h1, INPUT_W=w1, is_training=False)
# restore the trained model and run the style transferring
t_list = tf.trainable_variables()
saver = tf.train.Saver(var_list = t_list)
model_save_path = MODEL_SAVE_PATH
print(model_save_path)
sess.run(tf.global_variables_initializer())
saver.restore(sess, model_save_path)
outputs = sess.run(M.generated_img, feed_dict = {M.SOURCE1: img1, M.SOURCE2: img2})
output = outputs[0, :, :, 0] # 0-1
fig = plt.figure()
f1 = fig.add_subplot(311)
f2 = fig.add_subplot(312)
f3 = fig.add_subplot(313)
f1.imshow(img1, cmap = 'gray')
f2.imshow(img2, cmap = 'gray')
f3.imshow(output, cmap = 'gray')
plt.show()
if not os.path.exists(output_path):
os.makedirs(output_path)
imsave(output_path + str(i + 1) + Format, output)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2020-03-27 13:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('exhibition', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='painting',
name='image',
field=models.ImageField(default='https://avatars.mds.yandex.net/get-pdb/2979168/d8e27e17-e239-4175-9efb-3235a21eafea/s1200?webp=false', upload_to='/images'),
),
]
|
import time
import numpy as np
from dolo.algos.dtcscc.perturbations import approximate_controls
from dolo.numeric.optimize.ncpsolve import ncpsolve
from dolo.numeric.optimize.newton import (SerialDifferentiableFunction,
serial_newton)
from dolo.numeric.interpolation import create_interpolator
def parameterized_expectations(model, verbose=False, initial_dr=None,
pert_order=1, with_complementarities=True,
grid={}, distribution={},
maxit=100, tol=1e-8, inner_maxit=10,
direct=False):
'''
Find global solution for ``model`` via parameterized expectations.
Controls must be expressed as a direct function of equilibrium objects.
Algorithm iterates over the expectations function in the arbitrage equation.
Parameters:
----------
model : NumericModel
``dtcscc`` model to be solved
verbose : boolean
if True, display iterations
initial_dr : decision rule
initial guess for the decision rule
pert_order : {1}
if no initial guess is supplied, the perturbation solution at order
``pert_order`` is used as initial guess
grid : grid options
distribution : distribution options
maxit : maximum number of iterations
tol : tolerance criterium for successive approximations
inner_maxit : maximum number of iteration for inner solver
direct : if True, solve with direct method. If false, solve indirectly
Returns
-------
decision rule :
approximated solution
'''
t1 = time.time()
g = model.functions['transition']
h = model.functions['expectation']
d = model.functions['direct_response']
f = model.functions['arbitrage_exp'] # f(s, x, z, p, out)
parms = model.calibration['parameters']
if initial_dr is None:
if pert_order == 1:
initial_dr = approximate_controls(model)
if pert_order > 1:
raise Exception("Perturbation order > 1 not supported (yet).")
approx = model.get_grid(**grid)
grid = approx.grid
interp_type = approx.interpolation
dr = create_interpolator(approx, interp_type)
expect = create_interpolator(approx, interp_type)
distrib = model.get_distribution(**distribution)
nodes, weights = distrib.discretize()
N = grid.shape[0]
z = np.zeros((N, len(model.symbols['expectations'])))
x_0 = initial_dr(grid)
x_0 = x_0.real # just in case ...
h_0 = h(grid, x_0, parms)
it = 0
err = 10
err_0 = 10
verbit = True if verbose == 'full' else False
if with_complementarities is True:
lbfun = model.functions['controls_lb']
ubfun = model.functions['controls_ub']
lb = lbfun(grid, parms)
ub = ubfun(grid, parms)
else:
lb = None
ub = None
if verbose:
headline = '|{0:^4} | {1:10} | {2:8} | {3:8} |'
headline = headline.format('N', ' Error', 'Gain', 'Time')
stars = '-'*len(headline)
print(stars)
print(headline)
print(stars)
# format string for within loop
fmt_str = '|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} |'
while err > tol and it <= maxit:
it += 1
t_start = time.time()
# dr.set_values(x_0)
expect.set_values(h_0)
# evaluate expectation over the future state
z[...] = 0
for i in range(weights.shape[0]):
e = nodes[i, :]
S = g(grid, x_0, e, parms)
z += weights[i]*expect(S)
if direct is True:
# Use control as direct function of arbitrage equation
new_x = d(grid, z, parms)
if with_complementarities is True:
new_x = np.minimum(new_x, ub)
new_x = np.maximum(new_x, lb)
else:
# Find control by solving arbitrage equation
def fun(x): return f(grid, x, z, parms)
sdfun = SerialDifferentiableFunction(fun)
if with_complementarities is True:
[new_x, nit] = ncpsolve(sdfun, lb, ub, x_0, verbose=verbit,
maxit=inner_maxit)
else:
[new_x, nit] = serial_newton(sdfun, x_0, verbose=verbit)
new_h = h(grid, new_x, parms)
# update error
err = (abs(new_h - h_0).max())
# Update guess for decision rule and expectations function
x_0 = new_x
h_0 = new_h
# print error infomation if `verbose`
err_SA = err/err_0
err_0 = err
t_finish = time.time()
elapsed = t_finish - t_start
if verbose:
print(fmt_str.format(it, err, err_SA, elapsed))
if it == maxit:
import warnings
warnings.warn(UserWarning("Maximum number of iterations reached"))
# compute final fime and do final printout if `verbose`
t2 = time.time()
if verbose:
print(stars)
print('Elapsed: {} seconds.'.format(t2 - t1))
print(stars)
# Interpolation for the decision rule
dr.set_values(x_0)
return dr
import time
import numpy as np
from dolo.algos.dtcscc.perturbations import approximate_controls
from dolo.numeric.interpolation import create_interpolator
def parameterized_expectations_direct(model, verbose=False, initial_dr=None,
pert_order=1, grid={}, distribution={},
maxit=100, tol=1e-8):
'''
Finds a global solution for ``model`` using parameterized expectations
function. Requires the model to be written with controls as a direct
function of the model objects.
The algorithm iterates on the expectations function in the arbitrage
equation. It follows the discussion in section 9.9 of Miranda and
Fackler (2002).
Parameters
----------
model : NumericModel
"dtcscc" model to be solved
verbose : boolean
if True, display iterations
initial_dr : decision rule
initial guess for the decision rule
pert_order : {1}
if no initial guess is supplied, the perturbation solution at order
``pert_order`` is used as initial guess
grid: grid options
distribution: distribution options
maxit: maximum number of iterations
tol: tolerance criterium for successive approximations
Returns
-------
decision rule :
approximated solution
'''
t1 = time.time()
g = model.functions['transition']
d = model.functions['direct_response']
h = model.functions['expectation']
parms = model.calibration['parameters']
if initial_dr is None:
if pert_order == 1:
initial_dr = approximate_controls(model)
if pert_order > 1:
raise Exception("Perturbation order > 1 not supported (yet).")
approx = model.get_grid(**grid)
grid = approx.grid
interp_type = approx.interpolation
dr = create_interpolator(approx, interp_type)
expect = create_interpolator(approx, interp_type)
distrib = model.get_distribution(**distribution)
nodes, weights = distrib.discretize()
N = grid.shape[0]
z = np.zeros((N, len(model.symbols['expectations'])))
x_0 = initial_dr(grid)
x_0 = x_0.real # just in case ...
h_0 = h(grid, x_0, parms)
it = 0
err = 10
err_0 = 10
if verbose:
headline = '|{0:^4} | {1:10} | {2:8} | {3:8} |'
headline = headline.format('N', ' Error', 'Gain', 'Time')
stars = '-'*len(headline)
print(stars)
print(headline)
print(stars)
# format string for within loop
fmt_str = '|{0:4} | {1:10.3e} | {2:8.3f} | {3:8.3f} |'
while err > tol and it <= maxit:
it += 1
t_start = time.time()
# dr.set_values(x_0)
expect.set_values(h_0)
z[...] = 0
for i in range(weights.shape[0]):
e = nodes[i, :]
S = g(grid, x_0, e, parms)
# evaluate expectation over the future state
z += weights[i]*expect(S)
# TODO: check that control is admissible
new_x = d(grid, z, parms)
new_h = h(grid, new_x, parms)
# update error
err = (abs(new_h - h_0).max())
# Update guess for decision rule and expectations function
x_0 = new_x
h_0 = new_h
# print error information if `verbose`
err_SA = err/err_0
err_0 = err
t_finish = time.time()
elapsed = t_finish - t_start
if verbose:
print(fmt_str.format(it, err, err_SA, elapsed))
if it == maxit:
import warnings
warnings.warn(UserWarning("Maximum number of iterations reached"))
# compute final fime and do final printout if `verbose`
t2 = time.time()
if verbose:
print(stars)
print('Elapsed: {} seconds.'.format(t2 - t1))
print(stars)
# Interpolation for the decision rule
dr.set_values(x_0)
return dr
|
from __future__ import print_function, division, absolute_import
import os
from jinja2 import Template
cur_dir = os.path.abspath(os.path.dirname(__file__))
def create_docker_compose_template(IMAGE_NAME="k2d_example", IMAGE_VERSION="v1",
SERVICE_NAME="k2d_example", CONTAINER_NAME="k2d_example",
URL_PREFIX='', PORT_TUPLES=None, ENV_TUPLES=None):
if not PORT_TUPLES:
PORT_TUPLES = []
if not ENV_TUPLES:
ENV_TUPLES = []
DOCKER_COMPOSE_TEMPLATE = os.path.join(cur_dir, 'templates',
'docker-compose.yml')
with open(DOCKER_COMPOSE_TEMPLATE) as f:
data = f.read()
t = Template(data)
rendered = t.render(IMAGE_NAME=IMAGE_NAME, IMAGE_VERSION=IMAGE_VERSION,
SERVICE_NAME=SERVICE_NAME, CONTAINER_NAME=CONTAINER_NAME,
URL_PREFIX=URL_PREFIX, PORT_TUPLES=PORT_TUPLES,
ENV_TUPLES=ENV_TUPLES)
return rendered
def create_dockerfile_template(DIR_PATH='', APP_NAME=''):
DOCKERFILE_TEMPLATE = os.path.join(cur_dir, 'templates',
'Dockerfile')
with open(DOCKERFILE_TEMPLATE) as f:
data = f.read()
t = Template(data)
rendered = t.render(DIR_PATH=DIR_PATH, APP_NAME=APP_NAME)
return rendered
|
from Aircraft_winglets import Aircraft, Wing, Fuselage, ACSolidWing, ACRibWing
from Aerothon.DefaultMaterialsLibrary import Monokote, PinkFoam, Basswood, Steel, Balsa, Aluminum, Ultracote
from scalar.units import ARCDEG, FT, SEC, LBF, IN
from scalar.units import AsUnit
import pylab as pyl
import numpy as npy
from scalar.units import IN
Aircraft.Draw()
Z_array = [0.5,0.4,0.3,0.2,0.1,0.0,-0.1,-0.2,-0.3,-0.4,-0.5,-0.75,-1.0,-1.5,-2.0,-3.0,-4.0,-5.0,-10.0,-15.0,-20.0,-30.0,-40.0,-50.0,-100.0,-200.0]
#fname_base = ('AVL\AVLAircraft')
fname_base = ('AVL\AVLAircraft_winglets')
#Aircraft.WriteAVLAircraft(fname_base + '_noGE')
for i in Z_array:
Aircraft.iZsym = 1
Aircraft.Zsym = i * IN
fname = fname_base + '_GE_Zsym_'
if i<0:
fname = fname + 'n' + str(abs(i))
else:
fname = fname + 'p' + str(abs(i))
#Aircraft.WriteAVLAircraft(fname + '.avl')
print fname
#Controls.RunDir = 'AVL/'
#Controls.AddRun('Stab', 'AVLAircraft.avl', WriteAVLInput = Execute)
#Controls.Stab.AddCommand('a a ' + str(Aircraft.Alpha_Zero_CM/ARCDEG) )
#Controls.Stab.DumpStability('AVLDeriv.txt')
#Controls.Stab.Exit()
#if Execute:
# Controls.ExecuteAVL()
#Controls.ReadAVLFiles()
|
def itsMovieTime(d):
movie_duration=[90, 85, 75, 60, 120, 150, 125]
for i,x in enumerate(movie_duration):
for z,y in enumerate(movie_duration):
# print(x+y)
if ((x+y)>=d):
print( x,y,i,z )
print(itsMovieTime(250))
|
"""
Copy Target functions
"""
from ..utils import Dispatch, exceptions
Copy = Dispatch("copy")
@Copy.register
def default(*extra_args, **extra_kwargs):
return exceptions.target_not_implemented()
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-07-09 21:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0021_auto_20170709_1951'),
]
operations = [
migrations.AlterField(
model_name='event',
name='sponsors',
field=models.ManyToManyField(blank=True, related_name='events', to='events.Sponsor'),
),
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.