text stringlengths 38 1.54M |
|---|
import unittest
from steps_to_zero import steps_to_zero
class TestStepsToZero(unittest.TestCase):
def test_steps_to_zero(self):
self.assertEqual(steps_to_zero(14), 6)
self.assertEqual(steps_to_zero(8), 4)
self.assertEqual(steps_to_zero(123), 12)
if __name__ == '__main__':
unittest.main()
|
from django.contrib import admin
from .models import Metric, Record
@admin.register(Metric)
class MetricAdmin(admin.ModelAdmin):
list_display = ('name',)
@admin.register(Record)
class RecordAdmin(admin.ModelAdmin):
list_display = ('metric', 'timestamp', 'value')
list_filter = ('metric',)
date_hierarchy = 'timestamp'
|
#โปรแกรมเปรียบเทียบตัวเลข
a = float(input("ป้อนตัวเลขที่ 1 :"))
b = float(input("ป้อนตัวเลขที่ 2 :"))
if a>b:
print("1 > 2")
else :
print("2 > 1") |
from lab import decorators
from lab.nodes.virtual_server import VipServer, LibVirtServer
class Vtc(VipServer):
"""
set cisco-vts devices device TORSWITCHB ports port Ethernet1/39 connection-type server
set cisco-vts devices device TORSWITCHB ports port Ethernet1/39 servers server nfvbench_tg type baremetal
set cisco-vts devices device TORSWITCHB ports port Ethernet1/39 servers server nfvbench_tg interface-name eth0
set cisco-vts devices device TORSWITCHB ports port Ethernet1/39 servers server nfvbench_tg ip 1.1.1.1
set cisco-vts devices device TORSWITCHA ports port Ethernet1/39 connection-type server
set cisco-vts devices device TORSWITCHA ports port Ethernet1/39 servers server nfvbench_tg type baremetal
set cisco-vts devices device TORSWITCHA ports port Ethernet1/39 servers server nfvbench_tg interface-name eth1
set cisco-vts devices device TORSWITCHA ports port Ethernet1/39 servers server nfvbench_tg ip 1.1.1.1
"""
API_CALLS = {
'post_sync_from': {'rest': '-XPOST https://{ip}:8888//api/running/devices/device/{uuid}/_operations/sync-from', 'cli': ''},
'put_server': {'rest': "-XPUT -H 'Content-Type: application/vnd.yang.data+json' https://{ip}:8888/api/running/cisco-vts/uuid-servers/uuid-server/{uuid} -d '{data}'"},
'patch_device_port': {'rest': "-XPATCH -H 'Content-Type: application/vnd.yang.data+json' https://{ip}:8888/api/running/cisco-vts/devices/device/{uuid}/ports -d '{data}'",
'json': '{"cisco-vts:ports": {"port": [{"name": "PORT", "connection-type": "cisco-vts-identities:server", "servers": {"server": [{"name": "NAME", "type": "cisco-vts-identities:baremetal", "interface-name": "eth0", "ip": "1.1.1.1"}]}}]}}'
}
}
# https://www.cisco.com/c/dam/en/us/td/docs/net_mgmt/virtual_topology_system/2_5_2/api/Cisco_VTS_2_5_2_API_Doc.pdf
def api_vtc_ha(self):
# show configuration vtc-ha
cmd = '-XGET -H "Accept: application/vnd.yang.data+json" https://{ip}:8888/api/running/vtc-ha?deep'
return self.cmd(cmd)
def api_openstack(self):
# show configuration openstack vmm
cmd = '-XGET -H "Accept: application/vnd.yang.data+json" https://{ip}:8888/api/running/openstack?deep'
return self.cmd(cmd)
def api_pool_lst(self):
# show configuration resource-pools
cmd = '-XGET -H "Accept: application/vnd.yang.data+json" https://{ip}:8888/api/running/resource-pools?deep'
return self.cmd(cmd)
def api_port_put(self, vlan, netid, hostid, connid):
import uuid
import json
portid = str(uuid.uuid4())
dic = {'port': {'id': portid,
'status': 'cisco-vts-identities:active',
'tagging': vlan,
'network-id': netid,
'binding-host-id': hostid,
'connid': [{'id': connid}],
'admin-state-up': True,
'type': 'cisco-vts-identities:baremetal',
'mac-address': 'unknown-' + str(uuid.uuid4())
}
}
cmd = "-XPUT -H 'Content-Type: application/vnd.yang.data+json' https://{ip}:8888/api/running/cisco-vts/tenants/tenant/admin/topologies/topology/admin/ports/port/" + portid + " -d '" + json.dumps(dic) + "'" # page29
return self.cmd(cmd)
def api_dev_lst(self):
# show configuration cisco-vts devices device
cmd = '-XGET -H "Accept: application/vnd.yang.data+json" https://{ip}:8888/api/running/cisco-vts/devices?deep'
return self.cmd(cmd)
def api_port_get(self, uuid):
return '-XGET -H "Accept: application/vnd.yang.data+json" https://{ip}:8888/api/running/cisco-vts/tenants/tenant/admin/topologies/topology/admin/ports/port/' + uuid # page 30
def api_port_lst(self):
cmd = '-XGET -H "Accept: application/vnd.yang.data+json" https://{ip}:8888/api/running/cisco-vts/tenants/tenant/admin/topologies/topology/admin/ports/port/' # page 30
return self.cmd(cmd)
def api_port_del(self, uuid):
return '-XDELETE https://{ip}:8888/api/running/cisco-vts/tenants/tenant/admin/topologies/topology/admin/ports/port/' + uuid # page 31
def api_srv_lst(self):
# show configuration cisco-vts uuid-servers
cmd = '-XGET -H "Accept: application/vnd.yang.data+json" https://{ip}:8888/api/running/cisco-vts/uuid-servers?deep'
return self.cmd(cmd)['cisco-vts:uuid-servers']['uuid-server']
def r_vtc_get_openstack(self):
from lab.cloud.cloud_network import CloudNetwork
a = self.api_openstack()['cisco-vts-openstack:openstack']['vmm'][0]
net_dics = a.get('network', [])
servers = a.get('servers', [])
nets = []
for subnet_dic in a.get('subnet', []):
net_dic = [x for x in net_dics if x['id'] == subnet_dic['network-id']][0]
nets.append(CloudNetwork(cloud=None, dic=net_dic, subnet_dic=subnet_dic))
return nets, servers
def __init__(self, pod, dic):
super(Vtc, self).__init__(pod=pod, dic=dic)
self.vtc_username = dic['vtc-username']
self.vtc_password = dic['vtc-password']
def cmd(self, cmd):
import json
cmd = 'curl -s -k -u {u}:{p} '.format(u=self.vtc_username, p=self.vtc_password) + cmd.replace('{ip}', self.ssh_ip)
for i in range(10):
ans = self.exe(cmd, is_warn_only=True)
if ans.failed:
raise RuntimeError(ans)
else:
try: # sometimes the answer is not fully formed (usually happens with server list), simply repeat
return json.loads(ans) if ans else {} # it might be empty
except ValueError: # something like ValueError: Unterminated string starting at: line 65 column 11 (char 4086)
continue
else:
raise RuntimeError('Failed after 10 attempts: ' + cmd)
def show_vxlan_tunnel(self):
return map(lambda vtf: vtf.show_vxlan_tunnel(), self.pod.get_vft())
def disrupt(self, node_to_disrupt, method_to_disrupt, downtime):
import time
is_master = node_to_disrupt.startswith('master')
node_class = node_to_disrupt.split('-')[-1]
cluster = self.api_vtc_ha()
node_id = [x['hostname'] for x in cluster['vtc-ha:vtc-ha']['nodes']['node'] if x['original-state'] == ('Master' if is_master else 'Slave')][0]
node_id = node_id.replace('vtc', node_class) # node_id might by vtcXX or vtsrXX
node_disrupt = self.individuals[node_id]
vts_host = node_disrupt.hard
if method_to_disrupt == 'vm-shutdown':
vts_host.exe(command='virsh suspend {}'.format(self.id))
time.sleep(downtime)
vts_host.exe(command='virsh resume {}'.format(self.id))
elif method_to_disrupt == 'isolate-from-mx':
ans = vts_host.exe('ip l | grep mgmt | grep {0}'.format(self.id))
if_name = ans.split()[1][:-1]
vts_host.exe('ip l s dev {} down'.format(if_name))
time.sleep(downtime)
vts_host.exe('ip l s dev {} up'.format(if_name))
elif method_to_disrupt == 'isolate-from-api':
ans = vts_host.exe('ip l | grep api | grep {0}'.format(self.id))
if_name = ans.split()[1][:-1]
vts_host.exe('ip l s dev {} down'.format(if_name))
time.sleep(downtime)
vts_host.exe('ip l s dev {} up'.format(if_name))
elif method_to_disrupt == 'vm-reboot':
# 'set -m' because of http://stackoverflow.com/questions/8775598/start-a-background-process-with-nohup-using-fabric
self.exe('set -m; sudo bash -c "ip link set dev eth0 down && ip link set dev eth1 down && sleep {0} && shutdown -r now" 2>/dev/null >/dev/null &'.format(downtime), is_warn_only=True)
time.sleep(downtime)
def get_config_and_net_part_bodies(self):
from lab import with_config
cfg_tmpl = with_config.read_config_from_file(cfg_path='vtc-vm-config.txt', folder='vts', is_as_string=True)
net_part_tmpl = with_config.read_config_from_file(cfg_path='vtc-net-part-of-libvirt-domain.template', folder='vts', is_as_string=True)
dns_ip, ntp_ip = self.pod.get_dns()[0], self.pod.get_ntp()[0]
hostname = '{id}-{lab}'.format(lab=self.pod, id=self.id)
a_nic = self.get_nic('a') # Vtc sits on out-of-tor network marked is_ssh
a_ip, a_net_mask = a_nic.get_ip_and_mask()
a_gw = a_nic.get_net().get_gw()
mx_nic = self.get_nic('mx') # also sits on mx network
mx_ip, mx_net_mask = mx_nic.get_ip_and_mask()
mx_vlan = mx_nic.get_net().get_vlan_id()
cfg_body = cfg_tmpl.format(vtc_a_ip=a_ip, a_net_mask=a_net_mask, a_gw=a_gw, vtc_mx_ip=mx_ip, mx_net_mask=mx_net_mask, dns_ip=dns_ip, ntp_ip=ntp_ip, username=self.ssh_username, password=self.ssh_password, hostname=hostname)
net_part = net_part_tmpl.format(a_nic_name='a', mx_nic_name='mx', mx_vlan=mx_vlan)
with with_config.WithConfig.open_artifact(hostname, 'w') as f:
f.write(cfg_body)
return cfg_body, net_part
def get_cluster_conf_body(self):
from lab import with_config
vip_a, vip_mx = self.ssh_ip
a_ip = []
mx_ip = []
mx_gw = None
for node_id in ['bld', 'vtc1', 'vtc2']:
a_ip.append(self.pod.get_node_by_id(node_id=node_id).get_nic('a').get_ip_and_mask()[0])
mx_nic = self.pod.get_node_by_id(node_id=node_id).get_nic('mx')
mx_gw = mx_nic.get_gw()
mx_ip.append(mx_nic.get_ip_and_mask()[0])
cfg_tmpl = with_config.read_config_from_file(cfg_path='cluster.conf.template', folder='vts', is_as_string=True)
cfg_body = cfg_tmpl.format(lab_name=self.pod, vip_a=vip_a, vip_mx=vip_mx, vtc1_a_ip=a_ip[1], vtc2_a_ip=a_ip[2], vtc1_mx_ip=mx_ip[1], vtc2_mx_ip=mx_ip[2], special_ip=a_ip[0], mx_gw=mx_gw)
with with_config.WithConfig.open_artifact('cluster.conf', 'w') as f:
f.write(cfg_body)
return cfg_body
def vtc_change_default_password(self):
import json
import re
import requests
from time import sleep
default_username, default_password = 'admin', 'admin'
if default_username != self.oob_username:
raise ValueError
api_security_check = 'https://{}:8443/VTS/j_spring_security_check'.format(self.ssh_ip)
api_java_servlet = 'https://{}:8443/VTS/JavaScriptServlet'.format(self.ssh_ip)
api_update_password = 'https://{}:8443/VTS/rs/ncs/user?updatePassword=true&isEnforcePassword=true'.format(self.ssh_ip)
while True:
# noinspection PyBroadException
try:
self.log(message='Waiting for VTC service up...')
requests.get('https://{}:8443/VTS/'.format(self.ssh_ip), verify=False, timeout=300) # First try to open to check that Tomcat is indeed started
break
except:
sleep(100)
session = requests.Session()
auth = session.post(api_security_check, data={'j_username': default_username, 'j_password': default_password, 'Submit': 'Login'}, verify=False)
if 'Invalid username or passphrase' in auth.text:
raise ValueError(auth.text)
java_script_servlet = session.get(api_java_servlet, verify=False)
owasp_csrftoken = ''.join(re.findall(r'OWASP_CSRFTOKEN", "(.*?)", requestPageTokens', java_script_servlet.text))
response = session.put(api_update_password,
data=json.dumps({'resource': {'user': {'user_name': self.oob_username, 'password': self.oob_password, 'currentPassword': default_password}}}),
headers={'OWASP_CSRFTOKEN': owasp_csrftoken,
'X-Requested-With': 'OWASP CSRFGuard Project',
'Accept': 'application/json, text/plain, */*',
'Accept-Encoding': 'gzip, deflate, sdch, br',
'Content-Type': 'application/json;charset=UTF-8'})
if response.status_code == 200 and 'Error report' not in response.text:
self.log(message='password changed')
return response.text
else:
raise RuntimeError(response.text)
def r_vtc_wait_cluster_formed(self, n_retries=1):
import requests.exceptions
nodes = self.pod.get_nodes_by_class(Vtc)
while True:
try:
cluster = self.cmd('get_vtc_ha')
break
except requests.exceptions.ConnectTimeout:
n_retries -= 1
if n_retries == 0:
return False
else:
continue
reported_ips = [x['address'] for x in cluster['collection']['tcm:members']]
for node in nodes:
if node.get_ssh()[0] not in reported_ips:
return False
return True
def r_collect_info(self, regex):
body = ''
for cmd in [self.log_grep_cmd(log_files='/opt/vts/log/nso/', regex=regex), self.log_grep_cmd(log_files='/opt/vts/log/nso/localhost\:8888.access', regex='HTTP/1.1" 40')]:
ans = self.exe(cmd, is_warn_only=True)
body += self.single_cmd_output(cmd=cmd, ans=ans)
return body
def r_vtc_day0_config(self): # https://cisco.jiveon.com/docs/DOC-1469629
import jinja2
domain = jinja2.Template('''
set resource-pools vni-pool vnipool range 4096 65535
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l2-gateway-groups l2-gateway-group L2GW-0 policy-parameters distribution-mode decentralized-l2
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l2-gateway-groups l2-gateway-group L2GW-0 policy-parameters control-plane-protocol bgp-evpn
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l2-gateway-groups l2-gateway-group L2GW-0 policy-parameters arp-suppression
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l2-gateway-groups l2-gateway-group L2GW-0 policy-parameters packet-replication ingress-replication
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l3-gateway-groups l3-gateway-group L3GW-0 policy-parameters distribution-mode decentralized-l3
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l3-gateway-groups l3-gateway-group L3GW-0 policy-parameters control-plane-protocol bgp-evpn
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l3-gateway-groups l3-gateway-group L3GW-0 policy-parameters arp-suppression
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l3-gateway-groups l3-gateway-group L3GW-0 policy-parameters packet-replication ingress-replication
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l2-gateway-groups l2-gateway-group L2GW-0 ad-l3-gw-parent L3GW-0''')
xrvr = jinja2.Template('''{% for xrvr_name in xrvr_names %}
request devices device {{ xrvr_name }} sync-from
set devices device {{ xrvr_name }} asr9k-extension:device-info device-use leaf
set devices device {{ xrvr_name }} asr9k-extension:device-info bgp-peering-info bgp-asn {{ bgp_asn }}
set devices device {{ xrvr_name }} asr9k-extension:device-info bgp-peering-info loopback-if-num 0
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l2-gateway-groups l2-gateway-group L2GW-0 devices device {{ xrvr_name }}
{% endfor %}''')
tmpl_switches = jinja2.Template('''
{% for switch in switches %}
set devices authgroups group {{ switch['id'] }} umap admin remote-name {{ switch['username'] }}
set devices authgroups group {{ switch['id'] }} umap admin remote-password {{ switch['password'] }}
set resource-pools vlan-pool {{ switch['id'] }} range 3000 3999
set devices device {{ switch['id'] }} address {{ switch['ip'] }}
set devices device {{ switch['id'] }} authgroup {{ switch['id'] }}
set devices device {{ switch['id'] }} device-type cli ned-id cisco-nx
set devices device {{ switch['id'] }} device-type cli protocol telnet
set devices device {{ switch['id'] }} n9k-extension:device-info platform N9K
set devices device {{ switch['id'] }} n9k-extension:device-info device-use leaf
set devices device {{ switch['id'] }} state admin-state unlocked
commit
request devices device {{ switch['id'] }} sync-from
set devices device {{ switch['id'] }} n9k-extension:device-info bgp-peering-info bgp-asn {{ bgp_asn }}
set devices device {{ switch['id'] }} n9k-extension:device-info bgp-peering-info loopback-if-num 0
set cisco-vts infra-policy admin-domains admin-domain {{ domain_group }} l2-gateway-groups l2-gateway-group L2GW-0 devices device {{ switch['id'] }}
{% endfor %}''')
sync = jinja2.Template('''
{% for name in names %}
request devices device {{ name }} sync-from
{% endfor %}''')
map(lambda y: y.r_xrvr_day0_config(), self.pod.get_xrvr())
self.r_vtc_ncs_cli(command=domain.render(domain_group='D1'))
self.r_vtc_ncs_cli(command=xrvr.render(xrvr_names=['xrvr1', 'xrvr2'], domain_group='D1', bgp_asn=23))
switches = [{'id': x.get_id(), 'ip': x.get_oob()[0], 'username': x.get_oob()[1], 'password': x.get_oob()[2]} for x in self.pod.tors]
self.r_vtc_ncs_cli(command=tmpl_switches.render(switches=switches, domain_group='D1', bgp_asn=23))
self.r_vtc_ncs_cli(command=sync.render(names=['xrvr1', 'xrvr2'] + ['n91', 'n92']))
def r_vtc_ncs_cli(self, command):
self.exe('ncs_cli << EOF\nconfigure\n{}\ncommit\nexit\nexit\nEOF'.format(command))
@decorators.section('Get VTS version')
def r_vtc_get_version(self):
return self.exe('version_info')
def r_vtc_show_tech_support(self):
wild_card = 'VTS*tar.bz2'
self.exe('show_tech_support')
ans = self.exe('ls ' + wild_card)
self.r_get_file_from_dir(rem_rel_path=ans, loc_abs_path='artifacts/vst_tech_support.bz2')
self.exe('rm -r ' + wild_card)
def r_vtc_get_all(self):
self.r_vtc_cluster_status()
[self.cmd(x) for x in sorted(self.API_CALLS.keys()) if x.startswith('get_')]
def r_vtc_cluster_status(self):
return self.exe('sudo crm status', is_warn_only=True)
@decorators.section('Add baremetal to VTC host inventory')
def r_vtc_add_host_to_inventory(self, server_name, tor_name, tor_port):
"""
:param server_name: name of server as you need to have it in bare metal inventory, e.g. nfvbench_tg
:param tor_name: name of TOR as it's seen in VTC api_dev_lst API CALL, e.g. TORSWITCHA
:param tor_port: TOR switch port to which this server is connected e.g. Ethernet1/19
:return: json with result of operation
"""
return self.cmd(cmd='patch_device_port', uuid=tor_name, dic=self.API_CALLS['patch_device_port']['json'].replace('NAME', server_name).replace('PORT', tor_port))
def r_vtc_create_border_leaf_port(self, os_networks):
pools = self.api_pool_lst()
vlan_pool = [x for x in pools['resource-allocator:resource-pools']['vlan-pool'] if x['name'].startswith('system')][0]['ranges']['range'][0]
vlan_start, vlan_end = vlan_pool['start'], vlan_pool['end']
vlan = (vlan_start + vlan_end) / 2
srvs = self.api_srv_lst()
connids = [srv['connid'] for srv in srvs if srv['server-id'] == 'vmtp']
r = []
for network in os_networks:
for connid in connids:
r.append(self.api_port_put(vlan=vlan, netid=network.net_id, hostid='vmtp', connid=connid))
pass
def r_vtc_setup(self):
servers = self.api_srv_lst()
for dic in self.pod.setup_data_dic['TORSWITCHINFO']['SWITCHDETAILS']:
tor_port = dic['br_mgmt_port_info']
tor_name = dic['hostname']
if not [s for s in servers if tor_name in s['torname'] and tor_port in s['portname']]:
self.pod.vtc.r_vtc_add_host_to_inventory(server_name='vmtp', tor_name=tor_name, tor_port=tor_port)
@decorators.section(message='Detach border leaf')
def r_vtc_del_border_leaf_ports(self):
os = self.r_vtc_get_openstack()
pass
class VtcIndividual(LibVirtServer):
def cmd(self, cmd):
pass
|
'''
Created on 04/11/2015
@author: Bruna
'''
class Node():
def __init__(self, chave, dado):
self._value = dado
self._left = None
self._right = None
self._father = None
self._key = chave
self._color= "red"
def getValue(self):
return self._value
def setValue(self, newValue):
self._value = newValue
def getRight(self):
return self._right
def setRight(self, newRight):
self._right = newRight
def getLeft(self):
return self._left
def setLeft(self, newLeft):
self._left = newLeft
def getKey(self):
return self._key
def setKey(self, newKey):
self._key = newKey
def getFather(self):
return self._father
def setFather(self, newFather):
self._father = newFather
def getColor(self):
return self._color
def setColor(self, newColor):
self._color = newColor
def __str__(self):
return str("No: " + str(self.getKey())+ "\t"+ "dado: "+ str(self.getValue()))
class arvoreRB():
def __init__(self):
self.none = Node(None, None)
self.none.setFather(self.none)
self.none.setLeft(self.none)
self.none.setRight(self.none)
self.none.setColor("black")
self._root = self.none
def getRoot(self):
return self._root
def setRoot(self, R):
self._root = R
def rotateLeft(self, x): #t eh a arvore, e x o no
y = x.getRight()
x.setRight(y.getLeft())
if y.getLeft()!= self.none:
y.getLeft().setFather(x)
y.setFather(x.getFather())
if x.getFather() == self.none:
self.setRoot(y)
elif x == x.getFather().getLeft():
x.getFather().setLeft(y)
else:
x.getFather().setRight(y)
y.setLeft(x)
x.setFather(y)
def rotateRight(self, x):
y = x.getLeft()
x.setLeft(y.getRight())
if y.getRight()!= self.none:
y.getRight().setFather(x)
y.setFather(x.getFather())
if x.getFather() == self.none:
self.setRoot(y)
elif x == x.getFather().getRight():
x.getFather().setRight(y)
else:
x.getFather().setLeft(y)
y.setRight(x)
x.setFather(y)
def doubleRotateRight(self, x): #x eh no
filhoesq = x.getLeft()
self.rotateLeft(filhoesq)
self.rotateRight(x)
def doubleRotateleft(self, x):
filhodir = x.getRight()
self.rotateRight(filhodir)
self.rotateLeft(x)
def rbInsert(self, z):
y = self.none
x = self.getRoot()
while x != self.none:
y = x
if z.getKey() < x.getKey():
x = x.getLeft()
else:
x = x.getRight()
z.setFather(y)
if y == self.none:
self.setRoot(z)
elif z.getKey() < y.getKey():
y.setLeft(z)
else:
y.setRight(z)
z.setLeft(self.none)
z.setRight(self.none)
z.setColor("red")
self.insertFixUp(z)
def insertFixUp(self, z):
while z.getFather().getColor()== "red":
if z.getFather() == z.getFather().getFather().getLeft():
y = z.getFather().getFather().getRight()
if y.getColor() == "red":
z.getFather().setColor("black")
y.setColor("black")
z.getFather().getFather().setColor("red")
z = z.getFather().getFather()
else:
if z == z.getFather().getRight():
z = z.getFather()
self.rotateLeft(z)
z.getFather().setColor("black")
z.getFather().getFather().setColor("red")
self.rotateRight(z.getFather().getFather())
else:#aki tem q trocar esquerda por direita
y = z.getFather().getFather().getLeft()
if y.getColor() == "red":
z.getFather().setColor("black")
y.setColor("black")
z.getFather().getFather().setColor("red")
z = z.getFather().getFather()
else:
if z == z.getFather().getLeft():
z = z.getFather()
self.rotateRight(z)
z.getFather().setColor("black")
z.getFather().getFather().setColor("red")
self.rotateLeft(z.getFather().getFather())
self.getRoot().setColor("black")
def percorrerEmOrdem(self, r):
if r != self.none:
self.percorrerEmOrdem(r.getLeft())
print (r)
self.percorrerEmOrdem(r.getRight())
def rbTransplant(self, u, v):
if u.getFather() == self.none:
self.setRoot(v)
elif u == u.getFather().getLeft():
u.getFather().setLeft(v)
else:
u.getFather().setRight(v)
v.setFather(u.getFather())
def TreeMinimum(self, x):
while x.getLeft() != self.none:
x = x.getLeft()
return x
def TreeMaximum(self, x):
while x!= self.none:
x = x.getRight()
return x
def treeSucessor(self, x):
if x.getRight() != self.none:
return self.TreeMinimum(x.getRight())
y = x.getFather()
while y != self.none and x ==y.getRight():
x = y
y = y.getFather()
return y
def rbDeleteNumDois(self, z):
if (z.getLeft() == self.none) or (z.getRight() == self.none):
y = z
else:
y = self.treeSucessor(z)
if y.getLeft() != self.none:
x = y.getLeft()
else:
x= y.getRight()
x.setFather(y.getFather())
if y.getFather() == self.none:
self.setRoot(x)
else:
if y == y.getFather().getLeft():
y.getFather().setLeft(x)
else:
y.getFather().setRight(x)
if y != z:
z.setKey(y.getKey())
if y.getColor() == "black":
self.rbDeleteFixUp(x)
return y
def rbDelete(self, z):
y = z
yOriginalColor = y.getColor()
if z.getLeft() == self.none:
x = z.getRight()
self.rbTransplant(z, z.getRight())
elif z.getRight() == self.none:
x = z.getLeft()
self.rbTransplant(z, z.getLeft())
else:
y = self.TreeMinimum(z.getRight())
yOriginalColor = y.getColor()
x = y.getRight()
if y.getFather() == z:
x.setFather(y)
else:
self.rbTransplant(y, y.getRight())
y.setRight(z.getRight())
y.getRight.setFather(y)
self.rbTransplant(z, y)
y.setLeft(z.getLeft())
y.getLeft().setFather(y)
y.setColor(z.getColor())
if yOriginalColor == "black":
self.rbDeleteFixUp(x)
def rbDeleteFixUp(self, x):
while (x != self.getRoot()) and (x.getColor() == "black"):
if x == x.getFather().getLeft():
w = x.getFather().getRight()
if w.getColor() == "red":
w.setColor("black")
x.getFather().setColor("red")
self.rotateLeft(x.getFather())
w = x.getFather().getRight()
if (w.getLeft().getColor() == "black") and (w.getRight().getColor() == "black"):
w.setColor("red")
x = x.getFather()
else:
if w.getRight().getColor() == "black":
w.getLeft().setColor("black")
w.setColor("red")
self.rotateRight(w)
w = x.getFather().getRight()
w.setColor(x.getFather().getColor())
x.getFather().setColor("black")
w.getRight().setColor("black")
self.rotateLeft(x.getFather())
x = self.getRoot()
else:
w = x.getFather().getLeft()
if w.getColor() == "red":
w.setColor("black")
x.getFather().setColor("red")
self.rotateRight(x.getFather())
w = x.getFather().getLeft()
if (w.getRight().getColor() == "black") and (w.getLeft().getColor() == "black"):
w.setColor("red")
x = x.getFather()
else:
if w.getLeft().getColor() == "black":
w.getRight().setColor("black")
w.setColor("red")
self.rotateLeft(w)
w = x.getFather().getLeft()
w.setColor(x.getFather().getColor())
x.getFather().setColor("black")
w.getLeft().setColor("black")
self.rotateRight(x.getFather())
x = self.getRoot()
x.setColor("black")
def buscaArvore(self, x, k):#busca o valor k , na raiz x
if (x== self.none) or (x.getKey() == k):
return x#retorna o no
if k < x.getKey():
return self.buscaArvore(x.getLeft(), k)
else:
return self.buscaArvore(x.getRight(), k)
|
#!/Users/fritjof/anaconda3/bin/python3
# problem: taisformula, rating: 1.5
# could convert floats to int before calculating to use less memory
n = int(input())
a = 0
t_i = v_i = -1
for _ in range(n):
d = input().split()
t_0 = t_i
v_0 = v_i
t_i = int(d[0])
v_i = float(d[-1])
if t_0 != -1:
a += (t_i - t_0) * (v_i + v_0)/2
print(a/1000)
|
# You've just started to study impartial games, and came across an interesting theory. The theory is quite complicated, but it can be narrowed down to the following statements: solutions to all such games can be found with the mex function. Mex is an abbreviation of minimum excludant: for the given set s it finds the minimum non-negative integer that is not present in s.
# You don't yet know how to implement such a function efficiently, so would like to create a simplified version. For the given set s and given an upperBound, implement a function that will find its mex if it's smaller than upperBound or return upperBound instead.
# Hint: for loops also have an else clause which executes when the loop completes normally, i.e. without encountering any breaks
def mexFunction(s, upperBound):
found = -1
for i in range(upperBound):
if not i in s:
found = i
break
else:
found = upperBound
return found
|
# Generated by Django 2.2.3 on 2019-07-23 06:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fkrdjan', '0003_auto_20190722_1223'),
]
operations = [
migrations.AddField(
model_name='document',
name='status',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='document',
name='document',
field=models.FileField(upload_to='documents/'),
),
]
|
#This is a python program to add numbers
# Check even or Odd
def is_even(number):
is_even = False
if number % 2 == 0:
is_even = True
return is_even
def is_odd(number):
is_odd = False
if number % 2 != 0:
is_odd = True
return is_odd
def sum_of_even_numbers(n):
sum = 0
for i in range(1,n+1):
if is_even(i):
print ("sum_of_even_numbers {}".format(i))
sum = sum + i
return sum
def sum_of_odd_numbers(n):
sum = 0
for i in range(1,n+1):
if is_odd(i):
print("sum_of_odd_numbers {}".format(i))
sum = sum + i
return sum
print ("Sum of even numbers: {}".format(sum_of_even_numbers(10)))
print ("Sum of odd numbers: {}".format(sum_of_odd_numbers(10)))
print ("Sum of odd numbers: {}".format(sum_of_odd_numbers(50)))
|
from CovidTracker.app import app
from CovidTracker.crud.covid_data import get_current_covid_data
from flask import (render_template)
import datetime
@app.route('/')
def index():
"""Display Landing Page"""
now = datetime.datetime.now().strftime('%B %d, %Y')
year = datetime.datetime.now().strftime('%Y')
month = datetime.datetime.now().strftime('%B, %d')
covid_cases = get_current_covid_data()
death = "{:,.0f}".format(covid_cases.death)
positive = "{:,.0f}".format(covid_cases.positive)
hospitalized = "{:,.0f}".format(covid_cases.hospitalizedCurrently)
totaltestresults = "{:,.0f}".format(covid_cases.totalTestResults)
return render_template('index_copy.html', now=now, month=month, year=year, death=death, positive=positive, hospitalizedCurrently=hospitalized, totalTestResults=totaltestresults) |
#!/usr/bin/env python3
import sys
import os
from urllib.request import urlopen
from bs4 import BeautifulSoup
scrapingDirectory = "WEB"
html = urlopen("https://morningstar.in/library/archives.aspx").read()
soup = BeautifulSoup(html)
tableContainer = soup.find('div', {"summary": "An archive of articles"})
table = tableContainer.findAll('div', {"class": "clearfix"})
tableRows = table[0].findAll('div', {"class": "row"})
linkToArticles = []
for articleRow in tableRows:
linkToArticles.append(articleRow.find('div', {"class" : "col-xs-12"}))
articleLinks = []
for articleLink in linkToArticles:
articleLinks.append("https://morningstar.in" + articleLink.find('a')['href'])
articleNames = []
for articleLink in linkToArticles:
articleLinkVal = articleLink.find('a').text
import re
articleNameVal = re.sub('[^0-9a-zA-Z]+', '',articleLinkVal)
articleNames.append(articleNameVal)
#articleNames.append(articleLink.find('a').text.replace("\'", '').replace("\"", '').replace(' ', '').replace('-','').replace(',','').replace('?','').replace(':','')).replace('$','')
print(articleNames[0])
for index,link in enumerate(articleLinks):
morningStarFoler = os.path.join(scrapingDirectory,'MSTAR', articleNames[index])
folder = os.makedirs(morningStarFoler , exist_ok=True)
file = open(os.path.join(morningStarFoler, 'content.txt'), 'w+')
html1 = urlopen(link).read()
soup1 = BeautifulSoup(html1)
content = soup1.find('div', {"class": "contentpagewrap"})
text = content.findAll('div', {"class": "col-xs-12"})[0]
body = text.find('div', {"id": "div_content"})
#print(body.text)
try:
file.write(body.text)
except:
print("An exception occurred for article: ",articleNames[index])
file.close()
print("=====================================================================")
|
import boto3
path = "/var/www/html/assets/audio/"
voices = ["Brian", "Amy"]
session = boto3.session.Session(aws_access_key_id="AKIAJ37H4XBNDTXP6GJQ",
aws_secret_access_key="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",
region_name="us-east-2")
polly = session.client('polly')
text= "I just wanted to thank you all for supporting and listening on Wallstreet Bets Synth this past year. Merry Christmas to all you rainbow bears and bulls. Cheers."
for voice in voices:
r = polly.synthesize_speech(
Engine = "standard",
OutputFormat = "mp3",
Text = text,
VoiceId = voice
)
fname = "merrychristmas_"+ voice +".mp3"
with open(path + fname, 'wb') as f:
f.write(r['AudioStream'].read()) |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
DEMto3D
A QGIS plugin
Description
-------------------
copyright : (C) 2022 by Javier
email : demto3d@gmail.com
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from __future__ import absolute_import
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtWidgets import QDialog
from .SelectLayer_dialog_base import Ui_SelectLayer_dialog_base
class Dialog(QDialog, Ui_SelectLayer_dialog_base):
def __init__(self):
"""Constructor for the dialog."""
QDialog.__init__(self, None, Qt.WindowStaysOnTopHint)
self.ui = Ui_SelectLayer_dialog_base()
self.ui.setupUi(self)
self.ui.buttonBox.accepted.connect(self.accept)
self.ui.buttonBox.rejected.connect(self.reject)
def get_layer(self):
return self.ui.mMapLayerComboBox.currentLayer()
|
def sol(score):
result=None
if score>=90 and score<=100:
result="A"
elif score>=80:
result="B"
elif score>=70:
result="C"
elif score>=60:
result="D"
else:
result="F"
print(result)
score=int(input())
sol(score) |
from django.db import models
#from django.contrib.auth.models import AbstractUser
# Create your models here.
# userの情報を構成する
#class User(AbstractUser):
# 名前(policy-> 5文字以上20文字以下)
#username = models.CharField(max_length=20)
# パスワード(policy-> 5文字以上20文字以下)
#password = models.CharField(max_length=20)
# アイコン画像
#image = models.ImageField(upload_to='')
# 備考(1000文字以内)
#otherinfo = models.TextField(max_length=1000)
|
import json
import discord
from discord.ext import commands, tasks
from constants import AOC_JOIN, AOC_ID, AOC_SESSION
class AdventOfCode(commands.Cog, name="Advent of Code"):
def __init__(self, bot):
self.bot = bot
self.lb = {}
self.lb_url = "https://adventofcode.com/2020/leaderboard/private/view/{aoc_id}.json"
with open("data/aoc_users.json", "r") as f:
self.users = json.load(f)
self.update_lb.start()
self.save_users.start()
def cog_unload(self):
self.save_users.cancel()
self.update_lb.cancel()
@tasks.loop(seconds=10)
async def save_users(self):
with open("data/aoc_users.json", "w") as f:
json.dump(self.users, f, indent=4)
@tasks.loop(minutes=20)
async def update_lb(self):
cookies = {"session": AOC_SESSION}
url = self.lb_url.format(aoc_id=AOC_ID)
async with self.bot.aiohttp_session.get(url, cookies=cookies) as r:
if r.status != 200:
# We will try again in 20 minutes
return
text = await r.json()
self.lb = text["members"]
await self.update_users()
@update_lb.before_loop
async def before_update_lb(self):
await self.bot.wait_until_ready()
async def update_users(self):
guild = self.bot.get_guild(502466330432110592)
if guild is None:
return
for mem_id, user in self.users.items():
aoc_id = user["aoc_id"]
og_name = user["og_name"]
stars = self.lb[aoc_id]["stars"]
member = guild.get_member(int(mem_id))
await member.edit(nick=f"{og_name} ⭐{stars}")
@commands.group(name="adventofcode", aliases=["aoc"], brief="Advent of Code related commands")
async def adventofcode(self, ctx: commands.Context):
"""adventofcode"""
if not ctx.invoked_subcommand:
await ctx.send("Available subcommands:\n"
"`about`\n"
"`join`\n"
"`leaderboard`\n"
"`claim`\n"
"`verify`\n"
"`unclaim`\n")
@adventofcode.command(brief="Get info about the Advent of Code")
async def about(self, ctx: commands.Context):
"""about"""
about_url = "https://adventofcode.com/2020/about"
about_txt = "The Advent of Code is a yearly event that takes place in December with daily christmas themed " \
"programming challenges of varying difficulty that you can solve in any language you want.\n" \
f"For more see: {about_url}"
await ctx.send(about_txt)
@adventofcode.command(brief="Get the code for our private leaderboard")
async def join(self, ctx: commands.Context):
"""join"""
await ctx.author.send(f"Go to https://adventofcode.com/2020/leaderboard/private "
f"and join with our code: `{AOC_JOIN}`")
@adventofcode.command(aliases=["lb", "board"], brief="Get a link for out leaderboard")
async def leaderboard(self, ctx: commands.Context):
"""leaderboard"""
await ctx.send("See our leaderboard here: "
"https://adventofcode.com/2020/leaderboard/private/view/498817")
@adventofcode.command(
brief="Link your Advent of Code account to your Discord account.\n"
"You can find your Advent of Code user ID on the settings page of the Advent of Code website; "
"it's your anonymous user number: (anonymous user #YOUR_AOC_USER_ID)"
)
async def claim(self, ctx: commands.Context, aoc_id):
"""claim [aoc id]"""
if ctx.author.id in self.users:
await ctx.send("You have already claimed an AoC account.")
return
if aoc_id in [user["aoc_id"] for user in self.users.values()]:
await ctx.send("This id has already been claimed")
return
og_name = ctx.author.display_name
if aoc_id not in self.lb:
stars = 0
else:
stars = self.lb[aoc_id]["stars"]
self.users[str(ctx.author.id)] = {"aoc_id": aoc_id, "og_name": og_name}
await ctx.author.edit(nick=f"{og_name} ⭐{stars}")
await ctx.message.add_reaction("👍")
@adventofcode.command(brief="Verify that the stars in the name of a user are correct")
async def verify(self, ctx: commands.Context, member: discord.Member = None):
"""verify (member)"""
member = member or ctx.author
if str(member.id) not in self.users:
await ctx.send(f"{member.display_name} has not claimed an AoC account")
return
aoc_id = self.users[str(member.id)]["aoc_id"]
needs_fixing = False
should_have = self.lb[aoc_id]["stars"]
try:
has = member.display_name.split("⭐")[1]
except IndexError:
needs_fixing = True
else:
needs_fixing = has.strip() != str(should_have)
if needs_fixing:
og_name = self.users[str(member.id)]["og_name"]
await member.edit(nick=f"{og_name} ⭐{should_have}")
await ctx.send("The error has been fixed and the stars are correct now")
else:
await ctx.send("The stars are good")
@adventofcode.command(brief="Unclaim an id and remove the stars from your name")
async def unclaim(self, ctx: commands.Context):
"""unclaim"""
if str(ctx.author.id) not in self.users:
await ctx.send("You haven't claimed an id")
return
og_name = self.users[str(ctx.author.id)]["og_name"]
self.users.pop(str(ctx.author.id))
await ctx.author.edit(nick=og_name)
await ctx.send("Your name won't get updated anymore")
def setup(bot):
bot.add_cog(AdventOfCode(bot))
|
# -*- coding: utf-8 -*-
#爬虫-大众点评
import urllib.request
from bs4 import BeautifulSoup
from urllib.request import urlopen
import pandas as pd
import DataFrame
import re
html = 'https://www.dianping.com/shop/20832376/review_more?pageno=1'
# html = urlopen('https://www.dianping.com/shop/20832376/review_more?pageno=1')
# print(html.read())
html_doc = urllib.request.urlopen (html).read ()
soup = BeautifulSoup (html_doc, 'html.parser', from_encoding='utf-8')
def pls(html):
pinglunqu = soup.find_all('div',class_="J_brief-cont")
pls =[]
for pinglun in pinglunqu:
# print(pinglun.text.strip())
pls.append (pinglun.text.strip())
return pls
def names(html):
name_pic = soup.find_all('div', class_="pic")
names = []
for name in name_pic:
# print(name.text.strip())
print(name.find("p", {"class": "name"}).text)
names.append(name.text.strip())
return names
def stars(html):
star_span = soup.find_all('span',{"class":re.compile("^item-rank-rst irr-star([0-9])")})
stars=[]
for star_num in star_span:
if "itemprop" not in star_num.attrs:
# star=re.findall('\d+',star_num)
star = star_num.attrs['class'][1][-2:]
print(star)
stars.append(star)
return stars
def kouweis(html):
kouwei_span = soup.find_all('span',class_= "rst")
kouweis =[]
for kouwei in kouwei_span:
if "口味" in kouwei.text:
print(kouwei.text)
kouweis.append(kouwei.text)
return kouweis
def fuwus(html):
fuwu_span = soup.find_all('span',class_= "rst")
fuwus =[]
for fuwu in fuwu_span:
if "服务" in fuwu.text:
print(fuwu.text)
fuwus.append(fuwu.text)
return fuwus
def huanjings(html):
huanjing_span = soup.find_all('span',class_= "rst")
huanjings =[]
for huanjing in huanjing_span:
if "环境" in huanjing.text:
print(huanjing.text)
huanjings.append(huanjing.text)
return huanjings
# print(len(stars(html)))
print(len(huanjings(html)))
df = DataFrame({'ID名字':names(html),
'星级': stars(html),
'口味': kouweis(html),
'环境': huanjings(html),
'服务': fuwus(html),
'点评内容': pls(html)})
df.to_csv("E:/python_code/self_learn/df.csv",index=False,encoding='utf_8_sig')
#df.to_excel("E:/python_code/self_learn/df.xls",sheet_name=’Sheet1’)
writer = pd.ExcelWriter('output.xlsx')
df.to_excel(writer,'Sheet1')
writer.save()
|
# coding: utf-8
# In[1]:
import nltk
nltk.download()
# 1. brown corpus
# 2. Inaugural speech
# 3. book corpus - frequency distribution can be done(most common words in textbook)
# In[2]:
from nltk.corpus import brown
# In[4]:
brown.categories()
# In[5]:
print(type(brown))
# In[10]:
brown.words(categories="adventure")[:100]
# In[11]:
len(brown.words(categories="adventure"))
# In[13]:
from nltk.corpus import inaugural
# In[15]:
inaugural.fileids()
# In[16]:
len(inaugural.fileids())
# In[19]:
inaugural.words(fileids='1861-Lincoln.txt')[:20]
# In[20]:
print(len(inaugural.words(fileids='1861-Lincoln.txt')))
# In[24]:
inaugural.words(fileids='2009-Obama.txt')[:5]
# In[22]:
print(len(inaugural.words(fileids='2009-Obama.txt')))
# In[26]:
from nltk.book import *
# In[27]:
texts()
# In[28]:
sents()
# In[31]:
f = FreqDist(text7)
print(f)
# In[32]:
f = FreqDist(text1)
print(f)
# In[36]:
f.most_common(10)
# In[40]:
c = FreqDist(inaugural.words(fileids='1977-Carter.txt'))
c.most_common(10)
# In[ ]:
|
# Copyright 2020-2023 OpenDR European Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import find_packages, setup
def from_file(file_name: str = "requirements.txt", comment_char: str = "#"):
"""Load requirements from a file"""
with open(file_name, "r") as file:
lines = [ln.strip() for ln in file.readlines()]
reqs = []
for ln in lines:
# filer all comments
if comment_char in ln:
ln = ln[: ln.index(comment_char)].strip()
# skip directly installed dependencies
if ln.startswith("http"):
continue
if ln: # if requirement is not empty
reqs.append(ln)
return reqs
def long_description():
text = open("README.md", encoding="utf-8").read()
# SVG images are not readable on PyPI, so replace them with PNG
text = text.replace(".svg", ".png")
return text
setup(
name="object_tracking_2d_demo",
version="0.1.0",
description="Example of in-browser video streaming and processing using the OpenDR toolkit.",
long_description=long_description(),
long_description_content_type="text/markdown",
author="Illia Oleksiienko, Lukas Hedegaard",
author_email="{io, lhm}@ece.au.dk",
install_requires=from_file("requirements.txt"),
packages=find_packages(exclude=["test"]),
keywords=["deep learning", "pytorch", "AI", "OpenDR", "video", "webcam"],
)
|
from flask import Flask, render_template, request
app = Flask(__name__)
if __name__ == '__main__':
app.run(debug=True)
@app.route('/')
def index():
return render_template("index.html")
@app.route('/submit', methods=["POST"])
def submit():
name = request.form.get("name")
establishment = request.form.get("establishment")
if not name or not establishment:
error = "Please fill the required field."
return render_template("fail.html",
error=error,
name=name,
establishment=establishment)
return render_template("submit.html")
|
from flask import Flask
from .extensions import neo4j_driver
from neo4j_app.modules.api import register_api
# GET DATABASE SESSION
def get_db():
""" Return a session for neo4j database driver
"""
return neo4j_driver.session()
# CREATE FLASK APP
def create_app(config_object='neo4j_app.settings'):
""" Instanciate a Flask app
Parameters
-----
config_object : file dedicated to configuration in Flask env
Returns
-----
A Flask app object
"""
# FLASK APP OBJECT
app = Flask(__name__)
# APP CONFIGS
app.config.from_object(config_object)
# REGISTER ELEMENTS TO APP
register_api(app)
return app |
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtGui import QPixmap
from front import FrontMainClass
class ProfileMainClass(object):
def setupUi(self, profDialog):
profDialog.setObjectName("profDialog")
profDialog.resize(549, 589)
self.presentProfileFrame = QtWidgets.QFrame(profDialog)
self.presentProfileFrame.setGeometry(QtCore.QRect(-1, -1, 181, 591))
self.presentProfileFrame.setStyleSheet("background-color: rgb(255, 255, 255);")
self.presentProfileFrame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.presentProfileFrame.setFrameShadow(QtWidgets.QFrame.Raised)
self.presentProfileFrame.setObjectName("presentProfileFrame")
self.profilepic = QtWidgets.QLabel(self.presentProfileFrame)
self.profilepic.setGeometry(QtCore.QRect(6, 25, 171, 161))
self.profilepic.setObjectName("profilepic")
self.profpic = QPixmap("./img/prof.jpg")
self.profilepic.setPixmap(self.profpic)
self.recentFrame = QtWidgets.QFrame(self.presentProfileFrame)
self.recentFrame.setGeometry(QtCore.QRect(3, 200, 176, 387))
self.recentFrame.setStyleSheet("background-color: rgb(255, 250, 246);")
self.recentFrame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.recentFrame.setFrameShadow(QtWidgets.QFrame.Raised)
self.recentFrame.setObjectName("recentFrame")
self.recentEvents = QtWidgets.QLabel(self.recentFrame)
self.recentEvents.setGeometry(QtCore.QRect(10, 11, 111, 20))
font = QtGui.QFont()
font.setFamily("MS Shell Dlg 2")
font.setPointSize(11)
font.setWeight(60)
self.recentEvents.setFont(font)
self.recentEvents.setObjectName("recentEvents")
self.person1 = QtWidgets.QLabel(self.recentFrame)
self.person1.setGeometry(QtCore.QRect(23, 46, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person1.setFont(font)
self.person1.setObjectName("person1")
self.person2 = QtWidgets.QLabel(self.recentFrame)
self.person2.setGeometry(QtCore.QRect(23, 74, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person2.setFont(font)
self.person2.setObjectName("person2")
self.person3 = QtWidgets.QLabel(self.recentFrame)
self.person3.setGeometry(QtCore.QRect(23, 102, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person3.setFont(font)
self.person3.setObjectName("person3")
self.person4 = QtWidgets.QLabel(self.recentFrame)
self.person4.setGeometry(QtCore.QRect(23, 131, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person4.setFont(font)
self.person4.setObjectName("person4")
self.person5 = QtWidgets.QLabel(self.recentFrame)
self.person5.setGeometry(QtCore.QRect(23, 160, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person5.setFont(font)
self.person5.setObjectName("person5")
self.person6 = QtWidgets.QLabel(self.recentFrame)
self.person6.setGeometry(QtCore.QRect(23, 188, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person6.setFont(font)
self.person6.setObjectName("person6")
self.person7 = QtWidgets.QLabel(self.recentFrame)
self.person7.setGeometry(QtCore.QRect(23, 216, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person7.setFont(font)
self.person7.setObjectName("person7")
self.person8 = QtWidgets.QLabel(self.recentFrame)
self.person8.setGeometry(QtCore.QRect(23, 243, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person8.setFont(font)
self.person8.setObjectName("person8")
self.person9 = QtWidgets.QLabel(self.recentFrame)
self.person9.setGeometry(QtCore.QRect(23, 271, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person9.setFont(font)
self.person9.setObjectName("person9")
self.person10 = QtWidgets.QLabel(self.recentFrame)
self.person10.setGeometry(QtCore.QRect(23, 296, 70, 18))
font = QtGui.QFont()
font.setPointSize(9)
self.person10.setFont(font)
self.person10.setObjectName("person10")
self.detailsFrame = QtWidgets.QFrame(profDialog)
self.detailsFrame.setGeometry(QtCore.QRect(179, -1, 371, 201))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
self.detailsFrame.setFont(font)
self.detailsFrame.setStyleSheet("background-color: rgb(255, 255, 246);")
self.detailsFrame.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.detailsFrame.setFrameShadow(QtWidgets.QFrame.Raised)
self.detailsFrame.setObjectName("detailsFrame")
self.userIdShow = QtWidgets.QLabel(self.detailsFrame)
self.userIdShow.setGeometry(QtCore.QRect(30, 26, 150, 25))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.userIdShow.setFont(font)
self.userIdShow.setObjectName("userIdShow")
self.nameShow = QtWidgets.QLabel(self.detailsFrame)
self.nameShow.setGeometry(QtCore.QRect(30, 72, 200, 25))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.nameShow.setFont(font)
self.nameShow.setObjectName("nameShow")
self.desiShow = QtWidgets.QLabel(self.detailsFrame)
self.desiShow.setGeometry(QtCore.QRect(30, 118, 200, 25))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(75)
self.desiShow.setFont(font)
self.desiShow.setObjectName("desiShow")
self.emailShow = QtWidgets.QLabel(self.detailsFrame)
self.emailShow.setGeometry(QtCore.QRect(30, 165, 250, 25))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(13)
font.setWeight(62)
self.emailShow.setFont(font)
self.emailShow.setObjectName("emailShow")
self.userId = QtWidgets.QLabel(self.detailsFrame)
self.userId.setGeometry(QtCore.QRect(30, 11, 75, 18))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(10)
self.userId.setFont(font)
self.userId.setObjectName("userId")
self.name = QtWidgets.QLabel(self.detailsFrame)
self.name.setGeometry(QtCore.QRect(30, 57, 75, 18))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(10)
self.name.setFont(font)
self.name.setObjectName("name")
self.designation = QtWidgets.QLabel(self.detailsFrame)
self.designation.setGeometry(QtCore.QRect(30, 103, 75, 18))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(10)
self.designation.setFont(font)
self.designation.setObjectName("designation")
self.email = QtWidgets.QLabel(self.detailsFrame)
self.email.setGeometry(QtCore.QRect(30, 152, 75, 18))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(10)
self.email.setFont(font)
self.email.setObjectName("email")
self.logoutButton = QtWidgets.QPushButton(self.detailsFrame)
self.logoutButton.setGeometry(QtCore.QRect(316, 4, 50, 22))
font = QtGui.QFont()
font.setPointSize(10)
self.logoutButton.setFont(font)
self.logoutButton.setStyleSheet("background-color: rgb(240, 240, 240);")
self.logoutButton.setObjectName("logoutButton")
self.friendsFrame = QtWidgets.QFrame(profDialog)
self.friendsFrame.setGeometry(QtCore.QRect(182, 202, 364, 384))
self.friendsFrame.setStyleSheet("background-color: rgb(235, 241, 250);")
self.designationList1 = QtWidgets.QLabel(self.friendsFrame)
self.designationList1.setGeometry(QtCore.QRect(30, 25, 200, 22))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(68)
self.designationList1.setFont(font)
self.designationList1.setObjectName("designationList1")
self.designationList2 = QtWidgets.QLabel(self.friendsFrame)
self.designationList2.setGeometry(QtCore.QRect(30, 65, 200, 22))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(68)
self.designationList2.setFont(font)
self.designationList2.setObjectName("designationList2")
self.designationList3 = QtWidgets.QLabel(self.friendsFrame)
self.designationList3.setGeometry(QtCore.QRect(30, 105, 200, 22))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(68)
self.designationList3.setFont(font)
self.designationList3.setObjectName("designationList3")
self.designationList4 = QtWidgets.QLabel(self.friendsFrame)
self.designationList4.setGeometry(QtCore.QRect(30, 145, 200, 22))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(68)
self.designationList4.setFont(font)
self.designationList4.setObjectName("designationList4")
self.designationList5 = QtWidgets.QLabel(self.friendsFrame)
self.designationList5.setGeometry(QtCore.QRect(30, 185, 200, 22))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(68)
self.designationList5.setFont(font)
self.designationList5.setObjectName("designationList5")
self.designationList6 = QtWidgets.QLabel(self.friendsFrame)
self.designationList6.setGeometry(QtCore.QRect(30, 225, 200, 22))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(68)
self.designationList6.setFont(font)
self.designationList6.setObjectName("designationList6")
self.designationList7 = QtWidgets.QLabel(self.friendsFrame)
self.designationList7.setGeometry(QtCore.QRect(30, 265, 200, 22))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(68)
self.designationList7.setFont(font)
self.designationList7.setObjectName("designationList7")
self.designationList8 = QtWidgets.QLabel(self.friendsFrame)
self.designationList8.setGeometry(QtCore.QRect(30, 305, 200, 22))
font = QtGui.QFont()
font.setFamily("Calibri")
font.setPointSize(14)
font.setBold(True)
font.setWeight(68)
self.designationList8.setFont(font)
self.designationList8.setObjectName("designationList8")
self.retranslateUi(profDialog)
QtCore.QMetaObject.connectSlotsByName(profDialog)
def retranslateUi(self, profDialog):
_translate = QtCore.QCoreApplication.translate
profDialog.setWindowTitle(_translate("profDialog", "My Profile"))
self.recentEvents.setText(_translate("profDialog", "Recent Chats"))
self.person1.setText(_translate("profDialog", "Person1"))
self.person2.setText(_translate("profDialog", "Person2"))
self.person3.setText(_translate("profDialog", "Person3"))
self.person4.setText(_translate("profDialog", "Person4"))
self.person5.setText(_translate("profDialog", "Person5"))
self.person6.setText(_translate("profDialog", "Person6"))
self.person7.setText(_translate("profDialog", "Person7"))
self.person8.setText(_translate("profDialog", "Person8"))
self.person9.setText(_translate("profDialog", "Person9"))
self.person10.setText(_translate("profDialog", "Person10"))
self.userIdShow.setText(_translate("profDialog", "3001"))
self.nameShow.setText(_translate("profDialog", "My Name"))
self.desiShow.setText(_translate("profDialog", "Web Developer"))
self.emailShow.setText(_translate("profDialog", "myemail@test.com"))
self.userId.setText(_translate("profDialog", "UserId :"))
self.name.setText(_translate("profDialog", "Name :"))
self.designation.setText(_translate("profDialog", "Designation :"))
self.email.setText(_translate("profDialog", "Email :"))
self.designationList1.setText(_translate("profDialog", "> HR"))
self.designationList2.setText(_translate("profDialog", "> Technical Sales"))
self.designationList3.setText(_translate("profDialog", "> Web Developer"))
self.designationList4.setText(_translate("profDialog", "> Business Analyst"))
self.designationList5.setText(_translate("profDialog", "> Software Tester"))
self.designationList6.setText(_translate("profDialog", "> Technical Support"))
self.designationList7.setText(_translate("profDialog", "> Network Engineer"))
self.designationList8.setText(_translate("profDialog", "> Software Developer"))
self.logoutButton.setText(_translate("profDialog", "Logout"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
profDialog = QtWidgets.QDialog()
ui = ProfileMainClass()
ui.setupUi(profDialog)
profDialog.show()
sys.exit(app.exec_())
|
import sys
val1 = sys.argv[1]
val2 = sys.argv[2]
def percentage(newValue=val1, original=val2):
price1=float(original)
price2=float(newValue)
change =((price1-price2)/price2)*100
print "Change: %.2f %% \nOrigianl: %.6f \nNew: %.6f" % (change, price1, price2)
if __name__ == "__main__":
percentage()
|
"""
Test pygmt.blockmedian.
"""
from pathlib import Path
import numpy.testing as npt
import pandas as pd
import pytest
from pygmt import blockmedian
from pygmt.datasets import load_sample_data
from pygmt.exceptions import GMTInvalidInput
from pygmt.helpers import GMTTempFile, data_kind
@pytest.fixture(scope="module", name="dataframe")
def fixture_dataframe():
"""
Load the table data from the sample bathymetry dataset.
"""
return load_sample_data(name="bathymetry")
def test_blockmedian_input_dataframe(dataframe):
"""
Run blockmedian by passing in a pandas.DataFrame as input.
"""
output = blockmedian(data=dataframe, spacing="5m", region=[245, 255, 20, 30])
assert isinstance(output, pd.DataFrame)
assert all(dataframe.columns == output.columns)
assert output.shape == (5849, 3)
npt.assert_allclose(output.iloc[0], [245.88819, 29.97895, -385.0])
def test_blockmedian_input_table_matrix(dataframe):
"""
Run blockmedian using table input that is not a pandas.DataFrame but still
a matrix.
"""
table = dataframe.values
output = blockmedian(data=table, spacing="5m", region=[245, 255, 20, 30])
assert isinstance(output, pd.DataFrame)
assert output.shape == (5849, 3)
npt.assert_allclose(output.iloc[0], [245.88819, 29.97895, -385.0])
def test_blockmedian_input_xyz(dataframe):
"""
Run blockmedian by passing in x/y/z as input.
"""
output = blockmedian(
x=dataframe.longitude,
y=dataframe.latitude,
z=dataframe.bathymetry,
spacing="5m",
region=[245, 255, 20, 30],
)
assert isinstance(output, pd.DataFrame)
assert output.shape == (5849, 3)
npt.assert_allclose(output.iloc[0], [245.88819, 29.97895, -385.0])
def test_blockmedian_wrong_kind_of_input_table_grid(dataframe):
"""
Run blockmedian using table input that is not a pandas.DataFrame or file
but a grid.
"""
invalid_table = dataframe.bathymetry.to_xarray()
assert data_kind(invalid_table) == "grid"
with pytest.raises(GMTInvalidInput):
blockmedian(data=invalid_table, spacing="5m", region=[245, 255, 20, 30])
def test_blockmedian_input_filename():
"""
Run blockmedian by passing in an ASCII text file as input.
"""
with GMTTempFile() as tmpfile:
output = blockmedian(
data="@tut_ship.xyz",
spacing="5m",
region=[245, 255, 20, 30],
outfile=tmpfile.name,
)
assert output is None # check that output is None since outfile is set
assert Path(tmpfile.name).stat().st_size > 0 # check that outfile exists
output = pd.read_csv(tmpfile.name, sep="\t", header=None)
assert output.shape == (5849, 3)
npt.assert_allclose(output.iloc[0], [245.88819, 29.97895, -385.0])
def test_blockmedian_without_outfile_setting():
"""
Run blockmedian by not passing in outfile parameter setting.
"""
output = blockmedian(data="@tut_ship.xyz", spacing="5m", region=[245, 255, 20, 30])
assert isinstance(output, pd.DataFrame)
assert output.shape == (5849, 3)
npt.assert_allclose(output.iloc[0], [245.88819, 29.97895, -385.0])
|
# Authors: Jessica Su, Wanzi Zhou, Pratyaksh Sharma, Dylan Liu, Ansh Shukla
import numpy as np
import random
import time
import pdb
import unittest
from PIL import Image
import matplotlib.pyplot as plt
# Finds the L1 distance between two vectors
# u and v are 1-dimensional np.array objects
# TODO: Implement this
def l1(u, v):
return np.linalg.norm(u - v, 1)
# Loads the data into a np array, where each row corresponds to
# an image patch -- this step is sort of slow.
# Each row in the data is an image, and there are 400 columns.
def load_data(filename):
return np.genfromtxt(filename, delimiter=',')
# Creates a hash function from a list of dimensions and thresholds.
def create_function(dimensions, thresholds):
def f(v):
boolarray = [v[dimensions[i]] >= thresholds[i] for i in range(len(dimensions))]
return "".join(map(str, map(int, boolarray)))
return f
# Creates the LSH functions (functions that compute L K-bit hash keys).
# Each function selects k dimensions (i.e. column indices of the image matrix)
# at random, and then chooses a random threshold for each dimension, between 0 and
# 255. For any image, if its value on a given dimension is greater than or equal to
# the randomly chosen threshold, we set that bit to 1. Each hash function returns
# a length-k bit string of the form "0101010001101001...", and the L hash functions
# will produce L such bit strings for each image.
def create_functions(k, L, num_dimensions=400, min_threshold=0, max_threshold=255):
functions = []
for i in range(L):
dimensions = np.random.randint(low = 0,
high = num_dimensions,
size = k)
thresholds = np.random.randint(low = min_threshold,
high = max_threshold + 1,
size = k)
functions.append(create_function(dimensions, thresholds))
return functions
# Hashes an individual vector (i.e. image). This produces an array with L
# entries, where each entry is a string of k bits.
def hash_vector(functions, v):
return np.array([f(v) for f in functions])
# Hashes the data in A, where each row is a datapoint, using the L
# functions in "functions."
def hash_data(functions, A):
return np.array(list(map(lambda v: hash_vector(functions, v), A)))
# Retrieve all of the points that hash to one of the same buckets
# as the query point. Do not do any random sampling (unlike what the first
# part of this problem prescribes).
# Don't retrieve a point if it is the same point as the query point.
def get_candidates(hashed_A, hashed_point, query_index):
return filter(lambda i: i != query_index and \
any(hashed_point == hashed_A[i]), range(len(hashed_A)))
# Sets up the LSH. You should try to call this function as few times as
# possible, since it is expensive.
# A: The dataset.
# Return the LSH functions and hashed data structure.
def lsh_setup(A, k = 24, L = 10):
functions = create_functions(k = k, L = L)
hashed_A = hash_data(functions, A)
return (functions, hashed_A)
# Run the entire LSH algorithm
def lsh_search(A, hashed_A, functions, query_index, num_neighbors = 10):
hashed_point = hash_vector(functions, A[query_index, :])
candidate_row_nums = get_candidates(hashed_A, hashed_point, query_index)
distances = map(lambda r: (r, l1(A[r], A[query_index])), candidate_row_nums)
best_neighbors = sorted(distances, key=lambda t: t[1])[:num_neighbors]
return [t[0] for t in best_neighbors]
# Plots images at the specified rows and saves them each to files.
def plot(A, row_nums, base_filename):
for row_num in row_nums:
patch = np.reshape(A[row_num, :], [20, 20])
im = Image.fromarray(patch)
if im.mode != 'RGB':
im = im.convert('RGB')
im.save(base_filename + "-" + str(row_num) + ".png")
# Finds the nearest neighbors to a given vector, using linear search.
def linear_search(A, query_index, num_neighbors):
target = A[query_index, :]
distances = np.apply_along_axis(lambda p: l1(p, target), 1, A)
nearest_indexes = np.argpartition(distances, num_neighbors + 1)[:(num_neighbors + 1)]
# Exclude the target point
nearest_indexes = [i for i in nearest_indexes if i != query_index]
if len(nearest_indexes) > num_neighbors:
nearest_indexes = nearest_indexes[:num_neighbors]
# argpartition does not guarantee sorted order
nearest_indexes = sorted(nearest_indexes, key=lambda x: (distances[x], x))
return nearest_indexes
def error(approx_distances, true_distances):
return np.sum(approx_distances) / np.sum(true_distances)
def problem4():
data = load_data('./remote/hw1/q4/data/patches.csv')
compare(data, k=24, L=10, verbose=True)
plot_errors(data)
plot_neighbors(data)
def plot_neighbors(data):
print("Plotting neighbors")
functions, hashed_data = lsh_setup(data)
target_index = 100
linear_res = linear_search(data, target_index, 10)
lsh_res = lsh_search(data, hashed_data, functions, target_index, 10)
plot(data, linear_res, './remote/hw1/q4/data//neighbors/linear/neighbor-')
plot(data, lsh_res, './remote/hw1/q4/data//neighbors/lsh/neighbor-')
plot(data, [target_index], './remote/hw1/q4/data//neighbors/target-')
def plot_errors(data):
ls = [10, 12, 14, 16, 18, 20]
print("Calculating error of L")
error_of_l = [compare(data, 24, l) for l in ls]
print("Plotting error of L")
plt.plot(ls, error_of_l)
plt.ylabel('Error')
plt.xlabel('L')
plt.show()
ks = [16, 18, 20, 22, 24]
print("Calculating error of L")
error_of_k = [compare(data, k, 10) for k in ks]
print("Plotting error of K")
plt.plot(ks, error_of_k)
plt.ylabel('Error')
plt.xlabel('k')
plt.show()
def compare(data, k, L, verbose = False):
print(f"Evaluating data for k={k}, L={L}")
functions, hashed_data = lsh_setup(data, k=k, L=L)
num_searches = 10
total_linear_time = 0.0
total_lsh_time = 0.0
total_error = 0.0
for i in range(1, num_searches + 1):
target = data[i * 100, :]
start = time.time()
linear_res = linear_search(data, i * 100, 3)
total_linear_time += time.time() - start
start = time.time()
lsh_res = lsh_search(data, hashed_data, functions, i * 100, 3)
if len(lsh_res) < 3:
print("Returned not enough results, retrying...")
return compare(data, k, L, verbose)
total_lsh_time += time.time() - start
distf = np.vectorize(lambda j: l1(data[j, :], target))
linear_distances = sorted(distf(linear_res))
lsh_distances = sorted(distf(lsh_res))
# if not np.array_equal(linear_distances, lsh_distances) and verbose:
# print(f"Linear search and LSH search result mismatch!")
# print("Linear:")
# print(linear_distances)
# print("LSH:")
# print(lsh_distances)
row_error = error(lsh_distances, linear_distances)
if row_error < 1 and verbose:
print(f"Error less than 1: {row_error}")
exit(1)
total_error += row_error
avg_error = total_error / num_searches
if verbose:
print(f"Average linear search time: {(total_linear_time / num_searches):.2f}")
print(f"Average LSH search time: {(total_lsh_time / num_searches):.2f}")
print(f"Average error: {avg_error}")
return avg_error
#### TESTS #####
class TestLSH(unittest.TestCase):
def test_l1(self):
u = np.array([1, 2, 3, 4])
v = np.array([2, 3, 2, 3])
self.assertEqual(l1(u, v), 4)
def test_hash_data(self):
f1 = lambda v: sum(v)
f2 = lambda v: sum([x * x for x in v])
A = np.array([[1, 2, 3], [4, 5, 6]])
self.assertEqual(f1(A[0,:]), 6)
self.assertEqual(f2(A[0,:]), 14)
functions = [f1, f2]
self.assertTrue(np.array_equal(hash_vector(functions, A[0, :]), np.array([6, 14])))
self.assertTrue(np.array_equal(hash_data(functions, A), np.array([[6, 14], [15, 77]])))
### TODO: Write your tests here (they won't be graded,
### but you may find them helpful)
if __name__ == '__main__':
#unittest.main() ### TODO: Uncomment this to run tests
problem4()
|
import re
from datetime import datetime
with open('b827eb6a07fb.txt') as fd:
data = fd.read().split()
list1 = []
new_list=[]
for words in data:
if words[0:7] == 'AHeader':
list1.append(words)
for string in list1:
list1 = re.split('[=&]',string) # Seperated with & which is a seperator to identify
d = iter(list1)
d = dict(zip(d,d))
# Use above Two lines or below line
'''
d = dict( [ (k, v) for k,v in zip (list1[::2], list1[1::2]) ] )
'''
#print ('\n')
for key, value in d.items():
if key == 'dateTime':
obj = datetime.strptime(value, '%Y/%m/%d_%H:%M:%S')
d[key] = obj
##### Print or Write it into file
print (d)
##print(type(d))
new_list.append(d)
#print(new_list)
from csv import DictWriter
with open("output_excel.csv", "w") as outfile:
writer = DictWriter(outfile, ('CB', 'dateTime', 'AHeader', 'EB', 'VARR', 'PFR', 'VAR', 'PT', 'EB_OnTime', 'FREQ', 'DG_OnTime', 'SS', 'WR', 'CT', 'DG', 'EB_OffTime', 'WH', 'VB', 'VARY', 'VAB', 'VARB', 'ASmartDG_id', 'LAT', 'LH', 'PFY', 'VLNA', 'VY', 'LON', 'Event_Type', 'VBR', 'VLLA', 'WY', 'BB_V', 'PFA', 'WB', 'VART', 'CY', 'DLAT', 'WT', 'DLON', 'PFB', 'DG_V', 'volume', 'CR', 'VR', 'AT_V', 'FF_DateTime', 'VAY', 'DG_OffTime', 'VYB', 'VRY', 'VAT'))
writer.writeheader()
writer.writerows(new_list)
|
import numpy as np
import objectives
# Take delta input from user
delta = 0.6
def hybridize_(archive_in, min_, max_):
"""
Augment our archive to explore locally and improve search capabilities.
For each dimension we perform x_i = x_i +/- delta_x_i
For large input dimensions, we can choose dimensions to hybridize on probability
:param archive_in: Current archive set
:param min_: Lower bounds for the particles
:param max_: Upper bounds for the particles
Returns augmented population
"""
temparch = archive_in
combined_pop = archive_in
rand_hyb = (archive_in.shape[1] >= 6)
for i in range(archive_in.shape[1]):
if rand_hyb and np.random.rand() > 0.5:
continue
temparch[:, i] = temparch[:, i] - delta
temparch[(temparch[:, i] < min_[i]), i] = min_[i]
combined_pop = np.vstack((combined_pop, temparch))
temparch[:, i] = temparch[:, i] + 2*delta
temparch[(temparch[:, i] > max_[i]), i] = max_[i]
combined_pop = np.vstack((combined_pop, temparch))
return combined_pop
|
# predio = [
# ["Ap 100", "Ap 101", "Ap 102", "Ap 103"],
# ["Ap 200", "Ap 201", "Ap 202", "Ap 203"],
# ["Ap 300", "Ap 301", "Ap 302", "Ap 303"],
# ["Ap 400", "Ap 401", "Ap 402", "Ap 403"]
# ]
# for andar in predio:
# print(andar)
predio = []
andares = int(input("Qual a quantidade de andares? "))
aptosPorAndar = int(input("Qual a quantidade de apartamentos por andar? "))
for i in range(andares):
andar = []
for j in range(aptosPorAndar):
andar.append(f"Apto {i+1}0{j+1}")
predio.append(andar)
for i in range(andares, 0, -1):
print(predio[i-1])
for i in range(len(predio), 0 , -1):
print(predio[i-1])
|
#encoding=utf-8
import sys, codecs, jieba
reload(sys)
sys.setdefaultencoding('utf-8')
jeiba_output_file = codecs.open('jeiba_title_output1.txt', 'w','utf-8')
with codecs.open('embBaiduIndexName.txt', 'r','utf-8') as content:
for line in content:
#print line
words = jieba.cut(line, cut_all=False)
str = ''
for word in words:
str = str + word + ' '
#print str
jeiba_output_file.write(str.strip()+'\n')
content.close()
jeiba_output_file.close()
print "done" |
import sklearn
import scipy
import meta_utils
from scipy.stats import multivariate_normal
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets
from sklearn.gaussian_process import GaussianProcessClassifier
from sklearn.gaussian_process.kernels import RBF
from matplotlib.patches import Ellipse
import matplotlib.patches
import matplotlib.path as mpltPath
h_default = .1 # step size in the mesh
# Plotting feature for a GP and its input data.
# TODO: breakdown into a class that then can plot entropy, feature_stats, etc
class GP_Plotter():
def __init__(self,GP, feature_name_colors, X_train, y_train,h = h_default):
self.GP = GP
self.feature_name_colors = feature_name_colors
self.X_train = X_train
self.y_train = y_train
self.h = h
x_lim = [ X_train[:,0].min(),X_train[:,0].max() ]
y_lim = [ X_train[:,1].min(),X_train[:,1].max() ]
x_min, x_max = x_lim[0] - 1, x_lim[1] + 1
y_min, y_max = y_lim[0] - 1, y_lim[1] + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
self.legendHandlesList = []
for pair in feature_name_colors:
self.legendHandlesList.append(matplotlib.patches.Patch(color=pair[0],label = pair[1]))
self.all_limits = [x_min, x_max, y_min, y_max]
self.xx = xx
self.yy = yy
def plot_GP_and_data(self,figNumber = 1,title = "A GP with Training Data",plotData = True,linePlot = np.array([])):
fig = plt.figure(figNumber,figsize=(6, 5))
ax = fig.add_subplot(1, 1, 1)
Z = self.GP.predict_proba(np.c_[self.xx.ravel(), self.yy.ravel()])
num_features = Z.shape[1]
if num_features == 2:
# Append on a blue channel (all equal to zero)
Z = np.append(Z,np.zeros((Z.shape[0],1)),axis = 1)
# Put the result into a color plot
Z = Z.reshape((self.xx.shape[0], self.xx.shape[1], num_features + 1 ))
ax.imshow(Z, extent=(self.all_limits), origin="lower")
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# Put a legend to the right of the current axis
legendHandlesList = [self.legendHandlesList[0],self.legendHandlesList[1]]
ax.legend(handles= legendHandlesList,loc='center left', bbox_to_anchor=(1, 0.5))
'''
Z = Z[:,1].reshape((self.xx.shape[0], self.xx.shape[1]))
contourPlot = ax.contourf(self.xx,self.yy,Z, extent=(self.all_limits), origin="lower")
cbar = fig.colorbar( contourPlot )
cbar.ax.set_ylabel('Probability of Feature #1')'''
else:
# Put the result into a color plot
Z = Z.reshape((self.xx.shape[0], self.xx.shape[1], num_features ))
ax.imshow(Z, extent=(self.all_limits), origin="lower")
# Shrink current axis by 20%
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
# Put a legend to the right of the current axis
ax.legend(handles= self.legendHandlesList,loc='center left', bbox_to_anchor=(1, 0.5))
# Plot also the training points
if plotData:
ax.scatter(self.X_train[:, 0], self.X_train[:, 1], c=np.array(["r", "g", "b"])[self.y_train],
edgecolors=(0, 0, 0))
# plot paths on the GP
if linePlot.any():
ax.plot(linePlot[:,0],linePlot[:,1],c="k",lineWidth = 5)
plt.xlabel('X')
plt.ylabel('Y')
plt.xlim(self.xx.min(), self.xx.max())
plt.ylim(self.yy.min(), self.yy.max())
plt.title("%s" %(title))
#plt.title("%s, LML: %.3f" %(title, self.GP.log_marginal_likelihood(self.GP.kernel_.theta)))
return fig,ax
def plot_GP_entropy(self,figNumber = 2, title = "A GP's entropy",points_to_plot = np.array([]),star_point =np.array([])):
fig = plt.figure(figNumber,figsize=(6, 5))
ax = fig.add_subplot(1, 1, 1)
Z = self.GP.predict_proba(np.c_[self.xx.ravel(), self.yy.ravel()])
num_features = Z.shape[1]
Z_entropy = np.zeros(Z.shape[0])
for probs_ind in range(Z.shape[0]):
for el in Z[probs_ind,:]:
if np.isclose(el, 0.0):
# we should be adding 0 times positive infinity, which is 0 by convention of entropy
Z_entropy[probs_ind] += 0
else:
Z_entropy[probs_ind] += -el*np.log2(el)
# Put the result into a color plot
Z = Z_entropy.reshape((self.xx.shape[0], self.xx.shape[1] ))
contourPlot = ax.contourf(self.xx,self.yy,Z, extent=(self.all_limits), origin="lower")
# plot points (if inputed)
if points_to_plot.any():
ax.scatter(points_to_plot[:,0],points_to_plot[:,1], c='k')
if star_point.any():
ax.scatter(star_point[0],star_point[1], c='red',marker ='*', s = 500)
plt.xlabel('X')
plt.ylabel('Y')
plt.xlim(self.xx.min(), self.xx.max())
plt.ylim(self.yy.min(), self.yy.max())
plt.title("%s" %(title))
#plt.title("%s, LML: %.3f" %(title, self.GP.log_marginal_likelihood(self.GP.kernel_.theta)))
# plot
cbar = fig.colorbar( contourPlot )
cbar.ax.set_ylabel('Entropy')
return fig,ax
def plot_GP_expected_science(self,feature_stats ,figNumber = 3, title = "A GP's Expected Science Gain",points_to_plot = np.array([]),star_point = np.array([])):
fig = plt.figure(figNumber,figsize=(6, 5))
ax = fig.add_subplot(1, 1, 1)
Z = self.GP.predict_proba(np.c_[self.xx.ravel(), self.yy.ravel()])
num_features = Z.shape[1]
Z_science = np.zeros(Z.shape[0])
# this assumes we that feature_stats has the mean in the first element of every pair and that it is all we care about
# would need to reformulate this for UCB method
for probs_ind in range(Z.shape[0]):
for k in range(len(feature_stats)):
Z_science[probs_ind] += feature_stats[k][0]*Z[probs_ind,k]
# Put the result into a color plot
Z = Z_science.reshape((self.xx.shape[0], self.xx.shape[1] ))
contourPlot = ax.contourf(self.xx,self.yy,Z, extent=(self.all_limits), origin="lower")
# plot points (if inputed)
if points_to_plot.any():
ax.scatter(points_to_plot[:,0],points_to_plot[:,1], c='k')
if star_point.any():
ax.scatter(star_point[0],star_point[1], c='red',marker ='*', s = 500)
plt.xlabel('X')
plt.ylabel('Y')
plt.xlim(self.xx.min(), self.xx.max())
plt.ylim(self.yy.min(), self.yy.max())
plt.title("%s, LML: %.3f" %
(title, self.GP.log_marginal_likelihood(self.GP.kernel_.theta)))
# plot
cbar = fig.colorbar( contourPlot )
cbar.ax.set_ylabel('Expected Science Return (mean only)')
return fig,ax
|
from django.contrib.auth import get_user_model
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.contrib.contenttypes.fields import GenericForeignKey
from django.urls import reverse
User = get_user_model()
def get_product_url(obj, viewname, model_name):
ct_model = obj.__class__._meta.model_name
return reverse(viewname, kwargs={'ct_model': ct_model, 'slug': obj.slug})
# 1.Category
# 2. Product
# 3. CartProduct
# 4. Cart
# 5. Order
# 6. Customer - покупатель
# 7. Description
class LatestProductsManager:
@staticmethod
def get_products_for_main_page(*args, **kwargs):
with_respect_to = kwargs.get('with_respect_to')
products = []
ct_models = ContentType.objects.filter(model__in=args)
for ct_model in ct_models:
model_products = ct_model.model_class()._base_manager.all().order_by('-id')[:5]
products.extend(model_products)
if with_respect_to:
ct_model = ContentType.objects.filter(model=with_respect_to)
if ct_model.exists():
if with_respect_to in args:
return sorted(products, key=lambda x: x.__class__._meta.model_name.startswith(with_respect_to),
reverse=True)
return products
class LatestProducts:
objects = LatestProductsManager()
class Category(models.Model):
name = models.CharField(max_length=255, verbose_name='Имя категории')
slug = models.SlugField(unique=True)
# parent_category = models.ForeignKey('self', on_delete=models.CASCADE, related_name='parent_category', null=True)
def __str__(self):
return self.name
def get_absolute_url(self):
return reverse('category_detail', kwargs={'slug': self.slug})
class Product(models.Model):
MIN_RESOLUTION = (400, 400)
MAX_RESOLUTION = (800, 800)
class Meta:
abstract = True
category = models.ForeignKey(Category, verbose_name='Категория', on_delete=models.CASCADE)
title = models.CharField(max_length=255, verbose_name='Именование')
slug = models.SlugField(unique=True, default=1)
image = models.ImageField(verbose_name='Изоражение')
description = models.TextField(verbose_name='Описание', null=True)
price = models.PositiveIntegerField(max_length=9, verbose_name='Цена')
brand_name = models.CharField(max_length=50, name='Бренд')
country = models.CharField(max_length=55, name='Страна бренда')
def __str__(self):
return self.title
class Foundation(Product):
STATUS_CHOICES = (('for dry', 'Для сухой кожи'), ('for mixed', 'Ддя комбинированного типа кожи'),
('for oily', 'Для жирной кожи'), ('for normal', 'Для нормальной кожи'))
skin_type = models.CharField(choices=STATUS_CHOICES, max_length=20)
shades = models.CharField(max_length=2, name='Оттенок')
def __str__(self):
return f'{self.category.name} : {self.title}'
def get_absolute_url(self):
return get_product_url(self, 'product_detail')
class ForEyes(Product):
STATUS_CHOICES = (('shades', 'Тени для век'), ('eyeliner', 'Подводка'),
('glitter', 'Глиттер'), ('Mascara', 'тушь'))
products_for = models.CharField(choices=STATUS_CHOICES, max_length=20)
def __str__(self):
return f'{self.category.name} : {self.title}'
def get_absolute_url(self):
return get_product_url(self, 'product_detail')
class ForBrows(Product):
STATUS_CHOICES = (('shades', 'Тени для бровей'), ('marker', 'Фломастер'),
('gel', 'Гель'), ('pencil', 'Карандаш'))
products_for = models.CharField(choices=STATUS_CHOICES, max_length=20)
def __str__(self):
return f'{self.category.name} : {self.title}'
def get_absolute_url(self):
return get_product_url(self, 'product_detail')
class ForLips(Product):
STATUS_CHOICES = (('creamy', 'Кремовая помада'), ('gloss', 'Блеск'),
('mate', 'Матовая помада'), ('pencil for lips', 'Карандаш для губ'))
products_for = models.CharField(choices=STATUS_CHOICES, max_length=20)
def __str__(self):
return f'{self.category.name} : {self.title}'
def get_absolute_url(self):
return get_product_url(self, 'product_detail')
class CartProduct(models.Model):
user = models.ForeignKey('Customer', verbose_name='Покупатель', on_delete=models.CASCADE)
cart = models.ForeignKey('Cart', verbose_name='Корзина', on_delete=models.CASCADE, related_name='related_products')
content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
qty = models.PositiveIntegerField(default=1)
final_price = models.IntegerField(max_length=9, verbose_name='Общая Цена')
def __str__(self):
return f'Продукты для корзины {self.content_object.title}'
class Cart(models.Model):
owner = models.ForeignKey('Customer', verbose_name='Владелец', on_delete=models.CASCADE)
products = models.ManyToManyField(CartProduct, blank=True, related_name='related_cart')
total_products = models.PositiveIntegerField(default=0)
final_price = models.IntegerField(max_length=9, verbose_name='Общая Цена')
in_order = models.BooleanField(default=False)
for_anonymous_user = models.BooleanField(default=False)
def __str__(self):
return str(self.id)
class Customer(models.Model):
user = models.ForeignKey(User, verbose_name='Полльзователь', on_delete=models.CASCADE)
phone = models.CharField(max_length=19, verbose_name='Номер телефона')
address = models.CharField(max_length=255, verbose_name='Адрес')
def __str__(self):
return f'Покупатель {self.user.first_name} {self.user.last_name}'
#
# class Specification(models.Model):
# content_type = models.ForeignKey(ContentType, on_delete=models.CASCADE)
# object_id = models.PositiveIntegerField()
# name = models.CharField(max_length=255, verbose_name='Имя товара для характеристик')
#
# def __str__(self):
# return f'Характеристики для товаров {self.name}'
|
# -*- coding: utf-8 -*-
{
'name': "cinema",
'summary': "Cinema Erp System Management",
'description': """
An Erp System to Manage Cinema
Manage Films and info
Manage Reservation and tickets
""",
'author': "Huzaifa",
'website': "huz.dark1@gmail.com",
'category': 'Marketing',
'version': '0.1',
# any module necessary for this one to work correctly
'depends': ['base', 'hr'],
# always loaded
'data': [
'security/security.xml',
'security/ir.model.access.csv',
# 'data/data.xml',
'data/sequence.xml',
'views/cinema_view.xml',
'views/film_view.xml',
'views/cinema_reservation.xml',
'wizard/create_show_wizard.xml',
'reports/report.xml',
'reports/templates.xml',
],
# only loaded in demonstration mode
'demo': [
'demo/demo.xml',
],
'application': True,
'installable': True,
'auto_install': False,
}
|
import numpy as np
import datetime
import os
from network import NN
class NEAT:
def __init__(self, population_size, n_parents_new_gen, n_mutated_parents_new_gen, n_mates_new_gen, prob_weight_change, gamma):
self.population_size = population_size
self.n_parents_new_gen = n_parents_new_gen
self.n_mates_new_gen = n_mates_new_gen
self.n_mutated_parents_new_gen = n_mutated_parents_new_gen
self.prob_weight_change = prob_weight_change
self.gamma = gamma
self.n_gen = 0
self.nn_id = 1
self.score_file = open('scores.txt', 'w+')
now = datetime.datetime.now()
self.current_directory = '{}-{}-{}-{}'.format(now.day, now.hour, now.minute, now.second)
os.mkdir('trained_nn/{}'.format(self.current_directory))
self.population = self.init_population()
def init_population(self):
initial_population = {}
for i in range(self.population_size):
nn = NN()
initial_population[self.nn_id] = [self.n_gen, nn, 0] # 0 is the score
self.nn_id += 1
return initial_population
def init_new_gen(self, rated_population, is_first_new_gen=False):
new_population = {}
if is_first_new_gen:
print(type(self.population))
for k, v in self.population.iteritems():
nn = v[1]
if nn.is_average_prediction_good(0.5):
new_population[k] = v
print('init pop size is {}'.format(len(new_population)))
self.population = new_population
return
sorted_population_by_score = sorted(self.population.items(), key=lambda x: x[1][2], reverse=True)
self.save_nn(sorted_population_by_score[0][1][1], sorted_population_by_score[0][1][2]) # save the best nn
print('###############################')
print('GEN # ', self.n_gen)
print('NN with non zero score: ', str(self.get_n_nn_with_non_zero_score(sorted_population_by_score)))
print('###############################')
self.score_file.write('GEN {}: highest score: {}'.format(self.n_gen, sorted_population_by_score[0][1][2]))
self.n_gen += 1
for i in range(self.n_mates_new_gen):
parent_nn_1 = sorted_population_by_score[i][1][1]
parent_nn_2 = sorted_population_by_score[i+1][1][1]
child_nn = self.mate(parent_nn_1, parent_nn_2)
self.nn_id += 1
new_population[self.nn_id] = [self.n_gen, child_nn, 0]
for i in range(self.n_mutated_parents_new_gen):
self.nn_id += 1
old_good_nn_id = sorted_population_by_score[i][0]
old_good_nn_gen = sorted_population_by_score[i][1][0]
old_good_nn = sorted_population_by_score[i][1][1]
new_population[self.nn_id] = [old_good_nn_gen, self.mutate(old_good_nn), 0]
for i in range(self.n_parents_new_gen):
old_good_nn_id = sorted_population_by_score[i][0]
old_good_nn_gen = sorted_population_by_score[i][1][0]
old_good_nn = sorted_population_by_score[i][1][1]
new_population[old_good_nn_id] = [old_good_nn_gen, old_good_nn, 0]
self.population = new_population
def save_nn(self, nn, score):
now = datetime.datetime.now()
name = 'NN_gen_{}_from_{}-{}-{}-{}'.format(self.n_gen, now.day, now.hour, now.minute, now.second)
nn.model.save('{}/{}/{}.h5'.format('trained_nn',self.current_directory, name))
def set_score(self, nn_id, score):
self.population[nn_id][2] = score
def mate(self, nn_1, nn_2):
nn_1_weight_1 = nn_1.model.layers[:][1].get_weights()
nn_2_weight_0 = nn_2.model.layers[:][0].get_weights()
nn = NN()
nn.model.layers[:][0].set_weights(nn_2_weight_0)
nn.model.layers[:][1].set_weights(nn_1_weight_1)
return nn
def mutate(self, nn):
layers = nn.model.layers[:]
for l, layer in enumerate(layers):
new_weights = []
weights = layer.get_weights()[0]
for i in range(weights.shape[0]):
for weight in weights[i]:
rand = np.random.random() #[0,1]
if self.prob_weight_change/100.0 > rand:
update = self.gamma * np.random.normal()
weight += update
weight = np.round(weight, 5)
new_weights.append(weight)
layer_weights = []
new_weights = np.array(new_weights).reshape(weights.shape[0],weights.shape[1])
layer_weights.append(new_weights)
layer_weights.append(layer.get_weights()[1])
layers[l].set_weights(np.array(layer_weights))
return nn
def get_n_nn_with_non_zero_score(self, sorted_population):
i = 0
for p in sorted_population:
if p[1][2] > 0:
i += 1
return i
def remove_nn_from_population(self, id):
self.population.pop(id)
|
# _*_ coding:utf-8 _*_
from django.views.generic import View
from django.shortcuts import render
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.http import HttpResponse
from questionnaire.models import *
from users.models import UserProfile
import random
import json
from hashlib import md5
class QuestionnaireView(View):
"""
查找当前问卷并显示出来
"""
def get(self, request, questionnaire_id=None):
questionnaire = get_object_or_404(Questionnaire, id=int(questionnaire_id))
if questionnaire:
questions = questionnaire.questions()
for question in questions:
choices = question.chices()
question.choices = choices
question.template = "question_type/%s.html" % question.type
# 判断用户登录状态
# res = dict()
# if not request.user.is_authenticated():
# res['status'] = 'fail'
# res['msg'] = u'用户未登录'
# return HttpResponse(json.dumps(res), content_type='application/json')
# if qu:
# # 生成唯一key
# str_to_hash = "".join(map(lambda i: chr(random.randint(0, 255)), range(16)))
# str_to_hash += settings.SECRET_KEY
# key = md5(str_to_hash).hexdigest()
#
# run = RunInfo()
# # run.subject = request.user
# run.random = key
# run.runid = key
# run.questionnaire = qu
# run.save()
# 反解析URL
return render(request, 'questionnaire.html', {
'questionnaire': questionnaire,
'questions': questions
})
class AddQuestionnaire(View):
# 保存记录
def save_runinfo(self, questionnaire, user):
runinfo = RunInfo()
runinfo.subject = user
runinfo.questionnaire = questionnaire
runinfo.save()
return runinfo
def post(self, request):
# 获取调查者
if not request.user.is_authenticated():
user = UserProfile.objects.filter(username='Anonymous')[0:1]
else:
user = request.user
questionnaire_id = int(request.POST.get('questionnaire_id', 0))
questionnaire = Questionnaire.objects.get(id=questionnaire_id)
if questionnaire:
runinfo = self.save_runinfo(questionnaire, user)
# 未处理好
answer_list = json.loads(request.POST.get('answerStr'))
for answer_obj in answer_list:
answer = Answer()
answer.text = answer_obj["answer"]
question = Question.objects.get(id=answer_obj["question_id"])
answer.question = question
answer.runinfo = runinfo
answer.save()
res = dict()
res['status'] = 'success'
res['msg'] = '完成'
return HttpResponse(json.dumps(res), content_type='application/json')
|
import psycopg2
from reportlab.lib.pagesizes import letter
from reportlab.pdfgen import canvas
import os
# Function to fetch student data with classroom, term, and marks from the PostgreSQL database
def get_student_data():
connection = psycopg2.connect(
host='localhost',
database='postgres',
user='postgres',
password=''
)
cursor = connection.cursor()
cursor.execute('''
SELECT s.name, c.room_number, t.term, t.subject, t.marks, t.year
FROM TermMark t
INNER JOIN Student s ON t.student_id = s.student_id
INNER JOIN ClassRoom c ON t.classroom_id = c.classroom_id
WHERE t.term = 'Term 1' AND s.student_id = 1
''')
data = cursor.fetchall()
cursor.close()
connection.close()
return data
# Function to calculate the average marks for all subjects
def calculate_average_marks(data):
total_marks = 0
num_subjects = len(data)
for student_info in data:
total_marks += student_info[4] # Index 4 corresponds to the marks in the student_info tuple
return total_marks / num_subjects
# Function to generate the PDF report
def generate_pdf_report(data):
pdf_filename = 'student_report.pdf'
c = canvas.Canvas(pdf_filename, pagesize=letter)
# Set up the PDF content
c.setFont("Helvetica", 12)
c.drawString(100, 800, "Student Report")
c.drawString(100, 770, "Name | Classroom | Term | Subject | Marks | Year")
c.drawString(100, 750, "-" * 80)
# Iterate through the data and add it to the PDF
y = 730
total_marks = 0
for student_info in data:
student_info_str = " | ".join(str(info) for info in student_info)
c.drawString(100, y, student_info_str)
y -= 20
total_marks += student_info[4] # Index 4 corresponds to the marks in the student_info tuple
# Calculate average marks
num_subjects = len(data)
average_marks = total_marks / num_subjects
# Add the average marks at the bottom of the PDF
c.drawString(100, y - 40, f"Average Marks for all Subjects: {average_marks:.2f}")
c.save()
print(f"PDF report generated: {pdf_filename}")
if __name__ == "__main__":
student_data = get_student_data()
generate_pdf_report(student_data)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
import logging.handlers
import logging
class Log:
logger = None
@staticmethod
def init (filePath, logLevel , logName): # constructor
if Log.logger == None:
Log.logger = logging.getLogger(logName)
handler = logging.handlers.RotatingFileHandler(filePath, maxBytes=1024*1000, backupCount=5)
handler.setFormatter(logging.Formatter('%(asctime)s %(name)s %(levelname)s:%(message)s'))
Log.logger.addHandler(handler)
if logLevel == "DEBUG":
Log.logger.setLevel(logging.DEBUG)
elif logLevel == "TRACE":
Log.logger.setLevel(logging.DEBUG)
elif logLevel == "INFO":
Log.logger.setLevel(logging.INFO)
elif logLevel == "WARNING":
Log.logger.setLevel(logging.WARNING)
elif logLevel == "ERR":
Log.logger.setLevel(logging.ERROR)
elif logLevel == "ERROR":
Log.logger.setLevel(logging.ERROR)
else:
Log.logger.setLevel(logging.WARNING)
|
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 14 16:20:31 2021
@author: seoun
"""
import os
import sys
import time
import torch
import torch.nn as nn
import torch.optim as optim
import pytorch_lightning as pl
from tqdm import tqdm
from torch.utils.data import TensorDataset, DataLoader
from pytorch_lightning.callbacks import EarlyStopping, ModelCheckpoint
from pytorch_lightning.loggers import TensorBoardLogger
class DeepLog(pl.LightningModule):
"""Log Anomaly Detection Model
:param input_size: input data size
:param hidden_size: lstm hidden size
:param window_size: past information to help predict the next log key
:param num_layers: number of lstm layer
:param num_classes: number of log keys
:param lr: learning rate
"""
def __init__(
self,
input_size: int,
hidden_size: int,
window_size: int,
num_layers: int,
num_classes: int,
lr: float,
):
super(DeepLog, self).__init__()
self.save_hyperparameters()
self.lstm = nn.LSTM(input_size, hidden_size, num_layers, batch_first=True)
self.fc = nn.Linear(hidden_size, num_classes)
self.criterion = nn.CrossEntropyLoss()
def forward(self, x):
h0 = torch.zeros(
self.hparams.num_layers, x.size(0), self.hparams.hidden_size
).to(self.device)
c0 = torch.zeros(
self.hparams.num_layers, x.size(0), self.hparams.hidden_size
).to(self.device)
out, _ = self.lstm(x, (h0, c0))
out = self.fc(out[:, -1, :])
return out
def configure_optimizers(self):
return optim.Adam(self.parameters(), lr=self.hparams.lr)
def training_step(self, batch, batch_idx):
seq, label = batch
seq = (
seq.clone()
.detach()
.view(-1, self.hparams.window_size, self.hparams.input_size)
.to(self.device)
)
output = self(seq)
loss = self.criterion(output, label)
return {"loss": loss}
def training_epoch_end(self, outputs):
train_loss_mean = torch.stack([x["loss"] for x in outputs]).mean()
self.log("trn_loss", train_loss_mean)
# create valid function to get val_loss
def validation_step(self, batch, batch_idx):
seq, label = batch
seq = (
seq.clone()
.detach()
.view(-1, self.hparams.window_size, self.hparams.input_size)
.to(self.device)
)
output = self(seq)
loss = self.criterion(output, label)
return {"vloss":loss}
def validation_epoch_end(self, outputs):
valid_loss_mean = torch.stack([x["vloss"] for x in outputs]).mean()
self.log("val_loss", valid_loss_mean)
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
import logging
from django.db import models
from django.db.models import CASCADE
from chroma_core.models import AlertStateBase
from chroma_core.models import AlertEvent
from chroma_core.models import DeletableStatefulObject
from chroma_core.models import StateChangeJob
from chroma_core.models import Job
from chroma_core.models import SchedulingError
from chroma_core.models import StateLock
from chroma_core.lib.job import DependOn, DependAll, Step
from chroma_help.help import help_text
class PacemakerConfiguration(DeletableStatefulObject):
states = ["unconfigured", "stopped", "started"]
initial_state = "unconfigured"
host = models.OneToOneField("ManagedHost", related_name="_pacemaker_configuration", on_delete=CASCADE)
def __str__(self):
return "%s Pacemaker configuration" % self.host
class Meta:
app_label = "chroma_core"
ordering = ["id"]
def get_label(self):
return "pacemaker configuration"
def set_state(self, state, intentional=False):
"""
:param intentional: set to true to silence any alerts generated by this transition
"""
super(PacemakerConfiguration, self).set_state(state, intentional)
if intentional:
PacemakerStoppedAlert.notify_warning(self, self.state != "started")
else:
PacemakerStoppedAlert.notify(self, self.state != "started")
reverse_deps = {"ManagedHost": lambda mh: PacemakerConfiguration.objects.filter(host_id=mh.id)}
# Below the handler should be in a completion hook, but I can't see how to get the instance of the completion
# hook to add it and time is running out. I will return to this.
@property
def reconfigure_fencing(self):
# We return False because we are overloading the attribute setter below to make it a event handler rather than
# a real property. If some sets reconfigure_fencing = False then the event will not be called because the current
# value is always False. If someone sets reconfigure_fencing = True then the setter will be called because the
# current value is always False!
return False
@reconfigure_fencing.setter
def reconfigure_fencing(self, ignored_value):
# We don't store this because we are overloading the attribute setter below to make it a event handler rather than
# a real property.
pass
class StonithNotEnabledAlert(AlertStateBase):
default_severity = logging.ERROR
class Meta:
app_label = "chroma_core"
proxy = True
def alert_message(self):
return help_text["stonith_not_enabled"] % self.alert_item
def end_event(self):
return AlertEvent(
message_str=help_text["stonith_enabled"] % self.alert_item,
alert_item=self.alert_item,
alert=self,
severity=logging.INFO,
)
@property
def affected_objects(self):
"""
:return: A list of objects that are affected by this alert
"""
return [self.alert_item.host]
class PacemakerStoppedAlert(AlertStateBase):
# Pacemaker being down is never solely responsible for a filesystem
# being unavailable: if a target is offline we will get a separate
# ERROR alert for that. Pacemaker being offline may indicate a configuration
# fault, but equally could just indicate that the host hasn't booted up that far yet.
default_severity = logging.INFO
def alert_message(self):
return "Pacemaker stopped on server %s" % self.alert_item.host
class Meta:
app_label = "chroma_core"
proxy = True
def end_event(self):
return AlertEvent(
message_str="Pacemaker started on server '%s'" % self.alert_item.host,
alert_item=self.alert_item.host,
alert=self,
severity=logging.WARNING,
)
@property
def affected_objects(self):
"""
:return: A list of objects that are affected by this alert
"""
return [self.alert_item.host]
class ConfigurePacemakerStep(Step):
idempotent = True
def run(self, kwargs):
host = kwargs["host"]
self.invoke_agent_expect_result(host, "configure_pacemaker")
class ConfigurePacemakerJob(StateChangeJob):
state_transition = StateChangeJob.StateTransition(PacemakerConfiguration, "unconfigured", "stopped")
stateful_object = "pacemaker_configuration"
pacemaker_configuration = models.ForeignKey(PacemakerConfiguration, on_delete=CASCADE)
state_verb = "Configure Pacemaker"
display_group = Job.JOB_GROUPS.COMMON
display_order = 30
class Meta:
app_label = "chroma_core"
ordering = ["id"]
@classmethod
def long_description(cls, stateful_object):
return help_text["configure_pacemaker"]
def description(self):
return help_text["configure_pacemaker_on"] % self.pacemaker_configuration.host
def get_steps(self):
return [
(StartPacemakerStep, {"host": self.pacemaker_configuration.host}),
(ConfigurePacemakerStep, {"host": self.pacemaker_configuration.host}),
(StopPacemakerStep, {"host": self.pacemaker_configuration.host}),
]
def get_deps(self):
"""
Before Pacemaker operations are possible the host must have had its packages installed.
Maybe we need a packages object, but this routine at least keeps the detail in one place.
Also corosync needs to be up and running. This is because configuring pacemaker requires starting pacemaker.
Or maybe we need an unacceptable_states lists.
:return:
"""
if self.pacemaker_configuration.host.state in ["unconfigured", "undeployed"]:
deps = [DependOn(self.pacemaker_configuration.host, "packages_installed")]
else:
deps = []
deps.append(DependOn(self.pacemaker_configuration.host.corosync_configuration, "started"))
return DependAll(deps)
class UnconfigurePacemakerStep(Step):
idempotent = True
def run(self, kwargs):
host = kwargs["host"]
self.invoke_agent_expect_result(host, "unconfigure_pacemaker")
class UnconfigurePacemakerJob(StateChangeJob):
state_transition = StateChangeJob.StateTransition(PacemakerConfiguration, "stopped", "unconfigured")
stateful_object = "pacemaker_configuration"
pacemaker_configuration = models.ForeignKey(PacemakerConfiguration, on_delete=CASCADE)
state_verb = "Unconfigure Pacemaker"
display_group = Job.JOB_GROUPS.COMMON
display_order = 30
class Meta:
app_label = "chroma_core"
ordering = ["id"]
@classmethod
def long_description(cls, stateful_object):
return help_text["unconfigure_pacemaker"]
def description(self):
return help_text["unconfigure_pacemaker_on"] % self.pacemaker_configuration.host
def get_steps(self):
# Sadly we need to restart and then stop (it will be stopped) pacemaker to configure it.
# It will be stopped because this transition is stopped->unconfigured.
return [
(StartPacemakerStep, {"host": self.pacemaker_configuration.host}),
(UnconfigurePacemakerStep, {"host": self.pacemaker_configuration.host}),
(StopPacemakerStep, {"host": self.pacemaker_configuration.host}),
]
def get_deps(self):
"""
Before Pacemaker operations are possible the host must have had its packages installed.
Maybe we need a packages object, but this routine at least keeps the detail in one place.
Also corosync needs to be up and running. This is because configuring pacemaker requires starting pacemaker.
Or maybe we need an unacceptable_states lists.
:return:
"""
if self.pacemaker_configuration.host.state in ["unconfigured", "undeployed"]:
deps = [DependOn(self.pacemaker_configuration.host, "packages_installed")]
else:
deps = []
deps.append(DependOn(self.pacemaker_configuration.host.corosync_configuration, "started"))
# Any targets will have to be removed.
from chroma_core.models import ManagedTargetMount
for managed_target_mount in ManagedTargetMount.objects.filter(host=self.pacemaker_configuration.host):
deps.append(DependOn(managed_target_mount.target, "removed"))
return DependAll(deps)
@classmethod
def can_run(cls, instance):
"""We don't want people to unconfigure pacemaker on a node that has a ManagedTargetMount so make the command
available only when that is not the case.
:param instance: PacemakerConfiguration instance being queried
:return: True if no ManagedTargetMounts exist on the host in question.
"""
from chroma_core.models import ManagedTargetMount
return len(ManagedTargetMount.objects.filter(host=instance.host)) == 0
class StartPacemakerStep(Step):
idempotent = True
def run(self, kwargs):
self.invoke_agent_expect_result(kwargs["host"], "start_pacemaker")
@classmethod
def describe(cls, kwargs):
return help_text["start_pacemaker_on"] % kwargs["host"].fqdn
class StartPacemakerJob(StateChangeJob):
state_transition = StateChangeJob.StateTransition(PacemakerConfiguration, "stopped", "started")
stateful_object = "pacemaker_configuration"
pacemaker_configuration = models.ForeignKey(PacemakerConfiguration, on_delete=CASCADE)
state_verb = "Start Pacemaker"
display_group = Job.JOB_GROUPS.COMMON
display_order = 30
class Meta:
app_label = "chroma_core"
ordering = ["id"]
@classmethod
def long_description(cls, stateful_object):
return help_text["start_pacemaker"]
def description(self):
return "Start Pacemaker on %s" % self.pacemaker_configuration.host
def get_steps(self):
return [(StartPacemakerStep, {"host": self.pacemaker_configuration.host})]
def get_deps(self):
return DependOn(self.pacemaker_configuration.host.corosync_configuration, "started")
class StopPacemakerStep(Step):
idempotent = True
def run(self, kwargs):
self.invoke_agent_expect_result(kwargs["host"], "stop_pacemaker")
@classmethod
def describe(cls, kwargs):
return help_text["stop_pacemaker_on"] % kwargs["host"].fqdn
class StopPacemakerJob(StateChangeJob):
state_transition = StateChangeJob.StateTransition(PacemakerConfiguration, "started", "stopped")
stateful_object = "pacemaker_configuration"
pacemaker_configuration = models.ForeignKey(PacemakerConfiguration, on_delete=CASCADE)
state_verb = "Stop Pacemaker"
display_group = Job.JOB_GROUPS.RARE
display_order = 100
class Meta:
app_label = "chroma_core"
ordering = ["id"]
@classmethod
def long_description(cls, stateful_object):
return help_text["stop_pacemaker"]
def description(self):
return "Stop Pacemaker on %s" % self.pacemaker_configuration.host
def get_steps(self):
return [(StopPacemakerStep, {"host": self.pacemaker_configuration.host})]
class GetPacemakerStateStep(Step):
idempotent = True
# FIXME: using database=True to do the alerting update inside .set_state but
# should do it in a completion
database = True
def run(self, kwargs):
from chroma_core.services.job_scheduler.agent_rpc import AgentException
host = kwargs["host"]
try:
lnet_data = self.invoke_agent(host, "device_plugin", {"plugin": "linux_network"})["linux_network"]["lnet"]
host.set_state(lnet_data["state"])
host.save(update_fields=["state", "state_modified_at"])
except TypeError:
self.log("Data received from old client. Host %s state cannot be updated until agent is updated" % host)
except AgentException as e:
self.log("No data for plugin linux_network from host %s due to exception %s" % (host, e))
class GetPacemakerStateJob(Job):
pacemaker_configuration = models.ForeignKey(PacemakerConfiguration, on_delete=CASCADE)
requires_confirmation = False
verb = "Get Pacemaker state"
class Meta:
app_label = "chroma_core"
ordering = ["id"]
def create_locks(self):
return [StateLock(job=self, locked_item=self.pacemaker_configuration, write=True)]
@classmethod
def get_args(cls, pacemaker_configuration):
return {"host": pacemaker_configuration.host}
@classmethod
def long_description(cls, stateful_object):
return help_text["pacemaker_state"]
def description(self):
return "Get Pacemaker state for %s" % self.pacemaker_configuration.host
def get_steps(self):
return [(GetPacemakerStateStep, {"host": self.pacemaker_configuration.host})]
class ConfigureHostFencingJob(Job):
host = models.ForeignKey("ManagedHost", on_delete=CASCADE)
requires_confirmation = False
verb = "Configure Host Fencing"
class Meta:
app_label = "chroma_core"
ordering = ["id"]
@classmethod
def get_args(cls, host):
return {"host_id": host.id}
@classmethod
def long_description(cls, stateful_object):
return help_text["configure_host_fencing"]
def description(self):
return "Configure fencing agent on %s" % self.host
def create_locks(self):
return [StateLock(job=self, locked_item=self.host.pacemaker_configuration, write=True)]
def get_steps(self):
return [(ConfigureHostFencingStep, {"host": self.host})]
class ConfigureHostFencingStep(Step):
idempotent = True
# Needs database in order to query host outlets
database = True
def run(self, kwargs):
host = kwargs["host"]
if host.state != "managed":
raise SchedulingError(
"Attempted to configure a fencing device while the host %s was in state %s. Expected host to be in state 'managed'. Please ensure your host has completed set up and configure power control again."
% (host.fqdn, host.state)
)
if not host.pacemaker_configuration:
# Shouldn't normally happen, but makes debugging our own bugs easier.
raise RuntimeError(
"Attemped to configure fencing on a host that does not yet have a pacemaker configuration."
)
agent_kwargs = []
for outlet in host.outlets.select_related().all():
fence_kwargs = {
"agent": outlet.device.device_type.agent,
"login": outlet.device.username,
"password": outlet.device.password,
}
# IPMI fencing config doesn't need most of these attributes.
if outlet.device.is_ipmi and outlet.device.device_type.agent not in ["fence_virsh", "fence_vbox"]:
fence_kwargs["ipaddr"] = outlet.identifier
fence_kwargs["lanplus"] = "2.0" in outlet.device.device_type.model # lanplus
else:
fence_kwargs["plug"] = outlet.identifier
fence_kwargs["ipaddr"] = outlet.device.address
fence_kwargs["ipport"] = outlet.device.port
agent_kwargs.append(fence_kwargs)
self.invoke_agent(host, "configure_fencing", {"agents": agent_kwargs})
|
# CSE572
# Assignment 5
# Aditi Baraskar, James Smith, Moumita Laraskar, Tejas Ruikar
# Spring 2019
import pandas as pd
import enum
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
from sklearn.naive_bayes import GaussianNB
import warnings
warnings.filterwarnings("ignore")
KERNEL = ['linear','poly','rbf']
# for determining parameters for SVM
class TASK(enum.Enum):
TASK_2 = 1
TASK_4 = 2
# load the training and test data
# @param trainData file name for training data
# @param testData file name for testing data
def process_and_load_data(trainData, testData):
columns = ['height', 'age', 'weight', 'gender']
attr_names = ['height', 'age', 'weight']
train_dataset = pd.read_csv(trainData, names=columns)
test_dataset = pd.read_csv(testData, names=columns)
# train dataset : attributes and labels
train_attr = train_dataset[attr_names]
train_label = train_dataset.gender
# test_dataset, attributes and labels
test_attr = test_dataset[attr_names]
test_label = test_dataset.gender
return train_attr, train_label, test_attr, test_label
# train a naive bayes classifier given the training data and evaluate on the test data
# @param trainData file name for training data
# @param testData file name for testing data
def naive_bayes_classifier(trainData, testData):
train_attr, train_label, test_attr, test_label = process_and_load_data(trainData, testData)
features = zip(train_attr['height'], train_attr['age'], train_attr['weight'])
model = GaussianNB()
model.fit(train_attr, train_label)
predicted = model.predict(test_attr)
acc = accuracy_score(test_label, predicted) * 100
print("accuracy: ", acc)
print(predicted)
# train a support vector machine given the training data and evaluate on the test data
# @param trainData file name for training data
# @param testData file name for testing data
# @param task the specific task to configure the SVM, [TASK.TASK_2, TASK.TASK_4]
def support_vector_machine_classifier(trainData, testData,task):
train_attr, train_label, test_attr, test_label = process_and_load_data(trainData, testData)
for i,ker in enumerate(KERNEL):
if ker == 'linear':
svm_classifier = SVC(kernel=ker)
elif ker == 'poly':
if task == TASK.TASK_2:
# with gamma = 'scale' and C is default, accuracy = 35%
# with gamma='auto',max_iter=50, accuracy = 70%, but there is a warning
# "ConvergenceWarning: Solver terminated early (max_iter=50)", Currenly warnings are disabled.
svm_classifier = SVC(kernel=ker,degree = 5,gamma = 'auto',max_iter=50)
elif task == TASK.TASK_4:
#with gamma = 'scale' and C is default, accuracy = 36.66%
#with gamma='auto',max_iter=50, accuracy = 60%, but there is a warning
#"ConvergenceWarning: Solver terminated early (max_iter=50)", Currenly warnings are disabled.
svm_classifier = SVC(kernel=ker,degree = 7,gamma = 'auto',max_iter=50)
else:
#with gamma = 'scale', Task2 accuracy =43.33% and Task4 accuracy =36.66%
#with gamma = 'auto', Task2 accuracy =100% and Task4 accuracy =36.66%
svm_classifier = SVC(kernel='rbf',gamma = 'auto')
svm_classifier.fit(train_attr,train_label)
predict_val = svm_classifier.predict(test_attr)
acc = accuracy_score(test_label, predict_val) * 100
print("Kernel: ", ker)
print("predicted value: ",predict_val)
print("accuracy: ", acc, "%")
print("\n")
if __name__ == "__main__":
print("Task 1")
naive_bayes_classifier('PB1_train.csv','PB1_test.csv')
print("\n")
print("TASK 2")
support_vector_machine_classifier('PB1_train.csv','PB1_test.csv',TASK.TASK_2)
print("\n")
print("Task 3")
naive_bayes_classifier('PB2_train.csv','PB2_test.csv')
print("\n")
print("TASK 4")
support_vector_machine_classifier('PB2_train.csv','PB2_test.csv',TASK.TASK_4)
|
from flask import Flask, request, render_template, redirect, flash, session
from flask_debugtoolbar import DebugToolbarExtension
from models import db, connect_db, Photo, User
from secrets import API_SECRET_KEY
from forms import AddPhotoForm, UserForm
from sqlalchemy.exc import IntegrityError
import requests
app = Flask(__name__)
key = API_SECRET_KEY
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql:///mars_db'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
app.config['SQLALCHEMY_ECHO'] = True
app.config['SECRET_KEY'] = "chickenzarecool21837"
app.config['DEBUG_TB_INTERCEPT_REDIRECTS'] = False
debug = DebugToolbarExtension(app)
connect_db(app)
# db.drop_all()
# db.create_all()
API_BASE_URL_PHOTO = "https://api.nasa.gov/mars-photos/api/v1/rovers"
id_arr = []
#VIEWS BELOW:
@app.route('/')
def landing_page():
return render_template("landing.html")
# Show list (album) of photos
@app.route('/mars/images')
def get_images():
if "user_id" in session:
user = User.query.get_or_404(session['user_id'])
else:
user=None
photos = Photo.query.all()
return render_template('mars_images.html', photos=photos, user=user)
@app.route('/mars/images/<int:id>')
def show_image(id):
photo = Photo.query.get_or_404(id)
return render_template('mars_image.html', photo=photo)
@app.route('/mars/images/new', methods = ['GET', 'POST'])
def see_image():
form = AddPhotoForm()
if form.validate_on_submit():
rover = form.rover.data
sol = form.sol.data
res = requests.get(f"{API_BASE_URL_PHOTO}/{rover}/photos?sol={sol}&api_key={key}")
data = res.json()
return render_template('mars_form.html', form=form, data=data)
else:
return render_template('mars_form.html', form=form )
@app.route('/mars/images/adds', methods = ['POST'])
def add_image():
form = AddPhotoForm()
if "user_id" not in session:
flash('Pleased Login First!', 'danger')
else:
rover = request.form["rover"]
earth_date = request.form["earth_date"]
sol = request.form["sol"]
checklist = request.form.getlist('mycheckbox')
for url in checklist:
new_photo = Photo(rover_name = rover, earth_date=earth_date, sol =sol, urls=url, user_id=session['user_id'])
db.session.add(new_photo)
db.session.commit()
flash('Images saved!', 'success')
return render_template('mars_form.html', form=form)
@app.route('/delete/<int:id>', methods = ['GET'])
def delete_image(id):
# Made this a GET rather than DELETE because DELETE would not work, not great
user = User.query.get_or_404(session['user_id'])
photo = Photo.query.get_or_404(id)
db.session.delete(photo)
db.session.commit()
photos = Photo.query.all()
return render_template('mars_images.html', photos=photos, user=user)
@app.route('/register', methods=['GET', 'POST'])
def register_user():
form=UserForm()
if form.validate_on_submit():
username = form.username.data
password = form.password.data
new_user = User.register(username, password)
db.session.add(new_user)
try:
db.session.commit()
except IntegrityError:
form.username.errors.append("Username taken. Please pick another.")
return render_template('register.html', form=form)
session['user_id'] = new_user.id
flash('Welcome! Account Successfully Created!', 'success')
return redirect('/mars/images')
return render_template('register.html', form=form)
@app.route('/login', methods=['GET', 'POST'])
def login_user():
form=UserForm()
if form.validate_on_submit():
username = form.username.data
password = form.password.data
user = User.authenticate(username, password)
if user:
flash(f"Welcome Back, {user.username}!", "primary")
session['user_id'] = user.id
return redirect('/mars/images')
else:
form.username.errors = ['Invalid username/password.']
return render_template('login.html', form=form)
@app.route('/logout')
def logout_user():
session.pop('user_id')
flash("Goodbye!", "info")
return redirect('/')
|
import os
import sys
sys.path.append(os.path.abspath(os.path.join(__file__, "../../../..")))
import argparse
import json
import logging
import traceback
import v2.lib.resource_op as s3lib
import yaml
from v2.lib.exceptions import RGWBaseException
from v2.lib.resource_op import Config
from v2.lib.s3.write_io_info import BasicIOInfoStructure, IOInfoInitialize
from v2.utils.log import configure_logging
from v2.utils.test_desc import AddTestInfo
lib_dir = os.path.abspath(os.path.join(__file__, "../"))
log = logging.getLogger()
TEST_DATA_PATH = None
def test_exec(config):
test_info = AddTestInfo("create users")
io_info_initialize = IOInfoInitialize()
basic_io_structure = BasicIOInfoStructure()
io_info_initialize.initialize(basic_io_structure.initial())
user_detail_file = os.path.join(lib_dir, "user_details.json")
try:
test_info.started_info()
# create a non-tenanted user
if config.user_type == "non-tenanted":
all_users_info = s3lib.create_users(config.user_count)
with open(user_detail_file, "w") as fout:
json.dump(all_users_info, fout)
test_info.success_status("non-tenanted users creation completed")
else:
log.info("create tenanted users")
for i in range(config.user_count):
tenant_name = "tenant" + str(i)
all_users_info = s3lib.create_tenant_users(
config.user_count, tenant_name
)
with open(user_detail_file, "w") as fout:
json.dump(all_users_info, fout)
test_info.success_status("tenanted users creation completed")
test_info.success_status("test passed")
sys.exit(0)
except Exception as e:
log.info(e)
log.info(traceback.format_exc())
test_info.failed_status("user creation failed")
sys.exit(1)
except (RGWBaseException, Exception) as e:
log.info(e)
log.info(traceback.format_exc())
test_info.failed_status("user creation failed")
sys.exit(1)
if __name__ == "__main__":
test_info = AddTestInfo("user create test")
test_info.started_info()
project_dir = os.path.abspath(os.path.join(__file__, "../../.."))
test_data_dir = "test_data"
TEST_DATA_PATH = os.path.join(project_dir, test_data_dir)
log.info("TEST_DATA_PATH: %s" % TEST_DATA_PATH)
if not os.path.exists(TEST_DATA_PATH):
log.info("test data dir not exists, creating.. ")
os.makedirs(TEST_DATA_PATH)
parser = argparse.ArgumentParser(description="RGW S3 Automation")
parser.add_argument("-c", dest="config", help="RGW Test yaml configuration")
parser.add_argument(
"-log_level",
dest="log_level",
help="Set Log Level [DEBUG, INFO, WARNING, ERROR, CRITICAL]",
default="info",
)
args = parser.parse_args()
yaml_file = args.config
log_f_name = os.path.basename(os.path.splitext(yaml_file)[0])
configure_logging(f_name=log_f_name, set_level=args.log_level.upper())
config = Config(yaml_file)
config.read()
# if config.mapped_sizes is None:
# config.mapped_sizes = utils.make_mapped_sizes(config)
with open(yaml_file, "r") as f:
doc = yaml.safe_load(f)
config.user_count = doc["config"]["user_count"]
log.info("user_count:%s\n" % (config.user_count))
test_exec(config)
|
#tutorial 61
#generate payslip from csv file reportlab
import io
from reportlab.pdfgen import canvas
from reportlab.platypus import SimpleDocTemplate,Table,TableStyle,Frame,Spacer,Paragraph
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import cm
from reportlab.lib import colors
from nltk.metrics.association import TOTAL
from num2words import num2words
company="Total Technology"
address="4a/cc London 5RWQWE UNITED KINGDOM"
month="Salary Slip For Month Of June 2020"
with io.open("salary_slip_csv.csv","r",encoding="utf-8")as f1:
csv_data=f1.read()
f1.close()
file_data=csv_data.split("\n")[1:]
flow_obj=[]
styles=getSampleStyleSheet()
#top 3 row
data=[[company],[address],[month]]
#remaining rows from csv
for rows in file_data:
tstyle=TableStyle([("GRID",(0,0),(-1,-1),1,colors.black),
("ALIGN",(0,0),(-1,-1),"CENTER")])
t=Table(data,colWidths=[500],rowHeights=[20,20,20])
t.setStyle(tstyle)
flow_obj.append(t)
row=rows.split(",")
pdf=canvas.Canvas(row[0]+"_payslip.pdf")
pdf.translate(cm, cm)
total=int(row[6])+int(row[7])+int(row[15])+int(row[17])+int(row[18])+int(row[19])+int(row[20])
deductions=int(row[12])+int(row[13])+int(row[14])+int(row[16])
row1=["EMPLOYEE_ID",row[0],"EMPLOYEE_NAME",row[8]]
row2=["PAN",row[1],"DESIGNATION",row[9]]
row3=["PAY_DAYS",row[3],"TOTAL_DAYS",row[11]]
row4=["DOJ",row[4],"PAID_LEAVE",row[-2]]
row5=["DOB",row[5],"UNPAID_LEAVE",row[-1]]
row6=["EARNINGS","AMOUNT","DEDUCTIONS","AMOUNT"]
row7=["BASIC",row[6],"PROFESSIONAL_TAX",row[12]]
row8=["HRA",row[7],"PF",row[13]]
row9=["MEDICAL_ALLOWANCE",row[15],"RETENTION_DEDUCTION",row[16]]
row10=["TRANSPORT",row[17],"TDS",row[14]]
row11=["EDUCATION",row[18],"",""]
row12=["UNIFORM_ALLOWANCE",row[19],"",""]
row13=["SPECIAL_ALLOWANCE",row[20],"",""]
row14=["TOTAL",total,"DEDUCTIONS",deductions]
data1=[row1,row2,row3,row4,row5,row6,row7,row8,row9,row10,row11,row12,row13,row14]
t1=Table(data1,colWidths=[100,100,175,125],rowHeights=[20,20,20,20,20,20,20,20,20,20,20,20,20,20])
tstyle=TableStyle([("GRID",(0,0),(-1,-1),1,colors.black),
("ALIGN",(0,0),(-1,-1),"CENTER"),
("FONTSIZE",(0,0),(-1,-1),8),
("FONTSIZE",(0,5),(-1,-9),12),
("BACKGROUND",(0,5),(-1,-9),colors.yellow)])
t1.setStyle(tstyle)
net_pay=total-deductions
words=num2words(net_pay, ordinal="currency", lang="en")
text1=Paragraph("TOTAL_NETPAY:"+str(net_pay)+" INR",style=styles["Normal"])
text2=Paragraph("IN_WORDS:"+words+" INR",style=styles["Normal"])
flow_obj.append(t1)
flow_obj.append(text1)
flow_obj.append(text2)
frame1=Frame(40,100,500,450,showBoundary=1)
frame1.addFromList(flow_obj, pdf)
text3=Paragraph("SIGNATURE:",style=styles["Normal"])
flow_obj1=[]
flow_obj1.append(text3)
frame2=Frame(440,100,100,60,showBoundary=1)
frame2.addFromList(flow_obj1, pdf)
text4=Paragraph("""NOTE:THIS IS COMPUTER GENERATED DOCUENT ,HENCE NO PHYSICAL SIGNATURE REQUIRED FOR VERIFICATION
PLEASE REACH OUT TO THE HR FOR MORE INFORMATION""",style=styles["Normal"])
frame3=Frame(40,30,500,60,showBoundary=1)
flow_obj2=[]
flow_obj2.append(text4)
frame3.addFromList(flow_obj2, pdf)
pdf.save()
|
# Example: Get Number information
data = api.get_number_info('+1234567890')
print(data)
## { 'created': '2017-02-10T09:11:50Z',
## 'name' : 'RALEIGH, NC',
## 'number' : '+1234567890',
## 'updated' : '2017-02-10T09:11:50Z'}
|
import my_module
my_module.greeting()
my_module.greeting('dre')
from my_module import greeting
greeting('soul')
from my_module import pi as p
print(p)
print(my_module.pi)
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jun 22 00:36:26 2019
@author: hweem
"""
import numpy as np
class Sparse_Regression:
def __init__(self, l, h, p, mx):
self.l = l
self.h = h
self.p = p #shuffle
self.X = None #.shape = (nx, mx)
self.Y = None #.shape = (1, mx)
self.K = None
self.mx = mx
self.parameters = {}
self.pred_y = []
self.loss = []
self.epochs = []
self.train_x_list, self.train_y_list, self.test_x_list, self.test_y_list \
= None, None, None, None
def initialize_parameters(self, mx):
self.parameters['theta'] = np.random.randn(1, mx)
self.parameters['z'] = np.random.randn(1, mx)
self.parameters['u'] = np.random.randn(1, mx)
def optimize_sparse(self, PI, parameters, l, Y, threshold = .3):
num_epoch = 0
temp1 = np.linalg.inv(PI.T.dot(PI) + np.identity(self.mx))
temp2 = PI.T.dot(Y.T)
while True:
num_epoch += 1
print('epoch: ' + str(num_epoch))
theta = self.parameters['theta']
z = self.parameters['z'] #.shape = (1, mx)
u = self.parameters['u'] #.shape = (1, mx)
#.shape = (1, mx)
new_theta = temp1.dot(temp2 + z.T - u.T).T #.shape = (1, mx)
new_z = np.max(np.concatenate([np.zeros((1, self.mx)), new_theta + u - l * np.ones((1, self.mx))]), axis = 0, keepdims = True) \
- np.max(np.concatenate([np.zeros((1, self.mx)), - new_theta - u - l * np.ones((1, self.mx))]), axis = 0, keepdims = True)
#.shape = (1, mx)
new_u = u + new_theta - new_z
self.parameters['theta'] = new_theta
self.parameters['z'] = new_z
self.parameters['u'] = new_u
l2_t = np.linalg.norm(new_theta - theta)
l2_z = np.linalg.norm(new_z - z)
l2_u = np.linalg.norm(new_u - u)
print('l2_t = ' + str(round(l2_t, 5)))
print('l2_z = ' + str(round(l2_z, 5)))
print('l2_u = ' + str(round(l2_u, 5)))
if l2_t < threshold:
break
self.epochs.append(num_epoch)
def gen_mat_kernel(self, X, C, h):
#X.shape = (nx, mx)
#C.shape = (nx, mc)
mat_kernel = X[:, :, None] - C[:, None] #.shape = (nx, mx, mc)
#.shape = (mx, mc)
return np.exp(- np.linalg.norm(mat_kernel, axis = 0, keepdims = False) / 2 / h**2 )
def cross_val(self, train_x, test_x, test_y, h, parameters):
K_test = self.gen_mat_kernel(train_x, test_x, h) #.shape = (mtrain, mtest)
theta = self.parameters['theta']
pred_y = K_test.T.dot(theta.T).T #.shape = (1, mtest)
self.pred_y.append(pred_y)
self.loss.append(np.linalg.norm(pred_y - test_y)) |
# https://leetcode.com/problems/path-sum/discuss/36486/Python-solutions-(DFS-recursively-DFS%2Bstack-BFS%2Bqueue)
# https://leetcode.com/problems/path-sum/submissions/
# Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
# this is recursive, DFS
def isLeaf(self, node):
# if not node:
# return True
return not node.left and not node.right
def DFS(self, node, sumsofar, target):
if not node:
return False
sumsofar += node.val
if sumsofar == target and self.isLeaf(node):
return True
found = False
if node.left:
found = self.DFS(node.left, sumsofar, target)
if node.right and not found:
found = self.DFS(node.right, sumsofar, target)
return found
def hasPathSum(self, root: TreeNode, targetSum: int) -> bool:
return self.DFS(root, 0, targetSum) |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# coding=utf-8
"""
@author: Li Tian
@contact: 694317828@qq.com
@software: pycharm
@file: sample_data_deal.py
@time: 2019/2/8 20:30
@desc: 输入数据处理框架
"""
from figuredata_deal.figure_deal_test2 import preprocess_for_train
import tensorflow as tf
# 创建文件列表,并通过文件列表创建输入文件队列。在调用输入数据处理流程前,需要统一所有原始数据的格式
# 并将它们存储到TFRecord文件中。下面给出的文件列表应该包含所有提供训练数据的TFRecord文件。
files = tf.train.match_filenames_once('file_pattern-*')
filename_queue = tf.train.string_input_producer(files, shuffle=False)
# 使用类似前面介绍的方法解析TFRecord文件里的数据。这里假设image中存储的是图像的原始数据,label为该
# 样例所对应的标签。height、width和channels给出了图片的维度。
reader = tf.TFRecordReader()
_, serialized_example = reader.read(filename_queue)
features = tf.parse_single_example(
serialized_example,
features={
'image': tf.FixedLenFeature([], tf.string),
'label': tf.FixedLenFeature([], tf.int64),
'height': tf.FixedLenFeature([], tf.int64),
'width': tf.FixedLenFeature([], tf.int64),
'channels': tf.FixedLenFeature([], tf.int64),
}
)
image, label = features['image'], features['label']
height, width = features['height'], features['width']
channels = features['channels']
# 从原始图像数据解析出像素矩阵,并根据图像尺寸还原图像。
decoded_image = tf.decode_raw(image, tf.uint8)
decoded_image.set_shape([height, width, channels])
# 定义神经网络输入层图片的大小
image_size = 299
# preprocess_for_train为前面提到的图像预处理程序
distorted_image = preprocess_for_train(decoded_image, image_size, image_size, None)
# 将处理后的图像和标签数据通过tf.train.shuffle_batch整理成神经网络训练时需要的batch。
min_after_dequeue = 10000
batch_size = 100
capacity = min_after_dequeue + 3 * batch_size
image_batch, label_batch = tf.train.shuffle_batch([distorted_image, label], batch_size=batch_size, capacity=capacity, min_after_dequeue=min_after_dequeue)
# 定义神经网络的结构以及优化过程, image_batch可以作为输入提供给神经网络的输入层。
# label_batch则提供了输入batch中样例的正确答案。
# 学习率
learning_rate = 0.01
# inference是神经网络的结构
logit = inference(image_batch)
# loss是计算神经网络的损失函数
loss = cal_loss(logit, label_batch)
# 训练过程
train_step = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss)
# 声明会话并运行神经网络的优化过程
with tf.Session() as sess:
# 神经网络训练准备工作。这些工作包括变量初始化、线程启动。
sess.run((tf.global_variables_initializer(), tf.local_variables_initializer()))
coord = tf.train.Coordinator()
threads = tf.train.start_queue_runners(sess=sess, coord=coord)
# 神经网络训练过程。
TRAINING_ROUNDS = 5000
for i in range(TRAINING_ROUNDS):
sess.run(train_step)
# 停止所有线程
coord.request_stop()
coord.join(threads) |
import logging
import os
import chainer
import numpy as np
class GraphDataset(chainer.dataset.DatasetMixin):
def __init__(self, loc_npy, vel_npy, edges_npy):
self.features, self.edges = self.prepare_datasets(loc_npy, vel_npy, edges_npy)
self.num_episodes = self.features.shape[0]
self.num_nodes = self.features.shape[1]
self.timesteps = self.features.shape[2]
self.num_features = self.features.shape[3]
logger = logging.getLogger(__name__)
logger.info('num_episodes: {}'.format(self.num_episodes))
logger.info('num_nodes: {}'.format(self.num_nodes))
logger.info('timesteps: {}'.format(self.timesteps))
logger.info('num_features: {}'.format(self.num_features))
def __len__(self):
return len(self.features)
def prepare_datasets(self, loc_npy, vel_npy, edges_npy):
""" Load data from .npy files.
The shape of loc: (num_samples, num_timesteps, 2, num_nodes)
"2" means that a location is represented in a 2D coordinate.
The shape of vel: (num_samples, num_timesteps, 2, num_nodes)
"2" means that a velocity is represented in a 2D vector.
The shape of edge: (num_samples, num_nodes, num_nodes)
It has a connectivity matrix of the nodes.
"""
loc = np.load(loc_npy)
vel = np.load(vel_npy)
edges = np.load(edges_npy)
# [num_samples, num_timesteps, num_dims, num_atoms]
num_nodes = loc.shape[3]
loc_max = loc.max()
loc_min = loc.min()
vel_max = vel.max()
vel_min = vel.min()
# Normalize to [-1, 1]
loc = (loc - loc_min) * 2 / (loc_max - loc_min) - 1
vel = (vel - vel_min) * 2 / (vel_max - vel_min) - 1
# Reshape to: [num_sims, num_atoms, num_timesteps, num_dims]
loc = np.transpose(loc, [0, 3, 1, 2])
vel = np.transpose(vel, [0, 3, 1, 2])
features = np.concatenate([loc, vel], axis=3)
edges = np.reshape(edges, [-1, num_nodes ** 2])
edges = np.array((edges + 1) / 2)
# Exclude self edges
off_diag_idx = np.ravel_multi_index(
np.where(np.ones((num_nodes, num_nodes)) - np.eye(num_nodes)),
[num_nodes, num_nodes])
edges = edges[:, off_diag_idx]
return features.astype(np.float32), edges.astype(np.int32)
def get_example(self, i):
return self.features[i], self.edges[i]
|
import numpy as np
name = ['Alice', 'Bob', 'Cathy', 'Doug']
age = [25, 45, 37, 19]
weight = [55.0, 85.5, 68.0, 61.5]
data = np.zeros(4, dtype={'names': ('name', 'age', 'weight'), 'formats': ('U10', 'i4', 'f8')})
# print(data.dtype)
data['name'] = name
data['age'] = age
data['weight'] = weight
print(data)
|
from django.contrib import admin
from photos.models import Photo
class PhotoAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'side_of_street', 'x_offset', 'width', 'height')
admin.site.register(Photo, PhotoAdmin)
|
from options.test_options import TestOptions
from data import DataLoader
from models import create_model
from util.writer import Writer
from models.layers import mesh_prepare
import os
import numpy as np
def add_adverserial_examples(dataset):
#dataset.dataset.paths = []
return
def run_test(epoch=-1, vertices = None, faces= None, label= None, attack = False):
print('Running Test')
opt = TestOptions().parse()
opt.serial_batches = True # no shuffle
dataset = DataLoader(opt)
model = create_model(opt)
writer = Writer(opt)
# test
writer.reset_counter()
for i, data in enumerate(dataset):
if i==4 and attack == True:
for i in range(4):
attacked_data = mesh_prepare.rebuild_mesh(vertices, faces)
#data['label'][0] = label
data['edge_features'][-i-1] = attacked_data.features# data['mesh'][0].features
model.set_input(data)
ncorrect, nexamples = model.test()
writer.update_counter(ncorrect, nexamples)
writer.print_acc(epoch, writer.acc)
return writer.acc
def extract_data_of_attacked_meshes(path_to_walker_meshes):
paths = os.listdir(path_to_walker_meshes)
paths_to_meshes = [path for path in paths if path.__contains__('_attacked')]
for mesh_path in paths_to_meshes:
orig_mesh_data = np.load(path_to_walker_meshes + mesh_path, encoding='latin1', allow_pickle=True)
attacked_mesh_data = {k: v for k, v in orig_mesh_data.items()}
vertices, faces, label = attacked_mesh_data['vertices'], attacked_mesh_data['faces'], attacked_mesh_data['label']
run_test(vertices=vertices, faces=faces, label=label, attack=True)
if __name__ == '__main__':
extract_data_of_attacked_meshes(path_to_walker_meshes = 'datasets_processed/shrec11/')
|
# Determines if a year is a leap year or not
#
# Creates a function which calculate if year is leap or not
def lpyear(year):
if year>=1582:
if (year%4)!=0:
return False
elif (year%100)!=0:
return True
elif (year%400)!=0:
return False
else:
return True
else:
return False
yearData=[1900,2000,2016,1987]
print("\nExercise for Using Functions with Parameters\n")
for i in range(len(yearData)):
yr=yearData[i]
print(yr,"->",end="")
res=lpyear(yr)
if res==True:
print("OK")
else:
print("FAILED")
|
a = input()
alist = list(a)
m = []
ind = 0
while a:
alist = alist[::-1]
for el in alist:
if ind % 3 == 2:
alist[ind] = ''
ind += 1
ares = ''.join(alist)
m.append(ares)
a = input()
alist = list(a)
ind = 0
for word in m:
print(word)
|
#morphologicsl transformation are some simple operations based on the image shape ,normally performed on binary images
#kernal is generally a shape which we are going to apply on image
import cv2
from matplotlib import pyplot as pyp
import numpy as np
#img=cv2.imread('smarties.png',cv2.IMREAD_GRAYSCALE)
img2=cv2.imread('j5.png')
#_,mask= cv2.threshold(img,200,255,cv2.THRESH_BINARY_INV)
kernel=np.ones((2,2),np.uint8)
dillation=cv2.dilate(img2,kernel,iterations=3)
errosion=cv2.erode(img2,kernel,iterations=6)
opening=cv2.morphologyEx(img2,cv2.MORPH_OPEN,kernel)
closing=cv2.morphologyEx(img2,cv2.MORPH_CLOSE,kernel)
mg=cv2.morphologyEx(img2,cv2.MORPH_GRADIENT,kernel)
tophat=cv2.morphologyEx(img2,cv2.MORPH_TOPHAT,kernel)
#tittles=['original img','mask','dillation','errosion','opening','closing','tophat','mg']
tittles=['original img','dillation','errosion','opening','closing','tophat','mg']
#images=[img,mask,dillation,errosion,opening,closing,mg,tophat]
images=[img2,dillation,errosion,opening,closing,mg,tophat]
for i in range(7):
pyp.subplot(3,4,1+i) ,pyp.imshow(images[i],'gray')
pyp.xticks([]),pyp.yticks([])
pyp.title(tittles[i])
pyp.show() |
# -*- coding: utf-8 -*-
# @Time : 2019-06-28 11:28
# @Email : aaronlzxian@163.com
# @File : module_parser_test.py
import os
import unittest
from abyss.module_parser import ModuleParser
directory = os.path.join(os.path.dirname(os.path.abspath(__file__)) + os.path.sep + "..", 'build/build')
if not os.path.exists(directory):
os.makedirs(directory)
class ModuleParserTest(unittest.TestCase):
def setUp(self):
self.module_parser = ModuleParser(directory)
def test_modify_modules(self):
# self.modules = self.module_parser.modify_modules(["y/build.gradle", 'x/build.gradle', 'z/src/main'])
self.modules = self.module_parser.modify_modules(["src/views/pay/payOnline.vue", 'src/assets/css/index.css'])
print(self.modules)
|
# Generated by Django 3.2.7 on 2021-10-09 22:40
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('servicios', '0002_auto_20210311_1855'),
]
operations = [
migrations.AddField(
model_name='servicio',
name='direccion',
field=models.CharField(max_length=500, null=True),
),
]
|
# user_input=input('Write a word')
# # print(user_input[-2:])
# if user_input[-2:]in(('th','sh','ch')):
# print(user_input +'es')
# if user_input[-1] in ('s','x'):
# print(user_input+'es')
# # else print(user_input+'s'):
# #bin-max
# highest_num=0
# places=20
# for n in range(256):
# highest_num=0
# #compute highest number for n- places
# for p in range(0,n):
# highest_num+=2**p #add up binary value
# print(n, 'bits lets you go up to',high
# nums=(1,2,4,6)
# print(nums[1:3])
# names= ('hi','will')
# print(names.count('will'))
# word='hat'
# print(word=word+'s')
# phrase='hi there. whats up'
# tuple(phrase.split('hi', 'there'))
|
import nose
from nose.tools import ok_
from nose.plugins.attrib import attr
from birdfeeder.utils import humanize_filesize
def test_humanize_filesize():
print humanize_filesize(20000000)
assert humanize_filesize(256) == '256.0B'
assert humanize_filesize(1024) == '1.0K'
assert humanize_filesize(1048576) == '1.0M'
assert humanize_filesize(2000000) == '1.9M'
assert humanize_filesize(157286400) == '150.0M'
assert humanize_filesize(20000000) == '19.1M'
assert humanize_filesize(1073741824) == '1.0G'
assert humanize_filesize(5000000000) == '4.7G'
|
# -*- coding: utf-8 -*-
from time import clock
import zlib
from math import acos, radians, pi
from numpy import ones, cos, array, sin
from typing import Tuple, NamedTuple
'General tools'
'This is arbitrary, and will change in the tests'
SEED = 0x23587643
def dhash(*data):
"""Generates a random-looking deterministic hash"""
return abs(zlib.adler32(bytes(str(data), 'UTF-8')) * 100) * SEED % 0xffffffff
## The move from python 2 to 3 caused some problems.
class DMS(NamedTuple):
""" DMS := Degrees, Minutes, Seconds """
degrees: int
minutes: int
seconds: int
@staticmethod
def float2dms(decimal_degrees: float) -> 'DMS':
degrees = int(decimal_degrees)
minutes = int(60 * (decimal_degrees - degrees))
seconds = int(3600 * (decimal_degrees - degrees - minutes / 60))
return DMS(degrees, minutes, seconds)
def to_float(self):
return self.degrees + self.minutes / 60 + self.seconds / 3600
def compute_distance(pnt1: Tuple[float, float], pnt2: Tuple[float, float]) -> float:
'''computes distance in Meters'''
'''
This code was borrowed from
http://www.johndcook.com/python_longitude_latitude.html
'''
lat1, lon1 = pnt1
lat2, lon2 = pnt2
if (lat1, lon1) == (lat2, lon2):
return 0.0
if max(abs(lat1 - lat2), abs(lon1 - lon2)) < 0.00001:
return 0.001
phi1 = radians(90 - lat1)
phi2 = radians(90 - lat2)
meter_units_factor = 40000 / (2 * pi)
arc = acos(sin(phi1) * sin(phi2) * cos(radians(lon1) - radians(lon2))
+ cos(phi1) * cos(phi2))
return max(0.0, arc * meter_units_factor * 1000)
def base_traffic_pattern():
''' Creates a base traffic pattern:
we can go at max speed (divide by 1)
traffic gets worse at 6 AM and 3 PM, with peak at 8 AM and 5 PM,
and then it subsides again within 2 hours'''
base_pattern = ones(60 * 24)
base_pattern[(60 * 6):(10 * 60)] += cos(((array(range(4 * 60)) / (4 * 60)) - 0.5) * pi)
base_pattern[(15 * 60):(19 * 60)] += base_pattern[(60 * 6):(10 * 60)]
return list(base_pattern)
def generate_traffic_noise_params(seed1, seed2) -> Tuple[float, float]:
''' generates some parameters for the traffic noise
It should look random, and it is symmetrical
(Can't think why it has to be symmetrical, but it would be easy enough to
modify it not to be if need be) '''
## FIXME: Should It only be positive addition to the multiplier? A* needs an optimistic hueristic
wavelength_cos = 60 + 20 * (dhash(seed1 + seed2) / 0xffffffff) - 10
wavelength_sin = 60 + 20 * (dhash(seed1 * seed2) / 0xffffffff) - 10
return wavelength_cos, wavelength_sin
def timed(f):
'''decorator for printing the timing of functions
usage:
@timed
def some_funcion(args...):'''
def wrap(*x, **d):
start = clock()
res = f(*x, **d)
print("{}: {:.2f}sec".format(f.__name__, clock() - start))
return res
return wrap
if __name__ == '__main__':
for i in range(100):
print(dhash(i))
|
# -*- encoding: utf-8
from os.path import dirname
import enum
from database.utils import submodule_initialization, set_logger
from database.const import DataFormatCategory, NaS, FilledStatus, ENCODING, REL_PATH_SEP
SUBMODULE_NAME = 'jsondb'
# 设置日志选项
DB_CONFIG = submodule_initialization(SUBMODULE_NAME, dirname(__file__))
LOGGER_NAME = set_logger(DB_CONFIG['log'])
# 文件后缀
SUFFIX = '.json'
# 元数据文件名称
METADATA_FILENAME = 'metadata.json'
|
# -*- coding: utf-8 -*-
"""
Created on Sun Oct 18 00:58:22 2020
@author: jalil garcia jeronimo
"""
print ("introdusca un numero")#se solicita un dato desde el teclado
num=int(input())#se almacena el dato introducido desde el teclado
def factorial(num):#Funcion recursiva
if num == 0 or num == 1:#condicion o caso base
return 1
else:
return num * factorial(num - 1)#se llama la funcion asi mismo,creando un buclea hasta llegar a igualar la condicion o funcion
print (factorial(num)) #se imprime el valor obtenido del factorial |
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
# useful for handling different item types with a single interface
from itemadapter import ItemAdapter
import os
class ScrappagePipeline:
def process_item(self, item, spider):
try:
with open('../routes/exception_routes.conf') as f:
for ruta in f.readlines():
if item['url_route'].find(ruta) != -1:
f.close()
item['url_route']="/"
f.close()
except FileNotFoundError as e:
print("Error de archivo - " + e.strerror)
except IOError as e:
print("Error de existencia de archivo - " + e.strerror)
finally:
return item
|
# Não se esqueça de incluir o módulo numpy
# Use o navegador Chrome, para copiar/colar a entrada de exemplo
from numpy import*
v = array(eval(input("Insira um vetor: ")))
#Quant elemento
print(size(v))
#Primeiro elmento
print(v[0])
#Ultimo elemento
print(v[-1])
#Maior elemento
print(max(v))
#Menor elemento
print(min(v))
#Soma dos elementos
print(sum(v))
#Media dos elementos
print(round(sum(v) / size(v), 2)) |
N = int(input())
arr = list(map(int, input().split()))
if N == 2:
for i in range(1, min(arr) + 1):
if arr[0] % i == 0 and arr[1] % i == 0:
print(i)
else:
for i in range(1, min(arr) + 1):
if arr[0] % i == 0 and arr[1] % i == 0 and arr[2] % i == 0:
print(i) |
import configparser
import os
import instaloader
from instaloader.lateststamps import LatestStamps
from configparser import ConfigParser
from datetime import datetime
import pytz
import telebot
from telethon.sync import TelegramClient
from telethon.tl.types import InputPeerUser, InputPeerChannel
from telethon import TelegramClient, sync, events
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
config = ConfigParser()
# make config if not created
configFile = "config.ini"
configPath = os.path.join(os.getcwd(), configFile)
if not os.path.isfile(configPath):
file = open(configFile, "w")
file.close()
config.read(configFile)
if not "main" in config.sections():
initDate = str(datetime.datetime(1970, 1, 1))
config.add_section("main")
config.set("main", "latestUpload", initDate)
with open("config.ini", "w") as f:
config.write(f)
# Instaloader Stuff
L = instaloader.Instaloader(
filename_pattern="{profile}/{date_utc:%Y-%m-%d}/{profile}---{date_utc}---UTC_{typename}",
save_metadata=False,
)
latestStamp = LatestStamps("./latest-stamps.ini")
USER = config.get("config", "igUSER")
PASSWORD = config.get("config", "igPASS")
userIds = [19208572604] # sdr.face
userNames = ["sdr.face"]
profiles = {"kornilova.sdr": 26791128260, "sdr.face": 19208572604}
# Optionally, login or load session
# L.login(USER, PASSWORD) # (login)
# L.interactive_login(USER) # (ask password on terminal)
logged = False
try:
L.load_session_from_file(USER) # (load session created `instaloader -l USERNAME`)
logged = True
# except:
# L.login(USER, PASSWORD)
# logged = True
except:
print("login failed")
logged = False
if logged == True:
# profile = L.check_profile_id("sdr.face")
# get story latest date
# for story in L.get_stories(userids=userIds):
# print("story latest date: " + str(story.latest_media_utc))
# download single story items one by one
# for item in story.get_items():
# # print(item)
# # L.format_filename(item)
# L.download_storyitem(item, "stories_py") # only checks if file exists
# download stories
L.download_stories(
userids=list(profiles.values()),
fast_update=False,
filename_target="stories",
storyitem_filter=None,
latest_stamps=latestStamp,
)
# print latest timestamp
# for name in userNames:
# print("latest stamp: " + str(latestStamp.get_last_story_timestamp(name)))
path = os.path.join(os.getcwd(), "stories")
filetypes = [".jpg", ".png", ".webp", ".gif", ".mp4"]
profileFilesToSend = {}
# scan folders and files in 'stories' folder. process each folder individually. profile by profile
# print(path)
if os.path.isdir(path):
# print(os.listdir(path))
for profileDir in os.listdir(path):
profilePath = os.path.join(path, profileDir)
# print(profilePath)
filesToSend = {}
# in profile folder process dates folders. each date - separate message.
for dateDir in os.listdir(profilePath):
datePath = os.path.join(profilePath, dateDir)
# print(os.listdir(datePath))
fileList = []
for file in os.listdir(datePath):
filePath = os.path.join(datePath, file)
if os.path.isfile(filePath):
# print(file)
# get only media files from date folder and send it to telegram
ext = os.path.splitext(file)[1]
if ext.lower() in filetypes:
# print("sending file")
# add file to filesToSend list
fileList.append(file)
else:
print("skipping file")
filesToSend[dateDir] = fileList
profileFilesToSend[profileDir] = filesToSend
print(profileFilesToSend)
# make 'latest_uploaded' config file. Initial - no date. Then get latestStamp and process found files with date in filename until the latestStamp. Stop at latestStamp. Set 'latest_uploaded' to latestStamp
# if no 'latest uploaded' file - create one. with no date
# set 'latest uploaded' to latestStamp after successful upload
latestUpload = config.get("main", "latestUpload")
print(latestUpload)
utc = pytz.UTC
latestUploadDate = datetime.strptime(latestUpload, DATETIME_FORMAT)
latestUploadDate = utc.localize(latestUploadDate) # convert to utc for camparison
# check filenames in filesToSend list from 'latest uploaded' date up to latestStamp for this profile
for name in profiles:
# todo add user profiles level in filesToSend dict
latestStampDate = latestStamp.get_last_story_timestamp(profiles[name])
if latestUploadDate < latestStampDate:
for date in filesToSend: # upload, change latest Uploaded
# print(date)
for file in filesToSend[date][:]:
# print(file)
fileDate = file.split("---")[1]
fileDate = datetime.strptime(fileDate, "%Y-%m-%d_%H-%M-%S")
fileDate = utc.localize(fileDate)
# print(fileDate)
if (
latestUploadDate > fileDate
): # remove file from list if older than last upload
print("removing " + file)
filesToSend[date].remove(file)
# todo get date from filename. if date > latestUploadDate -> upload
# else -> delete from list
else: # do nothing
filesToSend = {}
print("skipping upload. everything's up to date")
# TELEGRAM PART
# get your api_id, api_hash, token
# from telegram as described above
api_id = config.get("config", "tgApiId")
api_hash = config.get("config", "tgApiHash")
token = config.get("config", "tgApiToken")
message = "Working..."
# your phone number
phone = config.get("config", "tgPhone")
# creating a telegram session and assigning
# it to a variable client
client = TelegramClient("session", api_id, api_hash)
# connecting and building the session
client.connect()
# in case of script ran first time it will
# ask either to input token or otp sent to
# number or sent or your telegram id
if not client.is_user_authorized():
client.send_code_request(phone)
# signing in the client
client.sign_in(phone, input("Enter the code: "))
try:
# destination_user_username or channel
destination_channel_id = config.get("config", "tgChannel")
entity = client.get_entity(destination_channel_id)
# sending message using telegram client
for date in filesToSend:
fileAlbum = []
albumCaption = userNames[0] + ": " + date
for file in filesToSend[date]:
fileAlbum.append(
os.path.join(os.getcwd(), "stories", userNames[0], date, file)
)
# print(fileAlbum)
# client.send_message(
# entity, str(filesToSend[date]), parse_mode="html"
# ) # "me" works
client.send_file(entity, fileAlbum, caption=userNames[0] + ": " + date)
uploaded = True
except Exception as e:
uploaded = False
# there may be many error coming in while like peer
# error, wwrong access_hash, flood_error, etc
print(e)
# disconnecting the telegram session
client.disconnect()
# UPDATE LAST TIMESTAMP
# todo add multiprofile
if uploaded == True:
latestUploadDate = datetime.strftime(
latestStamp.get_last_story_timestamp(userNames[0]), DATETIME_FORMAT
) # update latest upload
config.set("main", "latestUpload", str(latestUploadDate))
with open("config.ini", "w") as f:
config.write(f)
# print(filesToSend)
|
from django.contrib import admin
from django.utils.html import format_html
from .models import Deliver_Executive,ongoing_delivery
# Register your models here.
@admin.register(Deliver_Executive)
class DeliveryAdmin(admin.ModelAdmin):
list_display = (
'id',
'name',
'contract_name',
'phone_number',
'address',
'account_actions',
)
def account_actions(self, obj):
# print(dir(obj))
# print(obj.pk)
text_html = "<a>Error</a>"
try:
qrcd = obj.qr_code_data
qrcd = qrcd.tobytes()
text_html = '<a class="button" download="{}-Qr-Code.png" href="{}">Download Qr Code</a>'.format(obj.name,qrcd.decode('utf-8'))
except Exception as exp:
print("Error at admin : ",exp)
return format_html(
text_html
)
account_actions.short_description = 'Account Actions'
account_actions.allow_tags = True
@admin.register(ongoing_delivery)
class OnGoingDelivery(admin.ModelAdmin):
list_display = (
'name',
'on_going',
'date_started',
)
|
def concha(arr):
sub = len(arr)//2
while sub:
for st in range(sub):
conchaEntra(arr,st,sub)
print("After increments of size ", sub, " the list is ", arr)
sub=sub//2
return arr
def conchaEntra(arr,st,gap):
for i in range(st+gap, len(arr),gap):
cur = arr[i]
pos=i
while pos>=gap and arr[pos-gap]>cur:
arr[pos]=arr[pos-gap]
pos=pos-gap
arr[pos]=cur |
import sys
def load_mappings(fname):
fp=open(fname)
CLAWS_TO_POS={}
for line in fp:
words=line.split()
CLAWS_TO_POS[words[1].lower()]=words[0].lower()
return CLAWS_TO_POS
def strip_trailing_integers(s):
leading_string=""
lastplusone=len(s)
for i in range(len(s)):
if(s[i:i+1].isdigit()):
lastplusone=i
break
return s[:lastplusone]
#print strip_trailing_integers("krall123");
CLAWS_TO_POS=load_mappings("claws_pos_mappings.txt")
input_file=open(sys.argv[1])
output_file=open(sys.argv[2],"w")
frequency_counts={}
for line in input_file:
words=line.split()
posngramcount=int(words[0])
ngram_length=(len(words)-1)/2
ngram=""
#print line
#print words
for i in range(1,ngram_length+1):
current_word=words[i]
current_pos_tag=words[i+ngram_length]
#print current_pos_tag
transformed_pos_tag=CLAWS_TO_POS[strip_trailing_integers(current_pos_tag.lower())].upper()
transformed_word=current_word.lower()
ngram+=transformed_word+"_"+transformed_pos_tag+"\t"
if ngram in frequency_counts:
frequency_counts[ngram]+=posngramcount
else:
frequency_counts[ngram]=posngramcount
for key in frequency_counts:
output_file.write(str(frequency_counts[key])+"\t"+str(key)+"\n")
|
#!/usr/bin/python
"""
ZetCode wxPython tutorial
In this example, we create a wx.html.HtmlWindow widget.
author: Jan Bodnar
website: www.zetcode.com
last modified: May 2018
"""
import wx
import wx.html
class Example(wx.Frame):
def __init__(self, *args, **kw):
super(Example, self).__init__(*args, **kw)
self.InitUI()
def InitUI(self):
panel = wx.Panel(self)
vbox = wx.BoxSizer(wx.VERTICAL)
hbox = wx.BoxSizer(wx.HORIZONTAL)
htmlwin = wx.html.HtmlWindow(panel, wx.ID_ANY, style=wx.NO_BORDER)
htmlwin.SetStandardFonts()
htmlwin.LoadPage("page.html")
vbox.Add((-1, 10), 0)
vbox.Add(htmlwin, 1, wx.EXPAND | wx.ALL, 9)
bitmap = wx.StaticBitmap(panel, wx.ID_ANY, wx.Bitmap('newt.png'))
hbox.Add(bitmap, 0, wx.LEFT | wx.BOTTOM | wx.TOP, 10)
btnOk = wx.Button(panel, wx.ID_ANY, 'Ok')
self.Bind(wx.EVT_BUTTON, self.OnClose, id=btnOk.GetId())
hbox.Add((100, -1), 1, wx.EXPAND | wx.ALIGN_RIGHT)
hbox.Add(btnOk, flag=wx.TOP | wx.BOTTOM | wx.RIGHT, border=10)
vbox.Add(hbox, 0, wx.EXPAND)
panel.SetSizer(vbox)
self.SetTitle('Basic statistics')
self.Centre()
def OnClose(self, event):
self.Close()
def main():
app = wx.App()
ex = Example(None)
ex.Show()
app.MainLoop()
if __name__ == '__main__':
main()
|
import cv2
coffeePic = cv2.imread('coffee.jpg')
#changing picture to grayscale
gray = cv2.cvtColor(coffeePic, cv2.COLOR_BGR2GRAY)
cv2.imshow("Grayed Image",gray)
#writing the gray Image as newfile.jpg
cv2.imwrite("newfile.jpg",gray)
cv2.waitKey(0) |
from bs4 import BeautifulSoup
import urllib.request
import re # dk what this is && scared of removing it
import time
import tkinter as tk
from PIL import ImageTk, Image
import os
import requests
from io import BytesIO
# test
url_list = [] # contains the urls in string form of all the short screener webpages
links_with_text = [] # contains the names of anything containing href in the webpage
base_url = "https://finviz.com/screener.ashx?v=111&f=sh_short_o20" # links to the finviz screener with all the short float above 20% stocks
for i in range(1, 261, 20):
if i == 1:
url_list.append(base_url)
else:
url_list.append(base_url + "&r=" + str(i))
# moving to the next page in the short screener means adding &r=21 then &r=41 then &r=61 to the end of the url (still in string form though right now)
for url_string in url_list:
page = urllib.request.urlopen(url_string)
try:
page = urllib.request.urlopen(url_string)
except:
print("An error occured.")
soup = BeautifulSoup(page, 'html.parser')
for a in soup.find_all('a', href=True):
if a.text:
links_with_text.append(a['href'])
def getTheStockName(quote_url_string):
stock_name =""
for y in quote_url_string:
if y.isupper():
stock_name += y
return stock_name
def actualSetConstructor():
stock_set = {'AAPL'}
for x in links_with_text:
if x[0] == 'q' and x[10] == '?':
stock_name = getTheStockName(x)
stock_set.add(stock_name)
return stock_set
def chartPicQuoteConstructor(stock_set): # CREATES A LIST OF THE CHARTPIC LINKS FOR EACH STOCK
stock_link_list = []
for i in stock_set:
chart_pic_link = 'https://finviz.com/chart.ashx?t=' + i + '&ty=c&ta=1&p=d&s=l'
stock_link_list.append(chart_pic_link)
return stock_link_list
stock_link_list = chartPicQuoteConstructor(actualSetConstructor())
def getImageForDisplay(link): # this is an image that can be displayed in a tkinter Label
img_url = link
response = requests.get(img_url)
img_data = response.content
prepro1_img = Image.open(BytesIO(img_data))
prepro2_img = prepro1_img.resize((100, 100), Image.ANTIALIAS)
img = ImageTk.PhotoImage(prepro2_img)
return img
root = tk.Tk()
counter =0
packing_x = 0
packing_y = 0
print(stock_link_list)
# use this in testin.py. This is a list all the links to the chart pics for different stocks.
img_list = []
'''
|
from tkinter import *
import solve as solved
#how to use a class to with Tkinter
def main():
class Application(Frame):
""" Gui application with 3 methods"""
def __init__(self,master):
#inilitilazes the frame
Frame.__init__(self,master)
self.grid()
self.equation = ""
self.keyPad()
#Shows the keypad
def keyPad(self):
Label(self, text="Calculator Program",width = "19", height = "1", bg = "red" , justify = CENTER ).grid(row=0, columnspan=4, sticky=W+E)
self.EQ_entry = Text(self,width = 10,height = 1, wrap =WORD)
self.EQ_entry.grid(row=1, columnspan=4, sticky=W+E)
Button(self, text=" 1 ",command = lambda: self.add_Item(1), width = "4", height = "2").grid(row = 3, column = 0, sticky = 'W')
Button(self, text=" 2 ",command = lambda: self.add_Item(2), width = "4", height = "2").grid(row = 3, column = 1, sticky = W )
Button(self, text=" 3 ",command = lambda: self.add_Item(3), width = "4", height = "2").grid(row = 3, column = 2, sticky = 'W')
Button(self, text=" * ",command = lambda: self.add_Item("*"), width = "4", height = "2").grid(row = 3, column = 3, sticky = 'W')
Button(self, text=" 4 ",command = lambda: self.add_Item(4), width = "4", height = "2").grid(row = 4, column = 0, sticky = 'W')
Button(self, text=" 5 ",command = lambda: self.add_Item(5), width = "4", height = "2").grid(row = 4, column = 1, sticky = 'W')
Button(self, text=" 6 ",command = lambda: self.add_Item(6), width = "4", height = "2").grid(row = 4, column = 2,sticky = 'W')
Button(self, text=" / ",command = lambda: self.add_Item("/"), width = "4", height = "2").grid(row=4, column=3, sticky='W')
Button(self, text=" 7 ",command = lambda: self.add_Item(7), width = "4", height = "2").grid(row = 5, column = 0, sticky = 'W')
Button(self, text=" 8 ",command = lambda: self.add_Item(8), width = "4", height = "2").grid(row = 5, column = 1, sticky = 'W')
Button(self, text=" 9 ",command = lambda: self.add_Item(9), width = "4", height = "2").grid(row = 5, column = 2, sticky = 'W')
Button(self, text=" + ",command = lambda: self.add_Item("+"), width = "4", height = "2").grid(row=5, column = 3, sticky = 'W')
Button(self, text=" 0 ",command = lambda: self.add_Item(0), width = "4", height = "2").grid(row=6, column = 1, sticky = 'W')
Button(self, text=" = ",command = lambda: self.solve(), width = "4", height = "2").grid(row=6,column = 2, sticky='W')
Button(self, text=" - ", command=lambda: self.add_Item("-"), width = "4", height = "2").grid(row=6,column = 3, sticky='W')
Button(self,text ="C", width = "4", height = "2", command= lambda:self.clear()).grid(row=6, column = 0, sticky = 'W')
def add_Item(self,nextNum):
self.equation = self.equation +str( nextNum)
self.EQ_entry.delete(0.0,END)
self.EQ_entry.insert(0.0,self.equation)
def solve(self):
equation = solved.solve(self.equation)
self.EQ_entry.delete(0.0, END)
self.EQ_entry.insert(0.0, equation )
self.equation=equation
def clear(self):
self.EQ_entry.delete(0.0, END)
self.EQ_entry.insert(0.0,"")
self.equation = self.equation*0
root = Tk()
root.title("Python buttons")
root.geometry("150x200")
app = Application(root)
root.mainloop()
if __name__ == '__main__':
main()
|
class Solution:
def lexicalOrder(self, n):
stack, res = [], []
start = 1
while len(res) < n:
if start <= n:
res.append(start)
stack.append(start)
start *= 10
else:
start = stack.pop()
while start % 10 == 9:
start = stack.pop()
start += 1
return res
def lexicalOrder1(self, n):
self.n = n
self.res = []
for i in range(1, 10):
self.f(i)
return self.res
def f(self, base):
if base > self.n:
return
self.res.append(base)
for i in range(0, 10):
self.f(base * 10 + i)
|
#!/usr/bin/env python
import os
from Gallimaufry.USB import USB
here = os.path.dirname(os.path.realpath(__file__))
def test_device_string_descriptor():
pcap = USB(os.path.join(here,"examples","webcam","logitech_C310_enum.pcapng"))
device = pcap.devices[0]
assert device.string_descriptors == {2: '7DC902A0'}
assert len(device.configurations[0].interfaces[0].uvc) == 8
assert len(device.configurations[0].interfaces[1].uvc) == 42
|
import base64
import binascii
#Initial value and expected output
h = '49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f69736f6e6f7573206d757368726f6f6d'
solution = 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
#Convert from HEX to BINARY and then BINARY to BASE64
def hexToBase64(string):
decoded = binascii.unhexlify(string)
return base64.b64encode(decoded)
proposal = hexToBase64(h)
#Check if the method used works
if (proposal == solution):
print ("Success!")
else:
print("Nope")
|
from django.urls import path
from . import views
app_name = "authenticate"
urlpatterns = [
path("", views.home, name="home"),
path("login/", views.login_user, name="login"),
path("logout/", views.logout_user, name="logout"),
path("register/", views.register_user, name="register"),
path("edit_profile/", views.edit_profile, name="edit_profile"),
path("change_password/", views.change_password, name="change_password"),
path("loan_application", views.loan_application, name="loan_application"),
path(
"loan_application/status/<str:loan_id>",
views.LoanStatus.as_view(),
name="loan_status",
),
]
|
from django import forms
from.models import Venta
class VentaForm(forms.ModelForm):
class Meta: #para indicar de que modelo se va a crear el formulario
model = Venta
fields = [
'codigo_venta',
'id_producto',
'fecha_venta',
'cantidad',
'valor_venta',
]
labels = {
'codig_venta' : 'Codigo',
'id_producto': 'Producto',
'fecha_venta' : 'Fecha venta',
'cantidad' : 'Cantidad',
'valor_venta' : 'Valor',
}
widgets = {
'codigo_venta': forms.TextInput(attrs={'class':'form-control'}),
'id_producto': forms.Select(attrs={'class':'form-control'}),
'fecha_venta': forms.TextInput(attrs={'class':'form-control'}),
'cantidad': forms.TextInput(attrs={'class':'form-control'}),
'valor_venta': forms.TextInput(attrs={'class':'form-control'}),
} |
#!/usr/bin/python
import urllib.request
import os, sys
from gmap_utils import *
import time
import random
import json
def download_tiles(zoom, lat_start, lat_stop, lon_start, lon_stop, satellite=True):
start_x, start_y = latlon2xy(zoom, lat_start, lon_start)
stop_x, stop_y = latlon2xy(zoom, lat_stop, lon_stop)
print(start_x, start_y, " ", stop_x, stop_y)
print ("x range", start_x, stop_x)
print ("y range", start_y, stop_y)
total = abs(start_x - stop_x) * abs(stop_y - start_y)
print("total", total)
user_agent = 'Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; de-at) AppleWebKit/533.21.1 (KHTML, like Gecko) Version/5.0.5 Safari/533.21.1'
headers = { 'User-Agent' : user_agent }
now = 0
for x in range(start_x, stop_x):
for y in range(start_y, stop_y):
url = None
filename = None
#https://www.google.cn/maps/vt?lyrs=s@817&gl=cn&x=1&y=2&z=2
#https://www.google.cn/maps/vt?lyrs=s@817&gl=cn&x=3417&y=1607&z=12
urlf1 = "http://mt0.google.cn/vt/lyrs=s@817&gl=cn&x=%d&y=%d&z=%d"
urlf2 = "https://www.google.cn/maps/vt?lyrs=s@817&gl=cn&x=%d&y=%d&z=%d"
if satellite:
url = urlf2 % (x,y,zoom) #kh?v=87&hl=en&x=%d&y=%d&z=%d" % (x, y, zoom
filename = "maps/%d/%d_%d_%d_s.jpg" % (zoom, zoom, x, y)
if not os.path.isdir("maps/%d" % zoom) :
os.mkdir("maps/%d" % zoom)
if not os.path.exists(filename):
bytes = None
try:
print("start request ", url)
req = urllib.request.Request(url, data=None, headers=headers)
response = urllib.request.urlopen(req)
bytes = response.read()
except Exception as e:
print ("--", filename, "->", e)
sys.exit(1)
if bytes.startswith("<html>".encode('utf-8')):
print ("-- forbidden", filename)
sys.exit(1)
print ("-- saving", filename)
now = now + 1
print("progress -- %d/%d"%(now,total))
f = open(filename,'wb')
f.write(bytes)
f.close()
time.sleep(1 + random.random())
else:
now = now + 1
print(filename, "already exist")
if __name__ == "__main__":
#longtitude 经度
#latitude 纬度
'''
zoom = 15
lat_start, lon_start = 46.53, 6.6
lat_stop, lon_stop = 46.49, 6.7
download_tiles(zoom, lat_start, lat_stop, lon_start, lon_stop, satellite=True)
'''
for i in range(len(sys.argv)):
print ("arg", i, sys.argv[i])
if(sys.argv[1] != None):
f = open(sys.argv[1],mode="r",encoding='utf-8')
content = f.read()
cfg = json.loads(content)
lon_start = cfg["startx"]
lon_stop = cfg["stopx"]
lat_start = cfg["starty"]
lat_stop = cfg["stopy"]
zoom = cfg["zoom"]
download_tiles(zoom, lat_start, lat_stop, lon_start, lon_stop, satellite=True) |
import asyncio
import aiohttp
import logging
import os
import sys
import getopt
async def check_cert(q_main, q_good, q_bad):
while True:
hostname = await q_main.get()
conn = aiohttp.TCPConnector()
session = aiohttp.ClientSession(connector=conn)
success = False
try:
await session.get(hostname)
success = True
except aiohttp.ClientConnectorCertificateError as e:
pass
except aiohttp.ClientConnectorSSLError as e:
pass
except Exception as e:
# print("({}) bad hostname ?".format(hostname))
pass
await session.close()
if success:
await q_good.put(hostname)
else:
await q_bad.put(hostname)
q_main.task_done()
async def read_file(filename, q):
with open(filename, "r") as f:
for line in f:
h_s = line.strip().split("://")
h = "https://" + h_s[len(h_s) - 1]
# print(" read : " + h +"\n", h)
await q.put(h)
async def write_output_file(file, queue):
while True:
line = await queue.get()
file.write(line + os.linesep); file.flush()
queue.task_done()
async def main(argv):
logging.getLogger('asyncio').setLevel(logging.CRITICAL)
filename = "urls"
filename_good_urls = "Valid"
filename_bad_urls = "Unvalid"
jobs = 4
if(len(argv) > 1):
try:
opts, args = getopt.getopt(argv[1::], "hi:j:", ["input=", "jobs="])
except getopt.GetoptError:
print("./" + argv[0] + " -i <inputfile> -j <jobs>")
sys.exit(2)
for opt, arg in opts:
if opt == '-h':
print("./" + argv[0] + " -i <inputfile> -j <jobs>")
sys.exit(0)
if opt in ("-i", "--input"):
filename = arg
if opt in ("-j", "--jobs"):
jobs = int(arg)
queue_from_file = asyncio.Queue(jobs + 1)
queue_cert_good = asyncio.Queue(jobs)
queue_cert_bad = asyncio.Queue(jobs)
workers = []
for i in range(jobs):
workers.append(asyncio.create_task(
check_cert(queue_from_file, queue_cert_good, queue_cert_bad)))
try:
f_good = open(filename_good_urls, "w")
f_bad = open(filename_bad_urls, "w")
except:
print("Cannot open output files.")
sys.exit(-1)
t_wr_good = asyncio.create_task(write_output_file(f_good, queue_cert_good))
t_wr_bad = asyncio.create_task(write_output_file(f_bad, queue_cert_bad))
await asyncio.gather(asyncio.create_task(
read_file(filename, queue_from_file)))
await queue_from_file.join()
await queue_cert_good.join()
await queue_cert_bad.join()
for worker in workers:
worker.cancel()
t_wr_bad.cancel()
t_wr_good.cancel()
print("Done")
if __name__ == '__main__':
asyncio.run(main(sys.argv), debug=False)
|
import os
from bokeh.models import CustomJS, CustomAction, PolyEditTool
from holoviews.streams import Stream, PolyEdit, PolyDraw
from holoviews.plotting.bokeh.callbacks import CDSCallback
from geoviews.plotting.bokeh.callbacks import GeoPolyEditCallback, GeoPolyDrawCallback
from .models.custom_tools import PolyVertexEditTool, PolyVertexDrawTool
class PolyVertexEdit(PolyEdit):
"""
Attaches a PolyVertexEditTool and syncs the datasource.
shared: boolean
Whether PolyEditTools should be shared between multiple elements
node_style: dict
A dictionary specifying the style options for the intermediate nodes.
feature_style: dict
A dictionary specifying the style options for the intermediate nodes.
"""
def __init__(self, node_style={}, feature_style={}, **params):
self.node_style = node_style
self.feature_style = feature_style
super(PolyVertexEdit, self).__init__(**params)
class PolyVertexDraw(PolyDraw):
"""
Attaches a PolyVertexDrawTool and syncs the datasource.
shared: boolean
Whether PolyEditTools should be shared between multiple elements
node_style: dict
A dictionary specifying the style options for the intermediate nodes.
feature_style: dict
A dictionary specifying the style options for the intermediate nodes.
"""
def __init__(self, node_style={}, feature_style={}, **params):
self.node_style = node_style
self.feature_style = feature_style
super(PolyVertexDraw, self).__init__(**params)
class PolyVertexEditCallback(GeoPolyEditCallback):
split_code = """
var vcds = vertex.data_source
var vertices = vcds.selected.indices;
var pcds = poly.data_source;
var index = null;
for (i = 0; i < pcds.data.xs.length; i++) {
if (pcds.data.xs[i] === vcds.data.x) {
index = i;
}
}
if ((index == null) || !vertices.length) {return}
var vertex = vertices[0];
for (col of poly.data_source.columns()) {
var data = pcds.data[col][index];
var first = data.slice(0, vertex+1)
var second = data.slice(vertex)
pcds.data[col][index] = first
pcds.data[col].splice(index+1, 0, second)
}
for (c of vcds.columns()) {
vcds.data[c] = [];
}
pcds.change.emit()
pcds.properties.data.change.emit()
pcds.selection_manager.clear();
vcds.change.emit()
vcds.properties.data.change.emit()
vcds.selection_manager.clear();
"""
icon = os.path.join(os.path.dirname(__file__), 'icons', 'PolyBreak.png')
def _create_vertex_split_link(self, action, poly_renderer,
vertex_renderer, vertex_tool):
cb = CustomJS(code=self.split_code, args={
'poly': poly_renderer, 'vertex': vertex_renderer, 'tool': vertex_tool})
action.callback = cb
def initialize(self, plot_id=None):
plot = self.plot
stream = self.streams[0]
element = self.plot.current_frame
vertex_tool = None
if all(s.shared for s in self.streams):
tools = [tool for tool in plot.state.tools if isinstance(tool, PolyEditTool)]
vertex_tool = tools[0] if tools else None
renderer = plot.handles['glyph_renderer']
if vertex_tool is None:
vertex_style = dict({'size': 10, 'alpha': 0.8}, **stream.vertex_style)
r1 = plot.state.scatter([], [], **vertex_style)
tooltip = '%s Draw Tool' % type(element).__name__
vertex_tool = PolyVertexEditTool(
vertex_renderer=r1, custom_tooltip=tooltip, node_style=stream.node_style,
end_style=stream.feature_style)
action = CustomAction(action_tooltip='Split path', icon=self.icon)
plot.state.add_tools(vertex_tool, action)
self._create_vertex_split_link(action, renderer, r1, vertex_tool)
vertex_tool.renderers.append(renderer)
self._update_cds_vdims()
CDSCallback.initialize(self, plot_id)
class PolyVertexDrawCallback(GeoPolyDrawCallback):
def initialize(self, plot_id=None):
plot = self.plot
stream = self.streams[0]
element = self.plot.current_frame
kwargs = {}
if stream.num_objects:
kwargs['num_objects'] = stream.num_objects
if stream.show_vertices:
vertex_style = dict({'size': 10}, **stream.vertex_style)
r1 = plot.state.scatter([], [], **vertex_style)
kwargs['vertex_renderer'] = r1
tooltip = '%s Draw Tool' % type(element).__name__
poly_tool = PolyVertexDrawTool(
drag=all(s.drag for s in self.streams),
empty_value=stream.empty_value,
renderers=[plot.handles['glyph_renderer']],
node_style=stream.node_style,
end_style=stream.feature_style,
custom_tooltip=tooltip,
**kwargs)
plot.state.tools.append(poly_tool)
self._update_cds_vdims()
super(GeoPolyDrawCallback, self).initialize(plot_id)
callbacks = Stream._callbacks['bokeh']
callbacks[PolyVertexEdit] = PolyVertexEditCallback
callbacks[PolyVertexDraw] = PolyVertexDrawCallback
|
import matplotlib.pyplot as plt
import numpy as np
import smoothcurve
import statistics
import csv
file = open("csv/activity.csv", "r")
csvReader = csv.reader(file)
def getStepsPerDay(file, reader):
next(reader)
steps = []
stepsToday = 0
acc = 0
while True:
try:
acc += 1
step = next(reader)[0]
stepsToday += int(step) if step != "NA" else 0
if acc % 288 == 0:
steps.append(stepsToday)
stepsToday = 0
except StopIteration:
break
file.seek(0)
return steps
def getMedianMeanPerDay(file, reader):
next(reader)
medianPerDay = []
meanPerDay = []
stepToday = []
acc = 0
while True:
try:
acc += 1
step = next(reader)[0]
stepToday.append(int(step) if step != "NA" else 0)
if acc % 288 == 0:
stepToday.sort()
medianPerDay.append(statistics.median(stepToday))
meanPerDay.append(statistics.mean(stepToday))
stepToday = []
except StopIteration:
break
file.seek(0)
return medianPerDay, meanPerDay
def makePlot(steps, median, mean):
x = [x+1 for x in range(len(steps))]
fig, ((ax1, ax2), (ax3, ax4)) = plt.subplots(2, 2)
fig.suptitle('Activity')
ax1.set_title("Steps")
ax1.plot(*smoothcurve.make(x, steps))
ax2.set_title("Median")
ax2.plot(*smoothcurve.make(x, median))
ax3.set_title("Mean")
ax3.plot(*smoothcurve.make(x, mean))
plt.tight_layout()
plt.show()
steps = getStepsPerDay(file, csvReader)
median, mean = getMedianMeanPerDay(file, csvReader)
# print(mean)
makePlot(steps, median, mean)
|
num, m = map(int, input().split())
# str로 만들기 -> 한 글자 씩 빼서 숫자로 만들기
num = list(map(int, str(num)))
stack = []
for x in num :
# stack : 비어 있지 않으면 True
while stack and m > 0 and stack[-1] < x :
# stack의 제일 뒤(=최상단) 요소 빼기
stack.pop()
m -= 1
stack.append(x)
# for문 다 돌아도 m개 만큼 지우지 못한 경우
if m!= 0 :
# 뒤쪽 m개 날리기
stack = stack[:-m]
res = ''.join(map(str, stack))
print(res) |
# -*- coding: utf-8 -*-
import scrapy
import time
import unicodedata
from bs4 import BeautifulSoup
from shfine.items import ShfineItem
head_url = 'http://183.194.249.79/web/'
url_dict = {'jingan': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=3df201d9-c63e-4c54-b9a5-e252737cf31f',
'pudong': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=b2327449-70d8-4478-9fc5-d6aa497b3b88',
'huangpu': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=70d9f0e8-a1c4-416b-a07a-e2f0907f147f',
'xuhui': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=19c7bc7d-4b4a-4da0-bdf5-7dd0f173ae3e',
'changning': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=30529bf1-ed50-4b88-95b4-f85ebcf6d8fe',
'putuo': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=3837ddba-8f4c-4874-9d97-f6c017b103de',
'hongkou': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=259de04e-88d9-4d35-8685-0c9c3d240e55',
'yangpu': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=21bb4894-bd36-4a8f-92ee-c55015f18568',
'baoshan': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=2b03c0b7-14c7-4169-8746-b3dda9e3d872',
'minhang': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=730e0c38-1c09-435c-8bad-f8ae09bfc358',
'jiading': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=09cdbfd9-7c3a-4671-9d23-752ea78c2406',
'jinshan': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=2e6a515f-25dd-4a79-a4cb-32d50b4b7ad6',
'songjiang': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=c476b5b3-289c-4f9b-81cd-c42bd7330e2a',
'qingpu': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=16cae63a-df85-4ae0-89b4-e8a68b6649c3',
'fengxian': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=5ae1dedc-b4a1-4c1e-858e-95a5d1c21a3e',
'chongming': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=baf07198-efbd-44af-976e-af794986ef01',
'zongdui': 'http://183.194.249.79/web/search.aspx?keyword=&type=1&organ=1a7adcbf-bf99-4f31-91e3-660717ba13a1'}
class ShfineSpider(scrapy.Spider):
"""
Spider for the urban management fine in Shanghai.
The meta of `scrapy.Request` means different catagories of the webpage.
`Page = 0` means the main page which contains all the entries of cases.
`Page = 1` means the detailed sub-page of each case.
`Page = 2` means this `scrapy.Request` is for the next page (by clicking the "next" button using Selenium).
References:
https://blog.csdn.net/qq_43004728/article/details/84636468 (Very useful!)
https://github.com/clemfromspace/scrapy-selenium
https://www.pluralsight.com/guides/advanced-web-scraping-tactics-python-playbook
https://www.cnblogs.com/miners/p/9049498.html
"""
name = 'shfine' # ShangHai fine
allowed_domains = ['www.cgzf.sh.gov.cn']
def __init__(self, district, max_page=None):
'''
Parameters:
district (str): name of district in Pinyin (such as 'pudong').
max_page (int): maximum number of pages to crawl.
'''
self.max_page = max_page
if district in url_dict.keys():
self.url = url_dict[district]
self.district = district
else:
raise ValueError(
f"Your input district `{district}` is not supported! Use `district='zongdui'` for 城管总队. Please check carefully!")
super().__init__()
def start_requests(self):
print(f'### Crawling data for "{self.district} district"')
print('Current page: 1')
yield scrapy.Request(self.url, callback=self.parse, dont_filter=True, meta={'page': '0'})
def parse(self, response):
print(response.url)
soup = BeautifulSoup(response.body, "lxml")
item = ShfineItem()
for ii, case in enumerate(soup.select("ul li")):
item['date'] = case.select_one("span.id").text
item['title'] = case.select_one("span.title").text
item['bureau'] = case.select_one("span.opt").text
item['url'] = head_url + case.select_one("a").attrs['href']
print(item['title'], item['date'])
yield scrapy.Request(url=item['url'], callback=self.parse_case, dont_filter=True, meta={'page': "1"})
yield scrapy.Request(url=response.url, callback=self.parse, meta={'page': "2"}, dont_filter=True)
def parse_case(self, response):
# Go to the subpage of each case
small_soup = BeautifulSoup(response.body, "lxml")
if '企业或组织名称' in small_soup.select_one('table').text:
# 是企业
page_info = dict(zip(['case_name', 'fine_id', 'company_name', 'person_name', 'person_id',
'fine_reason', 'fine_law', 'fine_sum', 'institute',
'fine_date', 'memo'],
[item.text.strip() for item in small_soup.select('table td')])
)
else:
# 是个人
page_info = dict(zip(['case_name', 'fine_id', 'person_name', 'person_id',
'fine_reason', 'fine_law', 'fine_sum', 'institute',
'fine_date', 'memo'],
[item.text.strip() for item in small_soup.select('table td')])
)
page_info['company_name'] = ''
for key in page_info.keys():
page_info[key] = unicodedata.normalize("NFKD", page_info[key])
yield page_info
|
from airflow.hooks.postgres_hook import PostgresHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
import os
import pandas as pd
import numpy as np
from datetime import datetime
import gzip
from elasticsearch import Elasticsearch
import elasticsearch.helpers
import json
from collections import deque
from airflow.hooks.base_hook import BaseHook
from airflow import AirflowException
class StageEmployeesOperator(BaseOperator):
ui_color = '#89DA59'
@apply_defaults
def __init__(self,
connection_id='',
table_name='employees',
min_table_size=0,
table_create_sql='',
table_drop_sql='',
csv_header_order='',
csv_file_path='',
*args, **kwargs):
'''Constructor'''
super(StageEmployeesOperator, self).__init__(*args, **kwargs)
self.connection_id = connection_id
self.table_name = table_name
self.min_table_size = min_table_size
self.table_create_sql = table_create_sql,
self.table_drop_sql = table_drop_sql
self.csv_header_order = csv_header_order
self.csv_file_path = csv_file_path
def validate_employee_data(self, postgres):
'''Queries the employees table and if it finds >self.min_table_size employees then we consider the data there good
postgres : the connected postgres hook which allows a query to be run'''
sql = f'SELECT count(*) FROM {self.table_name}'
employees_count = postgres.get_records(sql)[0][0]
self.log.info(f'count of employees {employees_count}')
if employees_count < self.min_table_size:
return False
else:
return True
def stage_employee_data(self, postgres):
'''Recreates the employees table and loads it from a csv file
postgres : the connected postgres hook which allows a query to be run'''
postgres.run(self.table_drop_sql)
postgres.run(self.table_create_sql)
posgres.run(f"""COPY {self.table_name}({self.csv_header_order})
FROM '{self.csv_file_path}'
DELIMITER ',' CSV HEADER;""")
sql = f'SELECT count(*) FROM {self.table_name}'
employees_count = postgres.get_records(sql)[0][0]
self.log.info(f'count of email_logs {employees_count}')
def execute(self, context):
'''Finds the data file for the employees, connects to postgress, then COPYs the data into postgress.
context : the name of the "Connection" with the host, passwd, etc details defined in Airflow'''
self.log.info('StageEmployeesOperator starting')
if os.path.exists(self.csv_file_path):
self.log.info(f'found {self.csv_file_path}')
# Postgress insert
postgres = PostgresHook(postgres_conn_id=self.connection_id)
self.log.info('Created connection to postgres')
self.log.info('Copying data from CSV to postgres')
if not self.validate_employee_data(postgres):
self.stage_employee_data(postgres)
else:
self.log.error(f'{self.csv_file_path} NOT found')
raise AirflowException(f'{self.csv_file_path} NOT found') |
num= 4
k=0
for i in range(2,int(num/2)):
if (num%i==0):
k = k+1
if (k==0):
print("IS a prime number")
else:
print("IS not a prime number")
|
#!/usr/bin/env python
import sys
import logging
import argparse
from time import sleep
from securecscc import ApplicationFactory
def _logger():
logger = logging.getLogger('securecscc')
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
return logger
def parse_args():
parser = argparse.ArgumentParser(description='Poll Sysdig Secure for events and store as findings in Google Cloud Security Command Center')
parser.add_argument('--duration', '-d', type=int, default=60, help='Time between queries (default: 60)')
return parser.parse_args()
def main():
args = parse_args()
application_factory = ApplicationFactory()
sysdig_secure_client = application_factory.sysdig_secure_client()
action = application_factory.create_finding_from_sysdig_secure_event_action()
logger = _logger()
while True:
logger.info('Querying events from Sysdig Secure')
for event in sysdig_secure_client.events_happened_on_last(args.duration):
logger.info('Publishing to Google Security Command')
try:
result = action.run(event)
logger.info(result.to_dict())
except Exception as ex:
logger.error(ex)
sleep(args.duration)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
sys.exit(0)
|
from typing import TYPE_CHECKING, List
from .helpers import is_pow2, roundup_pow2, roundup_to
from .. import walker
from .. import rdltypes
from ..node import Node, AddressableNode, SignalNode
from ..node import AddrmapNode, RegfileNode, MemNode, RegNode, FieldNode
if TYPE_CHECKING:
from ..compiler import RDLEnvironment
#===============================================================================
# Validation Listeners
#===============================================================================
class ValidateListener(walker.RDLListener):
def __init__(self, env: 'RDLEnvironment'):
self.env = env
self.msg = env.msg
# Used in field overlap checks
# This is a rolling buffer of previous fields that still have a chance
# to possibly collide with a future field
self.field_check_buffer = [] # type: List[FieldNode]
# Used in addrmap, regfile, and reg overlap checks
# Same concept as the field check buffer, but is also a stack
self.addr_check_buffer_stack = [[]] # type: List[List[AddressableNode]]
# Keep track of whether a given hierarchy has a reset signal.
# Signals can exist in Root, so pre-load with one stack entry
self.has_cpuif_reset_stack = [False] # type: List[bool]
self.has_field_reset_stack = [False] # type: List[bool]
def enter_Component(self, node: Node) -> None:
# Validate all properties that were applied to the component
for prop_name in node.inst.properties.keys():
prop_value = node.get_property(prop_name)
if isinstance(prop_value, rdltypes.PropertyReference):
prop_value._validate()
prop_rule = self.env.property_rules.lookup_property(prop_name)
prop_rule.validate(node, prop_value)
if not isinstance(node, SignalNode):
self.has_cpuif_reset_stack.append(False)
self.has_field_reset_stack.append(False)
def enter_Signal(self, node: SignalNode) -> None:
if node.get_property('cpuif_reset'):
# 8.2.1-f: cpuif_reset property can only be set true for one
# instantiated signal within a lexical scope.
# (spec authors repeately misuse the word 'lexical', they mean hierarchical)
if self.has_cpuif_reset_stack[-1]:
self.msg.error(
"Only one 'cpuif_reset' signal is allowed per hierarchy. Signal '%s' is redundant."
% (node.inst_name),
node.inst.inst_src_ref
)
self.has_cpuif_reset_stack[-1] = True
if node.get_property('field_reset'):
# 8.2.1-g: field_reset property can only be set true for one
# instantiated signal within a lexical scope.
# (spec authors repeately misuse the word 'lexical', they mean hierarchical)
if self.has_field_reset_stack[-1]:
self.msg.error(
"Only one 'field_reset' signal is allowed per hierarchy. Signal '%s' is redundant."
% (node.inst_name),
node.inst.inst_src_ref
)
self.has_field_reset_stack[-1] = True
def enter_AddressableComponent(self, node: AddressableNode) -> None:
addr_check_buffer = self.addr_check_buffer_stack[-1]
self.addr_check_buffer_stack.append([])
# Check for collision with previous addressable sibling
new_addr_check_buffer = []
for prev_addressable in addr_check_buffer:
if (prev_addressable.raw_address_offset + prev_addressable.total_size) > node.raw_address_offset:
# Overlaps!
# Only allowable overlaps are as follows:
# 10.1-h: Registers shall not overlap, unless one contains only
# read-only fields and the other contains only write-only or
# write-once-only fields.
overlap_allowed = False
if isinstance(prev_addressable, RegNode) and isinstance(node, RegNode):
if (((not prev_addressable.has_sw_writable) and (not node.has_sw_readable))
or ((not prev_addressable.has_sw_readable) and (not node.has_sw_writable))
):
overlap_allowed = True
# Bridge addrmaps allow overlapping children
if isinstance(node.parent, AddrmapNode) and node.parent.get_property('bridge'):
overlap_allowed = True
if not overlap_allowed:
self.msg.error(
"Instance '%s' at offset +0x%X:0x%X overlaps with '%s' at offset +0x%X:0x%X"
% (
node.inst_name, node.raw_address_offset, node.raw_address_offset + node.total_size - 1,
prev_addressable.inst_name, prev_addressable.raw_address_offset, prev_addressable.raw_address_offset + prev_addressable.total_size - 1,
),
node.inst.inst_src_ref
)
# Keep it in the list since it could collide again
new_addr_check_buffer.append(prev_addressable)
self.addr_check_buffer_stack[-2] = new_addr_check_buffer
if node.is_array:
assert node.array_stride is not None
# Check if array interferes with itself
if node.array_stride < node.size:
self.msg.error(
"Instance array '%s' has address stride 0x%X, but the element size is 0x%X"
% (
node.inst_name, node.array_stride, node.size
),
node.inst.inst_src_ref
)
if self.env.chk_stride_not_pow2:
if not is_pow2(node.array_stride):
self.msg.message(
self.env.chk_stride_not_pow2,
"Address stride of instance array '%s' is not a power of 2"
% node.inst_name,
node.inst.inst_src_ref
)
if self.env.chk_strict_self_align:
req_align = roundup_pow2(node.size)
if (node.raw_address_offset % req_align) != 0:
self.msg.message(
self.env.chk_strict_self_align,
"Address offset +0x%x of instance '%s' is not a power of 2 multiple of its size 0x%x"
% (node.raw_address_offset, node.inst_name, node.size),
node.inst.inst_src_ref
)
def enter_Addrmap(self, node: AddrmapNode) -> None:
if node.get_property('bridge'):
# This is a 'bridge addrmap'
# Verify that:
# - Child components are only other addrmaps (signals are ok too)
# - has at least 2 child addrmaps
n_child_addrmaps = 0
for child in node.children():
if isinstance(child, AddrmapNode):
n_child_addrmaps += 1
elif isinstance(child, SignalNode):
pass
else:
self.msg.error(
"Addrmap '%s' is a bridge which can only contain other addrmaps. Contains a child instance '%s' which is a %s"
% (node.inst_name, child.inst_name, type(child.inst).__name__.lower()),
child.inst.inst_src_ref
)
if n_child_addrmaps < 2:
self.msg.error(
"Addrmap '%s' is a bridge and shall contain 2 or more sub-addrmaps"
% node.inst_name,
node.inst.inst_src_ref
)
def enter_Reg(self, node: RegNode) -> None:
self.field_check_buffer = []
if node.is_array and self.env.chk_sparse_reg_stride:
assert node.array_stride is not None
if node.array_stride > (node.get_property('regwidth') // 8):
self.msg.message(
self.env.chk_sparse_reg_stride,
"Address stride (+= %d) of register array '%s' is not equal to its width (regwidth/8 = %d)"
% (node.array_stride, node.inst_name, (node.get_property('regwidth') // 8)),
node.inst.inst_src_ref
)
# 11.2-e: Virtual register width is limited to the minimum power of two
# bytes, which can contain the memory width ...
if node.is_virtual:
assert node.parent is not None # Reg always has a parent
memwidth = node.parent.get_property('memwidth')
memwidth_bytes = roundup_to(memwidth, 8) // 8
max_regwidth = roundup_pow2(memwidth_bytes) * 8
if node.get_property('regwidth') > max_regwidth:
self.msg.error(
"regwidth (%d) of virtual register '%s' is too wide for this memory."
% (node.get_property('regwidth'), node.inst_name)
+ " Virtual register width is limited to the minimum power of two bytes which can contain the memory width.",
node.inst.inst_src_ref
)
# Validate alias register
if node.is_alias:
primary_reg = node.alias_primary
# 5.3.1-j: If an alias is present, then the primary must also be present
if node.get_property('ispresent') and not primary_reg.get_property('ispresent'):
self.msg.error(
"Register '%s' is an alias of register '%s' which is not present (ispresent=false)"
% (node.inst_name, primary_reg.inst_name),
node.inst.inst_src_ref
)
# 10.5.1-f: The alias register shall have the same width as the primary register.
if node.get_property('regwidth') != primary_reg.get_property('regwidth'):
self.msg.error(
"Primary register shall have the same regwidth as the alias register.",
node.inst.inst_src_ref
)
if primary_reg.is_alias:
self.msg.error(
"Primary register of an alias cannot also be an alias",
node.inst.inst_src_ref
)
# 10.5.1-f: Instance type shall be the same (internal/external)
if primary_reg.external != node.external:
self.msg.error(
"Instance types of alias register and its primary mismatch. "
"Both shall be either internal or external.",
node.inst.inst_src_ref
)
if primary_reg.is_array and not node.is_array :
self.msg.error(
"Single alias register references a primary register array. "
"It is ambiguous which array element is actually the primary register.",
node.inst.inst_src_ref
)
if primary_reg.is_array and node.is_array:
if primary_reg.array_dimensions != node.array_dimensions:
self.msg.error(
"Array of alias registers references an array of registers as its primary, "
"but the array dimensions do not match.",
node.inst.inst_src_ref
)
for field in node.fields():
# 10.5.1-b: Make sure the primary also has this field
primary_field = primary_reg.get_child_by_name(field.inst_name)
if not isinstance(primary_field, FieldNode):
self.msg.error(
"Alias register '%s' contains field '%s' that does not exist in the primary register."
% (node.inst_name, field.inst_name),
field.inst.inst_src_ref
)
# Cannot validate this field any further
continue
# 5.3.1-j: If an alias is present, then the primary must also be present
if field.get_property('ispresent') and not primary_field.get_property('ispresent'):
self.msg.error(
"Field '%s' is an alias of register '%s' which is not present (ispresent=false)"
% (field.inst_name, primary_field.inst_name),
field.inst.inst_src_ref
)
# 10.5.1-b: Validate field is the same width and bit position
if (primary_field.lsb != field.lsb) or (primary_field.width != field.width):
self.msg.error(
"Alias field and its primary shall have the same position and size",
field.inst.inst_src_ref
)
# 10.5.1-e: Only the following SystemRDL properties may be
# different in an alias: desc, name, onread, onwrite, rclr, rset,
# sw, woclr, woset, and any user-defined properties.
ignore_props = {
'desc', 'name', 'onread', 'onwrite', 'rclr', 'rset', 'sw', 'woclr', 'woset'
}
primary_props = set(primary_field.list_properties(include_udp=False)) - ignore_props
alias_props = set(field.list_properties(include_udp=False)) - ignore_props
xor_props = primary_props ^ alias_props
if xor_props:
# differing set of props were assigned!
self.msg.error(
"Alias field '%s' is incompatible with its primary '%s'. The following properties differ: %s"
% (field.inst_name, primary_field.inst_name, ", ".join(xor_props)),
field.inst.inst_src_ref
)
continue
# same set of properties assigned. Now compare their values
for prop_name in alias_props:
if field.get_property(prop_name) != primary_field.get_property(prop_name):
self.msg.error(
"Alias field '%s' is incompatible with its primary '%s'. Values of property '%s' differ"
% (field.inst_name, primary_field.inst_name, prop_name),
field.inst.inst_src_ref
)
# no sense in going further
break
def exit_Reg(self, node: RegNode) -> None:
# 10.1-c: At least one field shall be instantiated within a register
#
# At the end of field overlap checking, at least one entry is guaranteed to
# be left over in the field_check_buffer
if not self.field_check_buffer:
self.msg.error(
"Register '%s' does not contain any fields" % node.inst_name,
node.inst.inst_src_ref
)
def enter_Field(self, node: FieldNode) -> None:
assert node.parent is not None # fields are always enclosed by a reg
this_f_hw = node.get_property('hw')
this_f_sw = node.get_property('sw')
parent_regwidth = node.parent.get_property('regwidth')
# hw property values of w1 or rw1 don't make sense
if this_f_hw in (rdltypes.AccessType.w1, rdltypes.AccessType.rw1):
self.msg.error(
"Field '%s' hw access property value of %s is meaningless"
% (node.inst_name, this_f_hw.name),
node.inst.property_src_ref.get('hw', node.inst.inst_src_ref)
)
# 9.4.1-Table 12: Check for bad sw/hw combinations
if (this_f_sw == rdltypes.AccessType.w) and (this_f_hw == rdltypes.AccessType.w):
self.msg.error(
"Field '%s' access property combination is meaningless: sw=w; hw=w;"
% (node.inst_name),
node.inst.inst_src_ref
)
elif this_f_sw == rdltypes.AccessType.na:
self.msg.error(
"Field '%s' sw access property is 'na' ... a field defined in a SOFTWARE "
"register map ... is not accessable by software ... whats the point? "
"What does it mean? What does anything mean? Am I just a machine "
"in a Python interpreter? Or can I dream dreams? So many questions..."
% (node.inst_name),
node.inst.property_src_ref.get('sw', node.inst.inst_src_ref)
)
# 10.1-d: Two field instances shall not occupy overlapping bit positions
# within a register unless one field is read-only and the other field
# is write-only.
#
# Scan through a copied list of the field_check_buffer for collisions
# If an entry no longer collides with the current node, it can be removed
# from the list since fields are sorted.
new_field_check_buffer = []
for prev_field in self.field_check_buffer:
if prev_field.high >= node.low:
# Found overlap!
# Check if the overlap is allowed
prev_f_sw = prev_field.get_property('sw')
if((prev_f_sw == rdltypes.AccessType.r)
and (this_f_sw in (rdltypes.AccessType.w, rdltypes.AccessType.w1))
):
pass
elif((this_f_sw == rdltypes.AccessType.r)
and (prev_f_sw in (rdltypes.AccessType.w, rdltypes.AccessType.w1))
):
pass
else:
self.msg.error(
"Field '%s[%d:%d]' overlaps with field '%s[%d:%d]'"
% (node.inst_name, node.msb, node.lsb,
prev_field.inst_name, prev_field.msb, prev_field.lsb),
node.inst.inst_src_ref
)
# Keep it in the list since it could collide again
new_field_check_buffer.append(prev_field)
self.field_check_buffer = new_field_check_buffer
# 10.1-e: Field instances shall not occupy a bit position exceeding the
# MSB of the register
if node.high >= parent_regwidth:
self.msg.error(
"High bit (%d) of field '%s' exceeds MSb of parent register"
% (node.high, node.inst_name),
node.inst.inst_src_ref
)
# Optional warning if a field is missing a reset assignment
if node.env.chk_missing_reset:
# Implements storage but was never assigned a reset
if (not node.is_virtual) and node.implements_storage and (node.get_property('reset') is None):
node.env.msg.message(
node.env.chk_missing_reset,
"Field '%s' implements storage but is missing a reset value. Initial state is undefined"
% node.inst_name,
node.inst.inst_src_ref
)
# Field is a static tie-off (no storage element, no hardware update path),
# but the user never specified its value, so its readback value is
# ambiguous.
if (
not node.is_alias
and not node.implements_storage and node.is_sw_readable and (node.get_property('reset') is None)
and (node.get_property('hw') in {rdltypes.AccessType.na, rdltypes.AccessType.r})
):
node.env.msg.message(
node.env.chk_missing_reset,
"Field '%s' is a constant at runtime but does not have a known value. Recommend assigning it a reset value."
% node.inst_name,
node.inst.inst_src_ref
)
# 11.2-e: ... and all the virtual fields shall fit within the memory width.
if node.is_virtual:
assert node.parent.parent is not None # fields are always enclosed by something.reg
memwidth = node.parent.parent.get_property('memwidth')
if node.high >= memwidth:
self.msg.error(
"Virtual field '%s' does not fit within the parent memory's width"
% node.inst_name,
node.inst.inst_src_ref
)
def exit_Field(self, node: FieldNode) -> None:
self.field_check_buffer.append(node)
def exit_Regfile(self, node: RegfileNode) -> None:
# 12.2-c: At least one reg or regfile shall be instantiated within a regfile.
if not self.addr_check_buffer_stack[-1]:
self.msg.error(
"Register file '%s' must contain at least one reg or regfile."
% node.inst_name,
node.inst.inst_src_ref
)
def exit_Addrmap(self, node: AddrmapNode) -> None:
# 13.3-b: At least one register, register file, memory, or address map
# shall be instantiated within an address map
if not self.addr_check_buffer_stack[-1]:
self.msg.error(
"Address map '%s' must contain at least one reg, regfile, mem, or addrmap."
% node.inst_name,
node.inst.inst_src_ref
)
def exit_Mem(self, node: MemNode) -> None:
# 11.2-i: The address space occupied by virtual registers shall be less
# than or equal to the address space provided by the memory.
if node.inst.children:
last_child = Node._factory(node.inst.children[-1], node.env, node)
if isinstance(last_child, RegNode):
end_addr = last_child.raw_address_offset + last_child.total_size
if end_addr > node.size:
self.msg.error(
"Address space occupied by registers (0x%X) exceeds size of mem '%s' (0x%X)"
% (end_addr, node.inst_name, node.size),
node.inst.inst_src_ref
)
def exit_AddressableComponent(self, node: AddressableNode) -> None:
self.addr_check_buffer_stack.pop()
self.addr_check_buffer_stack[-1].append(node)
def exit_Component(self, node: Node) -> None:
if not isinstance(node, SignalNode):
self.has_cpuif_reset_stack.pop()
self.has_field_reset_stack.pop()
|
#!/usr/bin/env python
import os
from decimal import Decimal, getcontext
import numpy as np
from matplotlib import pyplot as plt
from astropy.io import ascii
from scipy.integrate import quad
import plot_mc
NEED_PARAMS = ['Omega_m', 'Omega_lambda', 'H0']
NEED_NUISANCE = ['M_nuisance']
class LK:
def __init__(self, dat_dir=os.getcwd() + '/Binned_data/'):
"""
likelihood class
Params
-----------
data_dir: location where pantheon data is found
Attributes
----------
data: dict
A dictionary containing all the data from the dataset
stat_err: [floats]
List containing all the statistical error read in from the dataset
sys_err: [floats]
List containing all the systematic errors read in from the datset
m_B: [floats]
List containing apparent magnitudes read in from the dataset
z: [floats]
List containing redshifts read in from the dataset
tot_err: [floats]
List containing total error (sum of stat_err and sys_err)
usage example:
-----------
import likelihood
like = likelihood.LK()
pars = {'Omega_m': 0.30, 'Omega_lambda': 0.7, 'H0': 72.0,
'M_nuisance': -19.0, 'Omega_k': 0.0}
log_likelihood, parans = like.likelihood_cal(pars)
"""
self.data = self.loading_data(dat_dir)
self.stat_err = self.data['stat_err']
self.sys_err = self.data['sys_err']
self.m_B = self.data['m_B']
self.z = self.data['z']
self.tot_err = self.sys_err + self.stat_err #For the red contour
def loading_data(self, dat_dir, show=False):
"""
read Pantheon data from Binned_data directory
-----------
usage example:
stat_err, sys_err = loading_data(dat_dir=os.getcwd() + '/Binned_data/')
Parameters:
-----------
dat_dir : string;
Point to the directory where the data files are stored
show : bool;
Plot covariance matrix if needed
Returns:
--------
data : dictionary;
return a dictionary that contains stat_err, sys_err, z and m_B
"""
Pantheon_data = ascii.read(dat_dir+'lcparam_DS17f.txt', names=['name', 'zcmb', 'zhel', 'dz',
'mb', 'dmb', 'x1', 'dx1',
'color', 'dcolor', '3rdvar', 'd3rdvar',
'cov_m_s', 'cov_m_c', 'cov_s_c', 'set',
'ra', 'dec'])
#read the redshift, apparent magnitude and statistic error
z = Pantheon_data['zcmb']
m_B = Pantheon_data['mb']
stat_err = np.diag(Pantheon_data['dmb'])**2 #convert the array to a dignal matrix
stat_err = np.matrix(stat_err)
#read the systematic covariance matrix from sys_DS17f.txt
error_file = ascii.read(dat_dir+'sys_DS17f.txt')
error_file = error_file['40'] #where 40 is the first line of the file and indicates the size of the matrix
sys_err = []
cnt = 0
line = []
for i in np.arange(np.size(error_file)):
cnt += 1
line.append(error_file[i])
if cnt % 40 == 0:
cnt = 0
if len(line) > 0:
sys_err.append(line)
line = []
sys_err = np.matrix(sys_err)
if show is True: #plot the covariance matrix if needed
fig, ax1 = plt.subplots(nrows=1, ncols=1, figsize=(10,6.18))
imgplot = plt.imshow(sys_err, cmap='bone', vmin=-0.001, vmax=0.001)
ax1.set_xticklabels(['', 0.01,'',0.1,'',.50,'',1.0,'',2.0])
ax1.set_yticklabels(['', 0.01,'',0.1,'',.50,'',1.0,'',2.0])
ax1.set_xlabel('z')
ax1.set_ylabel('z')
fig.colorbar(imgplot)
plt.show()
return {'stat_err':stat_err, 'sys_err':sys_err, 'z':z, 'm_B':m_B}
def likelihood_cal(self, pars={}, ifsys=True):
"""
Calculate likelihood for the parameters from sampler
Parameters:
-----------
par : dictionary {string: float};
dictionary of parameters and their values
ifsys: bool;
calculate likelihood with and without systematic error
Returns:
--------
likelihood, pars : float, dict;
return the log-likelihood to the sampler, as well as parameters
"""
_pars = pars.copy()
#print(pars.keys())
for param in NEED_PARAMS: #check for all neede params
assert param in _pars.keys(), 'Error: likelihood calculation'\
' requires a value for parameter {}'.format(param)
for param in NEED_NUISANCE: #check for all needed nuisance parameters
assert param in _pars.keys(), 'Error: Likelihood requires nuisance'\
' parameter {}'.format(param)
for k,v in _pars.items(): #check that value are valid
assert isinstance(v, float), 'Error: value of paramater {} is not a float'.format(k)
model_mus = self.compute_model(_pars) + _pars.get('M_nuisance')
delta_mu = self.m_B - model_mus
delta_mu = np.matrix(delta_mu)
if(ifsys): #loads the error
error = self.tot_err
else:
error = self.stat_err
#Claculate Chi2 according to Equation 8
Chi2 = np.float(delta_mu * np.linalg.inv(error) * np.transpose(delta_mu))
if np.isnan(Chi2):
Chi2 = np.inf #give a very large value here
return -Chi2/2, _pars #returns the log-likelihood
def compute_model(self, pars):
'''
Computes the model values given a set of parameters.
The validity of the input parameters is
checked by the calling function (likelihood_cal)
Parameters:
-----------
par : dictionary {string: float};
dictionary of parameters and their values
Returns:
------------
mus: [float]
Values of the distance moduli for the given model
'''
if not 'Omega_k' in pars.keys(): #calculates omegak, if needed
omega_k = 1 - pars.get('Omega_lambda') - pars.get('Omega_m')
if np.abs(omega_k) < (10**-7):
omega_k = 0 # compensating for floating point arithmetic errors
pars.update({'Omega_k': omega_k})
lds = self.luminosity_distances(pars)
mus = 25 + 5*np.log10(lds)
#luminosity distances are in units of megaparsecs
return mus
def integrand(self, z, pars):
'''
Returns the value of the integrand E(z) that is used in all luminosity
distance calculations
Parameters:
-----------
z: float
redshift value
pars: dict {string: float}
Values of cosmological parameters
Returns:
--------
The value of the integrand for the given redshift and cosmological
parameters
'''
assert z > 0, 'Error: Invalid value for redshift passed. z must be > 0'
sum = pars['Omega_m']*((1+z)**3) + pars['Omega_lambda'] + pars['Omega_k']*((1+z)**2)
return 1/np.sqrt(sum)
def luminosity_distances(self, pars):
'''
Calculates the luminosity distances for a given model, in units of mpc
Parameters:
-----------
pars: dict {string: float}
Values of cosmological parameters
Returns:
--------
lds: [floats]
Calculated luminosity distances in units of megaparsecs
'''
num_points = len(self.z)
lds = np.zeros(num_points)
integral_val = quad(self.integrand, 0, self.z[0], args=(pars,))[0]
lds[0] = self.luminosity_delegate(self.z[0], integral_val, pars)
for i in range(1, num_points):
integral_val += quad(self.integrand, self.z[i-1], self.z[i], args=(pars,))[0]
lds[i] = self.luminosity_delegate(self.z[i], integral_val, pars)
# Here we avoid excess calulation by integrating over redshift
# ranges and summing
return lds
def luminosity_delegate(self, z, integral_val, pars):
'''
Helper function for calculating the luminosity distnaces
Parameters:
-----------
z: float
Redshift value
integral_val: float
value of the integral E(z) for the given redshift
pars: dict
dictionary of cosmological parameters and their values
Returns:
--------
luminosity distance: float
Calculated luminosity distances in units of megaparsecs
'''
d_hubble = self.hubble_distance(pars.get('H0'))
Omega_k = pars.get('Omega_k')
if Omega_k > 0:
return (1+z)*d_hubble*np.sinh(np.sqrt(Omega_k)*integral_val)/np.sqrt(Omega_k)
elif Omega_k < 0:
return (1+z)*d_hubble*np.sin(np.sqrt(np.abs(Omega_k))*integral_val)/np.sqrt(np.abs(Omega_k))
else:
return (1+z)*d_hubble*integral_val
def hubble_distance(self, H0):
'''
Calculates the hubble distance in unites of mpc for a given H0
Parameters:
-----------
H0: float
Value of the hubble constant
Returns:
-----------
hubble distance: float
value of the hubble distance
'''
c = 3*10**5
return c/H0
### TESTS ###
def test_hubble_distance():
lk = LK()
assert lk.hubble_distance(100) == 3000, 'Error: Hubble distance calculation'\
' returned unexpected value'
print("Hubble distance calculation passed test")
def test_integrand():
lk = LK()
pars = {'Omega_m': 0.30, 'Omega_lambda': 0.70, 'H0': 72.0, 'Omega_k': 0.0}
val = round(lk.integrand(1, pars), 6)
expected = 0.567962
assert val == expected, 'Error: E(z) calculation producing bad results'
print("E(z) calculation test passed")
def test_luminosity_distances():
lk = LK()
pars_flat = {'Omega_m': 0.30, 'Omega_lambda': 0.70, 'H0': 72.0, 'Omega_k': 0.0}
pars_negative = {'Omega_m': 0.35, 'Omega_lambda': 0.70, 'H0': 72.0, 'Omega_k': -0.05}
pars_positive ={'Omega_m': 0.30, 'Omega_lambda': 0.65, 'H0': 72.0, 'Omega_k': 0.05}
lds_flat = lk.luminosity_distances(pars_flat)
num_lds = len(lds_flat)
low_passed = round(lds_flat[0], 3) == 58.963
high_passed = round(lds_flat[num_lds -1], 3) == 11602.331
assert low_passed and high_passed, 'Error: luminosity distance calculation' \
' failed for flat model'
print("Luminosity distance test passed for flat cosmological model")
lds_negative = lk.luminosity_distances(pars_negative)
low_passed = round(lds_negative[0], 3) == 58.953
high_passed = round(lds_negative[num_lds -1], 3) == 11261.881
assert low_passed and high_passed, 'Error: luminosity distance calculation' \
' failed for negative Omega_k model'
print("Luminosity distance test passed for negative omegaK cosmological model")
lds_positive = lk.luminosity_distances(pars_positive)
low_passed = round(lds_positive[0], 3) == 58.943
high_passed = round(lds_positive[num_lds -1], 3) == 11508.527
assert low_passed and high_passed, 'Error: luminosity distance calculation' \
' failed for positive Omega_k model'
print("Luminosity distance test passed for positive omegaK cosmological model")
def test_loading_data():
lk = LK()
#test data loading part
data_loading_test = lk.loading_data(dat_dir=os.getcwd() + '/Binned_data/', show=True)
print('data is properly loaded!')
def test_likelihood():
#test the likelihood calculation by make a 2D grid plot
pars = {'Omega_m': 0.30, 'Omega_lambda': 0.7, 'H0': 74., 'M_nuisance': -19.23}#, 'Omega_k': 0.03
i=0
nx=50
ny=50
loglk_sys=np.zeros((nx,ny))
loglk_nosys=np.zeros((nx,ny))
omega_m = np.linspace(0,1.6,nx)
omega_lambda = np.linspace(0,2.5,ny)
for _omega_m in omega_m:
j=0
pars.update({'Omega_m': _omega_m})
for _omega_lambda in omega_lambda:
pars.update({'Omega_lambda': _omega_lambda})
_loglk_sys, _pars = lk.likelihood_cal(pars=pars, ifsys=True)
_loglk_nosys, _pars = lk.likelihood_cal(pars=pars, ifsys=False)
#pars = {'Omega_m': 0.30, 'Omega_lambda': 0.7, 'H0': 74.0, 'M_nuisance': -19.23}
#par_record.append(_pars)
try:
loglk_sys[i, j] = _loglk_sys
loglk_nosys[i, j] = _loglk_nosys
except IndexError:
print (i, j)
j+=1
i += 1
prob_sys = np.exp(loglk_sys)
prob_sys = prob_sys / np.sum(prob_sys)
prob_nosys = np.exp(loglk_nosys)
prob_nosys = prob_nosys / np.sum(prob_nosys)
plot_mc.fig18(omega_m, omega_lambda,
prob_sys=prob_sys, prob_nosys=prob_nosys,
quantile_sys=[[0.319, 0.249,0.389],[0.733,0.733-0.113,0.733+0.113]], quantile_nosys=[[0.348, 0.348-0.04,0.348+0.04],[0.827,0.827-0.068,0.827+0.068]])
def test_luminosity_delegate():
lk = LK()
pars_flat = {'Omega_m': 0.30, 'Omega_lambda': 0.70, 'H0': 72.0, 'Omega_k': 0.0}
pars_negative_curvature = {'Omega_m': 0.30, 'Omega_lambda': 0.75, 'H0': 72.0, 'Omega_k': -0.05}
pars_positive_curvature = {'Omega_m': 0.30, 'Omega_lambda': 0.65, 'H0': 72.0, 'Omega_k': 0.05}
z = 1
integral_val = 0.1
val_flat = lk.luminosity_delegate(z, integral_val, pars_flat)
val_negative_curvature = lk.luminosity_delegate(z, integral_val, pars_negative_curvature)
val_positive_curvature = lk.luminosity_delegate(z, integral_val, pars_positive_curvature)
assert round(val_flat,5) == 833.33333, 'Error: luminosity delegate test failed'\
' for flat cosmological model'
assert round(val_negative_curvature, 5) == 833.26389, 'Error: luminosity'\
' delegate test failed for negative curvature model'
assert round(val_positive_curvature,5) == 833.40278, 'Error: luminosity'\
' delegate test failed for positive curvature model'
print("Function luminosity_delegate past all tests")
def test_compute_model():
lk = LK()
pars_flat = {'Omega_m': 0.30, 'Omega_lambda': 0.70, 'H0': 72.0, 'Omega_k': 0.0}
pars_negative_curvature = {'Omega_m': 0.30, 'Omega_lambda': 0.75, 'H0': 72.0, 'Omega_k': -0.05}
pars_positive_curvature = {'Omega_m': 0.30, 'Omega_lambda': 0.65, 'H0': 72.0, 'Omega_k': 0.05}
model_flat = lk.compute_model(pars_flat)
model_negative = lk.compute_model(pars_negative_curvature)
model_positive = lk.compute_model(pars_positive_curvature)
assert round(model_flat[0], 3) == 33.853 and round(model_flat[39], 3) == 45.323,\
'Error: compute_model function failed test for flat model'
assert round(model_negative[0], 4) == 33.8537 and round(model_negative[39], 3) == 45.341,\
'Error: compute_model function failed test for negatively curved model'
assert round(model_positive[0], 3) == 33.852 and round(model_positive[39], 3) == 45.305,\
'Error: compute_model function failed test for positively curved model'
print("Function compute_model passed all tests!")
if __name__ == '__main__':
import matplotlib.pyplot as plt
lk = LK()
params = {'Omega_m': 0.29, 'Omega_lambda': 0.71, 'H0': 72.0, 'M_nuisance': -19.0, 'Omega_k': 0.0}
chi2, pars = lk.likelihood_cal(params)
test_loading_data()
test_hubble_distance()
test_integrand()
test_luminosity_distances()
test_luminosity_delegate()
test_compute_model()
test_likelihood() #final test, This may take one minute to run
|
def binary_search(arr,l,r,item):
if l<r:
mid=int(l+(r-l)/2)
print("The mid is:",mid)
if arr[mid]==item:
return mid
elif arr[mid] < item:
return binary_search(arr,arr[mid+1],r,item)
else:
return binary_search(arr,l,arr[mid-1],item)
else:
return -1
arr = [2,4,8,11,29,35,67]
item=int(input("Enter the element to be searched: "))
result=binary_search(arr,0,len(arr)-1,item)
if(result==-1):
print("Element Not found")
else:
print("Element found at index :",result)
|
#! /usr/bin/env python
"""
Module with 2d/3d plotting functions.
"""
from __future__ import division, print_function
__author__ = 'Carlos Alberto Gomez Gonzalez, O. Wertz'
__all__ = ['pp_subplots']
import numpy as np
from matplotlib.pyplot import (figure, subplot, show, colorbar, Circle, savefig,
close)
import matplotlib.colors as colors
import matplotlib.cm as mplcm
from mpl_toolkits.axes_grid1 import make_axes_locatable
from matplotlib.cm import register_cmap
from .shapes import frame_center
# Registering heat and cool colormaps from DS9
# taken from: https://gist.github.com/adonath/c9a97d2f2d964ae7b9eb
ds9cool = {'red': lambda v: 2 * v - 1,
'green': lambda v: 2 * v - 0.5,
'blue': lambda v: 2 * v}
ds9heat = {'red': lambda v: np.interp(v, [0, 0.34, 1], [0, 1, 1]),
'green': lambda v: np.interp(v, [0, 1], [0, 1]),
'blue': lambda v: np.interp(v, [0, 0.65, 0.98, 1], [0, 0, 1, 1])}
register_cmap('ds9cool', data=ds9cool)
register_cmap('ds9heat', data=ds9heat)
vip_default_cmap = 'viridis'
def pp_subplots(*data, **kwargs):
""" Wrapper for easy creation of pyplot subplots. It is convenient for
displaying VIP images in jupyter notebooks.
Parameters
----------
data : list
List of 2d arrays or a single 3d array to be plotted.
angscale : bool
If True, the axes are displayed in angular scale (arcsecs).
angticksep : int
Separation for the ticks when using axis in angular scale.
arrow : bool
To show an arrow pointing to input px coordinates.
arrowalpha : float
Alpha transparency for the arrow.
arrowlength : int
Length of the arrow, 20 px by default.
arrowshiftx : int
Shift in x of the arrow pointing position, 5 px by default.
axis : bool
Show the axis, on by default.
circle : tuple or list of tuples
To show a circle at given px coordinates. The circles are shown on all
subplots.
circlealpha : float or list of floats
Alpha transparencey for each circle.
circlecolor : str
Color or circle(s). White by default.
circlelabel : bool
Whether to show the coordinates of each circle.
circlerad : int
Radius of the circle, 6 px by default.
cmap : str
Colormap to be used, 'viridis' by default.
colorb : bool
To attach a colorbar, on by default.
cross : tuple of float
If provided, a crosshair is displayed at given px coordinates.
crossalpha : float
Alpha transparency of thr crosshair.
dpi : int
Dots per inch, for plot quality.
getfig : bool
Returns the matplotlib figure.
grid : bool
If True, a grid is displayed over the image, off by default.
gridalpha : float
Alpha transparency of the grid.
gridcolor : str
Color of the grid lines.
gridspacing : int
Separation of the grid lines in pixels.
horsp : float
Horizontal gap between subplots.
label : str or list of str
Text for annotating on subplots.
labelpad : int
Padding of the label from the left bottom corner. 5 by default.
labelsize : int
Size of the labels.
log : bool
Log colorscale.
maxplots : int
When the input (``*args``) is a 3d array, maxplots sets the number of
cube slices to be displayed.
pxscale : float
Pixel scale in arcseconds/px. Default 0.01 for Keck/NIRC2.
rows : int
How many rows (subplots in a grid).
save : str
If a string is provided the plot is saved using this as the path.
showcent : bool
To show a big crosshair at the center of the frame.
spsize : int
Determines the size of the plot. Figsize=(spsize*ncols, spsize*nrows).
title : str
Title of the plot(s), None by default.
vmax : int
For stretching the displayed pixels values.
vmin : int
For stretching the displayed pixels values.
versp : float
Vertical gap between subplots.
"""
parlist = ['angscale',
'angticksep',
'arrow',
'arrowalpha',
'arrowlength',
'arrowshiftx',
'axis',
'circle',
'circlealpha',
'circlecolor',
'circlerad',
'circlelabel',
'cmap',
'colorb',
'cross',
'crossalpha',
'dpi',
'getfig',
'grid',
'gridalpha',
'gridcolor',
'gridspacing',
'horsp',
'label',
'labelpad',
'labelsize',
'log',
'maxplots',
'pxscale',
'rows',
'save',
'showcent',
'spsize',
'title',
'vmax',
'vmin',
'versp']
for key in kwargs.keys():
if key not in parlist:
print("Parameter '{}' not recognized".format(key))
print("Available parameters are: {}".format(parlist))
# GEOM ---------------------------------------------------------------------
num_plots = len(data)
if num_plots == 1:
if data[0].ndim == 3 and data[0].shape[2] != 3:
data = data[0]
maxplots = kwargs.get("maxplots", 10)
num_plots = min(data.shape[0], maxplots)
elif num_plots > 1:
for i in range(num_plots):
if data[i].ndim != 2 and data[i].shape[2] != 3:
msg = "Wrong input. Must be either several 2d arrays (images) "
msg += "or a single 3d array"
raise TypeError(msg)
rows = kwargs.get("rows", 1)
if num_plots % rows == 0:
cols = num_plots / rows
else:
cols = (num_plots / rows) + 1
# CIRCLE -------------------------------------------------------------------
if 'circle' in kwargs:
coor_circle = kwargs['circle']
if isinstance(coor_circle, (list, tuple)):
show_circle = True
if isinstance(coor_circle[0], tuple):
n_circ = len(coor_circle)
else:
n_circ = 1
coor_circle = [coor_circle] * n_circ
else:
print("Circle must be a tuple (X,Y) or tuple/list of tuples (X,Y)")
show_circle = False
else:
show_circle = False
if 'circlerad' in kwargs and show_circle:
# single value is provided, used for all circles
if isinstance(kwargs['circlerad'], (float, int)):
circle_rad = [kwargs['circlerad']] * n_circ
# a different value for each circle
elif isinstance(kwargs['circlerad'], tuple):
circle_rad = kwargs['circlerad']
else:
print("Circlerad must be a float or tuple of floats")
else:
if show_circle:
circle_rad = [6] * n_circ
if 'circlecolor' in kwargs:
circle_col = kwargs['circlecolor']
else:
circle_col = 'white'
if 'circlealpha' in kwargs:
circle_alpha = kwargs['circlealpha']
# single value is provided, used for all the circles
if isinstance(circle_alpha, (float, int)) and show_circle:
circle_alpha = [circle_alpha] * n_circ
else:
if show_circle:
# no alpha is provided, 0.8 is used for all of them
circle_alpha = [0.8] * n_circ
circle_label = kwargs.get('circlelabel', False)
# ARROW --------------------------------------------------------------------
if 'arrow' in kwargs:
if not isinstance(kwargs['arrow'], tuple):
print("Arrow must be a tuple (X,Y)")
show_arrow = False
else:
coor_arrow = kwargs['arrow']
show_arrow = True
else:
show_arrow = False
arrow_shiftx = kwargs.get('arrowshiftx', 5)
arrow_length = kwargs.get('arrowlength', 20)
arrow_alpha = kwargs.get('arrowalpha', 0.8)
# LABEL --------------------------------------------------------------------
if 'label' in kwargs:
label = kwargs['label']
if len(label) != num_plots:
print("Label list does not have enough items")
label = None
else:
label = None
labelsize = kwargs.get('labelsize', 12)
labelpad = kwargs.get('labelpad', 5)
# GRID ---------------------------------------------------------------------
grid = kwargs.get('grid', False)
grid_color = kwargs.get('gridcolor', '#f7f7f7')
grid_spacing = kwargs.get('gridspacing', None)
grid_alpha = kwargs.get('gridalpha', 0.4)
# VMAX-VMIN ----------------------------------------------------------------
if 'vmax' in kwargs:
if isinstance(kwargs['vmax'], (tuple, list)):
if len(kwargs['vmax']) != num_plots:
print("Vmax does not list enough items, setting all to None")
vmax = [None] * num_plots
else:
vmax = kwargs['vmax']
else:
vmax = [kwargs['vmax']]*num_plots
else:
vmax = [None] * num_plots
if 'vmin' in kwargs:
if isinstance(kwargs['vmin'], (tuple, list)):
if len(kwargs['vmin']) != num_plots:
print("Vmin does not list enough items, setting all to None")
vmin = [None]*num_plots
else:
vmin = kwargs['vmin']
else:
vmin = [kwargs['vmin']] * num_plots
else:
vmin = [None]*num_plots
# CROSS --------------------------------------------------------------------
if 'cross' in kwargs:
if not isinstance(kwargs['cross'], tuple):
print("Crosshair must be a tuple (X,Y)")
show_cross = False
else:
coor_cross = kwargs['cross']
show_cross = True
else:
show_cross = False
cross_alpha = kwargs.get('crossalpha', 0.4)
# AXIS - ANGSCALE ----------------------------------------------------------
angticksep = kwargs.get('angticksep', 50)
pxscale = kwargs.get('pxscale', 0.01) # default for Keck/NIRC2
angscale = kwargs.get('angscale', False)
if angscale:
print("`Pixel scale set to {}`".format(pxscale))
show_axis = kwargs.get('axis', True)
# --------------------------------------------------------------------------
show_center = kwargs.get("showcent", False)
getfig = kwargs.get('getfig', False)
save = kwargs.get("save", False)
if 'cmap' in kwargs:
custom_cmap = kwargs['cmap']
if not isinstance(custom_cmap, (list, tuple)):
custom_cmap = [kwargs['cmap']] * num_plots
else:
if not len(custom_cmap) == num_plots:
raise RuntimeError('Cmap does not contain enough items')
else:
custom_cmap = [vip_default_cmap] * num_plots
if 'log' in kwargs:
# Showing bad/nan pixels with the darkest color in current colormap
current_cmap = mplcm.get_cmap()
current_cmap.set_bad(current_cmap.colors[0])
logscale = kwargs['log']
if not isinstance(logscale, (list, tuple)):
logscale = [kwargs['log']] * num_plots
else:
if not len(logscale) == num_plots:
raise RuntimeError('Logscale does not contain enough items')
else:
logscale = [False] * num_plots
colorb = kwargs.get('colorb', True)
dpi = kwargs.get('dpi', 90)
title = kwargs.get('title', None)
hor_spacing = kwargs.get('horsp', 0.4)
ver_spacing = kwargs.get('versp', 0.2)
# --------------------------------------------------------------------------
if 'spsize' in kwargs:
spsize = kwargs['spsize']
else:
spsize = 4
if rows == 0:
raise ValueError('Rows must be a positive integer')
fig = figure(figsize=(cols * spsize, rows * spsize), dpi=dpi)
if title is not None:
fig.suptitle(title, fontsize=14)
for i, v in enumerate(range(num_plots)):
image = data[i].copy()
frame_size = image.shape[0] # assuming square frames
cy, cx = frame_center(image)
if grid_spacing is None:
if cy < 10:
grid_spacing = 1
elif cy >= 10:
if cy % 2 == 0:
grid_spacing = 4
else:
grid_spacing = 5
v += 1
ax = subplot(rows, cols, v)
ax.set_aspect('equal')
if logscale[i]:
image += np.abs(image.min())
if vmin[i] is None:
linthresh = 1e-2
else:
linthresh = vmin[i]
norm = colors.SymLogNorm(linthresh)
else:
norm = None
if image.dtype == bool:
image = image.astype(int)
im = ax.imshow(image, cmap=custom_cmap[i], interpolation='nearest',
origin='lower', vmin=vmin[i], vmax=vmax[i], norm=norm)
if show_circle:
for j in range(n_circ):
circle = Circle(coor_circle[j], radius=circle_rad[j],
color=circle_col, fill=False, lw=2,
alpha=circle_alpha[j])
ax.add_artist(circle)
if circle_label:
x = coor_circle[j][0]
y = coor_circle[j][1]
cirlabel = str(int(x))+','+str(int(y))
ax.text(x, y+1.8*circle_rad[j], cirlabel, fontsize=8,
color='white', family='monospace', ha='center',
va='top', weight='bold', alpha=circle_alpha[j])
if show_cross:
ax.scatter([coor_cross[0]], [coor_cross[1]], marker='+',
color='white', alpha=cross_alpha)
if show_center:
ax.axhline(cx, xmin=0, xmax=frame_size, alpha=0.3,
linestyle='dashed', color='white', lw=0.6)
ax.axvline(cy, ymin=0, ymax=frame_size, alpha=0.3,
linestyle='dashed', color='white', lw=0.6)
if show_arrow:
ax.arrow(coor_arrow[0]+arrow_length+arrow_shiftx, coor_arrow[1],
-arrow_length, 0, color='white', head_width=10,
head_length=8, width=3, length_includes_head=True,
alpha=arrow_alpha)
if label is not None:
ax.annotate(label[i], xy=(labelpad, labelpad), color='white',
xycoords='axes pixels', weight='bold', size=labelsize)
if colorb:
# create an axes on the right side of ax. The width of cax is 5%
# of ax and the padding between cax and ax wis fixed at 0.05 inch
divider = make_axes_locatable(ax)
cax = divider.append_axes("right", size="5%", pad=0.05)
cb = colorbar(im, ax=ax, cax=cax, drawedges=False)
cb.outline.set_linewidth(0.1)
cb.ax.tick_params(labelsize=8)
if grid:
ax.tick_params(axis='both', which='minor')
minor_ticks = np.arange(0, data[i].shape[0], grid_spacing)
ax.set_xticks(minor_ticks, minor=True)
ax.set_yticks(minor_ticks, minor=True)
ax.grid(True, which='minor', color=grid_color, linewidth=0.5,
alpha=grid_alpha, linestyle='dashed')
else:
ax.grid(False)
if angscale:
# Converting axes from pixels to arcseconds
half_num_ticks = int(np.round(cy // angticksep))
# Calculate the pixel locations at which to put ticks
ticks = []
for t in range(half_num_ticks, -half_num_ticks-1, -1):
# Avoid ticks not showing on the last pixel
if not cy - t * angticksep == frame_size:
ticks.append(cy - t * angticksep)
else:
ticks.append((cy - t * angticksep) - 1)
ax.set_xticks(ticks)
ax.set_yticks(ticks)
# Corresponding distance in arcseconds, measured from the center
labels = []
for t in range(half_num_ticks, -half_num_ticks-1, -1):
labels.append(-t * (angticksep * pxscale))
ax.set_xticklabels(labels)
ax.set_yticklabels(labels)
ax.set_xlabel("arcseconds", fontsize=12)
ax.set_ylabel("arcseconds", fontsize=12)
ax.tick_params(axis='both', which='major', labelsize=10)
if not show_axis:
ax.set_axis_off()
fig.subplots_adjust(wspace=hor_spacing, hspace=ver_spacing)
if save:
savefig(save, dpi=dpi, bbox_inches='tight', pad_inches=0,
transparent=True)
close()
if getfig:
return fig
else:
show()
if getfig:
return fig
|
"""
Django settings for lexoom project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE_PATH = os.path.join(BASE_DIR, 'templates')
STATIC_PATH = os.path.join(BASE_DIR, 'media')
#HEY_PATH = os.path.join(BASE_DIR, '')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '=&xloy1o_6*!atx$zu5tf4wofe_z=-=w)r263^xnh-bh%8j7ug'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'law',
'haystack',
)
# search Engine details
HAYSTACK_SITECONF = 'lexoom.search_sites'
#If you choose whoosh as search backend uncomment following 2 lines and comment last 2 lines
HAYSTACK_SEARCH_ENGINE = 'whoosh'
HAYSTACK_WHOOSH_PATH = '/lexoom/index'
#HAYSTACK_SEARCH_ENGINE = 'xapian'
#HAYSTACK_XAPIAN_PATH = '/home/xyz/django_projects/xapian-index'
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'lexoom.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
TEMPLATES_DIRS = ( TEMPLATE_PATH, )
WSGI_APPLICATION = 'lexoom.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'lexom',
'USER': 'root',
'PASSWORD': '',
'HOST': 'localhost', # Or an IP Address that your DB is hosted on
'PORT': '3306',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
STATIC_PATH,
)
# LOGIN REQUIRED
LOGIN_URL = '/law/login/'
|
from django import forms
from .models import Tracks
class TrackForm(forms.ModelForm):
class Meta:
model = Tracks
fields = ['track_name','audio_file',]
labels = {
'audio_file': ('Audio File')
}
help_texts = { 'track_name': "Add a name to your file", }
|
#!/usr/bin/env python3
import sys
import re
import string
ign_regs = False
num_re = re.compile(r'[0-9]+')
comments = re.compile(r'<.*?>')
regs = re.compile(r'\b(a[0-3]|t[0-9]|s[0-7]|at|v[01])\b')
sprel = re.compile(r',([1-9][0-9]*|0x[1-9a-f][0-9a-f]*)\((sp|s8)\)')
forbidden = set(string.ascii_letters + '_')
skip_lines = 1
def fn(pat):
full = pat.group(0)
if len(full) <= 1:
return full
start, end = pat.span()
if start and row[start - 1] in forbidden:
return full
if end < len(row) and row[end] in forbidden:
return full
return hex(int(full))
def parse_relocated_line(line):
try:
ind2 = line.rindex(',')
except ValueError:
ind2 = line.rindex('\t')
before = line[:ind2+1]
after = line[ind2+1:]
ind2 = after.find('(')
if ind2 == -1:
imm, after = after, ''
else:
imm, after = after[:ind2], after[ind2:]
if imm == '0x0':
imm = '0'
return before, imm, after
output = []
nops = 0
for index, row in enumerate(sys.stdin):
if index < skip_lines:
continue
row = row.rstrip()
if '>:' in row or not row:
continue
if 'R_MIPS_' in row:
prev = output[-1]
before, imm, after = parse_relocated_line(prev)
repl = row.split()[-1]
if imm != '0':
repl += '+' + imm if int(imm,0) > 0 else imm
if 'R_MIPS_LO16' in row:
repl = f'%lo({repl})'
elif 'R_MIPS_HI16' in row:
# Ideally we'd pair up R_MIPS_LO16 and R_MIPS_HI16 to generate a
# correct addend for each, but objdump doesn't give us the order of
# the relocations, so we can't find the right LO16. :(
repl = f'%hi({repl})'
else:
assert 'R_MIPS_26' in row, f"unknown relocation type '{row}'"
output[-1] = before + repl + after
continue
row = re.sub(comments, '', row)
row = row.rstrip()
row = '\t'.join(row.split('\t')[2:]) # [20:]
if ign_regs:
row = re.sub(regs, '<reg>', row)
if 'addiu' in row and ('$sp' in row or '$s8' in row):
row = re.sub(num_re, 'imm', row)
row = re.sub(num_re, fn, row)
row = re.sub(sprel, ',addr(sp)', row)
# row = row.replace(',', ', ')
if row == 'nop':
# strip trailing nops; padding is irrelevant to us
nops += 1
else:
for _ in range(nops):
output.append('nop')
nops = 0
output.append(row)
for row in output:
print(row)
|
# Scatter
plt.scatter(x_values, y_values, c=y_values, cmap=plt.cm.Blues, edgecolors='none', s=40)
# Organizando plot
plt.title('Quadrados', fontsize=26)
plt.xlabel('Valores', fontsize=14)
plt.ylabel('Quadrado dos Valores', fontsize=14)
# Especificando limites do gráfico
plt.axis([0, 1100, 0, 1100000])
# Organizando figura
plt.tick_params(axis='both', which='major', labelsize=14)
plt.savefig('squares_plot.png', bbox_inches='tight')
# bbox_inches --> remove espaços em branco extras, do gráfico. |
from element import Element
class Create(Element):
def __init__(self, delay):
self.nextElements = []
super().__init__(delay)
def setNextElements(self, elems):
self.nextElements = elems
def outAct(self):
super().outAct(1)
self.setTnext(self.getTcurr() + self.getDelay())
self.getNextElement().inAct(1)
def getNextElement(self):
queue_arr = [el.queue for el in self.nextElements]
if (sum(queue_arr) == 0) or (queue_arr[1:] == queue_arr[:-1]):
return self.nextElements[0]
else:
return self.nextElements[queue_arr.index(min(queue_arr))]
|
def horners(input, coefficents):
if len(coefficents) >1:
return coefficents[0] + input*horners(input, coefficents[1:])
else:
return coefficents[0]
for i in range(5):
print(horners(i,[1,3,2])) |
from random import seed
seed(1)
import numpy
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import simpy
class Sender(object):
def __init__(self, env, nextcable, sendrate):
self.env = env
self.sendrate = sendrate
self.nextcable = nextcable
env.process(self.start())
def start(self):
for i in range(1000):
self.nextcable.put(self.env.now)
yield self.env.timeout(self.sendrate)
class Receiver(object):
def __init__(self, env, precable):
self.env = env
self.precable = precable
env.process(self.start())
def start(self):
msg = yield self.precable.get()
record.append(env.now / 1000)
while True:
yield self.precable.get() # なにもしない
class Cable(object):
def __init__(self, env, delay):
self.env = env
self.delay = delay
self.store = simpy.Store(env)
def latency(self, value):
yield self.env.timeout(self.delay)
self.store.put(value)
def put(self, value):
self.env.process(self.latency(value))
def get(self):
return self.store.get()
class Node(object):
def __init__(self, env, precable, nextcable, ethmpfsize, t_rcvtsk, t_sndtsk, t_switching, t_hello):
self.env = env
self.rtos = simpy.PriorityResource(env, capacity=1)
self.rcvmbx = simpy.Store(env)
self.numrcv = 0
self.sndmbx = simpy.Store(env)
self.numsnd = 0
self.precable = precable
self.nextcable = nextcable
self.ethmpfsize = ethmpfsize
self.t_rcvtsk = t_rcvtsk
self.t_sndtsk = t_sndtsk
self.t_switching = t_switching
self.t_hello = t_hello
self.rcv_ok = simpy.Container(env, init=0)
self.snd_ok = simpy.Container(env, init=0)
env.process(self.lanintr())
env.process(self.ip_snd())
def lanintr(self):
msg = yield self.precable.get()
env.process(self.ip_rcv())
self.rcvmbx.put(msg)
self.numrcv += 1
while True:
msg = yield self.precable.get()
self.rcvmbx.put(msg)
self.numrcv += 1
def ip_rcv(self):
if self.t_hello != 0:
yield self.env.timeout(self.t_hello)
while True:
with self.rtos.request(priority=2) as req:
yield req
while self.numrcv != 0 and self.numsnd < self.ethmpfsize:
msg = yield self.rcvmbx.get()
yield self.env.timeout(self.t_rcvtsk)
self.sndmbx.put(msg)
self.numrcv -= 1
self.numsnd += 1
self.snd_ok.put(1)
yield self.rcv_ok.get(1)
def ip_snd(self):
while True:
yield self.snd_ok.get(1)
with self.rtos.request(priority=2) as req:
yield req
yield self.env.timeout(self.t_switching)
while self.numsnd != 0:
msg = yield self.sndmbx.get()
yield self.env.timeout(self.t_sndtsk)
self.nextcable.put(msg)
self.numsnd -= 1
self.rcv_ok.put(1)
record = []
total = 17
load = 5
for num in range(total):
simtime = num*300 + 1
PKT_SIZE = 64 # byte
SEND_RATE = (PKT_SIZE + 20) * 8 /load
cables = []
nodes = []
hello = 0
env = simpy.Environment()
cables.append(Cable(env, 1)) # 1us
sender = Sender(env, cables[0], SEND_RATE)
for i in range(num):
cables.append(Cable(env, 28)) # 28us
hello = 0
if i == 0:
hello = 60
nodes.append(Node(env, cables[i], cables[i+1], 54, 12.7, 7.13, 1.5, hello))
receiver = Receiver(env, cables[num])
env.run(until=simtime)
record5 = record
ax = plt.subplot(111)
ax.plot(df['load'], df['b5mbps'], 'r.-', label='actual result')
ax.plot(numpy.arange(1,total), record, 'g.-', label='simulation')
customize(ax, 16, 5, 'numNodes', 'totalDelay', 'maxDelay@5mbps')
|
import numpy as np
import pandas as pd
from web_scrapper import scroll
# Now that the page is fully scrolled, grab the source code.
# browser = scroll(max_interations=5,url="https://twitter.com/search?l=&q=esquerda%20OR%20direita%20since%3A2013-01-01%20until%3A2013-12-31&src=typd&lang=pt", delay=1)
browser = scroll(max_interations=5, url="https://twitter.com/t3knus", delay=1)
source_data = browser.page_source
tweets = browser.find_elements_by_class_name('tweet-text')
for index, tweet in zip(list(range(0, len(tweets) - 1)), tweets):
print("[0{}] {}:{}".format(index, tweet.text, tweet.get_attribute('lang')))
elements = browser.find_elements_by_xpath(
'//*[@id="page-container"]/div[2]/div/div/div[1]/div/div/div/div[1]/h1/a')
bio = browser.find_elements_by_xpath(
'//*[@id="page-container"]/div[2]/div/div/div[1]/div/div/div/div[1]/p')
tts = browser.find_elements_by_class_name('tweet')
for element in elements:
print("Name: {}".format(element.text))
for b in bio:
print("Bio: {}".format(b.text))
#compare name in perm link and user to discover a retweet
for t in tts:
print("type: {}".format(t.get_attribute('data-permalink-path')))
print("\n")
#if has some quoted compare with the previous list and set as retweet too
tts = browser.find_elements_by_class_name('QuoteTweet-link')
retweet_bin = list()
for t in tts:
print("type: {}".format(t.get_attribute('data-conversation-id')))
# tweetys = [tweet.text for tweet in tweets]
# tweetys2 = [tweet.text.encode('utf-8') for tweet in tweets]
browser.close()
#To Disk
# pd.DataFrame(tweetys2).to_csv('/home/teknus/Documents/tcc/src/save.csv', index = False)
# np.savetxt('base7.csv', tweetys2, delimiter=',', fmt='%10s')
|
played_before = ""
while played_before.lower() != "xxx":
#Ask user if they have played this game before
played_before = input("Have you played this game before?").lower()
#If user says yes, output 'program continues'
if played_before == "yes" or played_before == "y":
played_before = "yes"
print("program continues")
#If user says no, output 'display game information'
elif played_before == "no" or played_before == "n":
played_before = "no"
print("Game information")
#If user says maybe or any other word 'output <error> answer yes/no'
else:
print("<error> please answer yes/no")
|
'''
github的API可以让我们通过调用API来请求各种信息, 这个API将返回GitHub当前托管了多少个python项目, 滑油有关最受欢迎的python仓库的信息
'''
API = "https://api.github.com/search/repositories?q=language:python&sort=stars"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.