text stringlengths 38 1.54M |
|---|
from pyramid.testing import DummyRequest
from ptmscout.config import strings
from tests.PTMScoutTestCase import IntegrationTestCase, UnitTestCase
from ptmscout.views.files import compendia
from mock import Mock
class TestCompendiaDownloadIntegration(IntegrationTestCase):
def test_compendia_list_should_fail_for_non_users(self):
self.bot.logout()
result = self.ptmscoutapp.get('/compendia', status=403)
result.mustcontain('Forbidden')
def test_compendia_download_should_fail_for_non_users(self):
self.bot.logout()
result = self.ptmscoutapp.get('/compendia/vertebrata.tsv', status=403)
result.mustcontain('Forbidden')
def test_compendia_list_should_show_available_compendia(self):
result = self.ptmscoutapp.get('/compendia', status=200)
result.mustcontain('everything.tsv')
result.mustcontain('All proteins and modifications')
result.mustcontain('ubiquitination.tsv')
result.mustcontain('glycosylation.tsv')
def test_compendia_download_should_raise_404(self):
self.ptmscoutapp.get('/compendia/not_a_real_file.tsv', status=404)
def test_compendia_download_should_get_file(self):
result = self.ptmscoutapp.get('/compendia/ptmscout_vertebrata.tsv', status=200)
result.mustcontain('accessions acc_gene locus protein_name species sequence modifications domains mutations scansite_predictions GO_terms')
class TestCompendiaDownload(UnitTestCase):
def test_compendia_list(self):
request = DummyRequest()
request.route_url = Mock()
request.route_url.return_value = 'link'
result = compendia.compendia_listing(request)
self.assertEqual(strings.compendia_download_page_title, result['pageTitle'])
self.assertEqual(strings.compendia_download_page_desc, result['desc'])
self.assertEqual(8, len(result['files']))
|
import numpy as np
from unittest import TestCase
from ezyrb import POD, GPR, RBF, Database
from ezyrb import KNeighborsRegressor, RadiusNeighborsRegressor, Linear
from ezyrb import ReducedOrderModel as ROM
snapshots = np.load('tests/test_datasets/p_snapshots.npy').T
pred_sol_tst = np.load('tests/test_datasets/p_predsol.npy').T
pred_sol_gpr = np.load('tests/test_datasets/p_predsol_gpr.npy').T
param = np.array([[-.5, -.5], [.5, -.5], [.5, .5], [-.5, .5]])
class TestReducedOrderModel(TestCase):
def test_constructor(self):
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T)
rom = ROM(db, pod, rbf)
def test_save(self):
fname = 'ezyrb.tmp'
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T)
rom = ROM(db, pod, rbf)
rom.fit()
rom.save(fname)
def test_load(self):
fname = 'ezyrb.tmp'
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T)
rom = ROM(db, pod, rbf)
rom.fit()
rom.save(fname)
new_rom = ROM.load(fname)
new_param = [-0.293344, -0.23120537]
np.testing.assert_array_almost_equal(
rom.predict(new_param),
new_rom.predict(new_param)
)
def test_load2(self):
fname = 'ezyrb2.tmp'
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T)
rom = ROM(db, pod, rbf)
rom.fit()
rom.save(fname, save_db=False)
new_rom = ROM.load(fname)
new_param = [-0.293344, -0.23120537]
np.testing.assert_array_almost_equal(
rom.predict(new_param),
new_rom.predict(new_param)
)
def test_predict_01(self):
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T)
rom = ROM(db, pod, rbf).fit()
pred_sol = rom.predict([-0.293344, -0.23120537])
np.save('tests/test_datasets/p_predsol.npy', pred_sol.T)
np.testing.assert_allclose(pred_sol, pred_sol_tst, rtol=1e-4, atol=1e-5)
def test_predict_02(self):
np.random.seed(117)
pod = POD(method='svd', rank=4)
gpr = GPR()
db = Database(param, snapshots.T)
rom = ROM(db, pod, gpr).fit()
pred_sol = rom.predict([-.45, -.45])
np.testing.assert_allclose(pred_sol, pred_sol_gpr, rtol=1e-4, atol=1e-5)
def test_predict_03(self):
pod = POD(method='svd', rank=3)
gpr = GPR()
db = Database(param, snapshots.T)
rom = ROM(db, pod, gpr).fit()
pred_sol = rom.predict(db.parameters[2])
assert pred_sol.shape == db.snapshots[0].shape
def test_predict_04(self):
pod = POD(method='svd', rank=3)
gpr = GPR()
db = Database(param, snapshots.T)
rom = ROM(db, pod, gpr).fit()
pred_sol = rom.predict(db.parameters)
assert pred_sol.shape == db.snapshots.shape
def test_predict_scaler_01(self):
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T, scaler_snapshots=scaler)
rom = ROM(db, pod, rbf).fit()
pred_sol = rom.predict(db.parameters[0])
np.testing.assert_allclose(pred_sol, db._snapshots[0], rtol=1e-4, atol=1e-5)
pred_sol = rom.predict(db.parameters[0:2])
np.testing.assert_allclose(pred_sol, db._snapshots[0:2], rtol=1e-4, atol=1e-5)
def test_predict_scaler_02(self):
from sklearn.preprocessing import StandardScaler
scaler_p = StandardScaler()
scaler_s = StandardScaler()
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T, scaler_parameters=scaler_p, scaler_snapshots=scaler_s)
rom = ROM(db, pod, rbf).fit()
pred_sol = rom.predict(db._parameters[0])
np.testing.assert_allclose(pred_sol, db._snapshots[0], rtol=1e-4, atol=1e-5)
pred_sol = rom.predict(db._parameters[0:2])
np.testing.assert_allclose(pred_sol, db._snapshots[0:2], rtol=1e-4, atol=1e-5)
def test_predict_scaling_coeffs(self):
from sklearn.preprocessing import StandardScaler
scaler = StandardScaler()
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T)
rom = ROM(db, pod, rbf, scaler).fit()
pred_sol = rom.predict(db._parameters[0])
np.testing.assert_allclose(pred_sol, db._snapshots[0], rtol=1e-4, atol=1e-5)
pred_sol = rom.predict(db._parameters[0:2])
np.testing.assert_allclose(pred_sol, db._snapshots[0:2], rtol=1e-4, atol=1e-5)
def test_test_error(self):
pod = POD(method='svd', rank=-1)
rbf = RBF()
db = Database(param, snapshots.T)
rom = ROM(db, pod, rbf).fit()
error = rom.test_error(db)
np.testing.assert_almost_equal(error, 0, decimal=6)
def test_kfold_cv_error_01(self):
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T)
n_splits = len(db)
rom = ROM(db, pod, rbf)
err_kfold = rom.kfold_cv_error(n_splits=n_splits)
err_loo = rom.loo_error()
np.testing.assert_allclose(err_kfold, err_loo)
def test_loo_error_01(self):
pod = POD()
rbf = RBF()
gpr = GPR()
rnr = RadiusNeighborsRegressor()
knr = KNeighborsRegressor(n_neighbors=1)
lin = Linear()
db = Database(param, snapshots.T)
exact_len = len(db)
approximations = [rbf, gpr, knr, rnr, lin]
roms = [ROM(db, pod, app) for app in approximations]
len_errors = [len(rom.loo_error()) for rom in roms]
np.testing.assert_allclose(len_errors, exact_len)
def test_loo_error_02(self):
pod = POD()
gpr = GPR()
db = Database(param, snapshots.T)
rom = ROM(db, pod, gpr)
err = rom.loo_error(normalizer=False)
np.testing.assert_allclose(
err[0],
np.array(0.639247),
rtol=1e-3)
def test_loo_error_singular_values(self):
pod = POD()
rbf = RBF()
db = Database(param, snapshots.T)
rom = ROM(db, pod, rbf).fit()
valid_svalues = rom.reduction.singular_values
rom.loo_error()
np.testing.assert_allclose(valid_svalues, rom.reduction.singular_values)
def test_optimal_mu(self):
pod = POD()
rbf = RBF()
gpr = GPR()
rnr = RadiusNeighborsRegressor()
knr = KNeighborsRegressor(n_neighbors=1)
lin = Linear()
db = Database(param, snapshots.T)
exact_len = param.shape[1]
approximations = [rbf, gpr, knr, rnr, lin]
for k in [1, 2]:
roms = [ROM(db, pod, app).fit() for app in approximations]
len_opt_mu = [rom.optimal_mu(k=k).shape[1] for rom in roms]
np.testing.assert_allclose(len_opt_mu, exact_len)
len_k = [rom.optimal_mu(k=k).shape[0] for rom in roms]
np.testing.assert_allclose(len_k, k)
|
def qsort(l):
if l==[]: return []
return qsort([x for x in l[1:] if x<l[0]]) + l[0:1] + qsort([x for x in l[1:] if x>=l[0]])
ls = [23,12,45,99,2,6,1,0]
print(qsort(ls))
|
# -*- coding: utf-8 -*-
"""Tests for `adguardhome.stats`."""
import aiohttp
import pytest
from adguardhome import AdGuardHome
from adguardhome.exceptions import AdGuardHomeError
@pytest.mark.asyncio
async def test_dns_queries(event_loop, aresponses):
"""Test requesting AdGuard Home DNS query stats."""
aresponses.add(
"example.com:3000",
"/control/stats",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"dns_queries": 666}',
),
)
async with aiohttp.ClientSession(loop=event_loop) as session:
adguard = AdGuardHome("example.com", session=session, loop=event_loop)
result = await adguard.stats.dns_queries()
assert result == 666
@pytest.mark.asyncio
async def test_blocked_filtering(event_loop, aresponses):
"""Test requesting AdGuard Home filtering stats."""
aresponses.add(
"example.com:3000",
"/control/stats",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"blocked_filtering": 1337}',
),
)
async with aiohttp.ClientSession(loop=event_loop) as session:
adguard = AdGuardHome("example.com", session=session, loop=event_loop)
result = await adguard.stats.blocked_filtering()
assert result == 1337
@pytest.mark.asyncio
async def test_blocked_percentage(event_loop, aresponses):
"""Test requesting AdGuard Home filtering stats."""
aresponses.add(
"example.com:3000",
"/control/stats",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"dns_queries": 100, "blocked_filtering": 25}',
),
)
async with aiohttp.ClientSession(loop=event_loop) as session:
adguard = AdGuardHome("example.com", session=session, loop=event_loop)
result = await adguard.stats.blocked_percentage()
assert result == 25.0
@pytest.mark.asyncio
async def test_replaced_safebrowsing(event_loop, aresponses):
"""Test requesting AdGuard Home safebrowsing stats."""
aresponses.add(
"example.com:3000",
"/control/stats",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"replaced_safebrowsing": 42}',
),
)
async with aiohttp.ClientSession(loop=event_loop) as session:
adguard = AdGuardHome("example.com", session=session, loop=event_loop)
result = await adguard.stats.replaced_safebrowsing()
assert result == 42
@pytest.mark.asyncio
async def test_replaced_parental(event_loop, aresponses):
"""Test requesting AdGuard Home parental control stats."""
aresponses.add(
"example.com:3000",
"/control/stats",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"replaced_parental": 13}',
),
)
async with aiohttp.ClientSession(loop=event_loop) as session:
adguard = AdGuardHome("example.com", session=session, loop=event_loop)
result = await adguard.stats.replaced_parental()
assert result == 13
@pytest.mark.asyncio
async def test_replaced_safesearch(event_loop, aresponses):
"""Test requesting AdGuard Home safe search enforcement stats."""
aresponses.add(
"example.com:3000",
"/control/stats",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"replaced_safesearch": 18}',
),
)
async with aiohttp.ClientSession(loop=event_loop) as session:
adguard = AdGuardHome("example.com", session=session, loop=event_loop)
result = await adguard.stats.replaced_safesearch()
assert result == 18
@pytest.mark.asyncio
async def test_avg_processing_time(event_loop, aresponses):
"""Test requesting AdGuard Home DNS avarage processing time stats."""
aresponses.add(
"example.com:3000",
"/control/stats",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"avg_processing_time": 3.14}',
),
)
async with aiohttp.ClientSession(loop=event_loop) as session:
adguard = AdGuardHome("example.com", session=session, loop=event_loop)
result = await adguard.stats.avg_processing_time()
assert result == 3.14
@pytest.mark.asyncio
async def test_period(event_loop, aresponses):
"""Test requesting AdGuard Home stats period."""
aresponses.add(
"example.com:3000",
"/control/stats",
"GET",
aresponses.Response(
status=200,
headers={"Content-Type": "application/json"},
text='{"stats_period": "24 hours"}',
),
)
async with aiohttp.ClientSession(loop=event_loop) as session:
adguard = AdGuardHome("example.com", session=session, loop=event_loop)
result = await adguard.stats.period()
assert result == "24 hours"
@pytest.mark.asyncio
async def test_reset(event_loop, aresponses):
"""Test resetting all AdGuard Home stats."""
aresponses.add(
"example.com:3000",
"/control/stats_reset",
"POST",
aresponses.Response(status=200, text="OK"),
)
aresponses.add(
"example.com:3000",
"/control/stats_reset",
"POST",
aresponses.Response(status=200, text="Not OK"),
)
async with aiohttp.ClientSession(loop=event_loop) as session:
adguard = AdGuardHome("example.com", session=session, loop=event_loop)
result = await adguard.stats.reset()
assert result
with pytest.raises(AdGuardHomeError):
await adguard.stats.reset()
|
import pywt
cA, cD = pywt.dwt([1, 2, 3, 4], wavelet='db1')
print('cA:', cA)
print('cD', cD)
print('done') |
filename = "test.txt"
file = open(filename, 'r')
lines = file.readlines()
wordcount = 0
for line in lines:
for word in lines.split:
wordcount += 1
print(wordcount)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cms', '0003_auto_20140926_2347'),
]
operations = [
migrations.CreateModel(
name='PressContact',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('descriptive_text', models.CharField(max_length=255)),
('name', models.CharField(max_length=255)),
('address', models.CharField(max_length=255)),
('email', models.CharField(max_length=255)),
('phone', models.CharField(max_length=64)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PressContactPluginModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='cms.CMSPlugin')),
('press_contact', models.ForeignKey(related_name='plugins', to='press_contacts.PressContact')),
],
options={
'abstract': False,
},
bases=('cms.cmsplugin',),
),
]
|
import os
import subprocess
from glob import glob
cur_dir = os.path.split(os.path.realpath(__file__))[0]
class testTask(object):
def __init__(self, lon, lat, year, targetloc):
self.lon = lon
self.lat = lat
self.year = year
self.targetloc = targetloc
self.cancelflag = None
def run(self, dataType: str):
# readinERA5s.ncl readinSolarChinas.ncl
nclFile = os.path.join(cur_dir, f"readin{dataType}s.ncl")
shell_cmd = [
'ncl', '-Q', '-n', f'lat="{self.lat}"',
f'lon = "{self.lon}"',
f'year="{self.year}"',
f'datatype={dataType}',
f'targetloc="{self.targetloc}"',
nclFile
]
print(shell_cmd)
p = subprocess.Popen(shell_cmd,
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
# cwd=self.postdir,
preexec_fn=os.setpgrp)
while p.poll() is None:
line = p.stdout.readline()
line = line.strip().decode('utf-8')
print(line)
if self.cancelflag:
print("User cancel the process.")
p.communicate()
return
def stop(self):
self.cancelflag = True
print("Use cancel process time series.")
if __name__ == '__main__':
import json
with open(os.path.join("text", "location.txt"), "r") as f:
s = json.load(f)
for k in s:
print(s[k])
# print(s[k]['lat'])
t = testTask(lat=s[k]['lat'], lon=s[k]['lon'], year=s[k]['year'],
targetloc=k)
t.run(dataType='ERA5')
t.run(dataType='SolarChina')
|
#!/usr/bin/env python3
from ROOT import TCanvas, TFile, TH1F,TH2F,TH3F, TF1, TGraph, gROOT
import os
import re
import glob
import sys
gROOT.SetBatch(True)
dirName = '/sphenix/user/shulga/Work/TpcPadPlane_phi_coresoftware/coresoftware/calibrations/tpc/fillDigitalCurrentMaps/Files/'
#bXs = [1508071, 3016509, 4524020, 6032112, 7540028, 9048092, 10556072, 12064371, 13572143, 15080178, 16588072, 18096105]
#bXs = [18096105]
h_names = []#'_h_hits','_h_R','_h_DC_E']
for i in range(30):
h_names.append('_h_SC_ibf_{}'.format(i))
#sys.argv[0]
ib = sys.argv[1]
bX = sys.argv[2]
print(bX)
name = 'hist_G4Hits_sHijing_0-12fm_bX{}*'.format(bX)
outputName = './Files/Summary_hist_mdc2_UseFieldMaps_AA_event_{}_bX{}_new.root'.format(ib,bX)
filePattern = dirName+name
files = sorted(glob.glob(filePattern))
#print(files)
#n=0
histos = []
for n,file in enumerate(files):
#h=0
f = TFile.Open(file)
print(file)
for h,h_name in enumerate(h_names):
newName=h_name+'_{}'.format(n)
if n==0:
newName=h_name
#print(h_name)
hist = f.Get(h_name).Clone(newName)
if n==0:
histos.append(hist)
if n>0:
histos[h].Add(hist)
hist.SetDirectory(0)
#h+=1
#n+=1
outfile = TFile(outputName, "RECREATE")
for hist in histos:
hist.Sumw2(False)
hist.Write()
outfile.Write()
outfile.Close()
# Remove all the used files
for file in files :
if os.path.exists(file):
os.remove(file)
else:
print("Can not delete the file as it doesn't exist:{}",file)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*
#
# File: test_lpcli.py
#
# Copyright (C) 2012 Hsin-Yi Chen (hychen)
# Author(s): Hsin-Yi Chen (hychen) <ossug.hychen@gmail.com>
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import unittest
import mock
from boliau.plugins.lp_cli import formater
class FormaterTestCase(unittest.TestCase):
def _mk_obj(self, **kwargs):
obj = mock.MagicMock()
for k,v in kwargs.items():
setattr(obj, k, v)
return obj
def test_today_bugtaskstatus(self):
data = [self._mk_obj(status='Fix Committed'),
self._mk_obj(status='Fix Committed'),
self._mk_obj(status='Fix Released'),
self._mk_obj(status='Fix Released'),
self._mk_obj(status='Fix Released')]
res = formater.today_bugtask_status(data)
self.assertEquals(dict, type(res))
res.pop('date')
self.assertEquals({'todo': 0,
'in-progress': 0,
'wont-fix': 0,
'fix-committed': 2,
'fix-released': 3},
res)
|
import unittest
import os
import uuid
import names
import random
from services.topic_service import TopicService
from services.user_service import UserService
from dotenv import load_dotenv
load_dotenv()
class TestGetTopics(unittest.TestCase):
LOCAL_DB_FILE = '/data//datastores/local_test.sqlite3'
def setUp(self):
os.environ['USER_TOPICS_DATASTORE_CONNECTION_STRING'] = 'sqlite://' + self.LOCAL_DB_FILE + '?check_same_thread=False'
self.user_service = UserService()
self.topic_service = TopicService()
self.user_name = names.get_full_name()
self.user_id = self.user_service.create_user(self.user_name)
def tearDown(self):
self.user_service = None
self.topic_service = None
if os.path.exists('.' + self.LOCAL_DB_FILE):
os.remove('.' + self.LOCAL_DB_FILE)
def test_get_topics_sunny_day(self):
question_1 = self.make_random_sentence()
answer_1 = self.make_random_sentence()
topic_id_1 = self.topic_service.create_topic(self.user_id, question_1, answer_1)
self.assertIsNotNone(topic_id_1)
question_2 = self.make_random_sentence()
answer_2 = self.make_random_sentence()
topic_id_2 = self.topic_service.create_topic(self.user_id, question_2, answer_2)
self.assertIsNotNone(topic_id_2)
# get new topic and check if all fields are the same as expected
topics = self.topic_service.get_topics(self.user_id)
self.assertIsNotNone(topics)
self.assertTrue(len(topics) == 2)
for topic in topics:
self.assertTrue(topic['user_id'] == self.user_id)
self.assertTrue(topic['id'] in [topic_id_1, topic_id_2])
self.assertTrue('question' in topic)
self.assertTrue('answer' in topic)
self.assertTrue('created' in topic)
def test_get_topics_empty_list(self):
topics = self.topic_service.get_topics(self.user_id)
self.assertIsNotNone(topics)
self.assertTrue(len(topics) == 0)
def test_get_topics_nonexisting_user(self):
nonexisting_user_id = str(uuid.uuid4())
topics = self.topic_service.get_topics(nonexisting_user_id)
self.assertIsNone(topics)
def test_get_topics_empty_user_id(self):
empty_user_id = ''
topics = self.topic_service.get_topics(empty_user_id)
self.assertIsNone(topics)
def make_random_sentence(self):
nouns = ["puppy", "car", "rabbit", "girl", "monkey"]
verbs = ["runs", "hits", "jumps", "drives", "barfs"]
adv = ["crazily.", "dutifully.", "foolishly.", "merrily.", "occasionally."]
adj = ["adorable", "clueless", "dirty", "odd", "stupid"]
random_entry = lambda x: x[random.randrange(len(x))]
return " ".join([random_entry(nouns), random_entry(verbs), random_entry(adv), random_entry(adj)])
if __name__ == '__main__':
unittest.main()
|
import pandas as pd
import pygal
from pygal.style import LightStyle
render = pd.read_csv("op.csv")
#Scatter Plot
xy_chart = pygal.XY(stroke=False)
xy_chart.title = 'Depth versus Magnitude where Earthquake magnitude was between 3.0 & 6.0'
for index, row in render.iterrows():
xy_chart.add('', [(row["location"], row["magnitude "])])
xy_chart.render_to_file('render_scatter.svg')
|
#!/usr/bin/env python
# encoding: utf-8
"""
@author: Tmomy
@time: 2018/1/10 17:26
"""
from flask import request
from flask_app import app
from service.base.opration_record import opr_list
from const import msg
from util.common import build_ret
@app.route("/opr/list", methods=["GET"])
def opr_search():
parameter = request.args.to_dict()
page = int(parameter.pop('page'))
limit = int(parameter.pop('limit'))
cond = {}
for key, value in parameter.items():
if value.strip():
cond[key] = value.rstrip() # 只除去string右侧的空字符串
success, resp = opr_list(cond=cond, page=page, limit=limit)
if not success:
return build_ret(code=resp)
return build_ret(code=msg.SYS_SUCCESS, data=resp['data'], total=resp['total'])
|
import npyscreen
import curses
class BoxTitleColor(npyscreen.BoxTitle):
def update(self, clear=True):
super(BoxTitleColor, self).update(clear=clear)
HEIGHT = self.height - 1
WIDTH = self.width - 1
box_attributes = curses.A_NORMAL
if self.do_colors() and not self.editing:
box_attributes = box_attributes | self.parent.theme_manager.findPair(self, self.color) #| curses.A_BOLD
elif self.editing:
box_attributes = box_attributes | self.parent.theme_manager.findPair(self, 'HILIGHT')
else:
box_attributes = box_attributes #| curses.A_BOLD
self.parent.curses_pad.attron(box_attributes)
# draw box.
self.parent.curses_pad.hline(self.rely, self.relx, curses.ACS_HLINE, WIDTH)
self.parent.curses_pad.hline(self.rely + HEIGHT, self.relx, curses.ACS_HLINE, WIDTH)
self.parent.curses_pad.vline(self.rely, self.relx, curses.ACS_VLINE, self.height)
self.parent.curses_pad.vline(self.rely, self.relx+WIDTH, curses.ACS_VLINE, HEIGHT)
# draw corners
self.parent.curses_pad.addch(self.rely, self.relx, curses.ACS_ULCORNER, )
self.parent.curses_pad.addch(self.rely, self.relx+WIDTH, curses.ACS_URCORNER, )
self.parent.curses_pad.addch(self.rely+HEIGHT, self.relx, curses.ACS_LLCORNER, )
self.parent.curses_pad.addch(self.rely+HEIGHT, self.relx+WIDTH, curses.ACS_LRCORNER, )
self.parent.curses_pad.attroff(box_attributes)
# draw title
if self.name:
if isinstance(self.name, bytes):
name = self.name.decode(self.encoding, 'replace')
else:
name = self.name
name = self.safe_string(name)
name = " " + name + " "
if isinstance(name, bytes):
name = name.decode(self.encoding, 'replace')
name_attributes = curses.A_NORMAL
if self.do_colors() and not self.editing:
name_attributes = name_attributes | self.parent.theme_manager.findPair(self, self.color) #| curses.A_BOLD
elif self.editing:
name_attributes = name_attributes | self.parent.theme_manager.findPair(self, 'HILIGHT')
else:
name_attributes = name_attributes #| curses.A_BOLD
if self.editing:
name_attributes = name_attributes | curses.A_BOLD
self.add_line(self.rely, self.relx+4, name,
self.make_attributes_list(name, name_attributes),
self.width-8)
# end draw title
# draw footer
if hasattr(self, 'footer') and self.footer:
footer_text = self.footer
if isinstance(footer_text, bytes):
footer_text = footer_text.decode(self.encoding, 'replace')
footer_text = self.safe_string(footer_text)
footer_text = " " + footer_text + " "
if isinstance(footer_text, bytes):
footer_text = footer_text.decode(self.encoding, 'replace')
footer_attributes = self.get_footer_attributes(footer_text)
if len(footer_text) <= self.width - 4:
placing = self.width - 4 - len(footer_text)
else:
placing = 4
self.add_line(self.rely+HEIGHT, self.relx+placing, footer_text,
footer_attributes,
self.width-placing-2)
|
"""
Split and join huge files
"""
import os
import sys
import math
def split_list(data_list, max_size, file_size):
splits = []
amount_splits = math.ceil(max_size/file_size)
split_index = math.ceil(len(data_list)/amount_splits)
for split in range(0, amount_splits):
splits.append(data_list[(split*split_index): split_index*(split+1)])
return splits
def write_splitted_files(data_splits, file_name, path_file):
file_prefix = file_name.split('.')[0] # assuming that is a .txt, .csv
for index, split in enumerate(data_splits):
with open(os.path.join(path_file, file_prefix + '_part_' + str(index) + '.txt'), 'w') as output_file:
output_file.write('\n'.join(split) + '\n')
def define_path(path_file):
if not path_file:
return os.getcwd()
return path_file
def split_files(path_file=None, file_size=1e+6, file_name=None):
path_file = define_path(path_file)
files_path = os.listdir(path_file)
measure_size = os.path.getsize
selected_files = []
if file_name:
if measure_size(os.path.join(path_file, file_name)) >= file_size:
selected_files.append(file_name)
else:
selected_files = list(filter(lambda x: measure_size(os.path.join(path_file, x)) >= file_size, files_path))
for file in selected_files:
with open(os.path.join(path_file, file), 'r') as split_file:
data_list = split_file.read().split('\n')
data_splits = split_list(data_list, file_size, measure_size(os.path.join(path_file, file)))
write_splitted_files(data_splits, file, path_file)
def join_files(string_seek, path_file=None):
path_file = define_path(path_file)
files_path= os.listdir(path_file)
join_files = list(filter(lambda x: string_seek in x, files_path))
join_files = sorted(join_files)
with open(os.path.join(path_file, string_seek + '_join.txt'), 'w') as output_file:
for file in join_files:
with open(file, 'r') as join_file:
output_file.write(join_file.read() + '\n')
|
# .. Find the maximum total from top to bottom of the triangle ..
import networkx
class TriangleRoutes:
def __init__(self, filename):
self.triangle = self.read_triangle(filename)
def max_path(self):
print "generating graph.."
graph = self.build_graph()
print "generating all simple paths.."
path_list = self.build_paths(graph)
sums = []
print "calculating sums.."
for path in path_list:
current_sum = 0
for node in path:
(x, y) = map(int, node.split(':'))
current_sum += self.triangle[y][x]
sums.append(current_sum)
print "determining max sum.."
return max(sums)
def build_paths(self, g):
last_line = self.triangle[len(self.triangle)-1]
path_list = []
for i in range(0,len(last_line)):
path_list.extend(list(networkx.all_simple_paths(g, self.hashify(0,0), self.hashify(i, len(self.triangle)-1))))
return path_list
def build_graph(self):
"""n: grid size"""
g = networkx.DiGraph()
triangle = self.triangle
# build nodes
for y in range(0,len(triangle)):
line = triangle[y]
for x in range(0,len(line)):
g.add_node(self.hashify(x,y), value=triangle[y][x])
# connect nodes
for y in range(0,len(triangle)):
line = triangle[y]
if not y == len(triangle)-1:
for x in range(0,len(line)):
g.add_edge(self.hashify(x,y), self.hashify(x,y+1))
g.add_edge(self.hashify(x,y), self.hashify(x+1,y+1))
return g
def hashify(self, x, y):
return str(x) + ':' + str(y)
def read_triangle(self, filename):
f = open(filename, 'r')
triangle = []
for line in f:
triangle.append(map(int, line.split(' ')))
return triangle
if __name__ == '__main__':
triangleroutes = TriangleRoutes("problem18_large.txt")
print triangleroutes.max_path()
|
#SERVICE CODES
services = { 'netflix': 10000000 }
# GENRE CODES
genre_codes = { 'action': 100000, 'anime': 200000, 'comedy': 300000, 'drama': 400000, 'horror': 500000,
'musical': 600000, 'romance': 700000, 'scifi': 800000, 'thriller': 900000 }
service_genre_sizes = { 'netflix_action_size': 168, 'netflix_anime_size':1319, 'netflix_comedy_size': 1079, 'netflix_drama_size': 1111, 'netflix_horror_size': 488,
'netflix_musical_size': 559, 'netflix_romance_size': 951, 'netflix_scifi_size': 718, 'netflix_thriller_size': 720} |
#!/usr/bin/env python
_author_ = "rifatul.islam"
n = int(input().strip())
arr = [int(arr_temp) for arr_temp in input().strip().split(' ')]
rev_arr = []
i = 0
while n > 0:
rev_arr.append(arr[n-1])
n -= 1
for x in rev_arr:
print(x, end=" ") |
# Import Dependencies
import pytesseract
import argparse
import cv2
import os
import numpy as np
import pandas as pd
import time
from pdf2image import convert_from_path
from PIL import Image
# Set pytesseract path on local machine
pytesseract.pytesseract.tesseract_cmd = r'C:\Program Files\Tesseract-OCR\tesseract.exe'
# Define Individual Functions Per File Type
def df_to_txt(df):
# String to append to, specify source type
text = "Pandas DataFrame: \n"
# Get list of columns
df_cols = df.columns.tolist()
# Extract data from each column
for item in df_cols:
text = text + ' '.join(df[item].tolist())
text = text + '\n'
time.sleep(2)
return text
def csv_to_txt(path):
# Initialize string
doc_text = ''
# Read in csv
df_csv = pd.read_csv(path)
# Extract text
df_csv = df_csv.applymap(str).copy()
doc_text = df_to_txt(df_csv)
return doc_text
def excel_to_txt(path):
# Initialize string
doc_text = ''
# Read in csv
df_excel = pd.read_excel(path)
# Extract text
df_excel = df_excel.applymap(str).copy()
doc_text = df_to_txt(df_excel)
return doc_text
def ocr_pdf(path):
# Initialize string to append to and get pages
doc_text = ''
pages = convert_from_path(path)
# Initialize page counter, and loop through pages
page_counter = 1
for page in pages:
# Create image with unique page name
filename = "page_" + str(page_counter) + ".jpg"
page.save(filename,"JPEG")
page_counter = page_counter + 1
# Read text from image and discard image
text = pytesseract.image_to_string(Image.open(filename))
os.remove(filename)
# Concatenate text to one string
if doc_text == '':
doc_text = text
else:
doc_text = doc_text + "Page Number: " + str(page_counter) + '\n' + text
# Return text result
return doc_text
def docs_to_txt(path):
# Go into grant folder
os.chdir(path)
# Get list of PDF documents
doc_files = os.listdir()
grant_docs = []
# Go through documents
for doc in doc_files:
# Initiialize doc
doc_text = ""
# Handle different file type cases
if doc.endswith('.pdf'):
doc_text = ocr_pdf(doc)
elif doc.endswith('.csv'):
doc_text = csv_to_txt(doc)
elif doc.endswith('.xlsx'):
doc_text = excel_to_txt(doc)
elif doc.endswith('.xls'):
doc_text = excel_to_txt(doc)
else:
extension = doc.split('.')[1]
doc_text = 'File Format ' + extension + " not currently supported."
# Get file name
print('\n***** DOC PROCESSED: ')
file_name = doc.split('.')[0]
print(doc + ": " + doc_text[:100])
# Write two new text file, with same name
f = open(f'{file_name}.txt',"w+",encoding="utf-16",errors='ignore')
f.write(doc_text)
f.close()
# Append to list of strings/docs
grant_docs.append(doc_text)
time.sleep(2)
# Leave grant folder
os.chdir('..')
def ocr_directories(path):
# Got into parent directory
os.chdir(path)
# Get list of grants/directories
grant_folders = os.listdir()
content = []
# Go through each grant directory
for g_path in grant_folders:
# Get unique grant id
grant_id = g_path
# Create .txt versions of each doc in dir
grant_docs = docs_to_txt(g_path)
content.append(grant_docs)
print('************** GRANT PROCESSED')
# grant_dict = {'Grant_ID':grant_folders, 'Content':content}
# df = pd.DataFrame(grant_dict)
os.chdir('..')
# return(df)
def clear_jpgs(path):
# Clear leftover jpg files
os.chdir(path)
grant_folders = os.listdir()
for g_path in grant_folders:
os.chdir(g_path)
files_in_directory = os.listdir()
filtered_files = [file for file in files_in_directory if file.endswith(".jpg")]
for file in filtered_files:
os.remove(file)
os.chdir('..')
os.chdir('..')
def clear_txt(path):
# Clear leftover txt files
os.chdir(path)
grant_folders = os.listdir()
for g_path in grant_folders:
os.chdir(g_path)
files_in_directory = os.listdir()
filtered_files = [file for file in files_in_directory if file.endswith(".txt")]
for file in filtered_files:
os.remove(file)
os.chdir('..')
os.chdir('..')
# Clear residual files for new run
clear_jpgs('proposal_docs_for_ocr')
clear_txt('proposal_docs_for_ocr')
# Start new run
ocr_directories('proposal_docs_for_ocr')
# ***** Dataframe export functionality commented out,
# ***** Uncommented in functions above to reactivate
# ***** Add count of documents per grant id
# doc_counts = []
# for e,row in df.iterrows():
# doc_counts.append(len(row.Content))
# df[Doc_count] = doc_counts
# df.head()
# ***** Export df
# df.to_csv('grant_content.csv') |
# Generated by Django 3.0.8 on 2020-09-27 21:08
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Ad',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('location', models.CharField(max_length=100)),
('region', models.CharField(choices=[('Upper West', 'Upper West'), ('Upper East', 'Upper East'), ('North East', 'North East'), ('Northen', 'Northen'), ('Savannah', 'Savannah'), ('Bono East', 'Bono East'), ('Brong Ahafo', 'Brong Ahafo'), ('Oti', 'Oti'), ('Ahafo', 'Ahafo'), ('Ashanti', 'Ashanti'), ('Volta', 'Volta'), ('Greater Accra', 'Greater Accra'), ('Western North', 'Western North'), ('Western', 'Western'), ('Eastern', 'Eastern'), ('Central', 'Central')], max_length=100)),
('category', models.CharField(choices=[('Mobile Phones', 'Mobile Phones'), ('Mobile Accessories', 'Mobile Phone Accessories'), ('Computer Accessories', 'Computer Accessories'), ('TVs', 'TVs'), ('Cameras & Camcorders', 'Cameras & Camcorders'), ('Audio & MP3', 'Audio & MP3'), ('Other Electronics', 'Other Electronics')], max_length=100)),
('price', models.IntegerField()),
('brand', models.CharField(max_length=200)),
('negotiable', models.BooleanField(default=False)),
('main_photo', models.ImageField(upload_to='photos/%Y/%m/%d')),
('photo_1', models.ImageField(upload_to='photos/%Y/%m/%d')),
('photo_2', models.ImageField(upload_to='photos/%Y/%m/%d')),
('photo_3', models.ImageField(upload_to='photos/%Y/%m/%d')),
('description', models.TextField()),
('date_posted', models.DateField(default=datetime.datetime.now)),
('seller', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-date_posted'],
},
),
]
|
import pytest
from thefuck.rules.omnienv_no_such_command import get_new_command, match
from thefuck.types import Command
@pytest.fixture
def output(pyenv_cmd):
return "pyenv: no such command `{}'".format(pyenv_cmd)
@pytest.fixture(autouse=True)
def Popen(mocker):
mock = mocker.patch('thefuck.rules.omnienv_no_such_command.Popen')
mock.return_value.stdout.readlines.return_value = (
b'--version\nactivate\ncommands\ncompletions\ndeactivate\nexec_\n'
b'global\nhelp\nhooks\ninit\ninstall\nlocal\nprefix_\n'
b'realpath.dylib\nrehash\nroot\nshell\nshims\nuninstall\nversion_\n'
b'version-file\nversion-file-read\nversion-file-write\nversion-name_\n'
b'version-origin\nversions\nvirtualenv\nvirtualenv-delete_\n'
b'virtualenv-init\nvirtualenv-prefix\nvirtualenvs_\n'
b'virtualenvwrapper\nvirtualenvwrapper_lazy\nwhence\nwhich_\n'
).split()
return mock
@pytest.mark.parametrize('script, pyenv_cmd', [
('pyenv globe', 'globe'),
('pyenv intall 3.8.0', 'intall'),
('pyenv list', 'list'),
])
def test_match(script, pyenv_cmd, output):
assert match(Command(script, output=output))
def test_match_goenv_output_quote():
"""test goenv's specific output with quotes (')"""
assert match(Command('goenv list', output="goenv: no such command 'list'"))
@pytest.mark.parametrize('script, output', [
('pyenv global', 'system'),
('pyenv versions', ' 3.7.0\n 3.7.1\n* 3.7.2\n'),
('pyenv install --list', ' 3.7.0\n 3.7.1\n 3.7.2\n'),
])
def test_not_match(script, output):
assert not match(Command(script, output=output))
@pytest.mark.parametrize('script, pyenv_cmd, result', [
('pyenv globe', 'globe', 'pyenv global'),
('pyenv intall 3.8.0', 'intall', 'pyenv install 3.8.0'),
('pyenv list', 'list', 'pyenv install --list'),
('pyenv remove 3.8.0', 'remove', 'pyenv uninstall 3.8.0'),
])
def test_get_new_command(script, pyenv_cmd, output, result):
assert result in get_new_command(Command(script, output))
|
#!/usr/bin/env python3
import unittest
from simple_match_test import SimpleMatchTest
from simple_search_test import SimpleSearchTest
from simple_findall_test import SimpleFindallTest
from simple_sub_test import SimpleSubTest
from advanced_sub_test import AdvancedSubTest
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
class ResponseBase(object):
_allow_keys = ['result']
def __setattr__(self, key, value):
if key not in self._allow_keys:
return False
self.__dict__[key] = value
class ResponseAddTopic(ResponseBase):
pass
class ResponseGetTopicList(ResponseBase):
pass
class ResponseGetTopic(ResponseBase):
pass
class ResponseAddPictures(ResponseBase):
pass
class ResponseAddPictureContent(ResponseBase):
pass
class ResponseGetPictureList(ResponseBase):
pass
class ResponseGetPictureTotal(ResponseBase):
pass
class ResponseUpdateStat(ResponseBase):
pass
class ResponseAddPictureDetect(ResponseBase):
pass
if __name__ == '__main__':
df = ResponseGetPictureTotal()
df.result = 12
print type(df.result)
|
import json
import http
from backend.models import AnnotationProject, EvaluationProject,\
ProjectCategory, ProjectType
from tests.fixture import init_db, test_client
def create_proj_resp(test_client, project_type, name, project_category=''):
if project_type.lower() == ProjectType.ANNOTATION.value.lower():
return test_client.post('/project/%s' % ProjectType.ANNOTATION.value,
data=json.dumps(dict(
name=name,
dataset_name='BBC_Sample',
category=ProjectCategory.HIGHLIGHT.value,
total_exp_results=3
)
),
content_type='application/json'
)
elif project_type.lower() == ProjectType.EVALUATION.value.lower():
if project_category.lower() == ProjectCategory.INFORMATIVENESS_DOC.value.lower():
return test_client.post('/project/%s' % ProjectType.EVALUATION.value,
data=json.dumps(dict(
name=name,
dataset_name='BBC_Sample',
category=ProjectCategory.INFORMATIVENESS_DOC.value,
total_exp_results=3,
summ_group_name='BBC_Sample_ref_gold'
)),
content_type='application/json'
)
elif project_category.lower() == ProjectCategory.INFORMATIVENESS_REF.value.lower():
return test_client.post('/project/%s' % ProjectType.EVALUATION.value,
data=json.dumps(dict(
name=name,
dataset_name='BBC_Sample',
category=ProjectCategory.INFORMATIVENESS_REF.value,
total_exp_results=3,
summ_group_name='BBC_Sample_ref_gold'
)),
content_type='application/json'
)
elif project_category.lower() == ProjectCategory.FLUENCY.value.lower():
return test_client.post('/project/%s' % ProjectType.EVALUATION.value,
data=json.dumps(dict(
name=name,
dataset_name='BBC_Sample',
category=ProjectCategory.FLUENCY.value,
total_exp_results=3,
summ_group_name='BBC_Sample_ref_gold'
)),
content_type='application/json'
)
def test_project_annotation_result(test_client, init_db):
# Create project
response = create_proj_resp(
test_client,
ProjectType.ANNOTATION.value,
name='Test_Create_Result_Annotation'
)
assert response.status_code == http.HTTPStatus.CREATED
# Get project
response = test_client.get(
'/project/get/annotation/%s' % 'Test_Create_Result_Annotation')
assert response.status_code == http.HTTPStatus.OK
assert len(response.get_json()) > 0 is not None
project_id = list(response.get_json().keys())[0]
# Get document
response = test_client.get(
'/project/%s/%s/%s/single_doc' %
(ProjectType.ANNOTATION.value, ProjectCategory.HIGHLIGHT.value, project_id))
doc_status_id = response.get_json()['doc_status_id']
# Post result
annotation_result_json = {
'project_id': project_id,
'status_id': doc_status_id,
'result_json': {
'highlights': {},
'components': [],
'words': [],
},
'category': 'highlight',
'validity': True,
'email': 'test@test.com',
'mturk_code': 'test123',
}
response = test_client.post('project/save_result/annotation',
data=json.dumps(annotation_result_json),
content_type='application/json')
assert response.status_code == http.HTTPStatus.CREATED
def test_project_eval_inf_doc_result(test_client, init_db):
# Create project
response = create_proj_resp(
test_client,
ProjectType.EVALUATION.value,
project_category=ProjectCategory.INFORMATIVENESS_DOC.value,
name='Test_Create_Result_Evaluation_InfDoc'
)
assert response.status_code == http.HTTPStatus.CREATED
# Get project
response = test_client.get(
'/project/get/evaluation/%s' % 'Test_Create_Result_Evaluation_InfDoc')
assert response.status_code == http.HTTPStatus.OK
assert len(response.get_json()) > 0 is not None
project_id = list(response.get_json().keys())[0]
# Get document
response = test_client.get(
'/project/%s/%s/%s/single_doc' %
(ProjectType.EVALUATION.value, ProjectCategory.INFORMATIVENESS_DOC.value, project_id))
assert response.status_code == http.HTTPStatus.OK
summ_status_id = response.get_json()['summ_status_id']
# Post result
evaluation_result_json = {
'project_id': project_id,
'status_id': summ_status_id,
'precision': 1.0,
'recall': 1.0,
'category': ProjectCategory.INFORMATIVENESS_DOC.value,
'validity': True,
'email': 'test@test.com',
'mturk_code': 'test123',
}
response = test_client.post('project/save_result/evaluation',
data=json.dumps(evaluation_result_json),
content_type='application/json')
assert response.status_code == http.HTTPStatus.CREATED
def test_project_eval_inf_ref_result(test_client, init_db):
# Create project
response = create_proj_resp(
test_client,
ProjectType.EVALUATION.value,
project_category=ProjectCategory.INFORMATIVENESS_REF.value,
name='Test_Create_Result_Evaluation_InfRef'
)
assert response.status_code == http.HTTPStatus.CREATED
# Get project
response = test_client.get(
'/project/get/evaluation/%s' % 'Test_Create_Result_Evaluation_InfRef')
assert response.status_code == http.HTTPStatus.OK
assert len(response.get_json()) > 0 is not None
project_id = list(response.get_json().keys())[0]
# Get document
response = test_client.get(
'/project/%s/%s/%s/single_doc' %
(ProjectType.EVALUATION.value, ProjectCategory.INFORMATIVENESS_REF.value, project_id))
assert response.status_code == http.HTTPStatus.OK
summ_status_id = response.get_json()['summ_status_id']
# Post result
evaluation_result_json = {
'project_id': project_id,
'status_id': summ_status_id,
'precision': 1.0,
'recall': 1.0,
'category': ProjectCategory.INFORMATIVENESS_REF.value,
'validity': True,
'email': 'test@test.com',
'mturk_code': 'test123',
}
response = test_client.post('project/save_result/evaluation',
data=json.dumps(evaluation_result_json),
content_type='application/json')
assert response.status_code == http.HTTPStatus.CREATED
# def test_project_eval_fluency_result(test_client, init_db):
# # Create project
# response = create_proj_resp(
# test_client,
# ProjectType.EVALUATION.value,
# project_category=ProjectCategory.FLUENCY.value,
# name='Test_Create_Result_Evaluation_Fluency'
# )
# assert response.status_code == http.HTTPStatus.CREATED
# # Get project
# response = test_client.get(
# '/project/get/evaluation/%s' % 'Test_Create_Result_Evaluation_Fluency')
# assert response.status_code == http.HTTPStatus.OK
# assert len(response.get_json()) > 0 is not None
# project_id = list(response.get_json().keys())[0]
# # Get document
# response = test_client.get(
# '/project/%s/%s/%s/single_doc' %
# (ProjectType.EVALUATION.value, ProjectCategory.FLUENCY.value, project_id))
# assert response.status_code == http.HTTPStatus.OK
# summ_status_id = response.get_json()['summ_status_id']
# # Post result
# evaluation_result_json = {
# 'project_id': project_id,
# 'status_id': summ_status_id,
# 'fluency': 1.0,
# 'clarity': 1.0,
# 'category': ProjectCategory.FLUENCY.value,
# }
# response = test_client.post('project/save_result/evaluation',
# data=json.dumps(evaluation_result_json),
# content_type='application/json')
# assert response.status_code == http.HTTPStatus.CREATED
def test_project_create_annotation(test_client, init_db):
# Test Annotation Project
response = create_proj_resp(
test_client,
ProjectType.ANNOTATION.value,
name='Test_Create_Annotation'
)
assert response.status_code == http.HTTPStatus.CREATED
project = AnnotationProject.query.filter_by(name='Test_Create_Annotation').first()
assert project is not None
def test_project_create_evaluation(test_client, init_db):
# Test Evaluation Project
response = create_proj_resp(
test_client,
ProjectType.EVALUATION.value,
project_category=ProjectCategory.INFORMATIVENESS_DOC.value,
name='Test_Create_Evaluation_Doc'
)
assert response.status_code == http.HTTPStatus.CREATED
project = EvaluationProject.query.filter_by(name='Test_Create_Evaluation_Doc').first()
assert project is not None
response = create_proj_resp(
test_client,
ProjectType.EVALUATION.value,
project_category=ProjectCategory.INFORMATIVENESS_REF.value,
name='Test_Create_Evaluation_Ref'
)
assert response.status_code == http.HTTPStatus.CREATED
project = EvaluationProject.query.filter_by(name='Test_Create_Evaluation_Ref').first()
assert project is not None
def test_project_get_all_progress_annotation(test_client, init_db):
create_proj_resp(test_client, ProjectType.ANNOTATION.value, 'Test_Progress_All_Annotation')
response = test_client.get('/project/all_progress/annotation')
assert response.status_code == http.HTTPStatus.OK
assert len(response.get_json()['projects']) > 0
assert response.get_json()['projects'][0]['dataset_name'] == 'BBC_Sample'
assert 'progress' in response.get_json()['projects'][0]
def test_project_get_all_progress_evaluation(test_client, init_db):
create_proj_resp(test_client, ProjectType.EVALUATION.value,
'Test_Progress_All_Evaluation', ProjectCategory.INFORMATIVENESS_REF.value)
response = test_client.get('/project/all_progress/evaluation')
assert response.status_code == http.HTTPStatus.OK
assert len(response.get_json()['projects']) > 0
assert response.get_json()['projects'][0]['dataset_name'] == 'BBC_Sample'
assert 'progress' in response.get_json()['projects'][0]
def test_project_get_single_unfinished_doc_annotation(test_client, init_db):
create_proj_resp(test_client, ProjectType.ANNOTATION.value, 'Test_Single_Annotation')
project = AnnotationProject.query.filter_by(name='Test_Single_Annotation').first()
response = test_client.get(
'/project/%s/%s/%s/single_doc' %
(ProjectType.ANNOTATION.value, ProjectCategory.HIGHLIGHT, project.id))
assert response.status_code == http.HTTPStatus.OK
def test_project_get_single_unfinished_summ_evaluation(test_client, init_db):
create_proj_resp(test_client, ProjectType.EVALUATION.value,
name='Test_Single_Evaluation_Doc',
project_category=ProjectCategory.INFORMATIVENESS_DOC.value)
project = EvaluationProject.query.filter_by(name='Test_Single_Evaluation_Doc').first()
response = test_client.get(
'/project/%s/%s/%s/single_doc' %
(ProjectType.EVALUATION.value, ProjectCategory.INFORMATIVENESS_DOC.value, project.id)
)
assert response.status_code == http.HTTPStatus.OK
create_proj_resp(test_client, ProjectType.EVALUATION.value,
name='Test_Single_Evaluation_Ref',
project_category=ProjectCategory.INFORMATIVENESS_REF.value)
project = EvaluationProject.query.filter_by(name='Test_Single_Evaluation_Ref').first()
response = test_client.get(
'/project/%s/%s/%s/single_doc' %
(ProjectType.EVALUATION.value, ProjectCategory.INFORMATIVENESS_REF.value, project.id)
)
assert response.status_code == http.HTTPStatus.OK
def test_doc_status(test_client, init_db):
response = create_proj_resp(
test_client,
ProjectType.ANNOTATION.value,
project_category=ProjectCategory.HIGHLIGHT.value,
name='Test_Doc_Status'
)
assert response.status_code == http.HTTPStatus.CREATED
response = test_client.get(
'/project/get/annotation/%s' % 'Test_Doc_Status')
assert len(response.get_json()) > 0
project_id = list(response.get_json().keys())[0]
response = test_client.get('/doc_status/progress/%s' % project_id)
assert response.get_json()['progress'] == '0.00'
def test_project_get_progress_annotation(test_client, init_db):
response = create_proj_resp(test_client, ProjectType.ANNOTATION.value, 'Test_Progress_Annotation')
assert response.status_code == http.HTTPStatus.CREATED
project = AnnotationProject.query.filter_by(name='Test_Progress_Annotation').first()
response = test_client.get('/project/progress/annotation/%s' % project.id)
assert response.status_code == http.HTTPStatus.OK
assert 'documents' in response.get_json()
assert len(response.get_json()['documents']) > 0
def test_project_get_progress_evaluation(test_client, init_db):
response = create_proj_resp(test_client, ProjectType.EVALUATION.value,
project_category=ProjectCategory.INFORMATIVENESS_DOC.value,
name='Test_Progress_Evaluation')
assert response.status_code == http.HTTPStatus.CREATED
project = EvaluationProject.query.filter_by(name='Test_Progress_Evaluation').first()
response = test_client.get('/project/progress/evaluation/%s' % project.id)
assert response.status_code == http.HTTPStatus.OK
assert 'systems' in response.get_json()
assert len(response.get_json()['systems']) > 0
|
import numpy as np
from numpy import random
class GeneticAlgorithm(object):
def __init__(self, population_size, pop_turn_over=0.2,
diversification=0.05, mutation_prob=0.01,
seed=1):
"""
This is how we define a genetic algorithm, independently of its methods
:param population_size: defines the number of individuals available for test at
each generation
:param pop_turn_over: the turnover in the population at each generation (death rate)
:param diversification: the number of non fit individuals that will be saved by chance
:mutation_prob: the probability for one individual to mutate
"""
self.population_size = population_size
self.pop_turn_over = pop_turn_over
self.mutation_prob = mutation_prob
self.diversification = diversification
self.population = None
"""This is to distinguish from the fit individuals and those who aren't"""
self.fit = None
self.non_fit = None
random.seed(seed)
def individual(self):
"""
Depending on your implementation:
How do you create a random initial individual?
"""
raise NotImplementedError
def fitness(self, individual, target):
"""
How do you measure the fitness of your individual given your target
"""
raise NotImplementedError
def target(self):
"""
Create a target
"""
raise NotImplementedError
def select(self):
"""
This is how you define the fittest selection
"""
evaluated = [(self.fitness(x, self.target), x) for x in self.population]
evaluated = [individual[1] for individual in sorted(evaluated)]
index = int(self.pop_turn_over * len(evaluated))
selected, non_selected = evaluated[:index], evaluated[index:]
self.fit = selected
self.non_fit = non_selected
def diversify(self):
"""
Add some lucky unfit individuals
"""
lucky_ones = [individual for individual in self.non_fit if self.diversification > random.random()]
self.fit.extend(lucky_ones)
def mutate(self):
"""
How do you mutate an individual?
"""
raise NotImplementedError
def mate(self):
"""
In this version parents are kept alive by the end of the process.
"""
parents_length = len(self.fit)
full_replacement = self.population_size - parents_length
children = []
for child in range(full_replacement):
male, female = random.choice(len(self.fit), 2, replace=False)
male = self.fit[male]
female = self.fit[female]
half = len(male) // 2
child = male[:half] + female[half:]
children.append(child)
self.fit.extend(children)
self.population = self.fit[:]
def evolve(self):
"""
One evolution round
"""
self.select()
self.diversify()
self.mutate()
self.mate()
def first_breed(self):
self.population = [self.individual() for _ in range(self.population_size)]
def print_convergence(self):
"""
Method for verbose
"""
intermediate = [(self.fitness(x, self.target), x) for x in self.population]
print(np.mean([x[0] for x in intermediate]))
def run_algorithm(self, until=30, verbose=True):
self.first_breed()
for _ in range(until):
self.evolve()
if verbose:
self.print_convergence()
result = [(self.fitness(x, self.target), x) for x in self.population]
return sorted(result)
|
#
# @lc app=leetcode.cn id=94 lang=python
#
# [94] 二叉树的中序遍历
#
# @lc code=start
# Definition for a binary tree node.
class TreeNode(object):
def __init__(self, x):
self.val = x
self.left = None
self.right = None
import collections
from typing import List
class Solution(object):
def inorderTraversal(self, root: TreeNode) -> List[int]:
"""
:type root: TreeNode
:rtype: List[int]
"""
res = []
def inOrderTraverse1(cur):
if cur:
inOrderTraverse1(cur.left)
res.append(cur.val)
inOrderTraverse1(cur.right)
inOrderTraverse1(root)
return res
class Solution2(object):
def inorderTraversal(self, root: TreeNode) -> List[int]:
stack = collections.deque()
pNode = root
res = []
while pNode or stack:
if pNode:
stack.append(pNode)
pNode = pNode.left
else:
cur = stack.pop()
res.append(cur.val)
pNode = cur.right
return res
def arr_to_tree(arr):
if not arr:
return None
tree = collections.deque()
root = TreeNode(arr[0])
tree.append(root)
i = 1
while i < len(arr):
cur = tree.popleft()
if not cur:
continue
if i < len(arr):
if arr[i] == 'null':
left = None
else:
left = TreeNode(arr[i])
cur.left = left
i += 1
if left:
tree.append(left)
if i < len(arr):
if arr[i] == 'null':
right = None
else:
right = TreeNode(arr[i])
cur.right = right
i += 1
if right:
tree.append(right)
return root
tree = arr_to_tree([1,'null',2,3])
print(tree.left)
print(tree.right.val)
print(tree.right.left.val)
print(tree.right.left.left)
print(Solution2().inorderTraversal(tree))
# @lc code=end
|
import subprocess
import datetime
import sys
import os
from utils import get_files,move_files,clean_dir
from make_html import make_html
# Python wrapper to the check_anaTrees script when trying to run by-hand (instead of via lobster)
# Alias function
pjoin = os.path.join
USER_DIR = os.path.expanduser('~')
CMSSW_DIR = os.environ['CMSSW_BASE']
TIMESTAMP = datetime.datetime.now().strftime('%Y-%m-%d_%H%M')
TIMESTAMP2 = datetime.datetime.now().strftime('%Y_%m_%d')
HPATH = '/hadoop/store/user'
SFILES = os.path.expandvars('/store/smallfiles/$USER/anatest_files')
fpath_a15 = os.path.join(SFILES,"TOP-19-001_unblinded_v1.root")
fpath_a16 = os.path.join(SFILES,"anatest16.root")
fpath_a17 = os.path.join(SFILES,"anatest17.root")
fpath_a18 = os.path.join(SFILES,"anatest18.root")
fpath_a19 = os.path.join(SFILES,"anatest19.root")
fpath_a20 = os.path.join(SFILES,"anatest20.root")
fpath_a21 = os.path.join(SFILES,"anatest21.root")
fpath_a22 = os.path.join(SFILES,"anatest22.root")
fpath_a23 = os.path.join(SFILES,"anatest23_v3.root")
fpath_a24 = os.path.join(SFILES,"anatest24.root")
fpath_a25 = os.path.join(SFILES,"anatest25.root")
fpath_a26 = os.path.join(SFILES,"anatest26.root")
fpath_a27 = os.path.join(SFILES,"anatest27.root")
fpath_a28 = os.path.join(SFILES,"anatest28.root")
fpath_a29 = os.path.join(SFILES,"anatest29.root")
fpath_a30 = os.path.join(SFILES,"anatest30.root")
fpath_a31 = os.path.join(SFILES,"anatest31.root")
fpath_R6B1 = os.path.join(SFILES,"private_ttH-ttZ-ttW_HanV4_R6B1Only.root")
fpath_R6B1_NSH = os.path.join(SFILES,"private_ttH-ttZ-ttW_HanV4_R6B1Only_NSH.root")
fpath_R6e2_NSH = os.path.join(SFILES,"private_tZq_HanV4_R6-extra2Only_NSH.root")
fpath_tllq_R6B1 = os.path.join(SFILES,"private_sgnl_tllq_R6B1.root")
fpath_tllq_R6B2 = os.path.join(SFILES,"private_sgnl_tllq_R6B2.root")
fpath_tllq_R6B3 = os.path.join(SFILES,"private_sgnl_tllq_R6B3.root")
fpath_a28_redoFull = os.path.join(SFILES,"anatest28_redoFullWF-NoStreaming.root")
fpath_a29_redoFull = os.path.join(SFILES,"private_sgnl_redoTrees-a29_NoStreaming.root")
fpath_a29_redoFull_v2 = os.path.join(SFILES,"private_sgnl_redoTrees-a29_NoStreaming_v2.root")
fpath_a29_noDupes = os.path.join(SFILES,"anatest29_NoDuplicates.root")
fpath_a29_noDupes_v2 = os.path.join(SFILES,"anatest29_NoDuplicatesV2.root")
fpath_HanV4SMChecks = os.path.join(SFILES,"private_sgnl_HanV4_SMCheck.root")
fpath_HanOrigSMChecks = os.path.join(SFILES,"private_sgnl_HanOriginal_SMCheck.root")
class Sample(object):
def __init__(self,name,xsec,dirs=[]):
self.__name = name
self.__xsec = xsec # Should come from the rateinfo.h file in MakeGoodPlot2
self.__dirs = []
for d in dirs: self.addDirectory(d)
def name(self):
return self.__name
def xsec(self):
return self.__xsec
def list(self):
return self.__dirs
def addDirectory(self,*args):
if len(args) == 0:
return
elif len(args) == 1:
d = args[0]
else:
d = os.path.join(*args)
if d in self.list():
return
self.__dirs.append(d)
# Pipes subprocess messages to STDOUT
def run_process(inputs,verbose=True,indent=0):
# Note: This will hold the main thread and wait for the subprocess to complete
indent_str = "\t"*indent
p = subprocess.Popen(inputs,stdout=subprocess.PIPE)
stdout = []
while True:
l = p.stdout.readline()
if l == '' and p.poll() is not None:
break
if l:
stdout.append(l.rstrip())
if verbose: print indent_str+l.rstrip()
return stdout
def getFiles(tdir,ext=''):
if not os.path.exists(tdir): return []
lst = []
for fn in sorted(os.listdir(tdir)):
fpath = pjoin(tdir,fn)
if not os.path.isfile(fpath):
continue
base,tail = fn.rsplit('.',1)
if ext and tail != ext:
continue
lst.append(fpath)
return lst
# Wrapper to run the 'check_anaTrees.C' macro interactively
def check_anaTrees():
print "Running check_anaTrees()..."
max_files = 10
events = 500#75000
skim = False
testing = False
# label_suffix = "v14-v1_Full"
label_suffix = ""
spath = 'awightma/analysisTrees'
tllq4f_JetMax1 = Sample('tllq4f_JetMax1',xsec=0.0942)
tllq4f_JetMax2 = Sample('tllq4f_JetMax2',xsec=0.0942)
tllq4f_JetMax2_NSH = Sample('tllq4f_JetMax2_NoSkipHiggs',xsec=0.0942)
tllq4f_0p_NoMerge = Sample('tllq4f_0p_NoMerge',xsec=0.0942)
tllq4f_0p_JetMax1 = Sample('tllq4f_0p_JetMax1',xsec=0.0942)
tllq4f_0p_JetMax2 = Sample('tllq4f_0p_JetMax2',xsec=0.0942)
central_tZq_NSH = Sample('central_tZq_NoSkipHiggs',xsec=0.0942)
central_tZq = Sample('central_tZq',xsec=0.0942)
central_ttH = Sample('central_ttH',xsec=0.2151)
tllq4f_JetMax1.addDirectory(HPATH,spath,'special/tllq4f_EFT_t-channelJets_2019_08_07/v1/tllq_multidim_b1')
tllq4f_JetMax1.addDirectory(HPATH,spath,'special/tllq4f_EFT_t-channelJets_2019_08_07/v1/tllq_multidim_b2')
# tllq4f_JetMax2.addDirectory(HPATH,spath,'special/tllq4f_EFT_t-channel_01j_2019_09_04_JetMax2/v1/tllq_multidim')
tllq4f_JetMax2.addDirectory(HPATH,spath,'special/tllq4f_EFT_t-channel_01j_2019_09_20_JetMax2/v1/tllq_multidim')
tllq4f_JetMax2_NSH.addDirectory(HPATH,spath,'special/tllq4f_EFT_t-channel_01j_2019_09_20_JetMax2_NoSkipHiggs/v1/tllq_multidim')
tllq4f_0p_NoMerge.addDirectory(HPATH,spath,'special/tllq4f_EFT_t-channelNoJets_2019_10_01_HadFilterCheck/v1/tllq_multidim_NoMerge')
tllq4f_0p_JetMax1.addDirectory(HPATH,spath,'special/tllq4f_EFT_t-channelNoJets_2019_10_01_HadFilterCheck/v1/tllq_multidim_JetMax1')
tllq4f_0p_JetMax2.addDirectory(HPATH,spath,'special/tllq4f_EFT_t-channelNoJets_2019_10_01_HadFilterCheck/v1/tllq_multidim_JetMax2')
# central_tZq_NSH.addDirectory(HPATH,spath,'special/central_tZq_2019_09_23_v14-v2_NoSkipHiggs/v1/tZq')
central_tZq_NSH.addDirectory(HPATH,spath,'special/central_tZq_2019_09_23_v14-v1_NoSkipHiggs/v1/tZq')
# central_tZq.addDirectory(HPATH,spath,'special/central_tZq_2019_09_20_v1/v1/tZq')
central_tZq.addDirectory(HPATH,spath,'central_sgnl_2019_07_31/v1/tZq')
central_ttH.addDirectory(HPATH,spath,'central_sgnl_2019_07_31/v1/ttH')
###################
# anatest24 samples
###################
a24_private_ttH = Sample('private_ttH',xsec=0.2151)
a24_private_tHq = Sample('private_tHq',xsec=7.7129e-2)
a24_private_tllq = Sample('private_tllq',xsec=0.0942)
a24_private_ttll = Sample('private_ttll',xsec=0.2529)
a24_private_ttlnu = Sample('private_ttlnu',xsec=0.2043)
a24_private_ttH.addDirectory(HPATH,spath,'private_sgnl_2019_10_09/v1/ttH_multidim_b1')
a24_private_tHq.addDirectory(HPATH,spath,'private_sgnl_2019_10_09/v1/tHq_multidim_b1')
a24_private_tllq.addDirectory(HPATH,spath,'private_sgnl_2019_10_09/v1/tllq_multidim_b1')
a24_private_tllq.addDirectory(HPATH,spath,'private_sgnl_2019_10_09/v1/tllq_multidim_b2')
a24_private_ttll.addDirectory(HPATH,spath,'private_sgnl_2019_10_09/v1/ttll_multidim_b1')
a24_private_ttll.addDirectory(HPATH,spath,'private_sgnl_2019_10_09/v1/ttll_multidim_b2')
a24_private_ttlnu.addDirectory(HPATH,spath,'private_sgnl_2019_10_09/v1/ttlnu_multidim_b1')
###################
# anatest25 samples
###################
a25_private_ttH = Sample('private_ttH',xsec=0.2151)
a25_private_tHq = Sample('private_tHq',xsec=7.7129e-2)
a25_private_tllq = Sample('private_tllq',xsec=0.0942)
a25_private_ttll = Sample('private_ttll',xsec=0.2529)
a25_private_ttlnu = Sample('private_ttlnu',xsec=0.2043)
a25_private_ttH.addDirectory(HPATH,spath,'private_sgnl_2019_10_11/v1/ttH_multidim_b1')
a25_private_tHq.addDirectory(HPATH,spath,'private_sgnl_2019_10_11/v1/tHq_multidim_b1')
a25_private_tllq.addDirectory(HPATH,spath,'private_sgnl_2019_10_11/v1/tllq_multidim_b1')
a25_private_tllq.addDirectory(HPATH,spath,'private_sgnl_2019_10_11/v1/tllq_multidim_b2')
a25_private_ttll.addDirectory(HPATH,spath,'private_sgnl_2019_10_11/v1/ttll_multidim_b1')
a25_private_ttll.addDirectory(HPATH,spath,'private_sgnl_2019_10_11/v1/ttll_multidim_b2')
a25_private_ttlnu.addDirectory(HPATH,spath,'private_sgnl_2019_10_11/v1/ttlnu_multidim_b1')
a25_central_ttH = Sample('central_ttH',xsec=0.2151)
a25_central_tHq = Sample('central_tHq',xsec=7.7129e-2)
a25_central_tZq = Sample('central_tZq',xsec=0.0942)
a25_central_ttZ = Sample('central_ttZ',xsec=0.2529)
a25_central_ttW = Sample('central_ttW',xsec=0.2043)
a25_central_tZq.addDirectory(HPATH,spath,'central_sgnl_2019_10_11/v1/tZq')
a25_central_ttZ.addDirectory(HPATH,spath,'central_sgnl_2019_10_11/v1/ttZ')
a25_central_ttW.addDirectory(HPATH,spath,'central_sgnl_2019_10_11/v1/ttW')
a25_central_tHq.addDirectory(HPATH,spath,'central_sgnl_2019_10_11/v1/tHq')
a25_central_ttH.addDirectory(HPATH,spath,'central_sgnl_2019_10_11/v1/ttH')
# samples = [tllq4f_JetMax2]
samples = [
a25_private_ttH,
# a25_central_ttH,
# a25_central_tHq,
# a25_central_tZq,
# a25_central_ttZ,
# a25_central_ttW,
]
for samp in samples:
label = samp.name()
xsec = samp.xsec()
label_name = "{label}".format(label=label)
if len(label_suffix):
label_name = "{label}_{suffix}".format(label=label,suffix=label_suffix)
label_name += "_{tstamp}".format(tstamp=TIMESTAMP)
print "Processing: {l}".format(l=label_name)
file_list = []
print "Building file list:"
for idx,fdir in enumerate(samp.list()):
print "\t[{0:0>{w}}/{1}] {dirpath}".format(idx+1,len(samp.list()),w=1,dirpath=fdir)
files = getFiles(fdir,'root')
file_list.extend(files)
print "Found {} file(s)".format(len(file_list))
if len(file_list) == 0:
print "Skipping sample {l} with no files".format(l=label_name)
continue
inf = 'infiles_{l}.txt'.format(l=label_name)
with open(inf,'w') as fd:
for idx,fn in enumerate(file_list):
if max_files > 0 and idx >= max_files:
print "Only processing {0} of {1} total files".format(max_files,len(file_list))
break
fd.write('{fpath}\n'.format(fpath=fn))
outf = "output_{l}.root".format(l=label_name)
if testing:
outf = ""
cmd = ["root","-b","-l","-q"]
cmd_args = "\"{outf}\",\"{inf}\",{evts},{xsec},{skim}".format(outf=outf,inf=inf,evts=events,xsec=xsec,skim=int(skim))
# cmd.extend(["check_anaTrees.C(\"{outf}\", \"{inf}\", {evts}, {skim})".format(args=cmd_args)])
cmd.extend(["check_anaTrees.C({args})".format(args=cmd_args)])
subprocess.check_call(cmd)
#TODO: Switch the inputs to the 'all_samples' run
# Wrapper to run the 'read_anaTreeChecks.C' macro
def read_anaTreeChecks():
print "Running read_anaTreeChecks()..."
# NOTE: I re-named some of the pt/eta histograms, so most of the samples in 'checkAnaTrees' don't work (before 2019-11-12)
# in pb
tZq_xsec = 0.2529
ttH_xsec = 0.2151
tHq_xsec = 7.7129e-2
ttZ_xsec = 0.2529
ttW_xsec = 0.2043
##############################
# ana28/ana29 samples
##############################
# spath = "awightma/checkAnaTrees/ana28-priv_ana29-priv_NoStreaming_2019_11_11/v1"
spath = "awightma/checkAnaTrees/ana25-priv_ana25-cent_ana28-priv_ana29-priv_NoStreaming_2019_11_13/v1"
a25_private_ttH = Sample('a25priv_ttH',xsec=ttH_xsec)
a25_private_tHq = Sample('a25priv_tHq',xsec=tHq_xsec)
a25_private_tZq = Sample('a25priv_tllq',xsec=tZq_xsec)
a25_private_ttZ = Sample('a25priv_ttll',xsec=ttZ_xsec)
a25_private_ttW = Sample('a25priv_ttlnu',xsec=ttW_xsec)
a25_central_ttH = Sample('a25cent_ttH',xsec=ttH_xsec)
a25_central_tHq = Sample('a25cent_tHq',xsec=tHq_xsec)
a25_central_tZq = Sample('a25cent_tllq',xsec=tZq_xsec)
a25_central_ttZ = Sample('a25cent_ttll',xsec=ttZ_xsec)
a25_central_ttW = Sample('a25cent_ttlnu',xsec=ttW_xsec)
a28_private_ttH = Sample('a28priv_ttH',xsec=ttH_xsec)
a28_private_ttW = Sample('a28priv_ttlnu',xsec=ttW_xsec)
a28_private_ttZ = Sample('a28priv_ttll',xsec=ttZ_xsec)
a28_private_tZq = Sample('a28priv_tllq',xsec=tZq_xsec)
a28_private_tHq = Sample('a28priv_tHq',xsec=tHq_xsec)
a29_private_ttH = Sample('a29priv_ttH',xsec=ttH_xsec)
a29_private_ttW = Sample('a29priv_ttlnu',xsec=ttW_xsec)
a29_private_ttZ = Sample('a29priv_ttll',xsec=ttZ_xsec)
a29_private_tZq = Sample('a29priv_tllq',xsec=tZq_xsec)
a29_private_tHq = Sample('a29priv_tHq',xsec=tHq_xsec)
a25_central_ttH.addDirectory(spath,'a25_central_ttH')
a25_central_ttW.addDirectory(spath,'a25_central_ttW')
a25_central_ttZ.addDirectory(spath,'a25_central_ttZ')
a25_central_tZq.addDirectory(spath,'a25_central_tZq')
a25_central_tHq.addDirectory(spath,'a25_central_tHq')
a25_private_ttH.addDirectory(spath,'a25_private_ttH')
a25_private_ttW.addDirectory(spath,'a25_private_ttW')
a25_private_ttZ.addDirectory(spath,'a25_private_ttZ')
a25_private_tZq.addDirectory(spath,'a25_private_tZq')
a25_private_tHq.addDirectory(spath,'a25_private_tHq')
a28_private_ttH.addDirectory(spath,'a28_private_ttH')
a28_private_ttW.addDirectory(spath,'a28_private_ttW')
a28_private_ttZ.addDirectory(spath,'a28_private_ttZ')
a28_private_tZq.addDirectory(spath,'a28_private_tZq')
a28_private_tHq.addDirectory(spath,'a28_private_tHq')
a29_private_ttH.addDirectory(spath,'a29_private_ttH')
a29_private_ttW.addDirectory(spath,'a29_private_ttW')
a29_private_ttZ.addDirectory(spath,'a29_private_ttZ')
a29_private_tZq.addDirectory(spath,'a29_private_tZq')
a29_private_tHq.addDirectory(spath,'a29_private_tHq')
##############################
# ana29 (no duplicates) and SMCheck samples
##############################
spath = 'awightma/checkAnaTrees/ana29-priv-NoDuplicates_HanOrig-HanV4-SMCheck_2019_11_20/v1'
a29noDupes_private_ttH = Sample('a29privNoDupe_ttH',xsec=ttH_xsec)
a29noDupes_private_ttW = Sample('a29privNoDupe_ttW',xsec=ttW_xsec)
a29noDupes_private_ttZ = Sample('a29privNoDupe_ttZ',xsec=ttZ_xsec)
a29noDupes_private_tZq = Sample('a29privNoDupe_tZq',xsec=tZq_xsec)
a29noDupes_private_tHq = Sample('a29privNoDupe_tHq',xsec=tHq_xsec)
ttH_HanOrigSMCheck = Sample('ttH_HanOrigSMCheck',xsec=ttH_xsec)
ttZ_HanOrigSMCheck = Sample('ttZ_HanOrigSMCheck',xsec=ttZ_xsec)
ttW_HanOrigSMCheck = Sample('ttW_HanOrigSMCheck',xsec=ttW_xsec)
ttH_HanV4SMCheck = Sample('ttH_HanV4SMCheck',xsec=ttH_xsec)
ttZ_HanV4SMCheck = Sample('ttZ_HanV4SMCheck',xsec=ttZ_xsec)
ttW_HanV4SMCheck = Sample('ttW_HanV4SMCheck',xsec=ttW_xsec)
tZq_HanV4SMCheck = Sample('tZq_HanV4SMCheck',xsec=tZq_xsec)
tHq_HanV4SMCheck = Sample('tHq_HanV4SMCheck',xsec=tHq_xsec)
a29noDupes_private_ttH.addDirectory(spath,'a29_private_ttH')
a29noDupes_private_ttW.addDirectory(spath,'a29_private_ttW')
a29noDupes_private_ttZ.addDirectory(spath,'a29_private_ttZ')
a29noDupes_private_tZq.addDirectory(spath,'a29_private_tZq')
a29noDupes_private_tHq.addDirectory(spath,'a29_private_tHq')
ttH_HanOrigSMCheck.addDirectory(spath,'ttH_HanOrigSMCheck')
ttZ_HanOrigSMCheck.addDirectory(spath,'ttZ_HanOrigSMCheck')
ttW_HanOrigSMCheck.addDirectory(spath,'ttW_HanOrigSMCheck')
ttH_HanV4SMCheck.addDirectory(spath,'ttH_HanV4SMCheck')
ttZ_HanV4SMCheck.addDirectory(spath,'ttZ_HanV4SMCheck')
ttW_HanV4SMCheck.addDirectory(spath,'ttW_HanV4SMCheck')
tZq_HanV4SMCheck.addDirectory(spath,'tZq_HanV4SMCheck')
tHq_HanV4SMCheck.addDirectory(spath,'tHq_HanV4SMCheck')
inputs = [
# a25_private_ttH,
# a25_private_ttW,
# a25_private_ttZ,
# a25_private_tZq,
# a25_private_tHq,
# a25_central_ttH,
# a25_central_ttW,
# a25_central_ttZ,
# a25_central_tZq,
# a25_central_tHq,
a28_private_ttH,
a29_private_ttH,
a29noDupes_private_ttH,
ttH_HanOrigSMCheck,
ttH_HanV4SMCheck,
a28_private_ttW,
a29_private_ttW,
a29noDupes_private_ttW,
ttW_HanOrigSMCheck,
ttW_HanV4SMCheck,
a28_private_ttZ,
a29_private_ttZ,
a29noDupes_private_ttZ,
ttZ_HanOrigSMCheck,
ttZ_HanV4SMCheck,
a28_private_tZq,
a29_private_tZq,
a29noDupes_private_tZq,
tZq_HanV4SMCheck,
# a28_private_tHq,
# a29_private_tHq,
# a29noDupes_private_tHq,
# tHq_HanV4SMCheck,
]
# dir_name = 'testing_{tstamp}'.format(tstamp=TIMESTAMP)
# sub_dir = 'test'
sub_dir = 'a28-a29_checks'
dir_name = 'tZq_{tstamp}'.format(tstamp=TIMESTAMP2)
move_output = False
remake_merged_files = False # Force the recreation of the merged root files
merge_lst = []
for idx,samp in enumerate(inputs):
name = samp.name()
sub_path = samp.list()[0]
xsec = samp.xsec()
dir_path = pjoin(HPATH,sub_path)
to_merge = getFiles(dir_path,ext='root')
if len(to_merge) == 0:
print "No files to merge!"
continue
merged_fname = "{fname}.root".format(fname=name)
hadd_cmd = ['hadd','-f']
hadd_cmd.extend([merged_fname])
hadd_ops_split = len(hadd_cmd) # This is the number of options before we start including the files to merge
hadd_cmd.extend(to_merge)
s1 = ' '.join(hadd_cmd[:hadd_ops_split])
s2 = '\n\t'.join(hadd_cmd[hadd_ops_split:])
# print "Merge command: {0}\n\t{1}".format(s1,s2)
if remake_merged_files or not os.path.exists(merged_fname):
run_process(hadd_cmd)
if os.path.exists(merged_fname):
merge_lst.append(merged_fname)
else:
print "Skipping missing input file: {fname}".format(fname=merged_fname)
if len(merge_lst) == 0:
print "No samples to run over!"
return
infs = "{{{infs}}}".format(infs=",".join('"{fn}"'.format(fn=fn) for fn in merge_lst))
cmd = ["root","-b","-l","-q"]
cmd.extend(["read_anaTreeChecks.C({infs},{xsec})".format(infs=infs,xsec=xsec)])
subprocess.check_call(cmd)
if move_output:
output_dir = pjoin(USER_DIR,'www/eft_stuff/misc/anacheck_plots',sub_dir,dir_name)
print "Output Dir: {dir}".format(dir=output_dir)
if not os.path.exists(output_dir):
# os.mkdir(output_dir)
os.makedirs(output_dir)
clean_dir(output_dir,["^.*\.png$","^index.html$"])
imgs = get_files('.',targets=["^.*\.png$"])
move_files(files=imgs,target=output_dir)
make_html(output_dir)
def plot_systematic_variations():
local_dir = os.path.join(CMSSW_BASE,'src/EFTMultilepton/TemplateMakers/test/MakeGoodPlot2')
fpath_testing = os.path.join(local_dir,"testing.root")
# fpath = fpath_a22
# fpath = fpath_a29
fpath = fpath_testing
# As a reminder these should be unique over all lists, since they should be coming from the same file
private_signal = ["tllq_16D","ttH_16D","ttll_16D","ttlnu_16D","tHq_16D"]
central_signal = ["tZq","ttH","ttZ","ttW"]
central_bkgd = ["ttGJets","WZ","WWW"]
samples = []
samples.extend(private_signal)
samples.extend(central_signal)
samples.extend(central_bkgd)
# samples = ['ttlnu_16D']
samples = ['ttW','ttlnu_16D']
syst = "Q2RF"
# syst = "PDF"
move_output = False
web_dir = "/afs/crc.nd.edu/user/a/awightma/www"
# sub_dir = "eft_stuff/misc/anatest_plots/njet_plots/{syst}_variations/{tstamp}_from-anatest25".format(syst=syst,tstamp=TIMESTAMP2)
# For testing
sub_dir = "eft_stuff/misc/anatest_plots/njet_plots/testing/{tstamp}_{syst}".format(tstamp=TIMESTAMP,syst=syst)
for sample in samples:
name = sample
cmd = ["root","-b","-l","-q"]
cmd_args = "\"{fpath}\",\"{sample}\",\"{syst}\"".format(fpath=fpath,sample=name,syst=syst)
cmd.extend(['quick_plots.C({args})'.format(args=cmd_args)])
subprocess.check_call(cmd)
if move_output:
output_dir = "{name}".format(name=name)
if name in central_bkgd:
if name == "WZ":
output_dir = "bkgd_Diboson"
elif name == "WWW":
output_dir = "bkgd_Triboson"
else:
output_dir = "bkgd_{name}".format(name=name)
elif name in central_signal:
output_dir = "central_{name}".format(name=name)
elif name in private_signal:
tmp = name.split('_16D')[0] # Chop off the 16D part of the name
output_dir = "private_{name}".format(name=tmp)
else:
raise RuntimeError("Unknown sample name: {name}".format(name=name))
output_path = os.path.join(web_dir,sub_dir,output_dir)
print "Output: {path}".format(path=output_path)
if not os.path.exists(output_dir):
os.mkdir(output_dir)
imgs = get_files('.',targets=["^.*\.png$"])
def compare_anatest_files():
geoff_dir = "/afs/crc.nd.edu/user/g/gsmith15/Public/for_Tony/"
tony_dir = "/afs/crc.nd.edu/user/a/awightma/Public/for_tony/"
hist_dir = "/afs/crc.nd.edu/user/a/awightma/CMSSW_Releases/CMSSW_8_1_0/src/CombineHarvester/TopEFT/hist_files/"
local_dir = "/afs/crc.nd.edu/user/a/awightma/CMSSW_Releases/from_govner/CMSSW_9_4_6/src/EFTMultilepton/TemplateMakers/test/MakeGoodPlot2/"
merged_dir = "/tmpscratch/users/awightma/analysisWorkflow/mergedHists/"
fname_data = "temp_data.root"
fname_MuonEG = "temp_MuonEG.root"
fname_DoubleMuon = "temp_DoubleMuon.root"
fname_SingleMuon = "temp_SingleMuon.root"
fname_SingleElectron = "temp_SingleElectron.root"
fname_DoubleEG = "temp_DoubleEG.root"
fname_tZq_incl_higgs = "tZq_incl_higgs.root"
fname_tZq_base = "tZq_base.root"
fname_tZq_v14v1_NSH = "tZq_v14v1_NSH.root" # This file is fucked up for some reason (empty histograms)
fname_tllq = 'temp_tllq_multidim.root'
fname_testing = 'testing.root'
fpath_data_a26 = os.path.join(merged_dir,"2019_07_08_from-standardhists_SRs_with_Round5_EFTsamps",fname_data)
fpath_MuonEG_a26 = os.path.join(merged_dir,"2019_07_08_from-standardhists_SRs_with_Round5_EFTsamps",fname_MuonEG)
fpath_DoubleMuon_a26 = os.path.join(merged_dir,"2019_07_08_from-standardhists_SRs_with_Round5_EFTsamps",fname_DoubleMuon)
fpath_SingleMuon_a26 = os.path.join(merged_dir,"2019_07_08_from-standardhists_SRs_with_Round5_EFTsamps",fname_SingleMuon)
fpath_SingleElectron_a26 = os.path.join(merged_dir,"2019_07_08_from-standardhists_SRs_with_Round5_EFTsamps",fname_SingleElectron)
fpath_DoubleEG_a26 = os.path.join(merged_dir,"2019_07_08_from-standardhists_SRs_with_Round5_EFTsamps",fname_DoubleEG)
fpath_data_a27 = os.path.join(merged_dir,"2019_10_22_data-nominal_newGT-94X_dataRun2_v11",fname_data)
fpath_MuonEG_a27 = os.path.join(merged_dir,"2019_10_22_data-nominal_newGT-94X_dataRun2_v11",fname_MuonEG)
fpath_DoubleMuon_a27 = os.path.join(merged_dir,"2019_10_22_data-nominal_newGT-94X_dataRun2_v11",fname_DoubleMuon)
fpath_SingleMuon_a27 = os.path.join(merged_dir,"2019_10_22_data-nominal_newGT-94X_dataRun2_v11",fname_SingleMuon)
fpath_SingleElectron_a27 = os.path.join(merged_dir,"2019_10_22_data-nominal_newGT-94X_dataRun2_v11",fname_SingleElectron)
fpath_DoubleEG_a27 = os.path.join(merged_dir,"2019_10_22_data-nominal_newGT-94X_dataRun2_v11",fname_DoubleEG)
fpath_tZq_incl_higgs = os.path.join(local_dir,fname_tZq_incl_higgs)
fpath_tZq_base = os.path.join(local_dir,fname_tZq_base)
fpath_tZq_v14v1_NSH = os.path.join(local_dir,fname_tZq_v14v1_NSH)
fpath_testing = os.path.join(local_dir,fname_testing)
fpath_tllq_a25 = os.path.join(merged_dir,"2019_10_13_full_MC",fname_tllq)
fpath_tllq_a28 = os.path.join(merged_dir,"2019_10_19_full_MC",fname_tllq)
fpath = fpath_a26
compare = [
# (fpath_a15,fpath_a16),
# (fpath_a15,fpath_a22),
# (fpath_a16,fpath_a22),
# (fpath_a22,fpath_a23),
# (fpath_a23,fpath_a24),
# (fpath_a26,fpath_a27),
# (fpath_data_a26,fpath_data_a27),
# (fpath_MuonEG_a26,fpath_MuonEG_a27),
# (fpath_DoubleMuon_a26,fpath_DoubleMuon_a27),
# (fpath_SingleMuon_a26,fpath_SingleMuon_a27),
# (fpath_SingleElectron_a26,fpath_SingleElectron_a27),
# (fpath_DoubleEG_a26,fpath_DoubleEG_a27),
# (fpath_a22,fpath_a28),
# (fpath_a16,fpath_a28),
# (fpath_a16,fpath_tZq_incl_higgs),
# (fpath_a28,fpath_tZq_incl_higgs),
# (fpath_tZq_incl_higgs,fpath_tZq_base)
# (fpath_tZq_base,fpath_a28), # These are identical for tZq
# (fpath_a16,fpath_tZq_v14v1_NSH),
# (fpath_a23,fpath_a24),
# (fpath_a24,fpath_a25),
# (fpath_a25,fpath_a26),
# (fpath_a26,fpath_a28),
# (fpath_a24,fpath_a28),
# (fpath_a25,fpath_a28),
# (fpath_a26,fpath_a28),
# (fpath_a28,fpath_a29),
# (fpath_a29,fpath_a30),
# (fpatha_28,fpath_a31),
# (fpath_a28,fpath_a28_redoFull)
# (fpath_a30,fpath_testing)
# (fpath_a28_redoFull,fpath_a29_noDupes_v2),
# (fpath_a29_noDupes,fpath_a29_noDupes_v2),
# (fpath_a31,fpath_a29_noDupes_v2),
# (fpath_R6B1,fpath_a29_noDupes_v2),
# (fpath_R6B1_NSH,fpath_a29_noDupes_v2),
# (fpath_R6e2_NSH,fpath_a29_noDupes_v2),
(fpath_tllq_R6B1,fpath_tllq_R6B2),
(fpath_tllq_R6B1,fpath_tllq_R6B3),
(fpath_tllq_R6B2,fpath_tllq_R6B3),
# (fpath_HanV4SMChecks,fpath_HanOrigSMChecks),
]
for idx,tup in enumerate(compare):
fpath1,fpath2 = tup
h,fname1 = os.path.split(fpath1)
h,fname2 = os.path.split(fpath2)
print "Comparing: {fn1} --> {fn2}".format(fn1=fname1,fn2=fname2)
cmd = ["root","-b","-l","-q"]
cmd_args = "\"{fp1}\",\"{fp2}\"".format(fp1=fpath1,fp2=fpath2)
cmd.extend(['compare_anatest_files.C({args})'.format(args=cmd_args)])
subprocess.check_call(cmd)
# run_process(cmd,verbose=True,indent=0)
print "#"*100
def make_yield_table():
# fpath = fpath_a29_noDupes
private_signal = ["tllq_16D","ttH_16D","ttll_16D","ttlnu_16D","tHq_16D"]
central_signal = ["tZq","ttH","ttZ","ttW"]
central_bkgd = ["ttGJets","WZ","WWW"]
# Dictionary map to set the yield table name -- the (0,0) cell of the table
fname_map = {
'private_ttH-ttZ-ttW_HanV4_R6B1Only': 'R6B1Only',
'private_ttH-ttZ-ttW_HanV4_R6B1Only_NSH': 'R6B1Only_NSH',
'private_tZq_HanV4_R6-extra2Only_NSH': 'tllq_R6e2NSH',
'private_sgnl_redoHistMaking-a29_NoStreaming': 'a29_redoHist',
'private_sgnl_redoTrees-a29_NoStreaming': 'a29_redoFull',
'private_sgnl_redoTrees-a29_NoStreaming_v2': 'a29_redoFull',
'anatest28_redoFullWF-NoStreaming': 'a28_redoFull',
'anatest29_NoDuplicates': 'a29_noDupes',
'anatest29_NoDuplicatesV2': 'a29_noDupesV2',
'private_sgnl_HanV4_SMCheck': 'HanV4SMCheck',
'private_sgnl_HanOriginal_SMCheck': 'HanOrigSMChk',
'private_sgnl_tllq_R6B1': 'tllq_R6B1',
'private_sgnl_tllq_R6B2': 'tllq_R6B2',
'private_sgnl_tllq_R6B3': 'tllq_R6B3',
}
lst = []
# lst.extend([fpath_a28,fpath_a28_redoFull,fpath_a29,fpath_a29_noDupes,fpath_a31])
# lst.extend([
# fpath_a28_redoFull,
# # fpath_a31,
# # fpath_a29_noDupes,
# fpath_a29_noDupes_v2,
# fpath_HanV4SMChecks,
# fpath_HanOrigSMChecks
# ])
lst.extend([
fpath_tllq_R6B1,
fpath_tllq_R6B2,
fpath_tllq_R6B3,
])
# lst.extend([fpath_R6B1,fpath_R6B1_NSH,fpath_R6e2_NSH])
# lst.extend([fpath_a29_redoHist,fpath_a29_redoFull_v2])
for fp in lst:
h,t = os.path.split(fp)
name = t.rsplit('.')[0]
if fname_map.has_key(name):
name = fname_map[name]
print "Making yield table for {fp}".format(fp=fp)
cmd = ["root","-b","-l","-q"]
cmd_args = "\"{fp}\",\"{name}\"".format(fp=fp,name=name)
cmd.extend(['make_yield_table.C({args})'.format(args=cmd_args)])
subprocess.check_call(cmd)
def main():
# check_anaTrees()
read_anaTreeChecks()
# plot_systematic_variations()
# compare_anatest_files()
# make_yield_table()
if __name__ == "__main__":
main() |
import numpy as np
import cvxopt
from src.svm.utils import calculate_util
MULTIPLIER = 1e-5
COEF = 0.1
class Predictor:
def __init__(self, bazis, weights, vectors, labels):
self.bazis = bazis
self.weights = weights
self.vectors = vectors
self.labels = labels
def predict(self, x):
result = self.bazis
for z, x_temp, y in zip(self.weights, self.vectors, self.labels):
result += z * y * calculate_util(x_temp, x)
try:
return np.sign(result).item()
except ValueError:
return np.sign(result)[0]
def gram_matrix(x):
n_samples, n_features = x.shape
K = np.zeros((n_samples, n_samples))
for i, x_i in enumerate(x):
for j, x_j in enumerate(x):
K[i, j] = calculate_util(x_i, x_j)
return K
def lagrange_magic(X, y):
n_samples, n_features = X.shape
K = gram_matrix(X)
P = cvxopt.matrix(np.outer(y, y) * K)
q = cvxopt.matrix(-1 * np.ones(n_samples))
G_std = cvxopt.matrix(np.diag(np.ones(n_samples) * -1))
h_std = cvxopt.matrix(np.zeros(n_samples))
G_slack = cvxopt.matrix(np.diag(np.ones(n_samples)))
h_slack = cvxopt.matrix(np.ones(n_samples) * COEF)
G = cvxopt.matrix(np.vstack((G_std, G_slack)))
h = cvxopt.matrix(np.vstack((h_std, h_slack)))
A = cvxopt.matrix(y, (1, n_samples))
b = cvxopt.matrix(0.0)
solution = cvxopt.solvers.qp(P, q, G, h, A, b)
return np.ravel(solution['x'])
def prepare_predictor(X, y, lagrange):
indices = lagrange > MULTIPLIER
multipliers = lagrange[indices]
vectors = X[indices]
labels = y[indices]
predictor = Predictor(
bazis=0.0,
weights=multipliers,
vectors=vectors,
labels=labels
)
bazis = np.mean(
[
y_k - predictor.predict(x_k)
for(y_k, x_k) in zip(labels, vectors)
]
)
return Predictor(
bazis,
multipliers,
vectors,
labels
)
def train(X, y):
lagrange = lagrange_magic(X, y)
return prepare_predictor(X, y, lagrange)
|
import socket
import threading
import sqlite3
from time import time_ns
'''
Connection Codes:
00 <- Client Requesting Connection
01 -> Connection Successful
02 <- Request Clients
03 <-> Ready
04 <- Complete
05 <- Client
06 <- Message from Sender
07 -> Message to Receipient
08 -> Confirmation of Receipt of Server
09 -> Confirmation of Receipt of Receipient
10 <- Request Messages
11 -> Client Already Logged in
Packet Structure:
Message from Sender:
Code|Recipient Key|Sender Key~Encrypted Recipient Username|Encrypted Time|Encrypted Message
~Encrypted Recipeint Username|Encrypted Time|Encrypted Message
Message to Recipient:
Code|Sender Key|Encrypted Username|Encrypted Time|Encrypted Message
'''
class ClientThread(threading.Thread):
def __init__(self, server, conn, host):
super().__init__() # overrides parent class 'threading.Thread' __init__
self.conn = conn
self.host = host
self.id = host[0]
self.username = ''
self.server = server
self.verified = False
self.readyToSend = True
def run(self):
while self.conn:
try:
rawData = self.conn.recv(4096)
if rawData == b'':
raise Exception('dc')
data = rawData.decode().split('|')
if data[0] == '00':
time = time_ns()
self.conn.send(str(time).encode())
code = self.conn.recv(2056)
encoded = pow(int(code), 65537, int(data[2], 16))
if encoded == time:
if data[2] not in self.server.clients.keys():
self.verified = True
self.server.clients[data[2]] = self.server.clients.pop(self.host[0])
self.id = data[2]
self.username = self.binDec(data[1])
self.conn.send(b'01')
for row in self.server.sql.execute('SELECT * FROM messages WHERE recipient = ? OR sender = ?', (self.id, self.id)):
while 1:
if self.readyToSend == True:
self.readyToSend = False
sender = 0
if row[1] == self.id:
sender = 1
users = row[0:2]
row = row[sender+2].split('|')
if users[sender] in self.server.clients:
data = ('07|%s|%s|%s|%s' % (users[(sender+1) % 2], row[0], row[1], row[2]))
self.conn.send(data.encode())
break
if self.conn.recv(2056) == b'04':
self.readyToSend = True
else:
self.conn.send(b'11')
else:
print('Verification Failure. Possible Attack?')
elif data[0] == '02' and self.verified:
self.conn.send(b'03')
for client in self.server.clients:
data = self.conn.recv(2056)
if data == b'03':
self.conn.send(('05|%s|%s' % (self.server.clients[client].username, client)).encode())
data = self.conn.recv(2056)
if data == b'03':
self.conn.send(b'04')
elif data[0] == '06' and self.verified:
rawData = rawData.decode().split('~')
meta = rawData[0].split('|')
self.server.sql.execute('INSERT INTO messages VALUES ("%s", "%s", "%s", "%s")' % (meta[1], meta[2], rawData[1], rawData[2]))
if meta[1] in self.server.clients:
msg = rawData[1].split('|')
data = ('07|%s|%s|%s|%s' % (meta[2], msg[0], msg[1], msg[2]))
self.server.clients[meta[1]].conn.send(data.encode())
elif data[0] == '04':
self.readyToSend = True
except Exception as e:
if e == Exception('dc'):
print(self.host[0] + ' Disconnected')
else:
print('Error: ' + str(e))
self.conn = False
self.server.sql.commit()
self.server.clients.pop(self.id)
def binDec(self, binary):
string = ''
for i in binary.split('.'):
string += chr(int(i, 2))
return string
def binEnc(self, string):
return '.'.join(format(ord(i), 'b') for i in string)
def send(self, opcode, oprands):
data = '%s' % (opcode)
for oprand in oprands:
data += '|%s' % (oprand)
self.conn.send(data.encode())
class Server(threading.Thread):
def __init__(self, port):
super().__init__() # overrides parent class 'threading.Thread' __init__
self.port = port
self.soc = socket.socket()
self.host = socket.getfqdn()
self.soc.bind((self.host, port))
self.soc.listen()
self.clients = {}
self.sql = sqlite3.connect('messages.db', check_same_thread=False)
self.sql.execute('''CREATE TABLE IF NOT EXISTS messages (recipient TEXT NOT NULL, sender TEXT NOT NULL, message_r TEXT NOT NULL, message_s TEXT NOT NULL);''')
self.listen()
def listen(self):
print('Server Running')
while 1:
self.conn, self.host = self.soc.accept()
print(self.host[0] + ' Connected')
self.clients[self.host[0]] = ClientThread(self, self.conn, self.host)
self.clients[self.host[0]].start()
server = Server(6969)
server.start()
|
# -*- coding: utf-8 -*-
import xlrd
import base64
from odoo import models, fields, api, _, exceptions
from odoo.exceptions import UserError
import datetime
class GetAllDataImport(models.TransientModel):
_name = 'get.all.data.import'
xls_file = fields.Binary('File')
@api.multi
def import_data_partner(self):
if not self.xls_file:
raise exceptions.UserError(_('Please Select Excel file'))
wb = xlrd.open_workbook(file_contents=base64.decodestring(self.xls_file))
final_data_customer = []
for sheet in wb.sheets():
if sheet.name == 'Partner':
final_data_customer = []
# set data in list
for row in range(sheet.nrows):
if row != 0:
state_name = sheet.cell(row, 4).value
state_id = False
if state_name:
self._cr.execute("""SELECT id FROM res_country_state WHERE name = '%s'""" % (state_name))
state_name = self._cr.fetchone()
state_id = state_name and state_name[0] or False
partner_name = sheet.cell(row, 0).value
nit = sheet.cell(row, 8).value
if not partner_name and not nit:
msg = 'Partner not Avaiable ' \
'Partner name %s and NIT %s !\n ' % (partner_name, nit)
raise UserError(_('Data Not Available !\n' + msg))
self._cr.execute("""SELECT id FROM res_partner where name like '%s' AND vat = '%s'""" % (partner_name, nit))
partner_name = self._cr.fetchone()
partner = partner_name and partner_name[0] or False
customer = False
if sheet.cell(row, 6).value and sheet.cell(row, 6).value == 1:
customer = True
supplier = False
if sheet.cell(row, 7).value and sheet.cell(row, 7).value == 1:
supplier = True
if not partner:
data = {
'name': sheet.cell(row, 0).value,
'street': sheet.cell(row, 1).value,
'street2': sheet.cell(row, 2).value,
'city': sheet.cell(row, 3).value,
'state_id': state_id or False,
'zip': sheet.cell(row, 5).value,
'customer': customer,
'supplier': supplier,
'vat': sheet.cell(row, 8).value,
'phone': sheet.cell(row, 9).value,
'mobile': sheet.cell(row, 10).value,
'email': sheet.cell(row, 11).value,
'company_type': sheet.cell(row, 12).value
}
final_data_customer.append(data)
# create final data for parner
for partner in final_data_customer:
self.env['res.partner'].create(partner)
# create all partner
@api.multi
def import_data_tax(self):
if not self.xls_file:
raise exceptions.UserError(_('Please Select Excel file'))
wb = xlrd.open_workbook(file_contents=base64.decodestring(self.xls_file))
final_data_tax = []
for sheet in wb.sheets():
if sheet.name == 'Tax':
final_data_tax = []
# set data in list
for row in range(sheet.nrows):
if row != 0:
account_name = sheet.cell(row, 4).value
self._cr.execute("""SELECT id from account_account WHERE name = '%s'""" % (account_name))
d_tax_id = self._cr.fetchone()
d_tax = d_tax_id and d_tax_id[0] or False
account_name = sheet.cell(row, 5).value
self._cr.execute("""SELECT id from account_account WHERE name = '%s'""" % (account_name))
c_tax_id = self._cr.fetchone()
c_tax = c_tax_id and c_tax_id[0] or False
# tax find
tax_name = sheet.cell(row, 0).value
scope = sheet.cell(row, 1).value
if not tax_name and not scope:
msg = 'Tax not Avaiable ' \
'Tax name %s !\n ' % (tax_name)
raise UserError(_('Please Check Tax !\n' + msg))
self._cr.execute("""SELECT id from account_tax WHERE name = '%s'""" % (sheet.cell(row, 0).value))
tax_id = self._cr.fetchone()
tax = tax_id and tax_id[0] or False
if not tax:
tax_dict = {
'name': sheet.cell(row, 0).value,
'type_tax_use': sheet.cell(row, 1).value,
'amount_type': sheet.cell(row, 2).value,
'amount': sheet.cell(row, 3).value,
'account_id': d_tax or False,
'refund_account_id': c_tax or False,
'description': sheet.cell(row, 6).value,
'price_include': sheet.cell(row, 7).value,
}
final_data_tax.append(tax_dict)
# create tax from list
for tax in final_data_tax:
self.env['account.tax'].create(tax)
@api.multi
def import_data_product(self):
if not self.xls_file:
raise exceptions.UserError(_('Please Select Excel file'))
wb = xlrd.open_workbook(file_contents=base64.decodestring(self.xls_file))
final_data_product = []
for sheet in wb.sheets():
if sheet.name == 'Product':
final_data_product = []
# set data in list
for row in range(sheet.nrows):
if row != 0:
self._cr.execute("""SELECT id from product_category WHERE name = '%s'""" % (sheet.cell(row, 3).value))
product_categ_id = self._cr.fetchone()
product_categ = product_categ_id and product_categ_id[0] or False
s_tax_name = sheet.cell(row, 6).value,
self._cr.execute("""SELECT id from account_tax WHERE name = '%s'""" % (s_tax_name))
s_tax_id = self._cr.fetchone()
sale_tax = s_tax_id and s_tax_id[0] or False
if s_tax_name and not sale_tax:
msg = 'Sale TAX not Avaiable ' \
'Tax name %s !\n ' % (s_tax_name)
raise UserError(_('Data Not Available !\n' + msg))
v_tax_name = sheet.cell(row, 8).value,
self._cr.execute("""SELECT id from account_tax WHERE name = '%s'""" % (v_tax_name))
p_tax_id = self._cr.fetchone()
purchase_tax = p_tax_id and p_tax_id[0] or False
if v_tax_name and not purchase_tax:
msg = 'Vendor TAX not Avaiable ' \
'Tax name %s !\n ' % (s_tax_name)
raise UserError(_('Data Not Available !\n' + msg))
sale_ok = False
if sheet.cell(row, 10).value and sheet.cell(row, 10).value == 1:
sale_ok = True
purchase_ok = False
if sheet.cell(row, 11).value and sheet.cell(row, 11).value == 1:
purchase_ok = True
name = sheet.cell(row, 0).value
default_code = str(sheet.cell(row, 2).value)
product_domain = []
if name:
product_domain.append(('name', 'ilike', name))
if default_code:
product_domain.append(('default_code', 'ilike', default_code))
product_id = False
if product_domain:
product_id = self.env['product.template'].search(product_domain, limit=1)
# product_data = self._cr.fetchone()
# product_id = product_data and product_data[0] or False
if not product_id:
data = {
'name': name,
'type': sheet.cell(row, 1).value,
'default_code': default_code,
'lst_price': sheet.cell(row, 4).value,
'standard_price': sheet.cell(row, 5).value,
'sale_ok': sale_ok,
'purchase_ok': purchase_ok,
'categ_id': product_categ or 1,
'taxes_id': [(6, 0, [sale_tax])],
'supplier_taxes_id': [(6, 0, [purchase_tax])],
}
final_data_product.append(data)
# self.env['product.template'].create(data)
# create product from list
# cnt = 0
for product in final_data_product:
# if cnt == 50:
# break
self.env['product.template'].create(product)
# cnt += 1
@api.multi
def import_data_invoice(self):
if not self.xls_file:
raise exceptions.UserError(_('Please Select Excel file'))
wb = xlrd.open_workbook(file_contents=base64.decodestring(self.xls_file))
for sheet in wb.sheets():
if sheet.name == 'Invoice':
invoice_list = []
invoice_val = []
for row in range(sheet.nrows):
if row != 0:
obj_product = self.env['product.template']
in_type = sheet.cell(row, 1).value
partner_name = sheet.cell(row, 2).value.strip()
nit = sheet.cell(row, 3).value
domain = []
if partner_name:
domain.append(('name', 'ilike', partner_name))
if nit:
domain.append(('vat', '=', nit))
err_msg = ''
if in_type == "out_invoice":
domain.append(('customer', '=', True))
err_msg = "Customer"
elif in_type == "in_invoice":
domain.append(('supplier', '=', True))
err_msg = "Supplier"
partner_id = self.env['res.partner'].search(domain, limit=1)
if not partner_id:
msg = '%s not Avaiable ' \
'Partner name %s and NIT %s !\n ' % (err_msg, partner_name, nit)
raise UserError(_('Data Not Available !\n' + msg))
product_name = sheet.cell(row, 6).value
product_id = obj_product.search([('name', 'ilike', product_name)], limit=1)
if not product_id:
msg = 'Product not Avaiable ' \
'Product name _(%s) !\n ' % (product_name)
raise UserError(_('Data Not Available !\n' + msg))
# tax_name = sheet.cell(row, 10).value
# self._cr.execute("""SELECT id from account_tax where name like '%s'""" % (tax_name))
# tax_id = [data[0] for data in self._cr.fetchall()]
tax_data = sheet.cell(row, 10).value
tax_ids = []
if tax_data:
tax_list = tax_data.split(",")
tax_ids = self.env['account.tax'].search([('name', 'in', tax_list)])
if tax_list and not tax_ids:
msg = 'Tax not Avaiable '
raise UserError(_('Data Not Available !\n' + msg))
tax_ids = tax_ids.ids
extra_field = sheet.cell(row, 11).value
extra_id = False
if extra_field:
# extra_id = self.env['x_centro_de_costo'].search([('x_name', 'ilike', extra_field)], limit=1)
extra_id = self.env['account.analytic.account'].search([('name', 'ilike', extra_field)], limit=1).id
int_date = datetime.datetime(*xlrd.xldate_as_tuple(sheet.cell(row, 4).value, wb.datemode))
due_date = datetime.datetime(*xlrd.xldate_as_tuple(sheet.cell(row, 12).value, wb.datemode))
invoice_data = {
'number': sheet.cell(row, 0).value,
'type': in_type,
'partner_id': partner_id.id,
'date_invoice': int_date,
'date_due': due_date,
'origin': sheet.cell(row, 5).value,
'product_id': product_id.id,
'quantity': sheet.cell(row, 7).value,
'price_unit': sheet.cell(row, 8).value,
'tax': tax_ids,
'account_analytic_id': extra_id
}
# if in_type == "out_invoice":
# invoice_data.update({'x_studio_field_ngXF8': extra_id.id})
# elif in_type == "in_invoice":
# invoice_data.update({'x_studio_field_45LWE': extra_id.id})
invoice_list.append(invoice_data)
if invoice_list:
invoice_dict = {}
for key in invoice_list:
in_type = key['type']
if key['number'] not in invoice_dict.keys():
data = {
'number': key['number'],
'type': key['type'],
'partner_id': key['partner_id'],
'date_invoice': key['date_invoice'],
'origin': key['origin'],
'date_due': key['date_due'],
}
invoice_dict.update({key['number']: data})
for key in invoice_dict:
lst = []
for final_data in invoice_list:
if key == final_data['number']:
invoice_line_ids = {
'product_id': final_data.get('product_id'),
'quantity': final_data.get('quantity'),
'price_unit': final_data.get('price_unit'),
'tax': final_data.get('tax'),
'account_analytic_id': final_data.get('account_analytic_id'),
}
# if in_type == "out_invoice":
# invoice_line_ids.update({'x_studio_field_ngXF8': final_data.get('x_studio_field_ngXF8')})
# elif in_type == "in_invoice":
# invoice_line_ids.update({'x_studio_field_45LWE': final_data.get('x_studio_field_45LWE')})
lst.append(invoice_line_ids)
if lst and invoice_dict.get(key):
invoice_dict.get(key).update({'lines': lst})
for d in invoice_dict.values():
invoice_val.append(d)
for inv_data in invoice_val:
order_data = {
'partner_id': inv_data.get('partner_id'),
'date_invoice': inv_data.get('date_invoice'),
'type': inv_data.get('type'),
'number': inv_data.get('number'),
'date_due': inv_data.get('date_due'),
}
obj_account_inv = self.env['account.invoice']
invoice_id = obj_account_inv.create(order_data)
if invoice_id:
# invoice_id.create_from_import(inv_data.get('lines'), inv_data.get('number'))
invoice_lines = invoice_id.invoice_line_ids
for line in inv_data.get('lines'):
invoice_line = invoice_lines.new()
invoice_line.invoice_id = invoice_id.id
invoice_line.product_id = line.get('product_id')
invoice_line._onchange_product_id()
invoice_line.quantity = line.get('quantity')
invoice_line.price_unit = line.get('price_unit')
invoice_line.account_analytic_id = line.get('account_analytic_id')
invoice_line.invoice_line_tax_ids = []
invoice_line.invoice_line_tax_ids = [[6, 0, line.get('tax')]]
# if line.get('x_studio_field_ngXF8', False):
# invoice_line.x_studio_field_ngXF8 = line.get('x_studio_field_ngXF8')
# if line.get('x_studio_field_45LWE', False):
# invoice_line.x_studio_field_45LWE = line.get('x_studio_field_45LWE')
invoice_id.invoice_line_ids = invoice_id.invoice_line_ids | invoice_line
invoice_id._onchange_invoice_line_ids()
invoice_id.action_invoice_open()
invoice_id.write({'number': inv_data.get('number')})
@api.multi
def import_data_payment(self):
if not self.xls_file:
raise exceptions.UserError(_('Please Select Excel file'))
wb = xlrd.open_workbook(file_contents=base64.decodestring(self.xls_file))
for sheet in wb.sheets():
if sheet.name == 'payment':
obj_account_payment = self.env['account.payment']
obj_account_account = self.env['account.invoice']
obj_account_journal = self.env['account.journal']
# final_data_payment = []
for row in range(sheet.nrows):
if row != 0:
amount = sheet.cell(row, 0).value
journal = sheet.cell(row, 1).value
payment_date = datetime.datetime(*xlrd.xldate_as_tuple(sheet.cell(row, 2).value, wb.datemode))
memo = sheet.cell(row, 3).value
invoice_number = sheet.cell(row, 4).value
invoice_id = obj_account_account.search([('number', 'like', invoice_number)], limit=1)
if invoice_id:
new_journal = journal.split(" ")[0]
journal_id = obj_account_journal.search([('name', 'like', new_journal)])
payment = obj_account_payment
payment_id = payment.new()
payment_id.state = "draft"
payment_id.partner_id = invoice_id.partner_id.id
payment_id.amount = amount
payment_id._onchange_amount()
payment_id.payment_date = payment_date
payment_id.communication = memo
payment_id.invoice_ids = [(6, 0, [invoice_id.id])]
payment_id.journal_id = journal_id.id
if invoice_id.type == "out_invoice":
method = self.env['account.payment.method'].search([('payment_type', '=', 'inbound')], limit=1)
payment_id.partner_type = "customer"
payment_id.payment_type = "inbound"
payment_id.payment_method_id = method.id
elif invoice_id.type == "in_invoice":
method = self.env['account.payment.method'].search([('payment_type', '=', 'outbound')], limit=1)
payment_id.partner_type = "supplier"
payment_id.payment_type = "outbound"
payment_id.payment_method_id = method.id
payment_id._onchange_payment_type()
payment = payment | payment_id
payment.action_validate_invoice_payment()
# @api.multi
# def import_data(self):
# if not self.xls_file:
# raise exceptions.UserError(_('Please Select Excel file'))
# wb = xlrd.open_workbook(file_contents=base64.decodestring(self.xls_file))
# final_data_customer = []
# final_data_tax = []
# final_data_product = []
# for sheet in wb.sheets():
# if sheet.name == 'Partner':
# final_data_customer = []
# # set data in list
# for row in range(sheet.nrows):
# if row != 0:
# state_name = sheet.cell(row, 4).value
# state_id = False
# if state_name:
# self._cr.execute("""SELECT id FROM res_country_state WHERE name = '%s'""" % (state_name))
# state_name = self._cr.fetchone()
# state_id = state_name and state_name[0] or False
# partner_name = sheet.cell(row, 0).value
# nit = sheet.cell(row, 8).value
# if not partner_name and not nit:
# msg = 'Partner not Avaiable ' \
# 'Partner name %s and NIT %s !\n ' % (partner_name, nit)
# raise UserError(_('Data Not Available !\n' + msg))
# self._cr.execute("""SELECT id FROM res_partner where name like '%s' AND vat = '%s'""" % (partner_name, nit))
# partner_name = self._cr.fetchone()
# partner = partner_name and partner_name[0] or False
# customer = False
# if sheet.cell(row, 6).value and sheet.cell(row, 6).value == 1:
# customer = True
# supplier = False
# if sheet.cell(row, 7).value and sheet.cell(row, 7).value == 1:
# supplier = True
# if not partner:
# partner_data = {
# 'name': sheet.cell(row, 0).value,
# 'street': sheet.cell(row, 1).value,
# 'street2': sheet.cell(row, 2).value,
# 'city': sheet.cell(row, 3).value,
# 'state_id': state_id or False,
# 'zip': sheet.cell(row, 5).value,
# 'customer': customer,
# 'supplier': supplier,
# 'vat': sheet.cell(row, 8).value,
# 'phone': sheet.cell(row, 9).value,
# 'mobile': sheet.cell(row, 10).value,
# 'email': sheet.cell(row, 11).value,
# 'company_type': sheet.cell(row, 12).value
# }
# final_data_customer.append(partner_data)
# # create final data for parner
# for partner in final_data_customer:
# self.env['res.partner'].create(partner)
# # create all partner
# if sheet.name == 'Tax':
# final_data_tax = []
# # set data in list
# for row in range(sheet.nrows):
# if row != 0:
# account_name = sheet.cell(row, 4).value
# self._cr.execute("""SELECT id from account_account WHERE name = '%s'""" % (account_name))
# d_tax_id = self._cr.fetchone()
# d_tax = d_tax_id and d_tax_id[0] or False
# account_name = sheet.cell(row, 5).value
# self._cr.execute("""SELECT id from account_account WHERE name = '%s'""" % (account_name))
# c_tax_id = self._cr.fetchone()
# c_tax = c_tax_id and c_tax_id[0] or False
# # tax find
# tax_name = sheet.cell(row, 0).value
# scope = sheet.cell(row, 1).value
# if not tax_name and not scope:
# msg = 'Tax not Avaiable ' \
# 'Tax name %s !\n ' % (tax_name)
# raise UserError(_('Please Check Tax !\n' + msg))
# self._cr.execute("""SELECT id from account_tax WHERE name = '%s'""" % (sheet.cell(row, 0).value))
# tax_id = self._cr.fetchone()
# tax = tax_id and tax_id[0] or False
# if not tax:
# tax_dict = {
# 'name': sheet.cell(row, 0).value,
# 'type_tax_use': sheet.cell(row, 1).value,
# 'amount_type': sheet.cell(row, 2).value,
# 'amount': sheet.cell(row, 3).value,
# 'account_id': d_tax or False,
# 'refund_account_id': c_tax or False,
# 'description': sheet.cell(row, 6).value,
# 'price_include': sheet.cell(row, 7).value,
# }
# final_data_tax.append(tax_dict)
# # create tax from list
# for tax in final_data_tax:
# self.env['account.tax'].create(tax)
# if sheet.name == 'Product':
# final_data_product = []
# # set data in list
# for row in range(sheet.nrows):
# if row != 0:
# self._cr.execute("""SELECT id from product_category WHERE name = '%s'""" % (sheet.cell(row, 3).value))
# product_categ_id = self._cr.fetchone()
# product_categ = product_categ_id and product_categ_id[0] or False
# s_tax_name = sheet.cell(row, 6).value,
# self._cr.execute("""SELECT id from account_tax WHERE name = '%s'""" % (s_tax_name))
# s_tax_id = self._cr.fetchone()
# sale_tax = s_tax_id and s_tax_id[0] or False
# if s_tax_name and not sale_tax:
# msg = 'Sale TAX not Avaiable ' \
# 'Tax name %s !\n ' % (s_tax_name)
# raise UserError(_('Data Not Available !\n' + msg))
# v_tax_name = sheet.cell(row, 8).value,
# self._cr.execute("""SELECT id from account_tax WHERE name = '%s'""" % (v_tax_name))
# p_tax_id = self._cr.fetchone()
# purchase_tax = p_tax_id and p_tax_id[0] or False
# if v_tax_name and not purchase_tax:
# msg = 'Vendor TAX not Avaiable ' \
# 'Tax name %s !\n ' % (s_tax_name)
# raise UserError(_('Data Not Available !\n' + msg))
# sale_ok = False
# if sheet.cell(row, 10).value and sheet.cell(row, 10).value == 1:
# sale_ok = True
# purchase_ok = False
# if sheet.cell(row, 11).value and sheet.cell(row, 11).value == 1:
# purchase_ok = True
# name = sheet.cell(row, 0).value
# default_code = str(sheet.cell(row, 2).value)
# product_domain = []
# if name:
# product_domain.append(('name', 'ilike', name))
# if default_code:
# product_domain.append(('default_code', 'ilike', default_code))
# product_id = False
# if product_domain:
# product_id = self.env['product.template'].search(product_domain, limit=1)
# # product_data = self._cr.fetchone()
# # product_id = product_data and product_data[0] or False
# if not product_id:
# product_data = {
# 'name': name,
# 'type': sheet.cell(row, 1).value,
# 'default_code': default_code,
# 'lst_price': sheet.cell(row, 4).value,
# 'standard_price': sheet.cell(row, 5).value,
# 'sale_ok': sale_ok,
# 'purchase_ok': purchase_ok,
# 'categ_id': product_categ or 1,
# 'taxes_id': [(6, 0, [sale_tax])],
# 'supplier_taxes_id': [(6, 0, [purchase_tax])],
# }
# final_data_product.append(product_data)
# # self.env['product.template'].create(data)
# # create product from list
# cnt = 0
# for product in final_data_product:
# if cnt == 50:
# break
# self.env['product.template'].create(product)
# cnt += 1
# if sheet.name == 'Invoice':
# invoice_list = []
# invoice_val = []
# for row in range(sheet.nrows):
# if row != 0:
# obj_product = self.env['product.template']
# in_type = sheet.cell(row, 1).value
# partner_name = sheet.cell(row, 2).value.strip()
# nit = sheet.cell(row, 3).value
# domain = []
# if partner_name:
# domain.append(('name', 'ilike', partner_name))
# if nit:
# domain.append(('vat', '=', nit))
# err_msg = ''
# if in_type == "out_invoice":
# domain.append(('customer', '=', True))
# err_msg = "Customer"
# elif in_type == "in_invoice":
# domain.append(('supplier', '=', True))
# err_msg = "Supplier"
# partner_id = self.env['res.partner'].search(domain, limit=1)
# if not partner_id:
# msg = '%s not Avaiable ' \
# 'Partner name %s and NIT %s !\n ' % (err_msg, partner_name, nit)
# raise UserError(_('Data Not Available !\n' + msg))
# product_name = sheet.cell(row, 6).value
# product_id = obj_product.search([('name', 'ilike', product_name)], limit=1)
# if not product_id:
# msg = 'Product not Avaiable ' \
# 'Product name _(%s) !\n ' % (product_name)
# raise UserError(_('Data Not Available !\n' + msg))
# tax_name = sheet.cell(row, 10).value
# self._cr.execute("""SELECT id from account_tax where name like '%s'""" % (tax_name))
# tax_id = [data[0] for data in self._cr.fetchall()]
# if tax_name and not tax_id:
# msg = 'Tax not Avaiable ' \
# 'Tax name %s and !\n ' % (tax_name)
# raise UserError(_('Data Not Available !\n' + msg))
# extra_field = sheet.cell(row, 11).value
# extra_id = False
# if extra_field:
# extra_id = self.env['x_centro_de_costo'].search([('x_name', 'ilike', extra_field)], limit=1)
# int_date = datetime.datetime(*xlrd.xldate_as_tuple(sheet.cell(row, 4).value, wb.datemode))
# invoice_data = {
# 'number': sheet.cell(row, 0).value,
# 'type': in_type,
# 'partner_id': partner_id.id,
# 'date_invoice': int_date,
# 'origin': sheet.cell(row, 5).value,
# 'product_id': product_id.id,
# 'quantity': sheet.cell(row, 7).value,
# 'price_unit': sheet.cell(row, 8).value,
# 'tax': tax_id,
# }
# if in_type == "out_invoice":
# invoice_data.update({'x_studio_field_ngXF8': extra_id.id})
# elif in_type == "in_invoice":
# invoice_data.update({'x_studio_field_45LWE': extra_id.id})
# invoice_list.append(invoice_data)
# if invoice_list:
# invoice_dict = {}
# for key in invoice_list:
# in_type = key['type']
# if key['number'] not in invoice_dict.keys():
# data = {
# 'number': key['number'],
# 'type': key['type'],
# 'partner_id': key['partner_id'],
# 'date_invoice': key['date_invoice'],
# 'origin': key['origin'],
# }
# invoice_dict.update({key['number']: data})
# for key in invoice_dict:
# lst = []
# for final_data in invoice_list:
# if key == final_data['number']:
# invoice_line_ids = {
# 'product_id': final_data.get('product_id'),
# 'quantity': final_data.get('quantity'),
# 'price_unit': final_data.get('price_unit'),
# 'tax': final_data.get('tax'),
# }
# if in_type == "out_invoice":
# invoice_line_ids.update({'x_studio_field_ngXF8': final_data.get('x_studio_field_ngXF8')})
# elif in_type == "in_invoice":
# invoice_line_ids.update({'x_studio_field_45LWE': final_data.get('x_studio_field_45LWE')})
# lst.append(invoice_line_ids)
# if lst and invoice_dict.get(key):
# invoice_dict.get(key).update({'lines': lst})
# for d in invoice_dict.values():
# invoice_val.append(d)
# for inv_data in invoice_val:
# order_data = {
# 'partner_id': inv_data.get('partner_id'),
# 'date_invoice': inv_data.get('date_invoice'),
# 'type': inv_data.get('type'),
# 'number': inv_data.get('number'),
# }
# obj_account_inv = self.env['account.invoice']
# invoice_id = obj_account_inv.create(order_data)
# if invoice_id:
# # invoice_id.create_from_import(inv_data.get('lines'), inv_data.get('number'))
# invoice_lines = invoice_id.invoice_line_ids
# for line in inv_data.get('lines'):
# invoice_line = invoice_lines.new()
# invoice_line.invoice_id = invoice_id.id
# invoice_line.product_id = line.get('product_id')
# invoice_line._onchange_product_id()
# invoice_line.quantity = line.get('quantity')
# invoice_line.price_unit = line.get('price_unit')
# invoice_line.invoice_line_tax_ids = []
# invoice_line.invoice_line_tax_ids = [[6, 0, line.get('tax')]]
# if line.get('x_studio_field_ngXF8', False):
# invoice_line.x_studio_field_ngXF8 = line.get('x_studio_field_ngXF8')
# if line.get('x_studio_field_45LWE', False):
# invoice_line.x_studio_field_45LWE = line.get('x_studio_field_45LWE')
# invoice_id.invoice_line_ids = invoice_id.invoice_line_ids | invoice_line
# invoice_id._onchange_invoice_line_ids()
# invoice_id.action_invoice_open()
# invoice_id.write({'number': inv_data.get('number')})
|
"""The command for archiving a vacation."""
from dataclasses import dataclass
from typing import Iterable
from jupiter.core.domain.features import Feature
from jupiter.core.framework.base.entity_id import EntityId
from jupiter.core.framework.event import EventSource
from jupiter.core.framework.use_case import (
ProgressReporter,
UseCaseArgsBase,
)
from jupiter.core.use_cases.infra.use_cases import (
AppLoggedInMutationUseCase,
AppLoggedInUseCaseContext,
)
@dataclass
class VacationArchiveArgs(UseCaseArgsBase):
"""PersonFindArgs."""
ref_id: EntityId
class VacationArchiveUseCase(AppLoggedInMutationUseCase[VacationArchiveArgs, None]):
"""The command for archiving a vacation."""
@staticmethod
def get_scoped_to_feature() -> Iterable[Feature] | Feature | None:
"""The feature the use case is scope to."""
return Feature.VACATIONS
async def _perform_mutation(
self,
progress_reporter: ProgressReporter,
context: AppLoggedInUseCaseContext,
args: VacationArchiveArgs,
) -> None:
"""Execute the command's action."""
async with self._domain_storage_engine.get_unit_of_work() as uow:
vacation = await uow.vacation_repository.load_by_id(args.ref_id)
vacation = vacation.mark_archived(
EventSource.CLI,
self._time_provider.get_current_time(),
)
await uow.vacation_repository.save(vacation)
await progress_reporter.mark_updated(vacation)
|
import numpy as np
import sys
import torch
class BouncingMNISTDataHandler(object):
"""Data Handler that creates Bouncing MNIST dataset on the fly."""
def __init__(self, batch_size, num_digits):
self.seq_length_ = 20
self.batch_size_ = batch_size
self.image_size_ = 64
self.num_digits_ = num_digits
self.max_speed = 0.5 # with canvas size 1
self.dataset_size_ = 1000000 # The dataset is really infinite. This is just for validation.
self.digit_size_ = 28
self.data_ = [] # a list of numpy tensors of shape (1, digit_size_, digit_size)
for i in range(10):
try:
self.data_.append(np.load('mnist/raw digit/{}.npy'.format(i)))
except:
print('Please set the correct path to MNIST dataset')
sys.exit(1)
else:
self.data_[i] = self.data_[i].reshape(1,self.digit_size_, self.digit_size_)
self.indices_ = np.arange(len(self.data_))
self.row_ = 0
np.random.shuffle(self.indices_)
def GetBatchSize(self):
return self.batch_size_
def GetDatasetSize(self):
return self.dataset_size_
def GetSeqLength(self):
return self.seq_length_
def Reset(self):
pass
def GetRandomTrajectory(self, batch_size):
length = self.seq_length_
canvas_size = self.image_size_ - self.digit_size_
# Initial position uniform random inside the box.
y = np.random.rand(batch_size)
x = np.random.rand(batch_size)
# Choose a random velocity.
theta = np.random.rand(batch_size) * 2 * np.pi
# random velocity
ro = np.random.rand(batch_size) * self.max_speed
v_y = ro * np.sin(theta)
v_x = ro * np.cos(theta)
start_y = np.zeros((length, batch_size))
start_x = np.zeros((length, batch_size))
for i in range(length):
# Take a step along velocity.
y += v_y
x += v_x
# Bounce off edges.
for j in range(batch_size):
if x[j] <= 0:
x[j] = 0
v_x[j] = -v_x[j]
if x[j] >= 1.0:
x[j] = 1.0
v_x[j] = -v_x[j]
if y[j] <= 0:
y[j] = 0
v_y[j] = -v_y[j]
if y[j] >= 1.0:
y[j] = 1.0
v_y[j] = -v_y[j]
start_y[i, :] = y
start_x[i, :] = x
# Scale to the size of the canvas.
start_y = (canvas_size * start_y).astype(np.int32)
start_x = (canvas_size * start_x).astype(np.int32)
return start_y, start_x
def Overlap(self, a, b):
""" Put b on top of a."""
return np.maximum(a, b)
#return b
def GetBatch(self):
start_y, start_x = self.GetRandomTrajectory(self.batch_size_ * self.num_digits_)
# minibatch data
data = np.zeros((self.seq_length_, self.batch_size_, 1, self.image_size_, self.image_size_), dtype=np.float32)
for j in range(self.batch_size_):
for n in range(self.num_digits_):
# get random digit from dataset
ind = self.indices_[self.row_]
self.row_ += 1
if self.row_ == len(self.data_):
self.row_ = 0
np.random.shuffle(self.indices_)
digit_image = self.data_[ind]
# generate video
for i in range(self.seq_length_):
top = start_y[i, j * self.num_digits_ + n]
left = start_x[i, j * self.num_digits_ + n]
bottom = top + self.digit_size_
right = left + self.digit_size_
data[i, j, 0, top:bottom, left:right] = self.Overlap(data[i, j, 0, top:bottom, left:right], digit_image)
data = data.reshape(self.seq_length_, self.batch_size_, 1, self.image_size_, self.image_size_)
len1 = int(self.seq_length_/2)
len2 = self.seq_length_-len1
input_data = data[0:len1,: ,: ,: ,: ].reshape(len1, self.batch_size_, 1, self.image_size_, self.image_size_)
target_data = data[len1:self.seq_length_,: ,: ,: ,: ].reshape(len2, self.batch_size_, 1, self.image_size_, self.image_size_)
return (torch.tensor(input_data), torch.tensor(target_data))
def generate_files(self):
for i in range(int(self.dataset_size_/self.batch_size_)):
np.save('mnist/mnist-'+str(self.num_digits_)+'/batch'+str(i)+'.npy',self.GetBatch())
'''
def DisplayData(self, data, rec=None, fut=None, fig=1, case_id=0, output_file=None):
output_file1 = None
output_file2 = None
if output_file is not None:
name, ext = os.path.splitext(output_file)
output_file1 = '%s_original%s' % (name, ext)
output_file2 = '%s_recon%s' % (name, ext)
# get data
data = data[case_id, :].reshape(-1, self.image_size_, self.image_size_)
# get reconstruction and future sequences if exist
if rec is not None:
rec = rec[case_id, :].reshape(-1, self.image_size_, self.image_size_)
enc_seq_length = rec.shape[0]
if fut is not None:
fut = fut[case_id, :].reshape(-1, self.image_size_, self.image_size_)
if rec is None:
enc_seq_length = self.seq_length_ - fut.shape[0]
else:
assert enc_seq_length == self.seq_length_ - fut.shape[0]
num_rows = 1
# create figure for original sequence
plt.figure(2*fig, figsize=(20, 1))
plt.clf()
for i in range(self.seq_length_):
plt.subplot(num_rows, self.seq_length_, i+1)
plt.imshow(data[i, :, :], cmap=plt.cm.gray, interpolation="nearest")
plt.axis('off')
plt.draw()
if output_file1 is not None:
print(output_file1)
plt.savefig(output_file1, bbox_inches='tight')
# create figure for reconstuction and future sequences
plt.figure(2*fig+1, figsize=(20, 1))
plt.clf()
for i in range(self.seq_length_):
if rec is not None and i < enc_seq_length:
plt.subplot(num_rows, self.seq_length_, i + 1)
plt.imshow(rec[rec.shape[0] - i - 1, :, :], cmap=plt.cm.gray, interpolation="nearest")
if fut is not None and i >= enc_seq_length:
plt.subplot(num_rows, self.seq_length_, i + 1)
plt.imshow(fut[i - enc_seq_length, :, :], cmap=plt.cm.gray, interpolation="nearest")
plt.axis('off')
plt.draw()
if output_file2 is not None:
print(output_file2)
plt.savefig(output_file2, bbox_inches='tight')
else:
plt.pause(0.1)
'''
|
from rdflib import Graph
from oldman import SPARQLDataStore, ClientResourceManager, parse_graph_safely
from oldman.rest.controller import HTTPController
from os import path
import unittest
schema_graph = Graph()
schema_file = path.join(path.dirname(__file__), "controller-schema.ttl")
schema_graph = parse_graph_safely(schema_graph, schema_file, format="turtle")
context_file = "file://" + path.join(path.dirname(__file__), "controller-context.jsonld")
data_graph = Graph()
data_store = SPARQLDataStore(data_graph, schema_graph=schema_graph)
data_store.create_model("Collection", context_file, iri_prefix="http://localhost/collections/",
incremental_iri=True)
data_store.create_model("Item", context_file, iri_prefix="http://localhost/items/", incremental_iri=True)
client_manager = ClientResourceManager(data_store)
client_manager.import_store_models()
collection_model = client_manager.get_model("Collection")
item_model = client_manager.get_model("Item")
collection1 = collection_model.create()
controller = HTTPController(client_manager)
class ControllerTest(unittest.TestCase):
def test_operation(self):
"""TODO: remove """
operation = collection1.get_operation("POST")
self.assertTrue(operation is not None)
title = u"First item"
item = item_model.new(title=title)
#item_graph = Graph().parse(data=item.to_rdf(rdf_format="nt"), format="nt")
#print item_graph.serialize(format="turtle")
item_iri = item.id
operation(collection1, new_resources=[item])
print data_graph.serialize(format="turtle")
item = client_manager.get(id=item_iri)
self.assertTrue(item is not None)
self.assertEquals(item.title, title)
def test_normal_append_item(self):
#TODO: test mutiple formats
title = u"Append test"
# Skolem IRI that should not be serialized
skolem_iri = "http://localhost/.well-known/genid/3832"
item = item_model.new(id=skolem_iri, title=title)
payloads = {}
payloads["application/ld+json"] = item.to_jsonld()
payloads["application/json"] = item.to_json()
payloads["text/turtle"] = item.to_rdf("turtle")
for content_type in payloads:
controller.post(collection1.id, content_type, payloads[content_type])
#TODO: retrieve the IRI of the newly created resource
items = list(item_model.filter(title=title))
self.assertEquals(len(items), 1)
retrieved_item = items[0]
self.assertEquals(retrieved_item.title, title)
self.assertNotEquals(retrieved_item.id, skolem_iri)
print retrieved_item.id
#TODO: test the member part
retrieved_item.delete()
def forbid_putting_new_resource_test(self):
#TODO: implement it
pass
|
from util_interval import make_interval, lower_bound, upper_bound, print_interval
from util_interval import add_interval, sub_interval, mul_interval, div_interval
def interval_width(x):
return (upper_bound(x) - lower_bound(x)) / 2
z1 = make_interval(2, 5)
z2 = make_interval(7, 3)
z3 = add_interval(z1, z2)
z4 = sub_interval(z1, z2)
z5 = mul_interval(z1, z2)
z6 = div_interval(z1, z2)
print_interval(z1)
print_interval(z2)
print(interval_width(z1) + interval_width(z2))
print(interval_width(z3))
print(interval_width(z4))
print(interval_width(z5))
print(interval_width(z6)) |
# imos 法
from sys import stdin
from itertools import accumulate
from operator import itemgetter
readline = stdin.readline
N, C = map(int, readline().split())
stc = [tuple(map(int, readline().split())) for _ in range(N)]
stc.sort(key=itemgetter(2, 0))
a = [0] * (10 ** 5 + 1)
pt, pc = -1, -1
for s, t, c in stc:
if pt == s and pc == c:
a[s] += 1
else:
a[s - 1] += 1
a[t] -= 1
pt, pc = t, c
print(max(accumulate(a[:-1])))
|
from typing import List
# YET TO FIGURE OUT DFS BASED SOLUTION FOR TOPOLOGICAL SORT THAT TAKES CARE OF THE CYCLES AS WELL
# class Solution:
# def findOrder(self, numCourses: int, prerequisites: List[List[int]]) -> List[int]:
# adjlist = {i: set() for i in range(numCourses)}
# for prereq in prerequisites:
# src, dest = prereq[0], prereq[1]
# adjlist[src].add(dest)
# stack = []
# seen = set()
# for key in adjlist:
# if key not in seen:
# dfs(adjlist, key, stack, seen)
# # print(stack)
# if len(stack) != numCourses:
# return []
# ans = []
# while stack:
# ans.append(stack.pop())
# return ans
#
#
# def dfs(adjlist, node, stack, seen):
# seen.add(node)
# flag = True
# for nbr in adjlist[node]:
# if nbr not in seen:
# dfs(adjlist, nbr, stack, seen)
# else:
# flag = False
# if flag:
# stack.append(node)
if __name__=="__main__":
print(Solution().findOrder(2, [[1,0]])) |
from django.shortcuts import render
from .RemoteModel.HomeStatistics import HomeStatistics
def index(request):
HomeParameters=dict()
HomeParameters["statistic"]=HomeStatistics()
HomeParameters["Controls"]=tuple(range(30))
return render(request,"RemoteApp/main.html", context=HomeParameters)
# Create your views here.
|
import requests
import base64
import pytest
import hashlib
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
from Crypto.Hash import SHA256
URL = 'http://localhost:8181'
CREATE_URL = URL + "/v1-secrets/secrets/create"
REWRAP_URL = URL + "/v1-secrets/secrets/rewrap"
secret_data = {
"type": "secret",
"name": "secret1",
"clearText": "hello",
"backend": "none"
}
secrets_bulk_data = {
"data": [
{
"type": "secret",
"name": "secret1",
"clearText": "hello",
"backend": "none"
},
{
"type": "secret",
"name": "secret2",
"clearText": "world",
"backend": "none"
},
{
"type": "secret",
"name": "secret3",
"clearText": "!",
"backend": "none"
}
]
}
@pytest.fixture
def bulk_secret(scope="function"):
return secrets_bulk_data
@pytest.fixture(scope="function")
def single_secret():
return secret_data
insecure_private_key = '''-----BEGIN RSA PRIVATE KEY-----
MIIEogIBAAKCAQEAlqXgL8UtupeafCFVQwckREfGN+KM3M+tiY0CLsd847w3B3MI
rwurSDvBRZMvriYz7LCQIrrXTri8XZC0LNvRdkkHr9HWNPwA1eB8DLRORPIp0H4I
9XwLHP76qaKJY2Af2vL8Oq0paSiSwtCaN983JNwyDXmgGKYv0K+6byUv6AVtiQS8
8kOylCnrSKkui7nzcFuoLR/RwuLCxoK9jmAGBNJCG/16u9eFnaElJ1kCcnS0XsdJ
Biy60lWgnMLwlGel0vGZXjTOdAF1xMHZAHSq2Y0k82brNvxLNQSdnV1TjU70rSYO
Li/hoNep978UR76Fv2ZdBY8Ft06N09N4JNanWwIDAQABAoIBAFDlEdWVFFE2R4aQ
f7BWjWr8/7vSs8F+47kRNzLXfIDt+L7PTsJwibFoJQivWNMzQH7A8SU1H5juKngz
1AyinX/fB3mqPFSHXgt7WCGaUM1FHJ8Qjs8DpRQU95VP6maqn3B7OmZnxezqFKT4
T1fhTUNF2rrRrN6Pnu1476vvVCJKtPJcAqG4IIE01jrvZ/jD1wiZ+s3fpJN0Q/j3
FEkWP0B+KPAbE9viEK+aKX0eO2Jkq7xZYgslQRV1TrCooQ5U2+/xBypGrggHloK/
5/apjteJxwljyZMBRFXoX3Yl6Y2y/TXg2fYTTKo323IVLx/080REYjOXcGujp5Sy
cXJ7SsECgYEAxrzXmfO9E718bjilUBT1t2fy2gch+tubDsQeMwXD57sIgSE4Sr7k
xkaHW6FfgA0rtj94CkMW00509ny7HkyaFNkwrkrC/0R/gUIo0E31fgxTM2cO3urI
QXFw1lmFVsE9/uppgF5L9ktSe8TJz7fMp8iHV+1N7FDyuoNSoFp6/bcCgYEAwg3f
Hni3I5JgRI6MX5j1HquUt76PqI7CYeqRmqcHBSg6d5u1Y0P2Fulh4gdYIX8QrGi2
5viSaTZQt9DVATF4pKs2XMPZc9QooudYTSUhRDAnRfdYFa0E56rtL2L/RXTbZj7S
jYdmMrMBvB9mY+RbLTeWK7yG53IzaidJVp6tY30CgYBo8zbkPRwffZRlXJKoTLlK
BqHv0451PF2RGa5dAXFoQZQHJTTl/BMyRfKbSAf3xnzL/I521OEL68XGmS3znT5N
PjkAAckiJtkyuG53OoQm8XlKjuUCgXgJX0/YUmQg4WHM6ZuXR7TTtwkzBUQR5p00
Cai3nUDmSAU2y7zpo36J1wKBgEZtVGGxu/27/RZEieuUDroP2YyKK4coMKHqyOdQ
4Tpc7ENGjqE1JBYSo4St161oeTupUWAoLLLklIzxzKx/MOLKhJNMPRpNkGX3AlQV
OqqNs2MwLpbHUXVm0mgVTMH/dDT6bd4RmuShlOqalsWANhsGBolfBbLv/nrzQSmf
sxvdAoGALwb3fP9ir2Fs3vHn4fCpiuNoCovWExbhH+UtQ/kDYuXsjt1Th7cxuLPF
FNH/hPpMSf5p6Gl4Ipl12s5U6FVYQlmuVlFgV8iUEKsSkMWdrvvx5X38RlgqQqvU
+7k/Qphbh1dQWKCpMXmeMxRWTtgaftz18zvou6k0CyCSNco6JZ4=
-----END RSA PRIVATE KEY-----'''
insecure_public_key = '''-----BEGIN PUBLIC KEY-----
MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAlqXgL8UtupeafCFVQwck
REfGN+KM3M+tiY0CLsd847w3B3MIrwurSDvBRZMvriYz7LCQIrrXTri8XZC0LNvR
dkkHr9HWNPwA1eB8DLRORPIp0H4I9XwLHP76qaKJY2Af2vL8Oq0paSiSwtCaN983
JNwyDXmgGKYv0K+6byUv6AVtiQS88kOylCnrSKkui7nzcFuoLR/RwuLCxoK9jmAG
BNJCG/16u9eFnaElJ1kCcnS0XsdJBiy60lWgnMLwlGel0vGZXjTOdAF1xMHZAHSq
2Y0k82brNvxLNQSdnV1TjU70rSYOLi/hoNep978UR76Fv2ZdBY8Ft06N09N4JNan
WwIDAQAB
-----END PUBLIC KEY-----'''
def get_expected_encrypted_value(p_key, value):
key = RSA.importKey(p_key)
cipher = PKCS1_OAEP.new(key)
return cipher.encrypt(value)
def get_decrypted_value(p_key, val):
key = RSA.importKey(p_key)
cipher = PKCS1_OAEP.new(key, hashAlgo=SHA256)
return cipher.decrypt(val)
def _post(url, json):
secret = requests.post(url, json=json, timeout=10.0)
print(secret.status_code)
print(secret.json())
return secret
def python_post_response(url, json):
secret = _post(url, json)
assert secret.status_code == requests.codes.ok
assert secret.status_code != 400
return secret.json()
def verify_python_bad_post_response(url, json):
secret = _post(url, json)
resp = secret.json()
assert secret.status_code == 400
assert resp["type"] == "error"
def verify_plain_text_from_enc(data, expected_value=secret_data["clearText"]):
plain_text = get_decrypted_value(insecure_private_key,
base64.b64decode(data))
assert expected_value == plain_text
def md5_hex_digest(data):
m = hashlib.md5()
m.update(data)
return m.hexdigest()
def test_secrets_create_api_none_backend(single_secret):
json_secret = python_post_response(CREATE_URL, single_secret)
expected_encoded = base64.b64encode(single_secret["clearText"])
assert expected_encoded == json_secret["cipherText"]
assert "" == json_secret["clearText"]
assert md5_hex_digest(single_secret["clearText"]) == \
json_secret["signature"]
def test_secrets_create_bulk_api_none_backend(bulk_secret):
bulk_url = CREATE_URL + "?action=bulk"
json_secrets = python_post_response(bulk_url, bulk_secret)
i = 0
for secret in json_secrets["data"]:
expected_encoded = base64.b64encode(
secrets_bulk_data["data"][i]["clearText"])
assert expected_encoded == secret["cipherText"]
assert "" == secret["clearText"]
i += 1
def test_secrets_rewrap_api_none_backend(single_secret):
json_secret = python_post_response(CREATE_URL, single_secret)
json_secret["rewrapKey"] = insecure_public_key
json_rewrapped_secret = python_post_response(REWRAP_URL, json_secret)
assert "" == json_rewrapped_secret["clearText"]
assert "" == json_rewrapped_secret["cipherText"]
verify_plain_text_from_enc(json_rewrapped_secret["rewrapText"])
def test_secrets_rewrap_api_none_backend_invalid_signatures(single_secret):
json_secret = python_post_response(CREATE_URL, single_secret)
json_secret["rewrapKey"] = insecure_public_key
json_secret["signature"] = md5_hex_digest("bad signature")
verify_python_bad_post_response(REWRAP_URL, json_secret)
def test_secrets_api_vault_backend_no_collisions(single_secret):
single_secret["backend"] = "vault"
single_secret["keyName"] = "rancher"
json_secret1 = python_post_response(CREATE_URL, single_secret)
json_secret2 = python_post_response(CREATE_URL, single_secret)
assert json_secret1["cipherText"] != json_secret2["cipherText"]
assert json_secret1["signature"] != json_secret2["signature"]
def test_secrets_rewrap_api_vault_backend(single_secret):
single_secret["backend"] = "vault"
single_secret["keyName"] = "rancher"
json_secret = python_post_response(CREATE_URL, single_secret)
json_secret["rewrapKey"] = insecure_public_key
json_rewrapped_secret = python_post_response(REWRAP_URL, json_secret)
assert "" == json_rewrapped_secret["clearText"]
assert "" == json_rewrapped_secret["cipherText"]
verify_plain_text_from_enc(
json_rewrapped_secret["rewrapText"], single_secret["clearText"])
def test_secrets_rewrap_api_local_key_backend(single_secret):
single_secret["backend"] = "localkey"
single_secret["keyName"] = "test_key"
print(single_secret["clearText"])
json_secret = python_post_response(CREATE_URL, single_secret)
json_secret["rewrapKey"] = insecure_public_key
json_rewrapped_secret = python_post_response(REWRAP_URL, json_secret)
assert "" == json_rewrapped_secret["clearText"]
assert "" == json_rewrapped_secret["cipherText"]
verify_plain_text_from_enc(json_rewrapped_secret["rewrapText"])
def test_secrets_local_key_backend_same_text_avoids_collisions(single_secret):
single_secret["backend"] = "localkey"
single_secret["keyName"] = "test_key"
print(single_secret["clearText"])
json_secret1 = python_post_response(CREATE_URL, single_secret)
json_secret2 = python_post_response(CREATE_URL, single_secret)
assert json_secret1["cipherText"] != json_secret2["cipherText"]
assert json_secret1["signature"] != json_secret2["signature"]
def test_secrets_rewrap_api_local_key_bad_signature_backend(single_secret):
single_secret["backend"] = "localkey"
single_secret["keyName"] = "test_key"
print(single_secret["clearText"])
json_secret = python_post_response(CREATE_URL, single_secret)
json_secret["rewrapKey"] = insecure_public_key
json_secret["signature"] = "itdontlookgood"
verify_python_bad_post_response(REWRAP_URL, json_secret)
def test_secrets_rewrap_bulk_api_none_backend(bulk_secret):
bulk_url = CREATE_URL+"?action=bulk"
json_secret = python_post_response(bulk_url, bulk_secret)
json_secret["rewrapKey"] = insecure_public_key
print(json_secret)
bulk_rewrap_url = REWRAP_URL + "?action=bulk"
json_rewrapped_secrets = python_post_response(bulk_rewrap_url, json_secret)
i = 0
for secret in json_rewrapped_secrets["data"]:
assert "" == secret["clearText"]
assert "" == secret["cipherText"]
verify_plain_text_from_enc(
secret["rewrapText"],
secrets_bulk_data["data"][i]["clearText"])
i += 1
|
import discord
import random
from discord.ext import commands
rfooter = [":(", "Press F to pay respects", "RIP", "no u", "Uhh...", "this is awkward", "'aight Imma head out"]
class ErrorHandler(commands.Cog):
def __init__(self, bot):
bot.self = bot
@commands.Cog.listener()
async def on_command_error(self, ctx, e):
if hasattr(ctx.command, "on_error"):
return
embed = discord.Embed(title="An error occured!", description=f"```{e}```")
embed.set_footer(text=rfooter[random.randint(0, len(rfooter)-1)])
await ctx.reply(embed=embed)
def setup(bot):
bot.add_cog(ErrorHandler(bot))
|
"""
In this module determines metadata of data base
Also it contains declaration of Base Object for ORM objects in Application
Base object contains custom methods shared by every model.
Attributes:
NAMING_CONVENTION (dict): naming convention for SQLAlchemy auto name
generation.
metadata: SQLAlchemy object created by sqlalchemy.schema.MetaData
using specified naming convention
"""
import datetime
from decimal import *
from sqlalchemy.ext.declarative import as_declarative
from sqlalchemy.schema import MetaData
from sqlalchemy.orm.collections import InstrumentedList
# Recommended naming convention used by Alembic, as various different database
# providers will autogenerate vastly different names making migrations more
# difficult. See: http://alembic.zzzcomputing.com/en/latest/naming.html
NAMING_CONVENTION = {
"ix": "ix_%(column_0_label)s",
"uq": "uq_%(table_name)s_%(column_0_name)s",
"ck": "ck_%(table_name)s_%(constraint_name)s",
"fk": "fk_%(table_name)s_%(column_0_name)s_%(referred_table_name)s",
"pk": "pk_%(table_name)s"
}
metadata = MetaData(naming_convention=NAMING_CONVENTION)
@as_declarative(metadata=metadata)
class Base(object):
"""
Determine Base Object for ORM objects in Application
Contains custom methods sheared by all models:
__repr__() - to make fancy output by print
as_dict() - to compile ORM model into python dictionary
"""
def __repr__(self):
"""Do custom style output by print
Output format:
<class <class_name>> ([prop.name = prop.value, ])
"""
s = '<%s(' % (self.__class__)
for c in self.__table__.columns:
s += '%s = %s, ' % (c.name, getattr(self, c.name))
s += ')'
print s
return ""
def as_dict(self, exclude=[], include=[], with_relations=[], depth=0):
"""Converts model into python dictionary.
Also support relation unpack to one depth
To unpack relationship specify its name in with_relations
Example: model.as_dict(with_relations=["<relation1>", "<relation2>"])
Pls don`t reassing depth it uses to limit recursive depth
Args:
exclude: (list) (str) list of fields to be excluded more priority then include
include: (list) (str) list of fields to be included
with_relations: (list) (str) list of relationships to include
depth: (int) system variable to indicate recursion depth
Returns:
dictionary
{
[prop.name = prop.value, ... ]
}
if it finds datetime object converts it to isoformat.
"""
data = {}
if not depth:
rel_data = {}
for key in with_relations:
if key not in self.__mapper__.relationships.keys():
continue
value = getattr(self, key, None)
if value is not None:
if isinstance(value, InstrumentedList):
per_key_items = [item.as_dict(exclude=exclude,
include=include,
depth=depth + 1) for item in value]
else:
per_key_items = value.as_dict(exclude=exclude,
include=include,
depth=depth + 1)
rel_data[key] = per_key_items
data.update(rel_data)
for c in self.__table__.columns:
if c.name in exclude:
continue
if include:
if c.name not in include:
continue
value = getattr(self, c.name)
if isinstance(value, (datetime.datetime, datetime.date)):
value = value.isoformat()
if isinstance(value, Decimal):
value = float(value)
data[c.name] = value
return data
|
from collections import OrderedDict
def getPlotInfo():
'''
Define all plot information:
* comparisons
* binning info
* variables info
1. Variable information
1.1. Key synthax of the dictionary plotInfo["varInfo"]:
Possible key syntax:
* <var>
e.g. MET, Jet_mass_0 etc...
* <var>_common
defines common information for all variables strating with <var>
* !<var>
defines a composite variable calculated from previously
defined variables
1.2. Description of the variable information
handleName: edm::Handle to be used in Event::getByLabel
corresponds to Type in edmDumpEventContent <AODfile>
labelName : string to be used in Event::getByLabel
corresponds to Module in edmDumpEventContent <AODfile>
accessor : the accessor to get the variable from the EDM object
e.g. ".pt()"
leaf : leaf name in the nanoAOD, e.g. "MET_pt"
leadingIdx: 0 for leading, 1 for subleading etc...
name : the name you want to see in the plot title and file name
min : minimum of the histogram if plotInfo["plotInfo"][minMaxBinning"] == "minMaxKeys"
max : maximum ........................................................................
logscale : True/False (y axis)
plot : True/False make plot of the variable
expr : If the variable is a composite variable, defines its expression using
#['var'] to refer to the variable var
e.g. for deltaPhi: (#['Jet_phi_0']-#['MET_phi'])%(2*np.pi)
'''
## Dictionary storing all information about plots
plotInfo = OrderedDict()
## Set here which comparison you want
# Use names as defined in AOD_FORMATS in compare.py
plotInfo["comparisons"] = {
"MiniAODvsPFNanoAOD": 1,
"MiniAODvsNanoAOD" : 0,
"NanoAODvsPFNanoAOD": 0
}
## Binning info
plotInfo["plotInfo"] = {
"minMaxBinning": "minMaxKeys"
# minMaxKeys : from min to max keys of the plotted variable
# zeroMaxPerHistogram: from zero to max of the histogram
# minMaxPerHistogram : from min to max of the histogram
}
plotInfo["varInfo"] = OrderedDict()
# Create a pointer to the subdictionary plotInfo["varInfo"]
varInfo = plotInfo["varInfo"]
## Define some defaults
varInfo["defaults"] = {
"plot" : False,
"logscale" : True,
"min" : 0,
}
## MET
varInfo["MET_common"] = {
"handleName": "vector<pat::MET>",
"labelName" : "slimmedMETs",
"leadingIdx": 0
}
varInfo["MET"] = {
"name" : "MET",
"accessor" : ".pt()",
"leaf" : "MET_pt",
"max" : 1500,
# "plot" : False
}
varInfo["MET_phi"] = {
"name" : "MET_phi",
"accessor" : ".phi()",
"leaf" : "MET_phi",
"min" : -3.15,
"max" : +3.15,
"plot" : False
}
# Jet
varInfo["Jet_common"] = {
"handleName": "vector<pat::Jet>",
"labelName" : "slimmedJets"
}
# Jet mass
varInfo["Jet_mass_common"] = {
"accessor" : ".mass()",
"leaf" : "Jet_mass"
}
varInfo["Jet_mass_0"] = {
"name" : "Leading jet mass",
"leadingIdx": 0,
"max" : 300,
# "plot" : False
}
varInfo["Jet_mass_1"] = {
"name" : "Subleading jet mass",
"leadingIdx": 1,
"max" : 300,
# "plot" : False
}
varInfo["Jet_mass_2"] = {
"name" : "Subsubleading jet mass",
"leadingIdx": 2,
"max" : 150,
# "plot" : False
}
# Jet phi
varInfo["Jet_phi_common"] = {
"accessor" : ".phi()",
"leaf" : "Jet_phi",
"min" : -3.15,
"max" : +3.15,
"plot" : False
}
varInfo["Jet_phi_0"] = {
"name" : "Leading jet phi",
"leadingIdx": 0
}
varInfo["Jet_phi_1"] = {
"name" : "Subleading jet phi",
"leadingIdx": 1
}
# Jet pt
varInfo["Jet_pt_common"] = {
"accessor" : ".pt()",
"leaf" : "Jet_pt"
}
varInfo["Jet_pt_0"] = {
"name" : "Leading jet pt",
"leadingIdx": 0,
"max" : 2000,
# "plot" : False
}
varInfo["Jet_pt_1"] = {
"name" : "Subleading jet pt",
"leadingIdx": 1,
"max" : 2000,
# "plot" : False
}
varInfo["Jet_pt_2"] = {
"name" : "subsubleading jet pt",
"leadingIdx": 2,
"max" : 1000,
# "plot" : False
}
# FatJet
varInfo["FatJet_common"] = {
"handleName": "vector<pat::Jet>",
"labelName" : "slimmedJetsAK8"
}
# Jet mass
varInfo["FatJet_mass_common"] = {
"accessor" : ".mass()",
"leaf" : "FatJet_mass"
}
varInfo["FatJet_mass_0"] = {
"name" : "Leading fatjet mass",
"leadingIdx": 0,
"max" : 300,
"plot" : True
}
varInfo["FatJet_mass_1"] = {
"name" : "Subleading fatjet mass",
"leadingIdx": 1,
"max" : 300,
# "plot" : False
}
varInfo["FatJet_mass_2"] = {
"name" : "Subsubleading fatjet mass",
"leadingIdx": 2,
"max" : 150,
# "plot" : False
}
# FatJet phi
varInfo["FatJet_phi_common"] = {
"accessor" : ".phi()",
"leaf" : "FatJet_phi",
"min" : -3.15,
"max" : +3.15,
"plot" : False
}
varInfo["FatJet_phi_0"] = {
"name" : "Leading fatjet phi",
"leadingIdx": 0
}
varInfo["FatJet_phi_1"] = {
"name" : "Subleading fatjet phi",
"leadingIdx": 1
}
# FatJet pt
varInfo["FatJet_pt_common"] = {
"accessor" : ".pt()",
"leaf" : "FatJet_pt"
}
varInfo["FatJet_pt_0"] = {
"name" : "Leading fatjet pt",
"leadingIdx": 0,
"max" : 2000,
# "plot" : False
}
varInfo["FatJet_pt_1"] = {
"name" : "Subleading fatjet pt",
"leadingIdx": 1,
"max" : 2000,
# "plot" : False
}
varInfo["FatJet_pt_2"] = {
"name" : "subsubleading fajet pt",
"leadingIdx": 2,
"max" : 1000,
# "plot" : False
}
# Combined variables
varInfo["!deltaPhi_common"] = {
"min" : 0,
"max" : 6.3,
}
varInfo["!deltaPhi_METj0"] = {
"name" : "delta phi MET jet0",
"expr" : "(#['Jet_phi_0']-#['MET_phi'])%(2*np.pi)",
}
varInfo["!deltaPhi_METj1"] = {
"name" : "delta phi MET jet1",
"expr" : "(#['Jet_phi_1']-#['MET_phi'])%(2*np.pi)",
}
varInfo["!deltaPhi_minMETj"] = {
"name" : "min delta phi MET jets",
"expr" : "min( (#['Jet_phi_0']-#['MET_phi'])%(2*np.pi), (#['Jet_phi_1']-#['MET_phi'])%(2*np.pi) )",
}
return(plotInfo)
|
# version 1.1
import numpy as np
import pandas as pd
import seaborn as sn
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from sklearn.metrics import calinski_harabaz_score, silhouette_score,davies_bouldin_score
import warnings
warnings.filterwarnings('ignore')
def cluster_range(X, clusterer, k_start, k_stop, actual=None):
"""
Generates a dictionary of cluster labels, internal validation values,
and external validation values for every k
Parameters
----------
X : array
Design matrix with each row corresponding to a point
clusterer : clustering object
The clustering method used
k_start : integer
Initial value to step through
k_stop : integer
Final value to step through
actual : list, optional
List of labels
Returns
----------
dictionary
Contains cluster labels, internal and external validation values for every k
"""
# empty arrays for the 4 validation criteria
chs, dbi, inertias, scs = [], [], [], []
for k in range(k_start, k_stop+1):
clusterer.n_clusters = k
X_predict = clusterer.fit_predict(X)
chs.append(calinski_harabaz_score(X, X_predict)) # Calinski-Harabasz Index
dbi.append(davies_bouldin_score(X, X_predict)) # Davies-Bouldin Index
inertias.append(clusterer.inertia_) # Inertia or within-cluster sum-of-squares criterion
scs.append(silhouette_score(X, X_predict)) # Silhouette Coefficient
res = {'chs': chs,
'dbi': dbi,
'inertias': inertias,
'scs': scs}
return res
def plot_internal(inertias, chs, dbi, scs):
"""Plot internal validation values"""
fig, ax = plt.subplots(2,2, figsize=(15,10))
ks = np.arange(2, len(inertias)+2)
ax[0,0].plot(ks, inertias, '-o', label='Inertia')
ax[0,1].plot(ks, chs, '-ro', label='Calinski-Harabasz Index')
ax[1,0].plot(ks, dbi, '-go', label='Davies-Bouldin Index')
ax[1,1].plot(ks, scs, '-ko', label='Silhouette coefficient')
ax[0,0].set_xlabel('$k$')
ax[0,0].set_ylabel('Inertia')
ax[0,1].set_xlabel('$k$')
ax[0,1].set_ylabel('Calinski-Harabasz Index')
ax[1,0].set_ylabel('Davies-Bouldin Index')
ax[1,0].set_xlabel('$k$')
ax[1,1].set_ylabel('Silhouette')
ax[1,1].set_xlabel('$k$')
return ax
|
from abc import ABCMeta, abstractmethod
from typing import List, TypeVar, Type, Dict, Union
from common.database import Database
# T must be a model or one of its subclasses
T = TypeVar('T', bound='Model')
class Model(metaclass=ABCMeta):
# collection and _id will be string in the subclass
collection: str
_id: str
# Avoid Warning
def __init__(self, *args, **kwargs):
pass
@abstractmethod
def json(self) -> Dict:
raise NotImplementedError
@classmethod
def all(cls: Type[T]) -> List[T]:
items_from_db = Database.find(cls.collection, {})
# create and return a list of items obtained from the query
return [cls(**item) for item in items_from_db]
@classmethod
def get_by_id(cls: Type[T], _id: str) -> T:
return cls.find_one_by("_id", _id)
@classmethod
def find_one_by(cls: Type[T], attribute: str, value: Union[str, Dict]) -> T:
return cls(**Database.find_one(cls.collection, {attribute: value}))
@classmethod
def find_many_by(cls: Type[T], attribute: str, value: Union[str, Dict]) -> List[T]:
return [cls(**item) for item in Database.find(cls.collection, {attribute: value})]
def save_to_mongo(self):
# upsert data
Database.update(self.collection, {"_id": self._id}, self.json())
def remove_from_mongo(self):
Database.remove(self.collection, {"_id": self._id})
|
from django.db import models
from ckeditor.fields import RichTextField
# Create your models here.
class IndexSectionOne(models.Model):
class Meta:
verbose_name_plural = 'Index Section One'
title = models.CharField(max_length=250)
description = RichTextField(blank=True)
def __str__(self):
return self.title
class IndexSectionTwo(models.Model):
class Meta:
verbose_name_plural = 'Index Section Two'
image = models.ImageField(upload_to='images')
description = RichTextField(blank=True)
def __str__(self):
return self.description
class MediumImage(models.Model):
class Meta:
verbose_name_plural = 'Medium Images'
image = models.ImageField(upload_to='images')
dec = RichTextField(blank=True)
def __str__(self):
return self.dec
class IndexSectionThree(models.Model):
class Meta:
verbose_name_plural = 'Index Section Three'
title = models.CharField(max_length=250)
url = models.TextField()
description = RichTextField(blank=True)
def __str__(self):
return self.title
class IndexSectionFour(models.Model):
class Meta:
verbose_name_plural = 'Index Section Four'
title = models.CharField(max_length=250)
def __str__(self):
return self.title
class Questions(models.Model):
class Meta:
verbose_name_plural = 'Questions'
ques = models.TextField()
ans = RichTextField(blank=True)
def __str__(self):
return self.ques
class OrderSection(models.Model):
class Meta:
verbose_name_plural = 'Order Section'
title = models.CharField(max_length=250)
description = RichTextField(blank=True)
def __str__(self):
return self.title
class OrderStep(models.Model):
class Meta:
verbose_name_plural = 'Order Steps'
title = models.CharField(max_length=250)
dec = RichTextField(blank=True)
def __str__(self):
return self.title
class TestimonialsSectionOne(models.Model):
class Meta:
verbose_name_plural = 'Testimonials Section One'
title = models.CharField(max_length=250)
description = RichTextField(blank=True)
def __str__(self):
return self.title
class TestimonialsSectionTwo(models.Model):
class Meta:
verbose_name_plural = 'Testimonials Section Two'
title = models.CharField(max_length=250)
def __str__(self):
return self.title
class Video(models.Model):
class Meta:
verbose_name_plural = 'Videos'
url = models.TextField()
def __str__(self):
return self.url
class Testimonial(models.Model):
class Meta:
verbose_name_plural = 'Testimonials'
content = RichTextField(blank=True)
address = RichTextField(blank=True)
def __str__(self):
return self.address
|
import pandas as pd
import tensorflow as tf
import numpy as np
class LogisticClassifier:
start = 0
def __init__(self, nfeatures):
self.nfeatures = nfeatures
def __createBatch(self,features,labels,batch_size):
self.end = self.start + batch_size
batch_x = features[self.start:self.end]
batch_y = labels[self.start:self.end]
#batch_size = len(batch_y) if len(batch_y) < batch_size else batch_size
batch_x, batch_y = self.__reshape(batch_x,batch_y)
self.start = self.end
if(self.end >= len(features)):
self.start = 0
return batch_x, batch_y
def __convertLabelsToOneHotVectors(self,labels):
one_hot_label = []
for label in labels:
if label == 0:
one_hot_label.append([1,0])
else:
one_hot_label.append([0,1])
return one_hot_label
def __reshape(self,inputs,labels=None):
if inputs != None:
inputs = np.reshape(inputs, (-1, self.nfeatures))
if labels != None:
labels = np.reshape(labels, (-1, 2))
return inputs,labels
#Try scikit, convert to one hot encoding
# https://github.com/aymericdamien/TensorFlow-Examples/blob/master/examples/2_BasicModels/logistic_regression.py
#http://ischlag.github.io/2016/06/19/tensorflow-input-pipeline-example/
#http://stackoverflow.com/questions/34060332/how-to-get-predicted-class-labels-in-tensorflows-mnist-example
def trainModel(self, input_features, labels):
# Parameters
learning_rate = 0.06
training_epochs = 100
batch_size = 1000
display_step = 1
record_size = len(input_features)
labels = self.__convertLabelsToOneHotVectors(labels)
# tf Graph Input
self.x = tf.placeholder(tf.float32, [None, self.nfeatures])
self.y = tf.placeholder(tf.float32, [None, 2], name="classes") #convert to one hot encoding
# Set model weights
W = tf.Variable(tf.zeros([self.nfeatures, 2]))
b = tf.Variable(tf.zeros([2]))
# Construct model
self.pred = tf.nn.softmax(tf.matmul(self.x, W) + b) # Softmax
# Minimize error using cross entropy
cost = tf.reduce_mean(-tf.reduce_sum(self.y * tf.log(self.pred), reduction_indices=1))
# Gradient Descent
optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)
# Initializing the variables
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
count = 0
#input_features = tf.shape(input_features, name=None, out_type=tf.int32)
input_features = np.asarray(input_features)
labels = np.asarray(labels)
# Training cycle
# Change code accordingly
for epoch in range(training_epochs):
print("Epoch--->"+str(epoch))
avg_cost = 0.
total_batch = int(record_size / batch_size)
# Loop over all batches
for i in range(total_batch):
batch_xs,batch_ys = self.__createBatch(input_features,labels,batch_size)
# Run optimization op (backprop) and cost op (to get loss value)
_, c = sess.run([optimizer, cost], feed_dict={self.x: batch_xs,
self.y: batch_ys})
# Compute average loss
avg_cost += c / total_batch
count = count + batch_size
# Display logs per epoch step
if (epoch + 1) % display_step == 0:
print("Epoch:", '%04d' % (epoch + 1), "cost=", "{:.9f}".format(avg_cost))
print("Optimization Finished!")
def validateModel(self,test_inputs,test_labels):
test_labels = self.__convertLabelsToOneHotVectors(test_labels)
test_inputs = np.asarray(test_inputs)
test_labels = np.asarray(test_labels)
test_inputs,test_labels = self.__reshape(test_inputs,test_labels)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
# Test model
correct_prediction = tf.equal(tf.argmax(self.pred, 1), tf.argmax(self.y, 1))
print("pred-->"+str(correct_prediction))
# Calculate accuracy
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
print("Accuracy:", accuracy.eval({self.x: test_inputs, self.y: test_labels}))
def predict(self,test_inputs):
# Test model
result = None
test_inputs = np.asarray(test_inputs)
test_inputs = self.__reshape(test_inputs)[0]
#print(test_inputs)
init = tf.global_variables_initializer()
with tf.Session() as sess:
sess.run(init)
predict = tf.argmax(self.pred,1)
result = predict.eval(feed_dict={self.x: test_inputs},session=sess)
#print(result)
return result |
#!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as plt
import math
from heapq import *
import sys
import rospy
from geometry_msgs.msg import Twist
from nav_msgs.msg import Odometry
# extra = 0
res = 1
r = 3.3 # wheels
l = 28.7
clr = 15 # clearance from the boundaries
extra = 22+clr
dt = 1
obstacle_space = []
def create_graph():
print("Creating graph")
graph = {}
for i in range(int(1110 / res)):
for j in range(int(1010 / res)):
graph[(i,j)] = {'visited':False, 'g':np.inf, 'valid':True, 'parent': (0, 0), 'neighbour':[0.0,0.0,0.0,0.0,0.0,0.0]}
C1 = (i - round(390/res))**2 + (j - round(960/res))**2 - ((40.5/res) + extra)**2
if C1 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
C2 = (i - round(438 / res)) ** 2 + (j - round(736 / res)) ** 2 - ((40.5 / res) + extra) ** 2
if C2 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
C3 = (i - round(390 / res)) ** 2 + (j - round(45 / res)) ** 2 - ((40.5 / res) + extra) ** 2
if C3 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
C4 = (i - round(438 / res)) ** 2 + (j - round(274 / res)) ** 2 - ((40.5 / res) + extra) ** 2
if C4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 1 ##############
f1 = -j + (0/res) - extra
f2 = j - (35/res) - extra
f3 = -i + (685/res) - extra
f4 = i - (1110/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 2 ##############
f1 = -j + (35 / res) - extra
f2 = j - (111 / res) - extra
f3 = -i + (927 / res) - extra
f4 = i - (1110 / res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 3 ##############
f1 = -j + (35 / res) - extra
f2 = j - (93 / res) - extra
f3 = -i + (779 / res) - extra
f4 = i - (896 / res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 4 ##############
f1 = -j + (35 / res) - extra
f2 = j - (187 / res) - extra
f3 = -i + (474 / res) - extra
f4 = i - (748 / res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# # ########### RECTANGLE 5 ##############
# f1 = -j + (295.25/res) - extra
# f2 = j - (412.25/res) - extra
# f3 = -i + (1052/res) - extra
# f4 = i - (1110/res) - extra
#
# if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
# obstacle_space.append((i, j))
# ########### RECTANGLE 6 ##############
f1 = -j + (919/res) - extra
f2 = j - (1010/res) - extra
f3 = -i + (983/res) - extra
f4 = i - (1026/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 7 ##############
f1 = -j + (827/res) - extra
f2 = j - (1010/res) - extra
f3 = -i + (832/res) - extra
f4 = i - (918/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 8 ##############
f1 = -j + (621/res) - extra
f2 = j - (697/res) - extra
f3 = -i + (744/res) - extra
f4 = i - (1110/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 9 ##############
f1 = -j + (449/res) - extra
f2 = j - (566/res) - extra
f3 = -i + (1052/res) - extra
f4 = i - (1110/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 10 ##############
f1 = -j + (363/res) - extra
f2 = j - (449/res) - extra
f3 = -i + (1019/res) - extra
f4 = i - (1110/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 11 ##############
f1 = -j + (178.75/res) - extra
f2 = j - (295.75/res) - extra
f3 = -i + (1052/res) - extra
f4 = i - (1110/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 12 ##############
f1 = -j + (315/res) - extra
f2 = j - (498/res) - extra
f3 = -i + (438/res) - extra
f4 = i - (529/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 13 ##############
f1 = -j + (265/res) - extra
f2 = j - (341/res) - extra
f3 = -i + (529/res) - extra
f4 = i - (712/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### RECTANGLE 14 ##############
f1 = -j + (267/res) - extra
f2 = j - (384/res) - extra
f3 = -i + (784.5/res) - extra
f4 = i - (936.5/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### TABLE SQUARE ##############
f1 = -j + (751/res) - extra
f2 = j - (910/res) - extra
f3 = -i + (149.5/res) - extra
f4 = i - (318.885/res) - extra
if f1 <= 0 and f2 <= 0 and f3 <= 0 and f4 <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### TABLE CIRCLE LEFT ##############
TCL = (i - round(149.5/res))**2 + (j - round(830.5/res))**2 - ((79.5/res) + extra)**2
if TCL <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### TABLE CIRCLE RIGHT ##############
TCR = (i - round(318.885 / res)) ** 2 + (j - round(830.5 / res)) ** 2 - ((79.5 / res) + extra) ** 2
if TCR <= 0:
graph[(i,j)]['valid'] = False
obstacle_space.append((i, j))
# ########### WALLS ##############
# for i in range(int(1110 / res)):
# graph[(i,0)]['valid'] = False
# graph[(i,1009)]['valid'] = False
# obstacle_space.append((i, 0))
# obstacle_space.append((i, 1009 / res))
# for j in range(int(1010 / res)):
# graph[(0,j)]['valid'] = False
# graph[(1109,j)]['valid'] = False
# obstacle_space.append((0, j))
# obstacle_space.append((1109 / res, j))
return graph
def calculate_distance(goal, current):
d = math.sqrt(((goal[0]-current[0])*(goal[0]-current[0]))+((goal[1]-current[1])*(goal[1]-current[1])))
return d
def goal_reached(current, goal):
if (int(current[0])-goal[0])**2 + (int(current[1])-goal[1])**2 < (5)**2:
return True
else:
return False
def find_neighbour(current,ul,ur):
theta = current[2]
x = current[0]
y = current[1]
# for i in range(100):
d_theta = (r/l)*(ur-ul)*dt
dx = (r/2)*(ul+ur)*(math.cos(theta + d_theta))*dt
dy = (r/2)*(ul+ur)*(math.sin(theta + d_theta))*dt
x = x + dx
y = y + dy
theta = theta + d_theta
if x < 0:
x =0
if y < 0:
y = 0
if x >= 1110:
x = 1109
if y >= 1010:
y = 1009
x = round(x)
y = round(y)
# dx = round(dx)
# dy = round(dy)
# d_theta = theta - current[2]
# dx = x - current[0]
# dy = x - current[1]
# neighbour_position = (x, y, theta)
# neighbour_velocity = (dx/dt, dy/dt, d_theta/dt)
params = [x, y, theta, dx/dt, dy/dt, d_theta/dt]
key = (x,y)
return params, key
def astar(graph, source, goal, rpm):
theta = 0
(rpm1, rpm2) = rpm
rpm1 = 2*math.pi*rpm1/60
rpm2 = 2*math.pi*rpm2/60
row = []
(goal_x, goal_y) = goal
graph[source]['visited'] = True
graph[source]['neighbour'] = [source[0], source[1], 0.0, 0.0, 0.0, 0.0]
num_nodes_visited = 1
graph[source]['g'] = 0
queue = []
current = (source[0], source[1], theta)
queue_distance = calculate_distance(goal, current)+graph[source]['g']
heappush(queue, (queue_distance, current))
action_space = [(0, rpm1),(rpm1, 0),(rpm1, rpm1),(0,rpm2), (rpm2,0), (rpm2,rpm2),(rpm1,rpm2), (rpm2,rpm1)]
print("Exploring the nodes...")
while (len(queue) != 0):
current = heappop(queue)[1]
crt = list(current)
if current[0] >= 1110:
current[0] = 1109
if current[1] >= 1010:
current[1] = 1009
current = tuple(crt)
if goal_reached(current,goal) == True:
print("Goal reached")
# if row:
# x,y = zip(*row)
# plt.plot(x,y,'y.')
# plt.pause(0.01)
break
for i,j in action_space:
# print(i,j)
params, key = find_neighbour(current,i,j)
neighbour = (params[0], params[1], params[2])
# print(params)
# neighbour = (abs(current[0]+i), abs(current[1]+j))
lst = list(key)
if lst[0] >=1110:
lst[0] = 1109
if lst[1] >=1010:
lst[1] = 1009
key = tuple(lst)
if graph[(key)]['valid'] == True:
distance = calculate_distance(current, neighbour)
if graph[key]['visited'] == False:
graph[key]['visited'] = True
# row.append([neighbour[0], neighbour[1]])
# print(params)
# x,y = zip(*row)
# if (num_nodes_visited) % 1000 == 0:
# plt.plot(x,y,'y.')
# del row[:]
# # row.clear()
# plt.pause(0.01)
num_nodes_visited += 1
graph[key]['parent'] = (current[0], current[1])
graph[key]['neighbour'] = params
graph[key]['g'] = graph[graph[key]['parent']]['g'] + distance
queue_distance = calculate_distance(goal, neighbour)+graph[key]['g']
heappush(queue, (queue_distance, neighbour))
path = [(current[0], current[1])]
# path_x = source[0]
# path_y = source[1]
parent = (current[0], current[1])
while parent != source:
parent = graph[path[len(path)-1]]['parent']
# print(parent)
# print(graph[parent]['neighbour'])
# path_x += graph[parent]['neighbour'][3]
# path_y += graph[parent]['neighbour'][4]
# print(path_x, path_y)
path.append(parent)
min_distance = (graph[(goal_x,goal_y)]['g'])
print("Total Number of Nodes Visited:", num_nodes_visited)
return min_distance, path, graph
def simulate(path_list, graph):
print("Simulation started")
rospy.init_node('Motion_command',anonymous=True)
vel_pub = rospy.Publisher('/cmd_vel', Twist, queue_size=10)
# pos_pub = rospy.Publisher('/odom', Odometry, queue_size=1000)
vel_msg = Twist()
for p in path_list:
node = graph[p]['neighbour']
v = math.sqrt(node[3]**2 + node[4]**2)
w = node[5]
r = rospy.Rate(1)
vel_msg.linear.x = v/100
vel_msg.linear.y = 0
vel_msg.linear.z = 0
vel_msg.angular.x = 0
vel_msg.angular.y = 0
vel_msg.angular.z = w
t0 = rospy.Time.now().to_sec()
while not rospy.is_shutdown():
t1 = rospy.Time.now().to_sec()
elapsed = t1 - t0
print("elapsed: ", elapsed)
if elapsed >= 1 :
break
vel_pub.publish(vel_msg)
print("published velocity: ",vel_msg.linear.x)
r.sleep()
vel_msg.linear.x = 0.0
vel_msg.angular.z = 0.0
vel_pub.publish(vel_msg)
# rospy.spin()
if __name__ == "__main__":
x1,y1 = raw_input("Enter start point, with a space between x and y coordinates of start: ").split()
x1 = int(x1)
y1 = int(y1)
start = (x1,y1)
if x1 >= 1110 or x1 < 0 or y1 >= 1010 or y1 <0:
print("Invalid start state, exiting")
sys.exit(0)
p,q = raw_input("Enter goal point, with a space between x and y coordinates of goal: ").split()
p = int(p)
q = int(q)
goal = (p,q)
if p >= 1110 or p < 0 or q >= 1010 or q <0:
print("Invalid Goal state, exiting")
sys.exit(0)
rpm1, rpm2 = raw_input("Enter the two RPMS for the wheels: ").split()
rpm = (int(rpm1), int(rpm2))
g = create_graph()
points = [x for x in g.keys() if not (g[x]['valid'])]
x = [i[0] for i in points]
y = [i[1] for i in points]
for i in points:
if x1 == i[0] and y1 == i[1]:
print("Start point inside obstacle, exiting")
sys.exit(0)
if p == i[0] and q == i[1]:
print("Goal point inside obstacle, exiting")
sys.exit(0)
# plt.xlim(right=1110)
# plt.ylim(top=1010)
# plt.plot(x,y, 'k.')
# plt.plot(x1,y1,'xr')
# plt.plot(p,q,'xg')
min_distance, path, final_graph = astar(g, start, goal, rpm)
path = path[::-1]
# print(path)
# x = [i[0] for i in path]
# y = [i[1] for i in path]
# plt.plot(x,y, 'g-')
# plt.show()
try:
#Testing our function
simulate(path, final_graph)
except rospy.ROSInterruptException: pass
# simulate(path, final_graph)
#print("Minimum Distance from start to goal:", min_distance)
|
C64_TABLE = [
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U',
'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't',
'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '_', '?'
]
def encrypt(s: str):
bits = "".join(bin(byte)[2:].zfill(8)
for byte in bytes(s.encode('utf8')))[::-1]
length = len(bits)
for count in range(length - 1):
bits = bits[:count] + str(int(bits[count]) ^
int(bits[count + 1])) + bits[count + 1:]
for i in range(11):
bits = (bits[-1] + bits[:-1])[::-1]
left = ""
right = ""
for i in range(length):
if(not i % 2):
left += bits[i]
else:
right += bits[i]
data = right[:len(right) // 2] + "".join(str(int(byte1) ^ int(byte2))
for byte1, byte2 in zip(left, right)) + right[len(right) // 2:]
data = data + "".join(str(byte % 2) for byte in range(6 - len(data) % 6))
result = "".join(C64_TABLE[int(data[i * 6:(i + 1) * 6], 2)]
for i in range(len(data) // 6))
return result
def decrypt(s: str):
data = "".join(bin(C64_TABLE.index(font))[2:].zfill(6) for font in s)
data = data[:-(len(data) % 8)]
data_len = len(data)
right = data[:data_len // 4] + data[-data_len // 4:]
left = "".join(str(int(byte1) ^ int(byte2)) for byte1, byte2 in zip(
data[data_len // 4:data_len // 4 * 3], right))
bits = ""
for i in range(data_len):
if(not i % 2):
bits += left[i // 2]
else:
bits += right[i // 2]
for count in range(data_len - 1, 0, -1):
for i in range(11):
bits = bits[::-1]
bits = (bits[1:] + bits[0])
bits = bits[:count - 1] + \
str(int(bits[count]) ^ int(bits[count - 1])) + bits[count:]
bits = bits[::-1]
result = "".join(chr(int(bits[x * 8:(x + 1) * 8], 2))
for x in range(len(bits) // 8)).encode("latin1").decode('utf8')
return result
|
# imports
try:
import tkinter as tk
except ImportError:
import Tkinter as tk
import os, urllib.request,subprocess, sys, tkinter.filedialog as fd, moviepy.editor as mp, time, glob, re
from tkinter import messagebox, Button, Entry, Label, Frame, Canvas, Listbox
from pytube import YouTube
from bs4 import BeautifulSoup
from threading import Thread
# global variables
directory = os.getcwd()
current_search = []
result_cap = 5
# main loop and settings
root = tk.Tk()
root.geometry("600x300")
root.title('MP3 Downloader')
# UI Components
searchbar = Entry(root, width="70", borderwidth=5, relief=tk.SUNKEN)
searchbar.grid(row=0, column=0, sticky=tk.W)
search_results = Listbox(root, width="70")
search_results.grid(row=1, column=0, sticky=tk.W, padx=(5,5))
label_currentDir = Label(root, text=directory)
label_currentDir.grid(row=5, column=0, columnspan=2, sticky=tk.W)
label_notification = Label(root, text="Welcome!")
label_notification.grid(row=1, column=1, sticky=tk.W)
label_current_results = Label(root, text="No Searches yet")
# functions
def set_folder():
tempdir = fd.askdirectory(parent=root, title='Please select a directory')
label_currentDir['text'] = tempdir
directory = tempdir
while tempdir == "":
tempdir = fd.askdirectory(parent=root, title='Please select a directory')
label_currentDir['text'] = tempdir
directory = tempdir
def download():
URL = ""
try:
URL = current_search[search_results.curselection()[0]]
title = re.sub('\'','', search_results.get(search_results.curselection()[0]))
label_notification['text'] = "Downloading..."
YouTube(URL).streams.first().download(label_currentDir['text'])
label_notification['text'] = "Converting to mp3..."
clip = mp.VideoFileClip(label_currentDir['text'] + "/" + title + ".mp4")
clip.audio.write_audiofile(label_currentDir['text'] + "/" + title + ".mp3")
label_notification['text'] = "Done"
if clip.audio and clip.audio.reader:
clip.reader.close()
clip.audio.reader.close_proc()
os.remove(label_currentDir['text'] + "/" + title + ".mp4")
except Exception as err:
label_notification['text'] = "No URL found!"
print(err)
def find_video(term):
if term != "":
try:
label_notification['text'] = "Searching..."
label_current_results['text'] = ""
current_search.clear()
search_results.delete(0,tk.END)
top_link = ""
query = urllib.parse.quote(term)
url = "https://www.youtube.com/results?search_query=" + query
soup = BeautifulSoup(urllib.request.urlopen(url).read(), 'html.parser')
counter = 0
for video in soup.findAll(attrs={'class':'yt-uix-tile-link'}):
if counter >= result_cap:
break
counter += 1
URL = 'https://www.youtube.com' + video['href']
current_search.append(URL)
search_results.insert(tk.END, YouTube(URL).title)
label_notification['text'] = "Results Found!"
except:
label_notification['text'] = "Error, try again."
else:
label_notification['text'] = "No term inputted."
# buttons
Button(root, text='Search', command=lambda : Thread(target = find_video, args = (searchbar.get(), )).start()).grid(row=0, column=1, sticky=tk.W)
Button(root, text='Download', command=lambda : Thread(target = download).start()).grid(row=3, column=0, sticky=tk.W, padx=(5,0), pady=(5,0))
Button(root, text='Set Folder', command=lambda : Thread(target = set_folder()).start()).grid(row=4, column=0, sticky=tk.W, padx=(5,0), pady=(5,0))
# Begin Executing
root.mainloop()
|
#!/usr/bin/env python3
# Hardware Probe
# Copyright (c) 2020-2023, Simon Peter <probono@puredarwin.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Stethoscope Icon
# https://iconarchive.com/show/pry-system-icons-by-jonas-rask/Stethoscope-icon.html
# Artist: Jonas Rask Design
# Iconset: Pry System Icons (64 icons)
# License: Free for non-commercial use.
# Commercial usage: Not allowed
import sys, os, re, socket
import shutil
from datetime import datetime
from PyQt5 import QtWidgets, QtGui, QtCore # pkg install py37-qt5-widgets
# Plenty of TODOs and FIXMEs are sprinkled across this code.
# These are invitations for new contributors to implement or comment on how to best implement.
# These things are not necessarily hard, just no one had the time to do them so far.
# Translate this application using Qt .ts files without the need for compilation
import tstranslator
# FIXME: Do not import translations from outside of the application bundle
# which currently is difficult because we have all translations for all applications
# in the whole repository in the same .ts files
tstr = tstranslator.TsTranslator(os.path.dirname(__file__) + "/i18n", "")
def tr(input):
return tstr.tr(input)
#############################################################################
# Helper functions
#############################################################################
def internetCheckConnected(host="8.8.8.8", port=53, timeout=3):
"""
Host: 8.8.8.8 (google-public-dns-a.google.com)
OpenPort: 53/tcp
Service: domain (DNS/TCP)
"""
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except socket.error as ex:
print(ex)
return False
#############################################################################
# Initialization
# https://doc.qt.io/qt-5/qwizard.html
#############################################################################
app = QtWidgets.QApplication(sys.argv)
class Wizard(QtWidgets.QWizard, object):
def __init__(self):
app.setOverrideCursor(QtGui.QCursor(QtCore.Qt.WaitCursor)) # It can take some time before we show the initial page, because hw-probe runs there
print("Preparing wizard")
super().__init__()
self.should_show_last_page = False
self.error_message_nice = tr("An unknown error occurred.")
self.setWizardStyle(QtWidgets.QWizard.MacStyle)
self.setPixmap(QtWidgets.QWizard.BackgroundPixmap, QtGui.QPixmap(os.path.dirname(__file__) + '/Stethoscope-icon.png'))
self.setOption(QtWidgets.QWizard.ExtendedWatermarkPixmap, True) # Extend WatermarkPixmap all the way down to the window's edge; https://doc.qt.io/qt-5/qwizard.html#wizard-look-and-feel
self.hw_probe_tool = '/usr/local/bin/hw-probe'
self.server_probe_url = None
self.local_probe_path = None
self.setWindowTitle(tr("Hardware Probe"))
self.setFixedSize(600, 400)
self.setSubTitleFormat(QtCore.Qt.RichText) # Allow HTML; Qt 5.14+ also have an option for Markdown
# Translate the widgets in the UI objects in the Wizard
self.setWindowTitle(tr(self.windowTitle()))
for e in self.findChildren(QtCore.QObject, None, QtCore.Qt.FindChildrenRecursively):
if hasattr(e, 'text') and hasattr(e, 'setText'):
e.setText(tr(e.text()))
def showErrorPage(self, message):
print("Show error page")
self.addPage(ErrorPage())
# It is not possible jo directly jump to the last page from here, so we need to take a workaround
self.should_show_last_page = True
self.error_message_nice = message
self.next()
# When we are about to go to the next page, we need to check whether we have to show the error page instead
def nextId(self):
if self.should_show_last_page == True:
return max(wizard.pageIds())
else:
return self.currentId() + 1
def playSound(self):
print("Playing sound")
soundfile = os.path.dirname(__file__) + '/success.ogg' # https://freesound.org/people/Leszek_Szary/sounds/171670/, licensed under CC0
proc = QtCore.QProcess()
command = 'ogg123'
args = ['-q', soundfile]
print(command, args)
try:
proc.startDetached(command, args)
except:
pass
wizard = Wizard()
#############################################################################
# Privacy Information
#############################################################################
class PrivacyPage(QtWidgets.QWizardPage, object):
def __init__(self):
print("Privacy Information")
super().__init__()
self.setTitle(tr('Privacy Information'))
self.setSubTitle(tr('Uploading a Hardware Probe is subject to the following Pricacy Terms.'))
license_label = QtWidgets.QTextBrowser()
license_layout = QtWidgets.QVBoxLayout(self)
license_text = open(os.path.dirname(__file__) + '/intro.txt', 'r').read()
license_label.setText(license_text) # Skip the first 3 lines
font = wizard.font()
font.setPointSize(9)
license_label.setFont(font)
license_layout.addWidget(license_label)
additional_licenses_label = QtWidgets.QLabel()
additional_licenses_label.setWordWrap(True)
additional_licenses_label.setText(tr('Please see %s for more information.') % '<a href="https://bsd-hardware.info">https://bsd-hardware.info</a>')
license_layout.addWidget(additional_licenses_label)
#############################################################################
# Intro page
#############################################################################
class IntroPage(QtWidgets.QWizardPage, object):
def __init__(self):
print("Preparing IntroPage")
super().__init__()
self.setTitle(tr('Hardware Probe'))
self.setSubTitle(tr("""<p>This utility collects hardware details of your computer and can anonymously upload them to a public database.</p>
<p>This can help users and operating system developers to collaboratively debug hardware related issues, check for operating system compatibility and find drivers.</p>
<p>You will get a permanent probe URL to view and share collected information.</p><br><br><br>"""))
layout = QtWidgets.QVBoxLayout(self)
# layout.addWidget(center_widget, True) # True = add stretch vertically
wizard.showHardwareProbeButton = QtWidgets.QPushButton(tr('Show Hardware Probe'), self)
wizard.showHardwareProbeButton.clicked.connect(self.showHardwareProbeButtonClicked)
wizard.showHardwareProbeButton.setDisabled(True)
layout.addWidget(wizard.showHardwareProbeButton)
def showHardwareProbeButtonClicked(self):
print("showHardwareProbeButtonClicked")
print("self.local_probe_path: %s" % self.local_probe_path)
proc = QtCore.QProcess()
command = 'open'
args = [self.local_probe_path]
try:
print("Starting %s %s" % (command, args))
proc.startDetached(command, args)
except:
wizard.showErrorPage(tr("Failed to open the hardware probe."))
return
def initializePage(self):
print("Displaying IntroPage")
# Without this, the window does not get shown before run_probe_locally is done; why?
workaroundtimer = QtCore.QTimer()
workaroundtimer.singleShot(200, self.run_probe_locally)
def run_probe_locally(self):
proc = QtCore.QProcess()
command = 'sudo'
args = ["-A", "-E", self.wizard().hw_probe_tool, "-all"]
try:
print("Starting %s %s" % (command, args))
proc.start(command, args)
except:
wizard.showErrorPage(tr("Failed to run the %s tool." % wizard.hw_probe_tool)) # This does not catch most cases of errors; hence see below
return
proc.waitForFinished()
output_lines = proc.readAllStandardOutput().split("\n")
err_lines = proc.readAllStandardError().split("\n")
if len(output_lines) > 2:
self.local_probe_path = str(output_lines[len(output_lines)-2], encoding='utf-8').split(":")[1].strip() # /root/HW_PROBE/LATEST/hw.info
print("self.local_probe_path: %s" % self.local_probe_path)
else:
wizard.showErrorPage(tr("Failed to run the %s tool." % wizard.hw_probe_tool)) # This catches most cases if something goes wrong
return
# Make the Hardware Probe owned by user for easy access by the user
command = 'sudo'
args = ["-A", "-E", "chown", "-R", os.environ.get('USER'), self.local_probe_path]
try:
print("Starting %s %s" % (command, args))
proc.start(command, args)
except:
wizard.showErrorPage(tr(
"Failed to set the owner to %x.") % os.environ.get('USER')) # This does not catch most cases of errors; hence see below
return
proc.waitForFinished()
wizard.showHardwareProbeButton.setDisabled(False)
app.setOverrideCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
#############################################################################
# Installation page
#############################################################################
class UploadPage(QtWidgets.QWizardPage, object):
def __init__(self):
print("Preparing InstallationPage")
super().__init__()
self.setTitle(tr('Uploading Hardware Probe'))
self.setSubTitle(tr('The Hardware Probe is being uploaded to the public database'))
self.layout = QtWidgets.QVBoxLayout(self)
wizard.progress = QtWidgets.QProgressBar(self)
# Set the minimum, maximum and current values to get an indeterminate progress bar
wizard.progress.setMaximum(0)
wizard.progress.setMinimum(0)
wizard.progress.setValue(0)
self.layout.addWidget(wizard.progress, True)
def initializePage(self):
print("Displaying InstallationPage")
wizard.setButtonLayout(
[QtWidgets.QWizard.Stretch])
if internetCheckConnected() == False:
print("Offline?")
wizard.showErrorPage(tr("You need an active internet connection in order to upload."))
return
# Without this, the progress bar does not get shown at all; why?
workaroundtimer = QtCore.QTimer()
workaroundtimer.singleShot(200, self.upload)
def upload(self):
print("Starting Upload")
proc = QtCore.QProcess()
command = "sudo"
args = ["-A", "-E", wizard.hw_probe_tool, "-all", "-upload"]
try:
print("Starting %s %s" % (command, args))
proc.start(command, args)
except:
wizard.showErrorPage(tr("Failed to upload using the %s tool." % wizard.hw_probe_tool)) # This does not catch most cases of errors; hence see below
return
proc.waitForFinished()
# DIXME: What can we do so that the progress bar stays animatged without the need for threading?
output_lines = proc.readAllStandardOutput().split("\n")
err_lines = proc.readAllStandardError().split("\n")
if err_lines[0] != "":
wizard.showErrorPage(str(err_lines[0], encoding='utf-8'))
return
elif len(output_lines) > 2:
for line in output_lines:
line = str(line, encoding='utf-8')
print(line)
if "Probe URL:" in line:
wizard.server_probe_url = line.replace("Probe URL:","").strip() # Probe URL: https://bsd-hardware.info/?probe=...
print("wizard.server_probe_url: %s" % wizard.server_probe_url)
else:
wizard.showErrorPage(tr("Failed to upload using the %s tool." % wizard.hw_probe_tool)) # This catches most cases if something goes wrong
return
wizard.next()
#############################################################################
# Success page
#############################################################################
class SuccessPage(QtWidgets.QWizardPage, object):
def __init__(self):
print("Preparing SuccessPage")
super().__init__()
self.timer = QtCore.QTimer() # Used to periodically check the available disks
def initializePage(self):
print("Displaying SuccessPage")
wizard.setButtonLayout(
[QtWidgets.QWizard.Stretch, QtWidgets.QWizard.CancelButton])
# wizard.playSound()
self.setTitle(tr('Hardware Probe Uploaded'))
self.setSubTitle(tr('Thank you for uploading your Hardware Probe.'))
logo_pixmap = QtGui.QPixmap(os.path.dirname(__file__) + '/check.png').scaledToHeight(160, QtCore.Qt.SmoothTransformation)
logo_label = QtWidgets.QLabel()
logo_label.setPixmap(logo_pixmap)
center_layout = QtWidgets.QHBoxLayout(self)
center_layout.addStretch()
center_layout.addWidget(logo_label)
center_layout.addStretch()
center_widget = QtWidgets.QWidget()
center_widget.setLayout(center_layout)
layout = QtWidgets.QVBoxLayout(self)
layout.addWidget(center_widget, True) # True = add stretch vertically
# label = QtWidgets.QLabel()
# label.setText("You can view it at <a href='%s'>%s</a>" % (wizard.server_probe_url, wizard.server_probe_url))
# label.setWordWrap(True)
# layout.addWidget(label)
wizard.showUploadedProbeButton = QtWidgets.QPushButton(tr('Show uploaded Hardware Probe'), self)
wizard.showUploadedProbeButton.clicked.connect(self.showUploadedProbeButtonClicked)
layout.addWidget(wizard.showUploadedProbeButton)
self.setButtonText(wizard.CancelButton, tr("Quit"))
wizard.setButtonLayout([QtWidgets.QWizard.Stretch, QtWidgets.QWizard.CancelButton])
def showUploadedProbeButtonClicked(self):
print("showHardwareProbeButtonClicked")
print("wizard.server_probe_url: %s" % wizard.server_probe_url)
proc = QtCore.QProcess()
command = 'launch'
args = ["Falkon", wizard.server_probe_url]
try:
print("Starting %s %s" % (command, args))
proc.startDetached(command, args)
except:
wizard.showErrorPage(tr("Failed to open the uploaded hardware probe."))
return
#############################################################################
# Error page
#############################################################################
class ErrorPage(QtWidgets.QWizardPage, object):
def __init__(self):
print("Preparing ErrorPage")
super().__init__()
self.setTitle(tr('Error'))
self.setSubTitle(tr('Hardware Probe was not successful.'))
logo_pixmap = QtGui.QPixmap(os.path.dirname(__file__) + '/cross.png').scaledToHeight(160, QtCore.Qt.SmoothTransformation)
logo_label = QtWidgets.QLabel()
logo_label.setPixmap(logo_pixmap)
center_layout = QtWidgets.QHBoxLayout(self)
center_layout.addStretch()
center_layout.addWidget(logo_label)
center_layout.addStretch()
center_widget = QtWidgets.QWidget()
center_widget.setLayout(center_layout)
self.layout = QtWidgets.QVBoxLayout(self)
self.layout.addWidget(center_widget, True) # True = add stretch vertically
self.label = QtWidgets.QLabel() # Putting it in initializePage would add another one each time the page is displayed when going back and forth
self.layout.addWidget(self.label)
def initializePage(self):
print("Displaying ErrorPage")
app.setOverrideCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor))
wizard.showHardwareProbeButton.hide() # FIXME: Why is this needed?
wizard.progress.hide() # FIXME: Why is this needed?
# wizard.playSound()
self.label.setWordWrap(True)
self.label.clear()
self.label.setText(wizard.error_message_nice)
self.setButtonText(wizard.CancelButton, "Quit")
wizard.setButtonLayout([QtWidgets.QWizard.Stretch, QtWidgets.QWizard.CancelButton])
#############################################################################
# Pages flow in the wizard
#############################################################################
intro_page = IntroPage()
wizard.addPage(intro_page)
license_page = PrivacyPage()
wizard.addPage(license_page)
installation_page = UploadPage()
wizard.addPage(installation_page)
success_page = SuccessPage()
wizard.addPage(success_page)
wizard.show()
sys.exit(app.exec_())
|
from ex115.lib.interface import *
def arquivoExiste(nome): # Função para verificar se o arquivo existe
try: # Função interna do Python para tratamentos de erro que vai tentar fazer algo
a = open(nome, 'rt') # Open tenta abrir o arquivo com o parâmteros 'rt' que é reed(r) e text(t)
a.close() # Fecha o arquvio
except FileNotFoundError: # Função interna do Python para tratamentos de erro que vai fazer algo caso aconteça erro
return False
else:
return True
def criarArquivo(nome): # Função para criar um arquivo .txt
try:
a = open(nome, 'wt+') # Abre um arquivo de texto para escrever nele. 'wt+' write text o + cria caso não exista
a.close()
except:
print('Houve um error na criação do arquivo!')
else:
print(f'Arquivo {nome} criado com sucesso')
def lerArquivo(nome):
try:
a = open(nome, 'rt')
except:
print('ERRO ao ler arquivo!')
else:
cabeçalho('CADASTRO DE USUÁRIOS')
for linha in a:
dado = linha.split(';')
dado[1] = dado[1].replace('\n', '')
print(f'{dado[0]:<30}{dado[1]:>3} anos')
finally:
a.close()
def cadastrar(arq, nome='DESCONHECIDO', idade=0):
try:
a = open(arq, 'at')
except:
print('Houve um erro na abertura do arquivo!')
else:
try:
a.write(f'{nome};{idade}\n')
except:
print('Houve um ERRO ao escrever os dados!')
else:
print(f'Novo registro de {nome} adicionado.')
a.close()
|
n = int(input())
even_matrix = [[int(x) for x in input().split(',') if int(x) % 2 == 0] for _ in range(n)]
print(even_matrix)
|
import pyttsx3
import speech_recognition as sr
import datetime
import wikipedia
import webbrowser
import os
import pywhatkit as kit
machine = pyttsx3.init('sapi5')
voices = machine.getProperty('voices')
machine.setProperty('voice',voices[1].id)
def speak(audio):
machine.say(audio)
machine.runAndWait()
def greet():
time = int(datetime.datetime.now().hour)
if time >= 0 and time <=12:
speak('Good morning bhawna and chinmay')
elif time >= 12 and time <=17:
speak('Good afternoon bhawna and chinmay')
else:
speak('Good evening bhavna and chinmay')
speak('hello bhavna and chinmay, i am lily . how may i help you')
def order():
c = sr.Recognizer()
with sr.Microphone() as source:
print('Listening...')
c.pause_threshold = .5
audio = c.listen(source)
try:
print('Recognizing...')
query = c.recognize_google(audio,language='en-in')
print(f'user said:- {query}\n')
except:
print('Say that again please...')
return "none"
return query
if __name__ == "__main__":
greet()
while True:
query = order().lower()
if 'wikipedia' in query:
speak('Searching your choice')
query = query.replace("wikipedia","")
answer = wikipedia.summary(query, sentences=2)
print('According to wikipedia')
print(answer)
speak('According to wikipedia')
speak(answer)
elif 'youtube' in query:
kit.playonyt(query)
break
elif 'google' in query:
kit.search(query)
elif 'open zoom app' in query:
filepath = "C:\\Users\\chinmay khanna\\AppData\\Roaming\\Zoom\\bin\\Zoom.exe"
os.startfile(filepath)
break
elif 'open main' in query:
filepath = "c:"
os.startfile(filepath)
elif 'open code' in query:
filepath = "d:"
os.startfile(filepath)
elif 'open entertainment' in query:
filepath = "e:"
os.startfile(filepath)
elif 'open movies' in query:
filepath = "E:\\scaned pdf and movies\\movies"
os.startfile(filepath)
elif 'send message' in query:
while True:
print("Enter the mobile number")
speak("Enter the mobile number")
num = input()
print('Enter the message')
speak('Enter the message')
msg = input()
print("Enter the time")
speak("Enter the time")
hr,sec = map(int,input().split())
print("Type yes if you want to send the message")
speak("Type yes if you want to send the message")
check = input().lower()
if check == "yes":
kit.sendwhatmsg(num,msg,hr,sec)
break
break
elif 'hello' in query:
print('Hello Chinmay and Bhawna')
speak('Hello Chinmay and Bhawna')
elif 'how are you' in query:
print('I am fine . how are you???')
speak('I am fine . how are you?')
elif 'i am fine' in query or 'i am good' in query:
print('ohh good!! ok now tell me how can i help you???')
speak('oh good . ok now tell me how can i help you?')
elif 'bad mood' in query:
print("don't worry.. everything will be alright.. you can see movies or songs on youtube tell me what should i open for you??")
speak("don't worry . everything will be alright . you can see movies or songs on youtube tell me what should i open for you?")
elif 'thank you' in query:
print('mention not.. it is my duty to serve you a good results')
speak('mention not . it is my duty to serve you a good results')
elif 'go for a drive' in query:
print('ok.. Sure..')
speak('ok . sure .')
elif 'but when' in query:
print('when ever you want')
speak('when ever you want')
elif 'stop' in query:
speak('yaa sure . have a great day ahead')
break
elif 'close' in query:
speak('yaa sure . have a great day ahead')
break
|
from django.conf import settings
from django.contrib.sites.models import Site, SiteManager
from django.db import models
class SiteManager(SiteManager):
def get_current(self):
if settings.DEBUG:
self.clear_cache()
return super().get_current()
class Site(Site):
contact_email = models.EmailField(default='contact@electis.app')
sender_email = models.EmailField(default='contact@electis.app')
all_users_can_create = models.BooleanField(default=True)
all_results_are_visible = models.BooleanField(default=True)
footer_url = models.CharField(max_length=255, default='https://electis.app')
objects = SiteManager()
|
#DAY 11
#Problem : https://www.hackerrank.com/challenges/text-alignment/problem
thickness = int(input())
c = 'H'
# Top Cone
for i in range(thickness):
print((c * i).rjust(thickness - 1) + c + (c * i).ljust(thickness - 1))
# Top Pillars
for i in range(thickness + 1):
print((c * thickness).center(thickness * 2) + (c * thickness).center(thickness * 6))
# Middle Belt
for i in range((thickness + 1) // 2):
print((c * thickness * 5).center(thickness * 6))
# Bottom Pillars
for i in range(thickness + 1):
print((c * thickness).center(thickness * 2) + (c * thickness).center(thickness * 6))
# Bottom Cone
for i in range(thickness):
print(((c * (thickness - i - 1)).rjust(thickness) + c + (c * (thickness - i - 1)).ljust(thickness)).rjust(
thickness * 6))
#Problem : https://www.hackerrank.com/challenges/text-wrap/problem
import textwrap
def wrap(string, max_width):
return textwrap.fill(string, max_width)
if __name__ == '__main__':
string, max_width = input(), int(input())
result = wrap(string, max_width)
print(result)
#Problem : https://www.hackerrank.com/challenges/python-string-formatting/problem
n = int(input().strip())
w = len(str(bin(n))[2:])
for i in range(1,n+1,1):
o = str(oct(i))[2:]
h = str(hex(i))[2:]
h = h.upper()
b = str(bin(i))[2:]
d = str(i)
print('{:>{width}} {:>{width}} {:>{width}} {:>{width}}'.format(d,o,h,b,width=w))
#Problem : https://www.hackerrank.com/challenges/designer-door-mat/problem
N, M = map(int, input().split())
for i in range(1, N, 2):
print(int((M - 3 * i) / 2) * '-' + (i * '.|.') + int((M - 3 * i) / 2) * '-')
print(int((M - 7) / 2) * '-' + 'WELCOME' + int((M - 7) / 2) * '-')
for i in range(N - 2, -1, -2):
print(int((M - 3 * i) / 2) * '-' + (i * '.|.') + int((M - 3 * i) / 2) * '-')
#Problem : https://www.hackerrank.com/challenges/capitalize/problem
s = input()
s_ar = s.split(' ')
final_ar = []
space = ' '
for w in s_ar:
final_ar.append(w.capitalize())
print(space.join(final_ar))
#Problem : https://www.hackerrank.com/challenges/the-minion-game/problem
s = input().strip()
s_length = len(s)
vowel_list = ['A','E','I','O','U']
stuart_point = 0
kevin_point = 0
for i in range(s_length):
if s[i] in vowel_list:
kevin_point += s_length - i
else:
stuart_point += s_length - i
if stuart_point == kevin_point:
print('Draw')
elif kevin_point > stuart_point:
print('Kevin',kevin_point)
else:
print('Stuart',stuart_point)
#Problem : https://www.hackerrank.com/challenges/py-introduction-to-sets/problem
n = input()
ar = map(int,input().split(' '))
ar=set(ar)
print(sum(ar) / len(ar))
#Problem : https://www.hackerrank.com/challenges/symmetric-difference/problem
m = int(input())
set_a = set(map(int, input().split()))
n = int(input())
set_b = set(map(int, input().split()))
set_a_diff = set_a.difference(set_b)
set_b_diff = set_b.difference(set_a)
for i in sorted(set_a_diff.union(set_b_diff)):
print(i)
#Problem : https://www.hackerrank.com/challenges/calendar-module/problem
import datetime
import calendar
m,d,y=map(int,input().split())
input_date = datetime.date(y,m,d)
print(calendar.day_name[input_date.weekday()].upper())
#Problem : https://www.hackerrank.com/challenges/collections-counter/problem
x = int(input())
shoe_size = list(map(int,input().split()))
n = int(input())
sell = 0
for i in range(n):
s, p = map(int,input().split())
if s in shoe_size:
sell = sell + p
shoe_size.remove(s)
print(sell)
#Problem : https://www.hackerrank.com/challenges/zipped/problem
n, x = map(int,input().split())
ar = [0 for i in range(n)]
for i in range(x):
temp_ar=list(map(float,input().split()))
for j in range(n):
ar[j] += temp_ar[j]
for i in range(n):
print(ar[i]/x)
#Problem : https://www.hackerrank.com/challenges/input/problem
x,k=list(map(int,raw_input().split()))
print(input() == k)
#Problem : https://www.hackerrank.com/challenges/python-sort-sort/problem
n, m = map(int,input().split())
ar = []
for i in range(n):
ar.append(list(map(int,input().split())))
k = int(input())
ar = sorted(ar,key = lambda x:x[k])
for i in ar:
[print(x,end=' ') for x in i]
print('')
#Problem : https://www.hackerrank.com/challenges/polar-coordinates/problem
import cmath
z = complex(input())
p = cmath.polar(z)
print(p[0])
print(p[1])
|
from datetime import datetime, timedelta, tzinfo
from dateutil.parser import parse
class OneHot:
def __init__(self):
print("init")
@staticmethod
def clamp(n, smallest, largest):
return max(smallest, min(n, largest))
@staticmethod
def vec_from_pos(pos, value_width, width):
is_odd = 1 if value_width % 2 != 0 else 0
if pos < 0 or pos >= width:
raise AssertionError('pos must be in the range 0 < pos < width')
if value_width >= width:
raise AssertionError('value width must be smaller than the width')
# calculate left/right 0 boundaries
half_width = int((value_width - is_odd) / 2)
left_pos = pos - half_width
right_pos = pos + half_width
# clip for the edges
left_pos = 0 if left_pos < 0 else left_pos
right_pos = width - is_odd if right_pos >= width else right_pos
# workout the left/right overlaps for clipping against either edge
overlap_left = pos - half_width
overlap_right = (width - is_odd) - (pos + half_width)
value_width_left = half_width + overlap_left if overlap_left < 0 else half_width
value_width_right = half_width + overlap_right if overlap_right < 0 else half_width
return ('0' * left_pos) + (('1' * value_width_left) + ('1'*is_odd) + ('1' * value_width_right)) \
+ ('0' * ((width - 1) - right_pos)) + ('0' * (1 - is_odd))
@staticmethod
def scalar_params(value, min_val=0, max_val=40, width=80, overlap=0):
range_val = max_val - min_val
if range_val >= width:
raise AssertionError('Width of the vector must be larger than the value range')
if value >= max_val:
raise AssertionError('Value must be less than max')
if value < min_val:
raise AssertionError('Value must be larger or equal to min')
overlap_offset = (value - min_val) * overlap
value_width = int((width + ((range_val - 2) * overlap)) / range_val)
pos = (value_width * (value - min_val))
is_odd = 1 if value_width % 2 != 0 else 0
offset = int((value_width - is_odd) / 2) - 1 + is_odd
return pos + offset - overlap_offset, value_width, width
@staticmethod
def get_time_of_day_from_filetime(value, width=50, overlap=0):
dt = datetime.utcfromtimestamp(value / 1000)
return OneHot.get_time_of_day(dt.isoformat(), width, overlap)
@staticmethod
def get_time_of_day_enum(value):
dt = datetime.utcfromtimestamp(value / 1000)
date = parse(dt.isoformat())
hour = date.time().hour
if hour < 8:
result = 0
elif hour < 12:
result = 1
elif hour < 14:
result = 2
elif hour < 18:
result = 3
else:
result = 4
return result
@staticmethod
def get_duration_scalar(value, width=50):
return OneHot.get_scalar(value, width, 0, 100000)
@staticmethod
def get_time_of_day(value, width=50, overlap=0):
date = parse(value)
hour = date.time().hour
if hour < 8:
result = 0
elif hour < 12:
result = 1
elif hour < 14:
result = 2
elif hour < 18:
result = 3
else:
result = 4
return OneHot.get_scalar(result, width, 0, 5, overlap)
@staticmethod
def get_boolean(value, width=10):
pos, value_width, width = OneHot.scalar_params(value, 0, 2, width)
return OneHot.vec_from_pos(pos, value_width, width)
@staticmethod
def get_scalar(value, width=250, min_val=0, max_val=100, overlap=2, clip=False):
if clip:
value = OneHot.clamp(value, min_val, max_val)
pos, value_width, width = OneHot.scalar_params(value, min_val, max_val, width, overlap)
return OneHot.vec_from_pos(pos, value_width, width)
@staticmethod
def is_weekend(value, width=20):
date = parse(value)
weekday = date.weekday()
weekend = True if (weekday == 7 or weekday == 8) else False
return OneHot.get_boolean(weekend, width)
|
import datetime
from pushbullet import Pushbullet
pb = Pushbullet('o.cLhE1SyrpsZYocwqDrTHUPbHGqrW8L74')
# crontab
# * * * * * /usr/bin/python3 /home/pi/weather/crontest.py
t = str(datetime.datetime.now().time())
f = open('crontab.txt','a')
wrdata = f.write(t)
f.close()
push = pb.push_note('Cron Update', 'Cron worked')
|
import re
def part_1():
input_file = open('./input','r',newline='\n')
size = 1000
elf_map = [[[] for x in range(size)] for y in range(size)]
for line in input_file:
elems = re.split('@ |, |: |x |\n', line)
offset = elems[1].split(',')
sizes = elems[2].split('x')
pos = elems[0][1:]
#print("{}:{} & {}".format(pos,offset,sizes))
for i in range(int(offset[0]),int(offset[0])+int(sizes[0])):
for j in range(int(offset[1]), int(offset[1])+int(sizes[1])):
elf_map[i][j].append(pos)
count_two = 0
for i in range(size):
for j in range(size):
if len(elf_map[i][j]) >= 2:
count_two += 1
print("Count two: {}".format(count_two))
input_file.close()
def part_2():
input_file = open('./input','r',newline='\n')
size = 1000
allPos = []
elf_map = [[[] for x in range(size)] for y in range(size)]
for line in input_file:
elems = re.split('@ |, |: |x |\n', line)
offset = elems[1].split(',')
sizes = elems[2].split('x')
pos = elems[0][1:]
#print("{}:{} & {}".format(pos,offset,sizes))
for i in range(int(offset[0]),int(offset[0])+int(sizes[0])):
for j in range(int(offset[1]), int(offset[1])+int(sizes[1])):
elf_map[i][j].append(pos)
allPos.append(pos)
listOfPos = []
for i in range(size):
for j in range(size):
if len(elf_map[i][j]) >= 2:
for elem in elf_map[i][j]:
listOfPos.append(elem)
salida = list(set(allPos) - set(listOfPos))
print("Best request: {}".format(salida[0]))
input_file.close()
part_1()
part_2() |
# Enter your code here. Read input from STDIN. Print output to STDOUT
from itertools import product
print(*list(product(list(map(int,input().split())),list(map(int,input().split())))))
|
def split_bytes(word):
return [word >> 8, word & 0xFF]
def uint_to_bytevec(num, byte_count = 0):
result = []
while num > 0 or byte_count > 0:
result.insert(0, num & 0xFF)
num = num >> 8
byte_count -= 1
return result
def bytevec_to_uint(bytelist):
result = 0
for byte in bytelist:
result = result << 8
result += byte
return result
|
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 16:07:45 2020
@author: Soham Shah
"""
class Solution:
def uniquePaths(self, m: int, n: int) -> int:
dp = [[0 for i in range(m)] for i in range(n)]
for i in range(len(dp)):
if i==0:
dp[i] = [1 for i in range(m)]
dp[i][0] = 1
for i in range(1, len(dp)):
for j in range(1, len(dp[0])):
dp[i][j] = dp[i-1][j] + dp[i][j-1]
return dp[-1][-1] |
import secret
from bs4 import BeautifulSoup
from selenium import webdriver
# chromedriver 설정
driver = webdriver.Chrome('/Users/choehansol/Downloads/chromedriver')
driver.implicitly_wait(3)
# 네이버 로그인 페이지로 이동
driver.get('https://nid.naver.com/nidlogin.login')
# id와 password 입력
driver.find_element_by_name('id').send_keys('Your-ID')
driver.find_element_by_name('pw').send_keys('Your-PASSWORD')
# 로그인 버튼 클릭
driver.find_element_by_class_name('btn_global').click()
# 자주 사용하는 브라우저 등록 해제
driver.find_element_by_class_name('btn_cancel').click()
# 네이버 메일로 이동
driver.get('https://mail.naver.com')
html = driver.page_source
soup = BeautifulSoup(html, 'html.parser')
# 보낸사람을 가져온다
sendlist = soup.find_all('div', 'name _ccr(lst.from) ')
# class가 blind인 span을 제거한다
[s.extract() for s in soup('span', {'class':'blind'})]
# 메일 제목을 가져온다
titlelist = soup.find_all('div', 'subject')
# 모두 출력한다
for i in range(len(sendlist)):
print(sendlist[i].find('a').get_text())
print(titlelist[i].find('strong').get_text())
print() |
# SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:noway_5@47.101.140.135:3306/liar'
# local db
SQLALCHEMY_DATABASE_URI = 'mysql+pymysql://root:root@localhost:3306/liar'
SQLALCHEMY_COMMIT_ON_TEARDOWN = True
CSRF_ENABLED = True
SECRET_KEY = 'you-will-never-guess'
SQLALCHEMY_TRACK_MODIFICATIONS = False
JSON_AS_ASCII = False
SEARCH_NEWS_LIMITED = 20
SEARCH_SPEAKER_LIMITED = 20
SEARCH_PARTY_LIMITED = 20
SIM_LIMITED = 6
DEFAULT_MODEL_MODE = 5
|
#!/usr/bin/env python3
##
## GOOGLE KICK START, 2020
## ROUND H
#!
j = int(input())
for i in range(1, j + 1):
max_l, cur_l, sword = map(int, input().split(" "))
max_l2, cur_l2, sword2 = max_l, cur_l, sword
cur_t = cur_l
nb_l = 0
nb_l = cur_l - sword
cur_l = sword
nb_l += max_l - cur_l + cur_t
#method 2
nb2_l = cur_l2 + max_l2
if nb_l > nb2_l:
end = nb2_l
else:
end = nb_l
print("case #" + "%.0f" % i + ":", end)
|
#!/usr/bin/python3
import asyncio
import os
import traceback
import discord
from googleapiclient import discovery
from discord.ext import commands
BOT_TOKEN = os.getenv('BOT_TOKEN')
INIT_EXT = [
'cogs.manage',
'cogs.debug'
]
class MC_Server(commands.Bot):
def __init__(self, command_prefix):
# super class constructor
super().__init__(command_prefix)
# loading cogs in INIT_EXT list
for cog in INIT_EXT:
try:
self.load_extension(cog)
except Exception:
print('cog loading error.')
traceback.print_exc()
async def on_ready(self):
print('Bot logged in.')
await self.change_presence(status=discord.Status.idle)
self.prev_ip = None
if __name__ == '__main__':
bot = MC_Server(command_prefix='/')
bot.run(BOT_TOKEN)
|
#!/usr/bin/env python
from geometry_msgs.msg import Twist, Vector3, Pose, Point
from sensor_msgs.msg import LaserScan
from neato_node.msg import Bump
from visualization_msgs.msg import Marker
from nav_msgs.msg import Odometry
import rospy
from math import sin, cos, pi
from tf.transformations import euler_from_quaternion
class WallFollow(object):
def __init__(self):
# init node
rospy.init_node ('wall_follow_node')
# init cmd_vel publisher
self.cmd_vel_pub = rospy.Publisher('cmd_vel', Twist, queue_size=10)
self.rate = rospy.Rate(10)
# init subscribers
self.scan_sub = rospy.Subscriber("/stable_scan", LaserScan, self.scan_cb)
self.odom_sub = rospy.Subscriber("/odom", Odometry, self.odom_cb)
# init cmd_vel
self.cmd_vel = Twist(linear=Vector3(x=0),angular=Vector3(z=0))
self.cmd_vel.linear.x = 0.0
self.cmd_vel.angular.z = 0.0
# set up rviz marker for wall
self.position = Vector3(x=0,y=0,z=0) # z is yaw
self.marker = Marker(scale = Vector3(x = 0.05, y = 0.05, z = 1.0))
self.marker.color.a = 0.5
self.marker.color.r = 1
self.marker.header.frame_id = "odom"
self.marker.type = Marker.POINTS
self.marker_pub = rospy.Publisher('wall_marker',Marker,queue_size=10)
# set params
self.wall_distance = 1.0 # Stay about 1 meter from wall
self.ang_speed = 0.5
self.lin_speed = 0.1 # default linear speed
self.wall = False # True if a wall is visible
self.direction = 1
self.window = 45 # side scan window
self.scan = [] # holds LaserScan
self.kp = 0.2 # Proportional controller constant
self.error = 0 # controller error
def odom_cb(self,data):
yaw = euler_from_quaternion((data.pose.pose.orientation.x,data.pose.pose.orientation.y,data.pose.pose.orientation.z,data.pose.pose.orientation.w))
self.position.x = data.pose.pose.position.x
self.position.y = data.pose.pose.position.y
self.position.z = yaw
def scan_cb(self,data):
self.wall = False
self.scan = []
angle_range = list(range(0,self.window)) + list(range(360 - self.window, 360))
for angle in angle_range:
distance = data.ranges[angle]
if distance == 0.0:
continue
if distance < 2*self.wall_distance:
self.wall = True
if angle <= self.window:
self.scan.append([distance, angle*pi/180])
else:
self.scan.append([distance, (angle-360)*pi/180])
if self.wall:
self.scan.sort(key=lambda item: item[0])
self.error = self.wall_distance - self.scan[0][0]
self.wall_marker()
def wall_marker(self):
self.marker.header.stamp = rospy.Time.now()
x = self.scan[0][0]*cos(self.scan[0][1])
y = self.scan[0][0]*sin(self.scan[0][1])
point = Point(x,y,0)
self.marker.points.append(point)
def run(self):
while not rospy.is_shutdown():
if self.wall:
if self.scan[0][1] < 0.0:
self.direction = 1
else:
self.direction = -1
self.cmd_vel.angular.z = self.direction*self.kp*self.error
else:
self.cmd_vel.linear.x = self.lin_speed
self.cmd_vel.angular.z = 0.0
self.cmd_vel_pub.publish(self.cmd_vel)
self.marker_pub.publish(self.marker)
self.rate.sleep()
if __name__=='__main__':
wall_follow = WallFollow()
wall_follow.run()
|
#-*- coding: utf-8 -*-
# 수정 필요 (데이터의 개수)
from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import train_test_split
from keras.models import Sequential
from keras.layers import LSTM, Dense, Dropout, BatchNormalization
from keras.callbacks import EarlyStopping
import pandas as pd
import numpy as np
batchSize = 1
file_path = './0802/data/kospi200test.csv'
repeat_num = 10
# num_epochs = 100
file_data = pd.read_csv(file_path, encoding='euc-kr')
# print(file_data.head())
file_data.drop('Unnamed: 7' ,axis=1, inplace=True)
file_data=file_data.sort_values(by=['일자']) # 시간순으로 정렬
file_data.drop('일자', axis=1, inplace=True)
file_data.drop('거래량', axis=1, inplace = True)
file_data.drop('환율(원/달러)', axis=1, inplace = True)
price = file_data.values[:,:-1]
scaler = MinMaxScaler()
scaler.fit(price)
mm_price = scaler.transform(price)
volume = file_data.values[:,-1:]
scaler.fit(volume)
mm_volume = scaler.transform(volume)
x = np.concatenate((mm_price, mm_volume), axis=1)
# y = x[:,3] # MinMaxScaler()를 적용한 y값
# y =np.array(file_data['종가'].values)
y = file_data['종가'].values
# print(x.shape) # (599,4)
# print(y.shape) # (599,)
size = 2
def split_5(seq, size): # seq를 size 단위로 나눠 행을 구분
aaa=[]
for i in range(len(seq) - size):
subset = seq[i+1:(i+size)+1]
aaa.append([item for item in subset])
print(type(aaa))
return np.array(aaa)
y = split_5(y, size)
x = x[:-2] # 데이터의 크기를 맞추기 위해 마지막 행 제외
# y에서 데이터를 2개씩 묶었기 때문에 598개의 데이터가 나온다.
print(x.shape) # (598, 4)
print(y.shape) # (598,2)
# 랜덤한 데이터 분할
train_x, test_x , train_y, test_y = train_test_split(
x, y, random_state = 82, test_size=0.4
)
val_x, test_x ,val_y, test_y = train_test_split(
test_x, test_y, random_state = 82, test_size = 0.5
)
train_x = train_x.reshape((train_x.shape[0], train_x.shape[1],1))
test_x = test_x.reshape((test_x.shape[0], test_x.shape[1],1))
val_x = val_x.reshape((val_x.shape[0], val_x.shape[1],1))
# # DNN용 데이터 shape변환
# train_x= train_x.reshape(356, 5*6)
# test_x= test_x.reshape(119, 5*6)
# val_x= val_x.reshape(119,5*6)
print(train_x.shape)
print(train_y.shape)
print('-'*10)
print(test_x.shape)
print(test_y.shape)
print('-'*10)
print(val_x.shape)
print(val_y.shape)
print('-'*10)
# x_predict = file_data.values[:5,:]
# print(x_predict)
# print(x_predict.shape)
model = Sequential()
model.add(LSTM(80, batch_input_shape=(batchSize,4,1), stateful=True))
# model.add(LSTM(119, batch_input_shape=(batchSize,4,1)))
model.add(Dense(100,activation='relu'))
# model.add(BatchNormalization())
# model.add(Dense(356))
# model.add(Dropout(0.3))
model.add(Dense(30))
# model.add(BatchNormalization())
# model.add(Dense(119))
# model.add(Dropout(0.2))
# model.add(Dense(475))
# model.add(BatchNormalization())
# model.add(Dense(231))
# model.add(Dropout(0.2))
model.add(Dense(58))
# model.add(BatchNormalization())
model.add(Dense(81))
# # model.add(Dropout(0.2))
# model.add(Dense(122))
# model.add(Dropout(0.2))
model.add(Dense(2))
model.compile(loss='mse', optimizer='adam', metrics=['mse'])
early_stopping = EarlyStopping(monitor='val_mean_squared_error', patience=3, mode='auto')
# 상태유지 LSTM용
for rp_id in range(repeat_num):
print('num:' + str(rp_id))
model.fit(train_x, train_y,
epochs=10, batch_size=batchSize, verbose=2,
shuffle=False, validation_data=(val_x, val_y), callbacks=[early_stopping]
)
model.reset_states()
# DNN용 fit
# model.fit(train_x, train_y, epochs=repeat_num, batch_size=batchSize, verbose=2, validation_data=(val_x, val_y), callbacks=[early_stopping])
loss, mse = model.evaluate(test_x, test_y, batch_size=batchSize)
print('mse:', mse)
model.reset_states()
x_predict = file_data.values[-1:,:]
x_predict = x_predict.reshape((x_predict.shape[0], x_predict.shape[1],1))
# print(x_predict)
# print(x_predict.shape)
# print(x_predict[:,-1:])
y_predict = model.predict(x_predict)
# y_predict = scaler.inverse_transform(y_predict)
# y_predict = np.mean(y_predict)
print(y_predict)
|
import SocketServer
import logging
import threading
import yaml
import os
import time
import sys
import sudoku
buffer_size = 2048
global total_workload
total_workload = 0
SEPERATOR = ','
streamformat = "%(asctime)s %(name)s %(levelname)s: %(message)s"
logging.basicConfig(level=logging.DEBUG,
format=streamformat, filename='./log/server_' + time.strftime("%m%d-%H%M") + '.log',
filemode='w')
SERVICE_NAME = 'Sudoku_Service'
logger = logging.getLogger('root')
END_FLAG = 'END'
def read_file(input_file):
read_data = None
if not os.path.isfile(input_file):
logger.error('cannot find ' + input_file + ' in the directory')
return False, read_data
try:
with open(input_file, 'r') as f:
# f = open(input_file, 'r')
read_data = f.read()
f.close()
except:
logger.error('cannot read file: ' + input_file)
return False, read_data
return True, read_data
def load_config(config_file):
# read config_file to string
config = {}
readable, read_data = read_file(config_file)
if not readable: return False, config
try:
config = yaml.load(read_data)
except yaml.YAMLError, exc:
error_pos = ""
if hasattr(exc, 'problem_mark'):
mark = exc.problem_mark
error_pos = " at position: (%s:%s)" % (mark.line + 1, mark.column + 1)
logger.error(
"Error loading configuration file \'"
+ config_file + "\'" + error_pos
+ ": content format error: Failed to parse yaml format")
return False, config
return True, config
class ProcessTCPRequestHandler(SocketServer.BaseRequestHandler):
def handle(self):
arrive_time = time.time() if sys.platform is not 'win32' else time.clock()
pid = str(os.getpid())
puzzle_data = ''
try:
while True:
data = self.request.recv(buffer_size)
puzzle_data = puzzle_data + str(data)
if not data or str(data).endswith(SEPERATOR + END_FLAG):
break
# recv [service,id,puzzle,'END']
puzzle_data = puzzle_data.split(SEPERATOR)
assert len(puzzle_data) == 4, 'Receive err message'
service = puzzle_data[0]
wid = puzzle_data[1]
puzzle_data = puzzle_data[2]
source = self.client_address[0]
logger.debug('PID' + pid + ': receive message from ' + source)
solved_puzzle = sudoku.solve(puzzle_data)
finish_time = time.time() if sys.platform is not 'win32' else time.clock()
comp = finish_time - arrive_time
# reply_msg = [work_id, result, computing_time]
resp = SEPERATOR.join((wid, solved_puzzle, str(comp)))
self.request.sendall(resp)
logger.debug('PID{}: {} job is done.'.format(pid, wid))
except Exception as e:
logger.error('PID{}: {}'.format(pid, e.message))
class ProcessTCPServer(SocketServer.ForkingMixIn, SocketServer.TCPServer):
pass
if __name__ == "__main__":
# Port 0 means to select an arbitrary unused port
logging.info('Loading configuration')
config_file = 'server.cfg'
config_is_ok, config = load_config(config_file)
if not config_is_ok:
logging.error('load config fail!! exit the program')
exit(1)
logger = logging.getLogger(config['host_name'])
logger.setLevel(getattr(logging, config['file_log_level']))
console = logging.StreamHandler(stream=sys.stdout)
formatter = logging.Formatter(fmt=streamformat)
console.setFormatter(formatter)
console.setLevel(getattr(logging, config['console_log_level']))
logger.addHandler(console)
user = config['user']
server_ip = config['server_ip']
server_port = config['server_port']
logger.info('Start service..%s' % SERVICE_NAME)
server = ProcessTCPServer((server_ip, server_port), ProcessTCPRequestHandler)
server_ip, server_port = server.server_address
server_thread = threading.Thread(target=server.serve_forever)
# Exit the server thread when the main thread terminates
server_thread.daemon = True
server_thread.start()
logger.info('Server is ready')
try:
while True:
pass
except (KeyboardInterrupt, SystemExit):
pass
logger.debug('Server is shutting down')
server.shutdown()
server.server_close()
# print 'Total workloads =', total_workload
logger.info('Exit server')
exit()
|
import math
import torch
import numpy as np
from . import functional as F
class BBoxer:
def __init__(
self, image_size, areas, aspect_ratios, scale_ratios,
backbone_strides, iou_threshold, score_threshold,
nms_threshold, class_independent_nms, ignore_threshold=0.4
):
self.ignore_threshold = ignore_threshold
self.class_independent_nms = class_independent_nms
self.areas = areas
self.aspect_ratios = aspect_ratios
self.scale_ratios = scale_ratios
self.backbone_strides = backbone_strides
self.iou_threshold = iou_threshold
self.score_threshold = score_threshold
self.nms_threshold = nms_threshold
self.image_size = torch.tensor(image_size, dtype=torch.float)
self._num_anchors = None
self._anchor_bboxes = None
self._sizes = None
def cuda(self, device=None):
self._anchor_bboxes = self.anchor_bboxes.cuda(device=device)
return self
def to(self, device=None):
self._anchor_bboxes = self.anchor_bboxes.to(device=device)
return self
def cpu(self):
self._anchor_bboxes = self.anchor_bboxes.cpu()
return self
@property
def num_anchors(self):
if self._num_anchors is None:
self._num_anchors = len(self.aspect_ratios) * len(self.scale_ratios)
return self._num_anchors
@property
def sizes(self):
if self._sizes is None:
self._sizes = []
for s in self.areas:
for ar in self.aspect_ratios:
h = math.sqrt(s / ar)
w = ar * h
for sr in self.scale_ratios:
anchor_h = h * sr
anchor_w = w * sr
self._sizes.append([anchor_w, anchor_h])
self._sizes = torch.tensor(self._sizes, dtype=torch.float).view(len(self.areas), -1, 2)
return self._sizes
@property
def feature_map_sizes(self):
return [(self.image_size / stride).ceil() for stride in self.backbone_strides]
@property
def anchor_bboxes(self):
if self._anchor_bboxes is None:
self._anchor_bboxes = []
for feature_map_size, anchor_size in zip(self.feature_map_sizes, self.sizes):
grid_size = self.image_size / feature_map_size
feature_map_h, feature_map_w = int(feature_map_size[0]), int(feature_map_size[1])
xy = F.meshgrid(feature_map_w, feature_map_h) + 0.5
xy = (xy * grid_size).view(feature_map_h, feature_map_w, 1, 2)
xy = xy.expand(feature_map_h, feature_map_w, self.num_anchors, 2)
wh = anchor_size.view(1, 1, self.num_anchors, 2)
wh = wh.expand(feature_map_h, feature_map_w, self.num_anchors, 2)
box = torch.cat([xy - wh / 2.0, xy + wh / 2.0], 3)
self._anchor_bboxes.append(box.view(-1, 4))
self._anchor_bboxes = torch.cat(self._anchor_bboxes, 0)
return self._anchor_bboxes
def encode(self, bboxes, labels):
return F.bbox_label_encode(
bboxes=bboxes,
labels=labels,
anchor_bboxes=self.anchor_bboxes,
iou_threshold=self.iou_threshold,
ignore_threshold=self.ignore_threshold)
def decode(self, multi_bboxes, multi_labels):
return F.bbox_label_decode(
multi_bboxes=multi_bboxes,
multi_labels=multi_labels,
anchor_bboxes=self.anchor_bboxes,
nms_threshold=self.nms_threshold,
score_threshold=self.score_threshold,
class_independent_nms=self.class_independent_nms)
class BBoxTransform(object):
def __init__(self, transform, bboxer, p=1.):
self.transform = transform
self.bboxer = bboxer
self.p = p
def __call__(self, **data):
if np.random.random() < self.p:
data = self.transform(**data)
multi_bboxes, multi_labels = self.bboxer.encode(bboxes=data['bboxes'], labels=data['labels'])
data.update({'anchor_adjust': multi_bboxes, 'anchor_logits': multi_labels})
return data
|
# coding: utf-8
# In[39]:
import sys
file = open('/Users/sathish/misc/subboard_crawler/listing_all.csv')
from random import randint
import re
import json
import re
import requests
mapsapiurl = 'https://maps.googleapis.com/maps/api/geocode/json?address=[[origin]]'
def sanitizeAddress2(address):
address = address.lower()
stopwords =[',','.','apt','lower','upper','#']
for s in stopwords:
address =address.replace(s,'')
address = address.strip()
address = re.sub('```','',address)
address = address.strip().replace(' ','+')
return address
def getLattitude(originaddr):
originaddr = sanitizeAddress2(originaddr)
url = mapsapiurl.replace('[[origin]]',originaddr)
page = requests.get(url)
pagejson = json.loads(page.text)
#print(pagejson)
lattitude = pagejson['results'][0]['geometry']['location']['lat']
longitude = pagejson['results'][0]['geometry']['location']['lng']
lat=lattitude
longi= longitude
return lat,longi
def sanitizeAddress(address):
address = address.replace('```','')
#print address
address = address.lower()
stopwords =[',','.','apt','lower','upper','#']
for s in stopwords:
address =address.replace(s,'')
address = re.sub(' +',' ',address)
#address = address.strip().replace(' ','+')
alist = address.split()
d={}
d['number'] = alist[0]
d['zipcode'] = alist[len(alist)-1]
d['state'] = alist[len(alist)-2].upper()
d['city'] = ''.join([i for i in alist[len(alist)-3] if not i.isdigit()]).capitalize()
d['address_1'] = ' '.join(alist[1:3]).capitalize()
d['full_address'] = ', '.join([d['number'], d['address_1'],d['city'],d['state'],d['zipcode']])
return d
jsonlist = []
houseId = 1
counter = 0
try:
for line in file:
counter+=1
if counter >100:
break
#print line
house = {}
columns = line.split(',')
house['address'] = sanitizeAddress(columns[1])
house['busstop_distance'] = columns[2]
house['busstop'] = columns[3].replace('```',"").replace('Buffalo',"").replace('NY',"").strip()
house['busstop_walking_time'] = columns[4]
house["overallBedbugRating"]= randint(1,5)
house["overallLandlordRating"]= randint(1,5)
house["overallrentRating"]= randint(1,5)
ratings = []
rating = {
"bedbugRating": randint(1,5),
"landlordRating": randint(1,5),
"rentRating": randint(1,5),
"comments": "Wow awesome machi!!"
}
ratings.append(rating)
house['ratings'] = ratings
lat,lng = getLattitude(columns[1])
print lat,lng
house["location"]= {"lat": lat,"lng": lng}
house["houseId"] = houseId
jsonlist.append(house)
houseId+=1
print k
except :
print "Unexpected error:", sys.exc_info()[0]
pass
filew = open('/Users/sathish/misc/subboard_crawler/listing.json','w')
pjson = str(jsonlist)
ojson = pjson.replace("'","\"")
filew.write(ojson)
print 'done'
# In[ ]:
|
# Generated by Django 2.0.2 on 2019-10-20 12:17
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('eLearning', '0007_auto_20191020_1621'),
]
operations = [
migrations.AlterField(
model_name='course',
name='code',
field=models.CharField(max_length=500, unique=True),
),
migrations.AlterField(
model_name='coursetutorial',
name='last_update',
field=models.DateTimeField(default=datetime.datetime(2019, 10, 20, 17, 47, 27, 596611)),
),
migrations.AlterField(
model_name='document',
name='uploaded_at',
field=models.DateTimeField(default=datetime.datetime(2019, 10, 20, 17, 47, 27, 602750)),
),
migrations.AlterField(
model_name='material',
name='uploaded_at',
field=models.DateTimeField(default=datetime.datetime(2019, 10, 20, 17, 47, 27, 595584)),
),
migrations.AlterField(
model_name='video',
name='uploaded_at',
field=models.DateTimeField(default=datetime.datetime(2019, 10, 20, 17, 47, 27, 581984)),
),
]
|
import maya.cmds as cmds
import maya.mel as mel
import glTools.rig.utils
import glTools.utils.channelState
import glTools.utils.curve
class ControlBuilder(object):
def __init__(self):
"""
ControlBuilder Class Initializer
"""
# Colour Override
self.overrideId = {'lf': 14,
'rt': 13,
'cn': 17}
# Supported Control Types
self.controlType = ['anchor',
'arch',
'arrow',
'arrowArc',
'arrowCircle',
'box',
'circle',
'corners',
'crescent',
'cross',
'diamond',
'eye',
'face',
'gear',
'hex',
'line',
'locator',
'pyramid',
'spiral',
'sphere',
'sphereAnchor',
'square',
'tab',
'teardrop',
'text']
# Control LOD list
self.controlLod = ['primary',
'secondary',
'tertiary']
def create(self,
controlType,
controlName,
translate=(0, 0, 0),
rotate=(0, 0, 0),
scale=1,
colour=0,
text='',
ctrlLod='primary'):
"""
This script builds curve control objects based on the arguments input by the user
@param controlType: Type of control to build
@type controlType: str
@param controlName: Name of the resulting curve control.
@type controlName: str
@param translate: Translational offset for control curve
@type translate: list or tuple
@param rotate: Rotational offset for control curve
@type rotate: list or tuple
@param scale: Scale offset for control curve
@type scale: list or tuple
@param colour: The colour of the control curve
@type colour: int
@param text: Text value for "text" type control curve
@type text: str
"""
# ==========
# - Checks -
# ==========
# Check controlName
nameInd = 1
origControlName = controlName
while cmds.objExists(controlName):
controlName = origControlName + str(nameInd)
nameInd += 1
# Check Control Type
if not self.controlType.count(controlType):
raise Exception('Unsupported control shape type("' + controlType + '")!!')
# ==================
# - Create Control -
# ==================
control = ''
if controlType == 'anchor':
control = self.anchor()
elif controlType == 'arch':
control = self.arch()
elif controlType == 'arrow':
control = self.arrow()
elif controlType == 'arrowArc':
control = self.arrowArc()
elif controlType == 'arrowCircle':
control = self.arrowCircle()
elif controlType == 'box':
control = self.box()
elif controlType == 'circle':
control = self.circle()
elif controlType == 'corners':
control = self.corners()
elif controlType == 'crescent':
control = self.crescent()
elif controlType == 'cross':
control = self.cross()
elif controlType == 'diamond':
control = self.diamond()
elif controlType == 'eye':
control = self.eye()
elif controlType == 'face':
control = self.face()
elif controlType == 'gear':
control = self.gear()
elif controlType == 'hex':
control = self.hex()
elif controlType == 'line':
control = self.line()
elif controlType == 'locator':
control = self.locator()
elif controlType == 'pyramid':
control = self.pyramid()
elif controlType == 'spiral':
control = self.spiral()
elif controlType == 'sphere':
control = self.sphere()
elif controlType == 'sphereAnchor':
control = self.sphereAnchor()
elif controlType == 'square':
control = self.square()
elif controlType == 'tab':
control = self.tab()
elif controlType == 'teardrop':
control = self.teardrop()
elif controlType == 'text':
control = self.text(text=text)
else:
raise Exception('Unsupported control shape type("' + controlType + '")!!')
# Get Controls Shape(s)
control = cmds.rename(control, controlName)
controlShape = cmds.listRelatives(control, s=1, ni=1, pa=True)
if not controlShape: raise Exception('No control shape found!')
for c in range(len(controlShape)):
# Rename Control Shape
ctrlShape = cmds.rename(controlShape[c], control + 'Shape' + str(c + 1))
# Reorder
cmds.reorder(ctrlShape, b=True)
# Assign Control Shape Colour
prefix = controlName.split('_')[0]
if colour:
cmds.setAttr(ctrlShape + '.overrideEnabled', 1)
cmds.setAttr(ctrlShape + '.overrideColor', colour)
elif self.overrideId.has_key(prefix):
cmds.setAttr(ctrlShape + '.overrideEnabled', 1)
cmds.setAttr(ctrlShape + '.overrideColor', self.overrideId[prefix])
# Position Control
cmds.move(translate[0], translate[1], translate[2], control, r=True)
cmds.rotate(rotate[0], rotate[1], rotate[2], control)
cmds.scale(scale, scale, scale, control)
# Freeze Pivot and Transforms
cmds.xform(control, ws=True, piv=[0, 0, 0])
cmds.makeIdentity(control, apply=True, translate=True, rotate=True, scale=True, normal=False)
# Set Channel States
glTools.utils.channelState.ChannelState().setFlags([0, 0, 0, 0, 0, 0, 0, 0, 0, 1], [control])
# ======================
# - Create Control LOD -
# ======================
glTools.rig.utils.tagCtrl(control, ctrlLod)
# =================
# - Return Result -
# =================
return str(control)
def anchor(self):
"""
Create anchor control object
"""
# Create control object
pts = [(0.000, 0.000, 0.000), (0.000, 0.826, 0.000), (0.087, 0.826, 0.000), (0.087, 1.000, 0.000),
(-0.087, 1.000, 0.000), (-0.087, 0.826, 0.000), (0.000, 0.826, 0.000)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def arch(self):
"""
Create arch control object
"""
# Create control object
pts = [(0.100, 0.000, -0.500), (-0.100, 0.000, -0.500), (-0.100, 0.250, -0.433), (-0.100, 0.433, -0.250),
(-0.100, 0.500, 0.000), (-0.100, 0.433, 0.250), (-0.100, 0.250, 0.433), (-0.100, 0.000, 0.500),
(0.100, 0.000, 0.500), (0.100, 0.250, 0.433), (0.100, 0.433, 0.250), (0.100, 0.500, 0.000),
(0.100, 0.433, -0.250), (0.100, 0.250, -0.433), (0.100, 0.000, -0.500)]
knots = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0]
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def arrow(self):
"""
Create arrow control object
"""
# Create control object
pts = [(-0.333, 0.0, -1.0), (0.333, 0.0, -1.0), (0.333, 0.0, 0.333), (0.666, 0.0, 0.333),
(0.0, 0.0, 1.0), (-0.666, 0.0, 0.333), (-0.333, 0.0, 0.333), (-0.333, 0.0, -1.0)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def arrowArc(self):
"""
Create arrowArc control object
"""
# Create control object
pts = [(0.0, 0.414, -0.854), (0.0, 0.487, -0.942), (0.0, 0.148, -0.941), (0.0, 0.226, -0.627),
(0.0, 0.293, -0.708), (0.0, 0.542, -0.542), (0.0, 0.708, -0.293), (0.0, 0.767, 0.0), (0.0, 0.708, 0.293),
(0.0, 0.542, 0.542), (0.0, 0.293, 0.708), (0.0, 0.235, 0.607), (0.0, 0.126, 0.914), (0.0, 0.445, 0.967),
(0.0, 0.389, 0.871), (0.0, 0.678, 0.678), (0.0, 0.885, 0.367), (0.0, 0.958, 0.0), (0.0, 0.885, -0.367),
(0.0, 0.678, -0.678), (0.0, 0.414, -0.854)]
knots = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0,
19.0, 20.0]
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def arrowCircle(self):
"""
Create arrowCircle control object
"""
# Create control object
pts = [(-0.654, -0.111, -0.000), (-0.639, -0.224, -0.000), (-0.585, -0.343, -0.000), (-0.477, -0.488, -0.000),
(-0.353, -0.585, -0.000), (-0.224, -0.637, -0.000), (-0.111, -0.654, -0.000), (-0.111, -0.696, -0.000),
(-0.111, -0.738, -0.000), (-0.111, -0.780, -0.000), (-0.148, -0.780, -0.000), (-0.186, -0.780, -0.000),
(-0.223, -0.780, -0.000), (-0.148, -0.854, -0.000), (-0.007, -0.996, -0.000), (0.000, -1.003, -0.000),
(0.007, -0.996, -0.000), (0.148, -0.854, -0.000), (0.223, -0.780, -0.000), (0.186, -0.780, -0.000),
(0.148, -0.780, -0.000), (0.111, -0.780, -0.000), (0.111, -0.738, -0.000), (0.111, -0.696, -0.000),
(0.111, -0.654, -0.000), (0.224, -0.639, -0.000), (0.343, -0.585, -0.000), (0.488, -0.477, -0.000),
(0.585, -0.353, -0.000), (0.637, -0.224, -0.000), (0.654, -0.111, -0.000), (0.696, -0.111, -0.000),
(0.738, -0.111, -0.000), (0.780, -0.111, -0.000), (0.780, -0.148, -0.000), (0.780, -0.186, -0.000),
(0.780, -0.223, -0.000), (0.854, -0.148, -0.000), (0.990, -0.013, -0.000), (1.003, 0.000, 0.000),
(0.990, 0.013, 0.000), (0.854, 0.148, 0.000), (0.780, 0.223, 0.000), (0.780, 0.186, 0.000),
(0.780, 0.148, 0.000), (0.780, 0.111, 0.000), (0.738, 0.111, 0.000), (0.696, 0.111, 0.000),
(0.654, 0.111, 0.000), (0.639, 0.224, 0.000), (0.585, 0.343, 0.000), (0.477, 0.488, 0.000),
(0.353, 0.585, 0.000), (0.224, 0.637, 0.000), (0.111, 0.654, 0.000), (0.111, 0.696, 0.000),
(0.111, 0.738, 0.000), (0.111, 0.780, 0.000), (0.148, 0.780, 0.000), (0.186, 0.780, 0.000),
(0.223, 0.780, 0.000), (0.148, 0.854, 0.000), (0.013, 1.015, 0.000), (0.000, 1.184, 0.000),
(-0.013, 1.015, 0.000), (-0.148, 0.854, 0.000), (-0.223, 0.780, 0.000), (-0.186, 0.780, 0.000),
(-0.148, 0.780, 0.000), (-0.111, 0.780, 0.000), (-0.111, 0.738, 0.000), (-0.111, 0.696, 0.000),
(-0.111, 0.654, 0.000), (-0.224, 0.639, 0.000), (-0.343, 0.585, 0.000), (-0.488, 0.477, 0.000),
(-0.585, 0.353, 0.000), (-0.637, 0.224, 0.000), (-0.654, 0.111, 0.000), (-0.696, 0.111, 0.000),
(-0.738, 0.111, 0.000), (-0.780, 0.111, 0.000), (-0.780, 0.148, 0.000), (-0.780, 0.186, 0.000),
(-0.780, 0.223, 0.000), (-0.854, 0.148, 0.000), (-0.997, 0.006, 0.000), (-1.003, 0.000, 0.000),
(-0.997, -0.006, -0.000), (-0.854, -0.148, -0.000), (-0.780, -0.223, -0.000), (-0.780, -0.186, -0.000),
(-0.780, -0.148, -0.000), (-0.780, -0.111, -0.000), (-0.738, -0.111, -0.000), (-0.696, -0.111, -0.000),
(-0.654, -0.111, -0.000)]
knots = [0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0,
17.0, 18.0, 19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, 32.0, 33.0,
34.0, 35.0, 36.0, 37.0, 38.0, 39.0, 40.0, 41.0, 42.0, 43.0, 44.0, 45.0, 46.0, 47.0, 48.0, 49.0, 50.0,
51.0, 52.0, 53.0, 54.0, 55.0, 56.0, 57.0, 58.0, 59.0, 60.0, 61.0, 62.0, 63.0, 64.0, 65.0, 66.0, 67.0,
68.0, 69.0, 70.0, 71.0, 72.0, 73.0, 74.0, 75.0, 76.0, 77.0, 78.0, 79.0, 80.0, 81.0, 82.0, 83.0, 84.0,
85.0, 86.0, 87.0, 88.0, 89.0, 90.0, 91.0, 92.0, 93.0, 94.0, 94.0, 94.0]
control = cmds.curve(d=3, p=pts, k=knots)
# Return control name
return control
def box(self):
"""
Create box control object
"""
# Create control object
pts = [(-0.5, 0.5, 0.5), (0.5, 0.5, 0.5), (0.5, -0.5, 0.5),
(-0.5, -0.5, 0.5), (-0.5, 0.5, 0.5), (-0.5, 0.5, -0.5),
(-0.5, -0.5, -0.5), (-0.5, -0.5, 0.5), (-0.5, 0.5, 0.5),
(0.5, 0.5, 0.5), (0.5, 0.5, -0.5), (-0.5, 0.5, -0.5),
(-0.5, -0.5, -0.5), (0.5, -0.5, -0.5), (0.5, 0.5, -0.5),
(0.5, 0.5, 0.5), (0.5, -0.5, 0.5), (0.5, -0.5, -0.5)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def circle(self):
"""
Create circle control object
"""
return cmds.circle(c=(0, 0, 0), nr=(0, 0, 1), sw=360, r=0.5, d=3, ut=0, tol=0.01, s=8, ch=0)[0]
def corners(self):
"""
Create corners control
"""
ctrl = cmds.createNode('transform')
pts = [(0.9, 0.0, 1.0), (1.0, 0.0, 1.0), (1.0, 0.0, 0.9)]
cnr = cmds.curve(d=1, p=pts, k=range(3))
cnrShape = cmds.listRelatives(cnr, s=True, ni=True)[0]
cmds.parent(cnrShape, ctrl, s=True, r=True)
pts = [(-0.9, 0.0, 1.0), (-1.0, 0.0, 1.0), (-1.0, 0.0, 0.9)]
cnr = cmds.curve(d=1, p=pts, k=range(3))
cnrShape = cmds.listRelatives(cnr, s=True, ni=True)[0]
cmds.parent(cnrShape, ctrl, s=True, r=True)
pts = [(0.9, 0.0, -1.0), (1.0, 0.0, -1.0), (1.0, 0.0, -0.9)]
cnr = cmds.curve(d=1, p=pts, k=range(3))
cnrShape = cmds.listRelatives(cnr, s=True, ni=True)[0]
cmds.parent(cnrShape, ctrl, s=True, r=True)
pts = [(-0.9, 0.0, -1.0), (-1.0, 0.0, -1.0), (-1.0, 0.0, -0.9)]
cnr = cmds.curve(d=1, p=pts, k=range(3))
cnrShape = cmds.listRelatives(cnr, s=True, ni=True)[0]
cmds.parent(cnrShape, ctrl, s=True, r=True)
return ctrl
def crescent(self):
"""
Create Crescent control object
"""
# Create control object
control = cmds.curve(d=3, p=[(0.392, 0.392, -0.000), (-0.000, 0.554, -0.000), (-0.392, 0.392, -0.000),
(-0.554, 0.000, -0.000), (-0.392, 0.228, -0.000), (-0.000, 0.323, -0.000),
(0.392, 0.228, -0.000), (0.554, -0.000, 0.000), (0.392, 0.392, -0.000),
(-0.000, 0.554, -0.000), (-0.392, 0.392, -0.000)],
k=[-0.25, -0.125, 0.0, 0.125, 0.25, 0.375, 0.5, 0.625, 0.75, 0.875, 1.0, 1.125, 1.25])
# Return control
return control
def cross(self):
"""
"""
# Create control object
pts = [(-0.25, 0.75, 0), (0.25, 0.75, 0), (0.25, 0.25, 0), (0.75, 0.25, 0),
(0.75, -0.25, 0), (0.25, -0.25, 0), (0.25, -0.75, 0), (-0.25, -0.75, 0),
(-0.25, -0.25, 0), (-0.75, -0.25, 0), (-0.75, 0.25, 0), (-0.25, 0.25, 0), (-0.25, 0.75, 0)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def diamond(self):
"""
Create diamond control object
"""
# Create control object
pts = [(0.0, 0.5, 0.0), (-0.25, 0.0, 0.0), (0.0, -0.5, 0.0), (0.25, 0.0, 0.0), (0.0, 0.5, 0.0)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def eye(self):
"""
Create eye control object
"""
# Create control object
pts = [(1.000, 0.064, 0.000), (-0.000, 0.747, 0.000), (-1.000, 0.064, 0.000), (-1.000, 0.000, 0.000),
(-1.000, -0.064, 0.000), (-0.000, -0.747, 0.000), (1.000, -0.064, 0.000), (1.000, -0.000, 0.000),
(1.000, 0.064, 0.000), (-0.000, 0.747, 0.000), (-1.000, 0.064, 0.000)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def face(self):
"""
Create face control object
"""
# Create control object
pts = [(0.573, 0.863, 0.000), (-0.000, 1.047, 0.000), (-0.573, 0.863, 0.000), (-0.770, 0.266, 0.000),
(-0.750, 0.000, 0.000), (-0.409, -0.656, 0.000), (-0.322, -0.953, 0.000), (-0.000, -1.020, 0.000),
(0.322, -0.953, 0.000), (0.409, -0.656, 0.000), (0.750, -0.000, 0.000), (0.770, 0.266, 0.000),
(0.573, 0.863, 0.000), (-0.000, 1.047, 0.000), (-0.573, 0.863, 0.000)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def gear(self):
"""
Create face control object
"""
# Create control object
pts = [(0.160, 0.810, -0.000), (0.460, 0.685, -0.000), (0.610, 0.805, -0.000), (0.805, 0.610, -0.000),
(0.685, 0.460, -0.000), (0.810, 0.160, -0.000), (1.000, 0.140, -0.000), (1.000, -0.135, 0.000),
(0.810, -0.160, 0.000), (0.685, -0.460, 0.000), (0.805, -0.610, 0.000), (0.610, -0.805, 0.000),
(0.460, -0.685, 0.000), (0.160, -0.810, 0.000), (0.140, -1.000, 0.000), (-0.135, -1.000, 0.000),
(-0.160, -0.810, 0.000), (-0.460, -0.685, 0.000), (-0.610, -0.805, 0.000), (-0.805, -0.610, 0.000),
(-0.685, -0.460, 0.000), (-0.810, -0.160, 0.000), (-1.000, -0.140, 0.000), (-1.000, 0.135, -0.000),
(-0.810, 0.160, -0.000), (-0.685, 0.460, -0.000), (-0.805, 0.610, -0.000), (-0.610, 0.805, -0.000),
(-0.460, 0.685, -0.000), (-0.160, 0.810, -0.000), (-0.140, 1.000, -0.000), (0.135, 1.000, -0.000),
(0.160, 0.810, -0.000)]
knots = [0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0, 11.0, 12.0, 13.0, 14.0, 15.0, 16.0, 17.0, 18.0,
19.0, 20.0, 21.0, 22.0, 23.0, 24.0, 25.0, 26.0, 27.0, 28.0, 29.0, 30.0, 31.0, 32.0]
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def hex(self):
"""
Create hexagonal control object
"""
# Create control object
pts = [(0.0, 1.0, 0.0), (0.333, 0.5, 0.0), (0.333, -0.5, 0.0), (0.0, -1.0, 0.0), (-0.333, -0.5, 0.0),
(-0.333, 0.5, 0.0), (0.0, 1.0, 0.0)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def line(self):
"""
Create line control object
"""
# Create Control Object
pts = [(0, 0, 0), (0.5, 0, 0), (1, 0, 0)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return Control
return control
def locator(self):
"""
Create locator control object
"""
# Create control object
pts = [(-0.5, 0.0, 0.0), (0.5, 0.0, 0.0), (0.0, 0.0, 0.0), (0.0, 0.5, 0.0), (0.0, -0.5, 0.0), (0.0, 0.0, 0.0),
(0.0, 0.0, -0.5), (0.0, 0.0, 0.5)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def pyramid(self):
"""
Create pyramid control object
"""
# Create control object
pts = [(-0.5, -0.5, 0.5), (0.5, -0.5, 0.5), (0.5, -0.5, -0.5), (-0.5, -0.5, -0.5), (-0.5, -0.5, 0.5),
(0.0, 0.5, 0.0), (-0.5, -0.5, -0.5), (0.5, -0.5, -0.5), (0.0, 0.5, 0.0), (0.5, -0.5, 0.5)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def sphere(self):
"""
Create sphere control object
"""
# Create control object
pts = [(0.5, 0.0, 0.0), (0.462, 0.0, 0.19), (0.35, 0.0, 0.35),
(0.19, 0.0, 0.46), (0.0, 0.0, 0.5), (-0.19, 0.0, 0.46),
(-0.35, 0.0, 0.35), (-0.46, 0.0, 0.19), (-0.5, 0.0, 0.0),
(-0.46, 0.0, -0.19), (-0.35, 0.0, -0.35), (-0.19, 0.0, -0.46),
(0.0, 0.0, -0.5), (0.19, 0.0, -0.46), (0.35, 0.0, -0.35),
(0.46, 0.0, -0.19), (0.5, 0.0, 0.0), (0.46, -0.19, 0.0),
(0.35, -0.35, 0.0), (0.19, -0.46, 0.0), (0.0, -0.5, 0.0),
(-0.19, -0.46, 0.0), (-0.35, -0.35, 0.0), (-0.46, -0.19, 0.0),
(-0.5, 0.0, 0.0), (-0.46, 0.19, 0.0), (-0.35, 0.35, 0.0),
(-0.19, 0.46, 0.0), (0.0, 0.5, 0.0), (0.19, 0.46, 0.0),
(0.35, 0.35, 0.0), (0.46, 0.19, 0.0), (0.5, 0.0, 0.0),
(0.46, 0.0, 0.19), (0.35, 0.0, 0.35), (0.19, 0.0, 0.46),
(0.0, 0.0, 0.5), (0.0, 0.24, 0.44), (0.0, 0.44, 0.24),
(0.0, 0.5, 0.0), (0.0, 0.44, -0.24), (0.0, 0.24, -0.44),
(0.0, 0.0, -0.5), (0.0, -0.24, -0.44), (0.0, -0.44, -0.24),
(0.0, -0.5, 0.0), (0.0, -0.44, 0.24), (0.0, -0.24, 0.44),
(0.0, 0.0, 0.5)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def sphereAnchor(self):
"""
Create sphereAnchor control object
"""
# Create control object
pts = [(0.0, 1.0, -0.05), (0.0, 0.981, -0.0462), (0.0, 0.965, -0.035),
(0.0, 0.954, -0.019), (0.0, 0.95, 0.0), (0.0, 0.954, 0.019),
(0.0, 0.965, 0.035), (0.0, 0.981, 0.046), (0.0, 1.0, 0.05),
(0.0, 1.019, 0.046), (0.0, 1.035, 0.035), (0.0, 1.046, 0.019),
(0.0, 1.05, 0.0), (0.0, 1.046, -0.019), (0.0, 1.035, -0.035),
(0.0, 1.019, -0.046), (0.0, 1.0, -0.05), (-0.019, 1.0, -0.046),
(-0.035, 1.0, -0.035), (-0.046, 1.0, -0.019), (-0.05, 1.0, 0.0),
(-0.046, 1.0, 0.019), (-0.035, 1.0, 0.035), (-0.019, 1.0, 0.046),
(0.0, 1.0, 0.05), (0.019, 1.0, 0.046), (0.035, 1.0, 0.035),
(0.046, 1.0, 0.019), (0.05, 1.0, 0.0), (0.046, 1.0, -0.019),
(0.035, 1.0, -0.035), (0.019, 1.0, -0.046), (0.0, 1.0, -0.05),
(0.0, 0.981, -0.046), (0.0, 0.965, -0.035), (0.0, 0.954, -0.019),
(0.0, 0.95, 0.0), (0.024, 0.956, 0.0), (0.044, 0.976, 0.0),
(0.05, 1.0, 0.0), (0.044, 1.024, 0.0), (0.024, 1.044, 0.0),
(0.0, 1.05, 0.0), (-0.024, 1.044, 0.0), (-0.044, 1.024, 0.0),
(-0.05, 1.0, 0.0), (-0.044, 0.976, 0.0), (-0.024, 0.956, 0.0),
(0.0, 0.95, 0.0), (0.0, 0.0, 0.0)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return control name
return control
def spiral(self):
"""
Create spiral control object
"""
# Build Point Array
pts = [(0.0, 0.0, 0.0), (0.0, 0.1, 0.0), (0.0, 0.2, 0.0),
(0.0, 0.28, 0.0), (0.0, 0.288, 0.0), (0.0, 0.325, 0.0),
(0.0, 0.346, -0.05), (0.01, 0.35, -0.12), (0.13, 0.38, -0.11),
(0.21, 0.4, -0.02), (0.16, 0.44, 0.14), (0.0, 0.46, 0.2),
(-0.14, 0.5, 0.12), (-0.21, 0.5, -0.06), (-0.18, 0.525, -0.28),
(0.0, 0.55, -0.39), (0.28, 0.576, -0.312), (0.4, 0.615, -0.09),
(0.3, 0.67, 0.186), (0.0, 0.7, 0.28), (-0.28, 0.728, 0.187),
(-0.4, 0.768, -0.09), (-0.336, 0.823, -0.428), (0.0, 0.847, -0.595),
(0.425, 0.867, -0.486), (0.589, 0.9, -0.09), (0.435, 0.97, 0.311),
(0.158, 0.997, 0.415), (0.0, 1.0, 0.407)]
# Build Knot Array
knots = [0, 0]
knots.extend(range(len(pts) - 2))
knots.extend([len(pts) - 3, len(pts) - 3])
degree = 3
# Create control object
control = cmds.curve(d=degree, p=pts, k=knots)
# Return control name
return control
def square(self):
"""
Create square control object
"""
# Create control object
pts = [(-0.5, 0.5, 0.0), (-0.5, -0.5, 0.0), (0.5, -0.5, 0.0), (0.5, 0.5, 0.0), (-0.5, 0.5, 0.0)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# Return Control
return control
def tab(self):
"""
Create tab control object
"""
pts = [(-0.500, -0.500, 0.000), (-0.534, 0.356, 0.000), (-0.279, 0.500, -0.000), (0.279, 0.500, -0.000),
(0.534, 0.356, 0.000), (0.500, -0.500, 0.000), (0.500, -0.500, 0.000), (0.500, -0.500, 0.000),
(-0.500, -0.500, 0.000)]
knots = [0.0, 0.0, 0.0, 0.166666666667, 0.333333333333, 0.5, 0.666666666667, 0.833333333333, 1.0, 1.0, 1.0]
control = cmds.curve(d=3, p=pts, k=knots)
# Return Control
return control
def teardrop(self):
"""
Create teardrop control object
"""
# Create control object
pts = [(-0.000, 0.554, 0.000), (-0.015, 0.548, 0.000), (-0.554, 0.109, 0.000), (-0.392, -0.392, 0.000),
(-0.000, -0.554, 0.000), (0.392, -0.392, 0.000), (0.554, 0.109, 0.000), (0.015, 0.548, 0.000),
(-0.000, 0.554, 0.000)]
knots = range(len(pts))
control = cmds.curve(d=1, p=pts, k=knots)
# control = cmds.rebuildCurve(control,ch=False,rt=0,rpo=True,end=True,kr=True,d=3,kcp=True)[0]
# Return control name
return control
def text(self, text='text'):
"""
Create text control object
@param text: Text string
@type: str
"""
# Check text string
if not text: raise Exception('Empty string error!')
# Create Text
# textCurve = cmds.textCurves(ch=False,f='Arial',t=text)
textCurve = cmds.textCurves(ch=False, f='Utopia-Bold', t=text)
# Parent shapes to single treansform
textShapes = cmds.ls(cmds.listRelatives(textCurve, ad=True), type='nurbsCurve')
for textShape in textShapes:
textXform = cmds.listRelatives(textShape, p=True)[0]
textXform = cmds.parent(textXform, w=True)
cmds.makeIdentity(textXform, apply=True, t=True, r=True, s=True, n=False)
cmds.parent(textShape, textCurve, r=True, s=True)
cmds.delete(textXform)
# Delete unused transforms
textChildren = cmds.listRelatives(textCurve, c=True, type='transform')
if textChildren: cmds.delete(textChildren)
# Position text
cmds.select(textCurve)
mel.eval('CenterPivot')
piv = cmds.xform(textCurve, q=True, ws=True, rp=True)
cmds.move(-piv[0], -piv[1], -piv[2], textCurve, ws=True, r=True)
# Scale text
width = (cmds.getAttr(textCurve[0] + '.boundingBoxMaxX') - cmds.getAttr(textCurve[0] + '.boundingBoxMinX'))
height = (cmds.getAttr(textCurve[0] + '.boundingBoxMaxY') - cmds.getAttr(textCurve[0] + '.boundingBoxMinY'))
if width > height:
sc = 1.0 / width
else:
sc = 1.0 / height
cmds.scale(sc, sc, sc, textCurve)
# Freeze Transforms
cmds.makeIdentity(textCurve, apply=True, t=True, r=True, s=True, n=False)
# Return result
return textCurve
def controlShape(self, transform, controlType, translate=(0, 0, 0), rotate=(0, 0, 0), scale=1, colour=-1, text='',
orient=True):
"""
Add control shape to an existing transform.
@param transform: Transform to add controlshape to.
@type transform: str
@param controlType: Type of control to build.
@type controlType: str
@param translate: Translational offset for control curve.
@type translate: list or tuple
@param rotate: Rotational offset for control curve.
@type rotate: list or tuple
@param scale: Scale offset for control curve.
@type scale: list or tuple
@param colour: The colour of the control curve.
@type colour: int
@param text: Text value for "text" type control curve.
@type text: str
@param orient: Orient control to transform.
@type orient: bool
"""
# Create Control
if controlType == 'text':
control = self.create(controlType, 'temp_control_transform', text=text)
else:
control = self.create(controlType, 'temp_control_transform')
controlShapeList = cmds.listRelatives(control, s=True)
# Match Control
if not orient: cmds.setAttr(control + '.rotate', rotate[0], rotate[1], rotate[2])
cmds.delete(cmds.pointConstraint(transform, control))
cmds.parent(control, transform)
cmds.setAttr(control + '.translate', translate[0], translate[1], translate[2])
if orient: cmds.setAttr(control + '.rotate', rotate[0], rotate[1], rotate[2])
cmds.setAttr(control + '.scale', scale, scale, scale)
cmds.makeIdentity(control, apply=True, t=1, r=1, s=1, n=0)
# Parent Control Shape
for i in range(len(controlShapeList)):
controlShapeList[i] = cmds.parent(controlShapeList[i], transform, r=True, s=True)[0]
controlShapeList[i] = cmds.rename(controlShapeList[i], transform + 'Shape' + str(i + 1))
cmds.reorder(controlShapeList[i], b=True)
# Delete temp transform
cmds.delete(control)
# Colour Control
self.colourControl(transform, colour)
# Return result
return controlShapeList
def colourControl(self, control, colour=-1):
"""
Set the override colour for the specified control.
Apply a colour by index, or if arg "colour" < 0, set colour based on name prefix.
@param control: The control object to set the colour for.
@type control: str
@param colour: The colour of the control curve as an interger index.
@type colour: int
"""
# Get control transform
if not glTools.utils.transform.isTransform(control):
controlParent = cmds.listRelatives(control, p=True)
if not controlParent:
raise Exception('Unable to determine controls transform!')
control = controlParent[0]
# Determine Colour
if colour < 0:
if control.startswith('cn') or control.startswith('C'):
colour = self.overrideId['cn']
elif control.startswith('lf') or control.startswith('L'):
colour = self.overrideId['lf']
elif control.startswith('rt') or control.startswith('R'):
colour = self.overrideId['rt']
else:
colour = 17
# Set Colour
controlShapes = cmds.listRelatives(control, s=True)
for controlShape in controlShapes:
cmds.setAttr(controlShape + '.overrideEnabled', 1)
cmds.setAttr(controlShape + '.overrideColor', colour)
# Return result
return colour
def anchorCurve(self, control, anchor, template=True, selectable=False):
"""
Create an anchor curve for the specified control.
@param control: The control object to create an anchor curve for.
@type control: str
@param anchor: The transform that the anchor curve will be attached to for.
@type anchor: str
@param template: Set the anchor curve override type to template
@type template: bool
"""
# Check control
if not cmds.objExists(control):
raise Exception('Control "' + control + '" does not exist!')
if not cmds.objExists(anchor):
raise Exception('Anchor transform "' + anchor + '" does not exist!')
# Create curve shape
crv = cmds.curve(p=[(0, 0, 0), (0, 1, 0)], k=[0, 1], d=1, n=control + 'Anchor')
crvShape = cmds.listRelatives(crv, s=True, pa=True)
if not crvShape:
raise Exception('Unable to determine shape for curve "' + crv + '"!')
# Create Curve Locators
crvLoc = glTools.utils.curve.locatorCurve(crv, locatorScale=0.0, local=True, prefix=control)
cmds.parent(crvLoc, control)
cmds.setAttr(crvLoc[0] + '.t', 0, 0, 0)
cmds.setAttr(crvLoc[1] + '.t', 0, 0, 0)
cmds.setAttr(crvLoc[0] + '.v', 0)
cmds.setAttr(crvLoc[1] + '.v', 0)
# Rename and Parent curve shape
crvShape = cmds.parent(crvShape[0], control, r=True, s=True)[0]
crvShape = cmds.rename(crvShape, control + 'Shape0')
cmds.reorder(crvShape, b=True)
# Colour Curve Shape - Light Grey
cmds.setAttr(crvShape + '.overrideEnabled', 1)
cmds.setAttr(crvShape + '.overrideColor', 3) # Light Grey
# Delete Original Curve Transform
cmds.delete(crv)
# Connect to anchor
cmds.pointConstraint(anchor, crvLoc[1])
# Template
if template: glTools.utils.base.displayOverride(crvShape, overrideEnable=1, overrideDisplay=1)
# Set channel states
glTools.utils.channelState.ChannelState().setFlags([2, 2, 2, 2, 2, 2, 2, 2, 2, 1], crvLoc)
# Return result
return crvShape
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 1 22:18:39 2017
@author: Administrator
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from collections import Counter
from sklearn.cluster import DBSCAN
from sklearn import metrics
from sklearn.preprocessing import StandardScaler
nrows = 10000 #测试代码
#nrows = None
print('loading data starting...')
train = pd.read_csv(r'F:/data/biendata/MOBIKE_CUP_2017/data_process/train_process.csv', nrows=nrows)
test = pd.read_csv(r'F:/data/biendata/MOBIKE_CUP_2017/data_process/test_process.csv', nrows=nrows)
print('loading data finishing...')
#做一个基于密度的聚类
print('cluster data starting...')
db = DBSCAN(eps=0.3, min_samples=10).fit(train[['start_loc_lats','start_loc_lons']])
labels = db.labels_
# Number of clusters in labels, ignoring noise if present.
n_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)
|
n = int(input())
while(n!=0):
a = input().split(" ")
print(int(a[0])+int(a[1]))
n=n-1
|
#!/usr/bin/env python
#all imports
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.openid import OpenID
from flask.ext.mail import Mail
from flask.ext.babel import Babel, lazy_gettext
from config import basedir, ADMINS, MAIL_SERVER, MAIL_PORT, MAIL_USERNAME, MAIL_PASSWORD
from momentjs import momentjs
#Create application
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app)
mail = Mail(app)
babel = Babel(app)
lm = LoginManager()
lm.init_app(app)
lm.login_view = 'login'
lm.login_message = lazy_gettext('Please log in to access this page.')
oid = OpenID(app, os.path.join(basedir, 'tmp'))
if not app.debug:
import logging
from logging.handlers import SMTPHandler
credentials = None
if MAIL_USERNAME or MAIL_PASSWORD:
credentials = (MAIL_USERNAME, MAIL_PASSWORD)
mail_handler = SMTPHandler((MAIL_SERVER, MAIL_PORT), 'no-reply@' + MAIL_SERVER, ADMINS, 'umbase failure', credentials)
mail_handler.setLevel(logging.ERROR)
app.logger.addHandler(mail_handler)
if not app.debug:
import logging
from logging.handlers import RotatingFileHandler
file_handler = RotatingFileHandler('tmp/umbase.log', 'a', 1*1024*1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s :%(lineno)d]'))
app.logger.setLevel(logging.INFO)
file_handler.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
app.logger.info('umbase startup')
app.jinja_env.globals['momentjs']=momentjs
from main import views, models
if __name__ == "__main__":
app.run()
|
import pymysql
connection = pymysql.connect(host='localhost',
user='root',
password='',
database='teste',
cursorclass=pymysql.cursors.DictCursor)
cursor = connection.cursor()
delete_query = "delete from employee where _id = 1"
try:
cursor.execute(delete_query)
connection.commit()
print("Record delete")
except Exception as e:
connection.rollback()
print("Exception Occurred: ", e)
connection.close() |
raw_word = str(input(''))
size = len(raw_word)
list = []
counter = 0
for items in raw_word:
if counter == 0:
item = items.upper()
counter = counter+1
list.insert(len(list),item)
else:
list.insert(len(list),items)
w = ''
for items in list:
w = w+str(items)
print(w,end='')
|
from typing import List
from tinkoff.investments.api.base import BaseTinkoffInvestmentsAPI
from tinkoff.investments.model.portfolio import (
Currencies,
CurrencyPosition,
Portfolio,
PortfolioPosition,
)
from tinkoff.investments.model.user.accounts import BrokerAccountID
class PortfolioAPI(BaseTinkoffInvestmentsAPI):
async def get_positions(self, broker_account_id=None):
# type: (BrokerAccountID) -> List[PortfolioPosition]
if broker_account_id is not None:
params = {"brokerAccountId": broker_account_id}
else:
params = {}
payload = await self._request(
method="GET",
path="/portfolio",
params=params,
)
return Portfolio.from_dict(payload).positions # type: ignore
async def get_currencies(self, broker_account_id=None):
# type: (BrokerAccountID) -> List[CurrencyPosition]
if broker_account_id is not None:
params = {"brokerAccountId": broker_account_id}
else:
params = {}
payload = await self._request(
method="GET",
path="/portfolio/currencies",
params=params,
)
return Currencies.from_dict(payload).currencies # type: ignore
|
import json
import re
import os
import datetime
from pprint import pprint
today_day = datetime.datetime.now()
day_of_the_week = today_day.strftime("%A")
final_feature = {}
trigger_list = []
verify_list = []
def read_input(filename,day_of_the_week):
try:
with open(filename, 'r') as data_file:
data = json.load(data_file)
global userverify
global usertrigger
global features
global node
global triggers
global path
global job_path
path = data["path"]
job_path = data["job_path"]
for key, value in data.iteritems():
if key == day_of_the_week:
dict_data = data[day_of_the_week]
userverify = dict_data[0].get('user_verify')
node = dict_data[0].get('node')
usertrigger = dict_data[0].get('user_trigger')
features = dict_data[0].get('features')
triggers = dict_data[0].get('triggers')
except:
print 'COULD NOT LOAD:', filename
def parse_file(path):
global trigger_list
global verify_list
pattern = re.compile(r'{psat_trigger\S*\s{')
pattern1 = re.compile(r'{verify\S*\s{')
try:
for subdir, dirs, files in os.walk(path):
for file in files:
if file.endswith(".tcl"):
x = path+'/'+file
with open(x) as f:
for line in f:
if pattern.search(line):
list_new = re.sub(r'\s|{|{', r'', line)
trigger_list.append(list_new.strip())
if pattern1.search(line):
list_new1 = re.sub(r'\s|{|{', r'', line)
verify_list.append(list_new1.strip())
except ValueError:
print 'No TCL file found in the given path:', path
def create_final_feature_list(features):
for i in features:
final_feature[i] = {}
final_feature[i]['verification'] = []
final_feature[i]['trigger'] = []
for item in trigger_list:
if i in item:
final_feature[i]['trigger'].append(item)
for item in verify_list:
if i in item:
final_feature[i]['verification'].append(item)
final_feature[i]['trigger'].extend(usertrigger)
final_feature[i]['verification'].extend(userverify)
if __name__ == "__main__":
filename = "input.json"
read_input(filename,day_of_the_week)
parse_file(path)
create_final_feature_list(features)
pprint(final_feature)
|
from kafka import KafkaProducer
import requests
import json
import time
producer = KafkaProducer(bootstrap_servers = 'localhost:9099')
cities=['London', 'Paris', 'Bern', 'Stockholm', 'Madrid', 'Vienna']
while True:
for i in range(len(cities)):
api_address = 'http://api.openweathermap.org/data/2.5/weather?q='
city = cities[i]
appid= '&appid='
api_key = '7aea328252d1145f04f11f48470d20d4'
url = api_address + city+ appid + api_key
response = requests.get(url).json()
msg = response
print(type(response))
producer.send('api', json.dumps(msg).encode('utf-8'))
time.sleep(2)
print("Sending msg \"{}\"".format(msg))
print("Message sent!")
|
"""
Ronda 1
problema 1
"""
def dameRes(casoActual):
palabra = casoActual
posta = ""
posta += palabra[0]
for c in palabra[1:]:
if c >= posta[0]:
posta = c+posta
else:
posta += c
return posta
t = int(raw_input())
for ti in range(1, t+1):
casoActual = [s for s in raw_input().split(" ")]
#casoActual[0] = int(casoActual[0])
print "Case #{}: {}".format(ti, dameRes(casoActual[0])) |
"""Log some information about a specific construction file"""
import sys
from construction import ConstructionReader
def get_info(construction_file):
with ConstructionReader(construction_file) as construction:
print(f'Section count: {len(construction.sections)}')
for i, selection in enumerate(construction.selection):
print(f'Selection {i}, {selection}')
for i, block in enumerate(construction.palette):
print(f'Block {i}, {block}')
for i, (posx, posy, posz, sizex, sizey, sizez, _, _) in enumerate(construction.sections):
print(f'Section {i}, Pos:({posx}, {posy}, {posz}), Size:({sizex}, {sizey}, {sizez})')
section = construction.read(i)
print('\t', section.blocks)
for e in section.entities:
print('\t', e)
for e in section.block_entities:
print('\t', e)
if __name__ == '__main__':
for arg in sys.argv[1:]:
print(arg)
get_info(arg)
|
class Vector():
"""
A vector tools class
"""
def __init__(self, values):
if type(values) == list:
self._initList(values)
elif type(values) == int:
self._initInt(values)
elif type(values) == tuple:
self._initTuple(values)
else:
raise TypeError(
"Param format ({}) not supported".format(type(values)))
self.size = len(self.values)
def _initTuple(self, values):
my_values = []
if len(values) == 2:
i = float(values[0])
if i < values[1]:
while i < values[1]:
my_values.append(i)
i += 1
else:
while i > values[1]:
my_values.append(i)
i -= 1
self.values = my_values
else:
raise ValueError("Tuple param must have exactly two values")
def _initInt(self, values):
my_list = []
i = 0.0
while i < values:
my_list.append(i)
i += 1
self.values = my_list
def _initList(self, values):
for val in values:
if type(val) != float:
raise TypeError(
"Vectors Values's type must be {}".format(float))
self.values = values
def __iter__(self):
self.n = 0
return self
def __next__(self):
if self.n < self.size:
self.n += 1
return self.values[self.n - 1]
else:
raise StopIteration
def __str__(self):
txt = ""
fisrt = 1
for val in self.values:
if fisrt:
txt += str(val)
else:
txt += ", " + str(val)
fisrt = 0
return txt
def __add__(self, other):
if not isinstance(other, Vector):
raise TypeError("a Vector can only be added to another Vector")
if len(self.values) == len(other.values):
i = 0
while i < len(self.values):
self.values[i] += other.values[i]
i += 1
else:
raise ValueError(
"a Vector can only be added" +
"to another vector of the same size")
return self
def __radd__(self, other):
if not isinstance(other, Vector):
raise TypeError("a Vector can only be added to another Vector")
if len(self.values) == len(other.values):
i = 0
while i < len(self.values):
other.values[i] += self.values[i]
i += 1
else:
raise ValueError(
"a Vector can only be added" +
"to another vector of the same size")
return self
def __sub__(self, other):
if not isinstance(other, Vector):
raise TypeError("a Vector can only be added to another Vector")
if len(self.values) == len(other.values):
i = 0
while i < len(self.values):
self.values[i] -= other.values[i]
i += 1
else:
raise ValueError(
"a Vector can only be added" +
"to another vector of the same size")
return self
if __name__ == "__main__":
my_vector = Vector([0.0, 1.0, 2.0, 3.0])
my_vector2 = Vector(4)
my_vector3 = Vector((3, 1))
print("my_vector ==", my_vector)
print("my_vector2==", my_vector2)
print("my_vector3==", my_vector3)
my_vector += my_vector2
print("\nmy_vector after addition==", my_vector)
my_vector -= my_vector2
print("\nmy_vector after substraction==", my_vector)
|
number = int(input('Enter number : '))
count = 0
counter = 1
while counter <= number:
if number % counter ==0:
count +=1
counter +=1
if count == 2:
print(number, ' is a prime')
else:
print(number, 'is not a prime') |
from django.db import models
# Create your models here.
class User(models.Model):
name = models.CharField(max_length=45)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
#objects object
# one to many relationship
# owns = [Car objects]
# many to many relationship
# back_seat_driver = [Car objects]
def __repr__(self):
return f'<User object: name={self.name} id={self.id}>'
class CarManager(models.Manager):
def validate_car(self, form_data):
print(form_data)
print('*'*20 + 'in models' + '*'*20)
errors = []
if len(form_data['name']) < 1:
# name was not there
errors.append('Name must be present!')
if not form_data['name'].isalpha():
# name contains numbers
errors.append('Name can not have numbers!')
owner = User.objects.filter(id=form_data['owner_id'])
if len(owner) < 1:
errors.append('Car must have an owner!')
# how do know if we passed the validations?
if(len(errors) < 1):
# passed validations
# save the car
car = self.create(name=form_data['name'], owner=owner[0])
response_to_views = {'status': True, 'car': car}
return response_to_views
else:
# failed validaions
# send the messages to views
response_to_views = {'status': False, 'errors': errors}
return response_to_views
class Car(models.Model):
name = models.CharField(max_length = 45)
owner = models.ForeignKey(User, related_name="owns")
# owner = User object
passengers = models.ManyToManyField(User, related_name="back_seat_driver")
# passengers = [User objects]
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
objects = CarManager()
def __repr__(self):
print(f'<Car object: {self.name} passengers are:>')
print(self.passengers)
print(type(self.passengers))
for user in self.passengers.all():
print(f'\t User name: {user.name}')
return f'<Car object: name={self.name} owner={self.owner.name}>'
|
my_dict = {'color':'red','value':5,'distance':16.50}
for key,value in my_dict.items():
print(key)
print(value)
my_list = ['jimmy','reza','sohag','nayeem']
for name in my_list:
print("My name is "+name)
print("playing with git") |
# TO DO:
# -Let cpu choose a random item and make it the only correct answer.
# -Instead of "Guess a word", give a hint about an item like "It's a fruit and it's red. What am I thinking about?"
def GuessWork():
print("Welcome to \"Guesswork\" game.\n")
GameOn = True
while GameOn:
words = ["apple", "berry", "pineapple", "strawberry"]
guess = ""
ValidGuess = False
while not ValidGuess:
guess = input("Guess a word: ").lower()
if guess not in words:
print("Try again")
else:
ValidGuess = True
print("You guessed!\n")
GameOn = False
while not GameOn:
result = input("Play again? (y/n): ").lower()
if result == "no" or result == "n":
break
elif result == "yes" or result == "y":
GameOn = True
else:
print("Wrong input")
GuessWork()
|
import time
from library import *
setSqlVerbose(False)
iterations = 250
def benchmarkQuery(query, *indexes):
for table, index in indexes:
try: # Delete index if it exists
sqlQuery('ALTER TABLE ' + sqlBackticks(table) + ' DROP INDEX ' + sqlBackticks(index))
except: # Index probably didn't exist
pass
print 'Running query without indexes.'
t = time.time()
for i in xrange(iterations):
sqlQuery(query)
timeWithout = time.time() - t
# Now add indexes
for table, index in indexes:
sqlQuery('ALTER TABLE ' + sqlBackticks(table) + ' ADD INDEX (' + sqlBackticks(index) + ')')
print 'Running query with indexes.'
t = time.time()
for i in xrange(iterations):
sqlQuery(query)
timeWith = time.time() - t
print timeWithout, timeWith
benchmarkQuery("""
-- All actors that have acted in 10 movies or more
-- Useful to know which actors are popular
SELECT actors.name, COUNT(actors.name) AS numberOfMovies FROM actors
NATURAL JOIN has_actors
GROUP BY actors.name
HAVING COUNT(actors.name) >= 10
""", ('actors', 'name'))
benchmarkQuery("""
-- Movies available in both english (spoken) and french (spoken) released in 1913
-- Useful if we're looking for content in certain languages to share with a household where some people speak french and some english,
-- while looking for a particular period
SELECT iid, title FROM items NATURAL JOIN video
WHERE `date` >= '1913-01-01' AND `date` <= '1913-12-31'
AND iid IN (
SELECT iid FROM has_languages WHERE type = 'spoken' AND iso = 'en'
) AND iid IN (
SELECT iid FROM has_languages WHERE type = 'spoken' AND iso = 'fr'
)
""", ('items', 'date'), ('has_languages', 'iid'), ('has_languages', 'type'), ('has_languages', 'iso'))
benchmarkQuery("""
-- List members who have not given back their items before the end of their reservation date, and are already in bad standing.
-- Useful when wanting to see the list of people whose membership should probably be ended.
SELECT members.pnid, name, email, balance FROM members NATURAL JOIN people WHERE standing = 'bad' AND EXISTS (
SELECT 1 FROM reserved_by
WHERE reserved_by.pnid = members.pnid
AND `to` < CURRENT_DATE
LIMIT 1 -- Only need one record to check existence
)
""", ('members', 'standing'), ('reserved_by', 'pnid'), ('reserved_by', 'to'))
|
from __future__ import division, print_function, absolute_import
from gym.envs.registration import register
import numpy as np
from NGSIM_env import utils
from NGSIM_env.envs.common.observation import observation_factory
from NGSIM_env import utils
from NGSIM_env.envs.common.abstract import AbstractEnv
from NGSIM_env.road.road import Road, RoadNetwork
from NGSIM_env.vehicle.behavior import IDMVehicle
from NGSIM_env.vehicle.humandriving import HumanLikeVehicle, NGSIMVehicle
from NGSIM_env.road.lane import LineType, StraightLane
from NGSIM_env.utils import *
from NGSIM_env.data.data_process import build_trajecotry
class NGSIMEnv(AbstractEnv):
"""
A highway driving environment with NGSIM data.
"""
def __init__(self, scene, period, vehicle_id, IDM=False):
self.vehicle_id = vehicle_id
self.scene = scene
self.trajectory_set = build_trajecotry(scene, period, vehicle_id)
self.ego_length = self.trajectory_set['ego']['length'] / 3.281
self.ego_width = self.trajectory_set['ego']['width'] / 3.281
self.ego_trajectory = self.trajectory_set['ego']['trajectory']
self.duration = len(self.ego_trajectory) - 3
self.surrounding_vehicles = list(self.trajectory_set.keys())
self.surrounding_vehicles.pop(0)
self.run_step = 0
self.human = False
self.IDM = IDM
super(NGSIMEnv, self).__init__()
def process_raw_trajectory(self, trajectory):
trajectory = np.array(trajectory)
for i in range(trajectory.shape[0]):
x = trajectory[i][0] - 6
y = trajectory[i][1]
speed = trajectory[i][2]
trajectory[i][0] = y / 3.281
trajectory[i][1] = x / 3.281
trajectory[i][2] = speed / 3.281
return trajectory
def default_config(self):
config = super().default_config()
config.update({
"observation": {"type": "Kinematics"},
"vehicles_count": 10,
"show_trajectories": True,
"screen_width": 800,
"screen_height": 300,
})
return config
def reset(self, human=False, reset_time=1):
'''
Reset the environment at a given time (scene) and specify whether use human target
'''
self.human = human
self._create_road()
self._create_vehicles(reset_time)
self.steps = 0
return super(NGSIMEnv, self).reset()
def _create_road(self):
"""
Create a road composed of NGSIM road network
"""
net = RoadNetwork()
c, s, n = LineType.CONTINUOUS_LINE, LineType.STRIPED, LineType.NONE
if self.scene == 'us-101':
length = 2150 / 3.281 # m
width = 12 / 3.281 # m
ends = [0, 560/3.281, (698+578+150)/3.281, length]
# first section
line_types = [[c, n], [s, n], [s, n], [s, n], [s, c]]
for lane in range(5):
origin = [ends[0], lane * width]
end = [ends[1], lane * width]
net.add_lane('s1', 's2', StraightLane(origin, end, width=width, line_types=line_types[lane]))
# merge_in lanes
net.add_lane('merge_in', 's2', StraightLane([480/3.281, 5.5*width], [ends[1], 5*width], width=width, line_types=[c, c], forbidden=True))
# second section
line_types = [[c, n], [s, n], [s, n], [s, n], [s, n], [s, c]]
for lane in range(6):
origin = [ends[1], lane * width]
end = [ends[2], lane * width]
net.add_lane('s2', 's3', StraightLane(origin, end, width=width, line_types=line_types[lane]))
# third section
line_types = [[c, n], [s, n], [s, n], [s, n], [s, c]]
for lane in range(5):
origin = [ends[2], lane * width]
end = [ends[3], lane * width]
net.add_lane('s3', 's4', StraightLane(origin, end, width=width, line_types=line_types[lane]))
# merge_out lanes
net.add_lane('s3', 'merge_out', StraightLane([ends[2], 5*width], [1550/3.281, 7*width], width=width, line_types=[c, c], forbidden=True))
self.road = Road(network=net, np_random=self.np_random, record_history=self.config["show_trajectories"])
elif self.scene == 'i-80':
length = 1700 / 3.281
lanes = 6
width = 12 / 3.281
ends = [0, 600/3.281, 700/3.281, 900/3.281, length]
# first section
line_types = [[c, n], [s, n], [s, n], [s, n], [s, n], [s, c]]
for lane in range(lanes):
origin = [ends[0], lane * width]
end = [ends[1], lane * width]
net.add_lane('s1', 's2', StraightLane(origin, end, width=width, line_types=line_types[lane]))
# merge_in lanes
net.add_lane('s1', 's2', StraightLane([380/3.281, 7.1*width], [ends[1], 6*width], width=width, line_types=[c, c], forbidden=True))
# second section
line_types = [[c, n], [s, n], [s, n], [s, n], [s, n], [s, n]]
for lane in range(lanes):
origin = [ends[1], lane * width]
end = [ends[2], lane * width]
net.add_lane('s2', 's3', StraightLane(origin, end, width=width, line_types=line_types[lane]))
# merge_in lanes
net.add_lane('s2', 's3', StraightLane([ends[1], 6*width], [ends[2], 6*width], width=width, line_types=[s, c]))
# third section
line_types = [[c, n], [s, n], [s, n], [s, n], [s, n], [s, n]]
for lane in range(lanes):
origin = [ends[2], lane * width]
end = [ends[3], lane * width]
net.add_lane('s3', 's4', StraightLane(origin, end, width=width, line_types=line_types[lane]))
# merge_in lane
net.add_lane('s3', 's4', StraightLane([ends[2], 6*width], [ends[3], 5*width], width=width, line_types=[n, c]))
# forth section
line_types = [[c, n], [s, n], [s, n], [s, n], [s, n], [s, c]]
for lane in range(lanes):
origin = [ends[3], lane * width]
end = [ends[4], lane * width]
net.add_lane('s4', 's5', StraightLane(origin, end, width=width, line_types=line_types[lane]))
self.road = Road(network=net, np_random=self.np_random, record_history=self.config["show_trajectories"])
def _create_vehicles(self, reset_time):
"""
Create ego vehicle and NGSIM vehicles and add them on the road.
"""
whole_trajectory = self.process_raw_trajectory(self.ego_trajectory)
ego_trajectory = whole_trajectory[reset_time:]
ego_acc = (whole_trajectory[reset_time][2] - whole_trajectory[reset_time-1][2]) / 0.1
self.vehicle = HumanLikeVehicle.create(self.road, self.vehicle_id, ego_trajectory[0][:2], self.ego_length, self.ego_width,
ego_trajectory, acc=ego_acc, velocity=ego_trajectory[0][2], human=self.human, IDM=self.IDM)
self.road.vehicles.append(self.vehicle)
for veh_id in self.surrounding_vehicles:
other_trajectory = self.process_raw_trajectory(self.trajectory_set[veh_id]['trajectory'])[reset_time:]
self.road.vehicles.append(NGSIMVehicle.create(self.road, veh_id, other_trajectory[0][:2], self.trajectory_set[veh_id]['length']/3.281,
self.trajectory_set[veh_id]['width']/3.281, other_trajectory, velocity=other_trajectory[0][2]))
def step(self, action=None):
"""
Perform a MDP step
"""
if self.road is None or self.vehicle is None:
raise NotImplementedError("The road and vehicle must be initialized in the environment implementation")
features = self._simulate(action)
obs = self.observation.observe()
terminal = self._is_terminal()
info = {
"velocity": self.vehicle.velocity,
"crashed": self.vehicle.crashed,
'offroad': not self.vehicle.on_road,
"action": action,
"time": self.time
}
return obs, features, terminal, info
def _simulate(self, action):
"""
Perform several steps of simulation with the planned trajectory
"""
trajectory_features = []
T = action[2] if action is not None else 5
for i in range(int(T * self.SIMULATION_FREQUENCY)-1):
if i == 0:
if action is not None: # sampled goal
self.vehicle.trajectory_planner(action[0], action[1], action[2])
else: # human goal
self.vehicle.trajectory_planner(self.vehicle.ngsim_traj[self.vehicle.sim_steps+T*10][1],
(self.vehicle.ngsim_traj[self.vehicle.sim_steps+T*10][0]-self.vehicle.ngsim_traj[self.vehicle.sim_steps+T*10-1][0])/0.1, T)
self.run_step = 1
self.road.act(self.run_step)
self.road.step(1/self.SIMULATION_FREQUENCY)
self.time += 1
self.run_step += 1
features = self._features()
trajectory_features.append(features)
self._automatic_rendering()
# Stop at terminal states
if self.done or self._is_terminal():
break
self.enable_auto_render = False
human_likeness = features[-1]
interaction = np.max([feature[-2] for feature in trajectory_features])
trajectory_features = np.sum(trajectory_features, axis=0)
trajectory_features[-1] = human_likeness
return trajectory_features
def _features(self):
"""
Hand-crafted features
:return: the array of the defined features
"""
# ego motion
ego_longitudial_positions = self.vehicle.traj.reshape(-1, 2)[self.time-3:, 0]
ego_longitudial_speeds = (ego_longitudial_positions[1:] - ego_longitudial_positions[:-1]) / 0.1 if self.time >= 3 else [0]
ego_longitudial_accs = (ego_longitudial_speeds[1:] - ego_longitudial_speeds[:-1]) / 0.1 if self.time >= 3 else [0]
ego_longitudial_jerks = (ego_longitudial_accs[1:] - ego_longitudial_accs[:-1]) / 0.1 if self.time >= 3 else [0]
ego_lateral_positions = self.vehicle.traj.reshape(-1, 2)[self.time-3:, 1]
ego_lateral_speeds = (ego_lateral_positions[1:] - ego_lateral_positions[:-1]) / 0.1 if self.time >= 3 else [0]
ego_lateral_accs = (ego_lateral_speeds[1:] - ego_lateral_speeds[:-1]) / 0.1 if self.time >= 3 else [0]
# travel efficiency
ego_speed = abs(ego_longitudial_speeds[-1])
# comfort
ego_longitudial_acc = ego_longitudial_accs[-1]
ego_lateral_acc = ego_lateral_accs[-1]
ego_longitudial_jerk = ego_longitudial_jerks[-1]
# time headway front (THWF) and time headway behind (THWB)
THWFs = [100]; THWBs = [100]
for v in self.road.vehicles:
if v.position[0] > self.vehicle.position[0] and abs(v.position[1]-self.vehicle.position[1]) < self.vehicle.WIDTH and self.vehicle.velocity >= 1:
THWF = (v.position[0] - self.vehicle.position[0]) / self.vehicle.velocity
THWFs.append(THWF)
elif v.position[0] < self.vehicle.position[0] and abs(v.position[1]-self.vehicle.position[1]) < self.vehicle.WIDTH and v.velocity >= 1:
THWB = (self.vehicle.position[0] - v.position[0]) / v.velocity
THWBs.append(THWB)
THWF = np.exp(-min(THWFs))
THWB = np.exp(-min(THWBs))
# avoid collision
collision = 1 if self.vehicle.crashed or not self.vehicle.on_road else 0
# interaction (social) impact
social_impact = 0
for v in self.road.vehicles:
if isinstance(v, NGSIMVehicle) and v.overtaken and v.velocity != 0:
social_impact += np.abs(v.velocity - v.velocity_history[-1])/0.1 if v.velocity - v.velocity_history[-1] < 0 else 0
# ego vehicle human-likeness
ego_likeness = self.vehicle.calculate_human_likeness()
# feature array
fetures = np.array([ego_speed, abs(ego_longitudial_acc), abs(ego_lateral_acc), abs(ego_longitudial_jerk),
THWF, THWB, collision, social_impact, ego_likeness])
return fetures
def _is_terminal(self):
"""
The episode is over if the ego vehicle crashed or go off road or the time is out.
"""
return self.vehicle.crashed or self.time >= self.duration or self.vehicle.position[0] >= 2150/3.281 or not self.vehicle.on_road
def sampling_space(self):
"""
The target sampling space (longitudinal speed and lateral offset)
"""
lane_center = self.vehicle.lane.start[1]
current_y = self.vehicle.position[1]
current_speed = self.vehicle.velocity
lateral_offsets = np.array([lane_center-12/3.281, current_y, lane_center+12/3.281])
min_speed = current_speed - 5 if current_speed > 5 else 0
max_speed = current_speed + 5
target_speeds = np.linspace(min_speed, max_speed, 10)
return lateral_offsets, target_speeds
|
a = 11113
b =23
ret=a%b
print('<%d>를 <%d>로 나누면 <%d>가 나머지로 남습니다.'%(a,b,ret)) |
class Optimizer:
"""Base optimizer class."""
def __init__(self, cost_function, ftol):
"""
Initialize base Optimizer class.
Args:
cost_function: one dimensional function to optimize.
ftol: cost function tolerance.
"""
self.f_evals = 0
self.cost_function = cost_function
self.ftol = ftol
def evaluate(self, x):
"""Wrapper that calls the cost function and increments Cost Function
Evaluations (FEvals) counter."""
result = self.cost_function(x)
self.f_evals += 1
return result
def optimize(self):
raise NotImplementedError |
# Nicholas Land / land.nicholas@outlook.com
# 10/5/2018
# Plight of friction
#TODO add more spells in ability selection and add them into level up function
import random
import json
# GAME MECHANICS
def welcome(mainCharList):
# the reason why theres a list as the parameter is because nick is a lazy POS and he didnt
# want to convert the character creation from a list format into a dictionary format
# so instead i just built a dictionary convert the list return from def welcome
mainCharList.append(str(input("Hello adventurer, welcome to Plight of Friction! What is your name? ")))
print("Awesome, nice to meet you ", mainCharList[0])
print("\n\nIn this game there are 3 main factions that rule Plight of Friction.")
print("The 3 factions are: ")
print("First there is the fire clan, who use the destructive power of fire as their core.")
print("Next there is the water clan who uses the healing and power aspects of water.")
print("Finally there is the forest clan who uses the power of earth, plants, and nature.")
elementDecision = str(input("What clan would you like to associate yourself with? (fire, water, or forest) "))
elementDecision.lower()
elementTrigger = 0
while (elementTrigger == 0):
if (elementDecision == "fire"):
print("Congrats you chose the fire clan!")
mainCharList.append(elementDecision)
elementTrigger += 1
elif (elementDecision == "water"):
print("Congrats you chose the water clan!")
mainCharList.append(elementDecision)
elementTrigger += 1
elif (elementDecision == "forest"):
print("Congrats you chose the forest clan!")
mainCharList.append(elementDecision)
elementTrigger += 1
else:
elementDecision = str(input("Please enter a valid clan or possibly check your spelling "))
print("\n\nIn this game there are 3 classes to choose from with each having their own niche in the game.")
print("The classes are: ")
print("Warrior: A class who relies raw strength and power to mercilessly defeat his enemies")
print("Assassin: A class that uses deception and agility to outsmart his or her foes into defeat")
print("Sorcerer: A class that taps into the arcane and mystic energy to conjure spells to eliminate enemies")
classDecision = str(input("What class would you like to become? "))
classDecision.lower()
classTrigger = 0
while (classTrigger == 0):
if (classDecision == "warrior"):
print("Congrats you chose the Warrior class!")
mainCharList.append(classDecision)
classTrigger += 1
mainCharList.append(1) # level
mainCharList.append(20) # attack
mainCharList.append(.15) # defense
mainCharList.append(200) # health
mainCharList.append(100) # mana
mainCharList.append(5) # luck
mainCharList.append(0) # xp
mainCharList.append(0) # gold
elif (classDecision == "assassin"):
print("Congrats you chose the Assassin class!")
mainCharList.append(classDecision)
classTrigger += 1
mainCharList.append(1) # level
mainCharList.append(14) # attack
mainCharList.append(.10) # defense
mainCharList.append(120) # health
mainCharList.append(150) # mana
mainCharList.append(40) # luck
mainCharList.append(0) # xp
mainCharList.append(0) #gold
elif (classDecision == "sorcerer"):
print("Congrats you chose the Sorcerer class!")
mainCharList.append(classDecision)
classTrigger += 1
mainCharList.append(1) # level
mainCharList.append(16) # attack
mainCharList.append(.10) # defense
mainCharList.append(150) # health
mainCharList.append(220) # mana
mainCharList.append(15) # luck
mainCharList.append(0) # xp
mainCharList.append(0) # gold
else:
classDecision = str(input("Please enter a valid class, or possibly check your previous spelling"))
print(
"\n\nYour character starts out at level 1, and as you kill more monsters you gain xp and you level up")
print("right now you are level ", mainCharList[3])
return mainCharList
def save(aCharDict):
if(len(aCharDict['stats']) > 0):
with open('charStats.json', 'w') as outfile:
json.dump(aCharDict, outfile)
outfile.close()
print("Character has been saved!")
else:
print("It seems you don't have a character file on selection, please load one or create one.")
def load(aCharDict):
with open('charStats.json', 'r') as readfile:
try:
data = json.load(readfile)
aCharDict.update(data)
print("Character has been loaded!")
except:
print("There are currently no characters saved.")
def levelUp(mainCharDict):
xp = mainCharDict['stats']['xp']
level = mainCharDict['stats']['level']
attack = mainCharDict['stats']['attack']
defense = mainCharDict['stats']['defense']
health = mainCharDict['stats']['health']
mana = mainCharDict['stats']['mana']
gold = mainCharDict['inventory']['gold']
print("You now have", mainCharDict['stats']['xp'], "experience")
# level 2
if (30 <= xp < 70 and mainCharDict['stats']['level'] < 2):
print("Congrats you are now level 2!")
if (mainCharDict['stats']['class'] == "warrior"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 50
mainCharDict['abilities']['smash'] = {}
mainCharDict['abilities']['smash']['name'] = "Smash"
mainCharDict['abilities']['smash']['mana'] = 60
description = "You smash your weapon at the enemy's feet creating a shock that stuns them for a turn"
mainCharDict['abilities']['smash']['description'] = description
if (mainCharDict['stats']['class'] == "assassin"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 50
mainCharDict['abilities']['shadowStep'] = {}
mainCharDict['abilities']['shadowStep']['name'] = "Shadow Step"
mainCharDict['abilities']['shadowStep']['mana'] = 60
description = "You flash behind your opponent slapping the side of their head and stunning them for a turn "
mainCharDict['abilities']['shadowStep']['description'] = description
if (mainCharDict['stats']['class'] == "sorcerer"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 50
mainCharDict['abilities']['magicBind'] = {}
mainCharDict['abilities']['magicBind']['name'] = "Magic Bind"
mainCharDict['abilities']['magicBind']['mana'] = 70
description = "You cast a binding spell on the enemy stunning them for a turn"
mainCharDict['abilities']['magicBind']['description'] = description
# level 3
if (70 <= xp < 100 and mainCharDict['stats']['level'] < 3):
print("Congrats you are now level 3!")
if (mainCharDict['stats']['class'] == "warrior"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 60
if (mainCharDict['stats']['class'] == "assassin"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 60
if (mainCharDict['stats']['class'] == "sorcerer"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 60
# level 4
if (140 <= xp < 190 and mainCharDict['stats']['level'] < 4):
print("Congrats you are now level 4!")
if (mainCharDict['stats']['class'] == "warrior"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 70
if (mainCharDict['stats']['class'] == "assassin"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 70
if (mainCharDict['stats']['class'] == "sorcerer"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 70
# level 5
if (190 <= xp < 250 and mainCharDict['stats']['level'] < 5):
print("Congrats you are now level 5!")
if (mainCharDict['stats']['class'] == "warrior"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 80
if (mainCharDict['stats']['class'] == "assassin"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 80
if (mainCharDict['stats']['class'] == "sorcerer"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 80
# level 6
if (250 <= xp < 320 and mainCharDict['stats']['level'] < 6):
print("Congrats you are now level 6!")
if (mainCharDict['stats']['class'] == "warrior"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 90
if (mainCharDict['stats']['class'] == "assassin"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 90
if (mainCharDict['stats']['class'] == "sorcerer"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 90
# level 7
if (320 <= xp < 390 and mainCharDict['stats']['level'] < 7):
print("Congrats you are now level 7!")
if (mainCharDict['stats']['class'] == "warrior"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 100
if (mainCharDict['stats']['class'] == "assassin"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 100
if (mainCharDict['stats']['class'] == "sorcerer"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 100
# level 8
if (390 <= xp <= 470 and mainCharDict['stats']['level'] < 8):
print("Congrats you are now level 8!")
if (mainCharDict['stats']['class'] == "warrior"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 100
if (mainCharDict['stats']['class'] == "assassin"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 100
if (mainCharDict['stats']['class'] == "sorcerer"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 100
# level 9
if (470 <= xp < 600 and mainCharDict['stats']['level'] < 9):
print("Congrats you are now level 9!")
if (mainCharDict['stats']['class'] == "warrior"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 100
if (mainCharDict['stats']['class'] == "assassin"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 100
if (mainCharDict['stats']['class'] == "sorcerer"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 100
# level 10
if (xp > 600 and mainCharDict['stats']['level'] < 10):
print("Congrats you are now level 10!")
print("You are at the level cap of the game!")
if (mainCharDict['stats']['class'] == "warrior"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 150
if (mainCharDict['stats']['class'] == "assassin"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 150
if (mainCharDict['stats']['class'] == "sorcerer"):
level += 1
attack *= 1.3
defense *= 1.3
health *= 1.3
mana *= 1.3
gold += 150
def convertFromListToDict(mainCharList, mainCharDict):
mainCharDict['stats']['name'] = mainCharList[0]
mainCharDict['stats']['clan'] = mainCharList[1]
mainCharDict['stats']['class'] = mainCharList[2]
mainCharDict['stats']['level'] = mainCharList[3]
mainCharDict['stats']['attack'] = mainCharList[4]
mainCharDict['stats']['defense'] = mainCharList[5]
mainCharDict['stats']['health'] = mainCharList[6]
mainCharDict['stats']['mana'] = mainCharList[7]
mainCharDict['stats']['luck'] = mainCharList[8]
mainCharDict['stats']['xp'] = mainCharList[9]
mainCharDict['inventory']['gold'] = mainCharList[10]
print(mainCharDict)
if (mainCharDict['stats']['class'] == "warrior"):
mainCharDict['abilities']['crush'] = {}
crush = mainCharDict['abilities']['crush']
crush['name'] = "crush"
crush['mana'] = 50
description = "Your character jumps up and slams its weapon down on the enemy and deals 1.5x attack damage"
crush['description'] = description
if (mainCharDict['stats']['class'] == "sorcerer"):
mainCharDict['abilities']['energyBlast'] = {}
energyBlast = mainCharDict['abilities']['energyBlast']
energyBlast['name'] = "energy blast"
energyBlast['mana'] = 50
description = "Your character conjures a energy into a ball and sends it at the enemy dealing 1.5x atk dmg"
energyBlast['description'] = description
if (mainCharDict['stats']['class'] == "assassin"):
mainCharDict['abilities']['gouge'] = {}
gouge = mainCharDict['abilities']['gouge']
gouge['name'] = "gouge"
gouge['mana'] = 50
description = "Your character runs up to the enemy and shoves its weapon into the back of the enemy"
gouge['description'] = description
def convertFromListToDictEnemy(enemyCharList, enemyCharDict):
enemyCharDict['stats']['name'] = enemyCharList[0]
enemyCharDict['stats']['clan'] = enemyCharList[1]
enemyCharDict['stats']['class'] = enemyCharList[2]
enemyCharDict['stats']['level'] = enemyCharList[3]
enemyCharDict['stats']['attack'] = enemyCharList[4]
enemyCharDict['stats']['defense'] = enemyCharList[5]
enemyCharDict['stats']['health'] = enemyCharList[6]
enemyCharDict['stats']['mana'] = enemyCharList[7]
enemyCharDict['stats']['luck'] = enemyCharList[8]
enemyCharDict['stats']['xp'] = enemyCharList[9]
enemyCharDict['stats']['gold'] = enemyCharDict['stats']['xp'] * 2
def statDisplay(mainCharDict):
if(len(mainCharDict['stats']) < 5):
print("You have no character on file, please load your character or create a new one")
return 0
trigger = False
while( trigger == False):
print("\nEnter 1 to look at character stats")
print("Enter 2 to look at inventory")
print("Enter 3 to look at your abilities")
statChoice = int(input("Enter your choice:"))
while( not( 1<=statChoice<=3)):
print("Input not understood.")
statChoice = int(input("Please enter a valid number"))
if( statChoice == 1):
print("\n\nStats for: ",mainCharDict['stats']['name'])
print("clan:\t\t\t\t", mainCharDict['stats']['clan'])
print("class:\t\t\t\t", mainCharDict['stats']['class'])
print("level:\t\t\t\t", mainCharDict['stats']['level'])
print("attack:\t\t\t\t", mainCharDict['stats']['attack'])
print("defense:\t\t\t", mainCharDict['stats']['defense'])
print("health:\t\t\t\t", mainCharDict['stats']['health'])
print("mana:\t\t\t\t", mainCharDict['stats']['mana'])
print("critical strike:\t", mainCharDict['stats']['luck'], "%\n\n")
if( statChoice == 2):
print("\nYour inventory:")
for i in mainCharDict['inventory']:
print(i, mainCharDict['inventory'][i])
print()
if( statChoice == 3):
print("Your abilities:")
for i in mainCharDict['abilities']:
print(mainCharDict['abilities'][i]['name'])
print("mana cost:", mainCharDict['abilities'][i]['mana'])
print("description:",mainCharDict['abilities'][i]['description'])
print()
exit = input("Do you want to look at anymore details about your character?(Enter yes or no):")
if(exit == "no"):
trigger = True
def warningForCharCreation():
print("\n\n\n***WARNING*** if you have a previously saved file on this game then creating a new character will delete it")
print("Enter 0 if you wish to go back to the main menu")
print("Or")
print("Enter literally any other number to continue forward with character creation")
charCreation = input()
if(charCreation == "0"):
return False
else:
return True
def getHealth(mainCharDict):
charClass = mainCharDict['stats']['class']
charLevel = mainCharDict['stats']['level']
health = 0
if(charClass == "warrior"):
health == 200
elif(charClass == "assassin"):
health = 120
else:
health = 150
if(charLevel == 1):
return int(health)
else:
for i in range(charLevel-1):
health*= 1.3
return int(health)
def getMana(mainCharDict):
charClass = mainCharDict['stats']['class']
charLevel = mainCharDict['stats']['level']
mana = 0
if(charClass == "warrior"):
mana == 100
elif(charClass == "assassin"):
mana = 150
else:
mana = 220
if(charLevel == 1):
return int(mana)
else:
for i in range(charLevel-1):
mana*= 1.3
return int(mana)
# COMBAT MECHANICS
def combatSim(mainCharDict, enemyDict):
mainCharName = mainCharDict['stats']['name']
mainCharHealth = mainCharDict['stats']['health']
mainCharMana = mainCharDict['stats']['mana']
healthPot = mainCharDict['inventory'].get('health potion', -1)
manaPot = mainCharDict['inventory'].get('mana potion', -1)
enemyName = enemyDict['stats']['name']
enemyHealth = enemyDict['stats']['health']
enemyMana = enemyDict['stats']['mana']
print("You are now fighting", enemyName,"starting out with", enemyHealth,"health")
print("You start out the fight with", mainCharHealth, "health")
# 1 to use a normal attack
# 2 to use a special ability
# 3 to drink health potion
# 4 to drink mana potion
trigger = 0
while (trigger == False):
print("\nEnter 1 to do a basic attack")
print("Enter 2 to do a special ability")
print("Enter 3 to look at stats and inventory")
print("Enter 4 to use a health potion")
print("Enter 5 to use a mana potion")
print("Enter 6 to run like a loser")
print("Your mana is: ", mainCharMana,"\n\n")
userChoice = int(input("Your choice is: "))
if ( userChoice == 1 ):
print(mainCharName, "uses a normal attack!")
attack = attackWithCrit(mainCharDict)
mainAttack = attackWithDef(attack, enemyDict)
enemyHealth = enemyHealth - mainAttack
print(mainCharName, "has attacked the enemy for", mainAttack, "damage!")
print("The enemy has", enemyHealth, "health left! \n\n")
print(enemyName + " uses basic attack!")
enemyAttack = attackWithCrit(enemyDict)
enemyMainAttack = attackWithDef(enemyAttack, mainCharDict)
mainCharHealth = mainCharHealth - enemyMainAttack
print(enemyName, "has attacked", mainCharName, "for", enemyMainAttack, "damage!")
print(mainCharName + " has", mainCharHealth, "health left! \n\n")
if ( userChoice == 2 ):
moveResult = abilitySelection(mainCharDict)
moveName = moveResult[0]
moveDamage = moveResult[1]
moveMana = moveResult[2]
moveCondition = moveResult[3]
changeInEnemyAtk = moveResult[4]
changeInEnemyDef = moveResult[5]
changeInMainAtk = moveResult[6]
changeInMainDef = moveResult[7]
if( mainCharMana > moveMana ):
mainCharMana = mainCharMana - moveMana
mainAttack = attackWithDef(moveDamage,enemyDict)
enemyHealth = enemyHealth - mainAttack
print(mainCharName, "has used", moveName, "for", mainAttack, "!")
if(moveCondition == "stun"):
print(enemyName, "is now stunned! It cannot attack!")
print("... \nIt finally woke up!")
else:
attack = attackWithCrit(enemyDict)
enemyAttack = attackWithDef(attack, mainCharDict)
print(enemyName, "uses normal attack for", enemyAttack, "!")
mainCharHealth = mainCharHealth - enemyAttack
print(mainCharName + " has", mainCharHealth, "health left! \n\n")
print("The enemy now has", enemyHealth, "health!\n\n")
else:
print("You don't have enough mana! If you have mana potions drink them!")
if( userChoice == 3 ):
statDisplay(mainCharDict)
if( userChoice == 4 ):
maxHealth = getHealth(mainCharDict)
if( healthPot <= 0 ):
print("You don't have any health potions in your inventory!")
elif( healthPot > 0 ):
if (maxHealth == mainCharHealth):
print("You're already at full health!")
else:
mainCharDict['inventory']['health potion'] = mainCharDict['inventory']['health potion'] - 1
mainCharHealth += 50
amountHealed = 50
if( mainCharHealth > maxHealth ):
amountHealed = ((mainCharHealth - maxHealth) - 50) * -1
mainCharHealth = maxHealth
print("You've been healed for", amountHealed, "health, your health is now", mainCharHealth)
if( userChoice == 5 ):
maxMana = getMana(mainCharDict)
if (manaPot <= 0):
print("You don't have any mana potions in your inventory!")
elif (manaPot > 0):
if (maxMana == mainCharMana):
print("You're already at full mana!")
else:
mainCharDict['inventory']['mana potion'] = mainCharDict['inventory']['mana potion'] - 1
mainCharMana += 50
amountHealed = 50
if (mainCharMana > maxMana):
amountHealed = ((mainCharMana - maxMana) - 50) * -1
mainCharMana = maxMana
print(amountHealed, "mana was restored! You now have", mainCharMana, "mana")
if( userChoice == 6 ):
print("You run away from the enemy and cower in fear")
trigger == True
if (enemyHealth <= 0):
print(enemyName + " has been defeated!")
mainCharDict['stats']['xp'] += enemyDict['stats']['xp']
mainCharDict['stats']['health'] = mainCharHealth
trigger = True
if (mainCharHealth <= 0):
print(mainCharName + " has been defeated!")
mainCharDict['stats']['health'] = mainCharHealth
trigger = True
if (mainCharDict['stats']['health'] <= 0):
print("Well", mainCharHealth, "it looks like you've been defeated.")
print("Luckily the gods of PLIGHT OF FRICTION have decided to resurrect you from the dead")
print("You now start of back alive with 60hp, please be more prepared as you go into dungeons")
mainCharDict['stats']['health'] = 60
mainCharDict['stats']['health'] = mainCharHealth
mainCharDict['stats']['mana'] = mainCharMana
levelUp(mainCharDict)
save(mainCharDict)
def attackWithCrit(aCharDict):
# only use the mainCharDict as the parameter because the function takes care of the rest
crit = random.randint(0,100)
if(aCharDict['stats']['luck'] >= crit):
print("CRITICAL STRIKE!")
return aCharDict['stats']['attack']*2
else:
return aCharDict['stats']['attack']
def attackWithDef(attack, aCharDict):
difference = attack * aCharDict['stats']['defense']
return int(attack - difference)
def abilitySelection(aCharDict):
spells = aCharDict['abilities']
abilities = []
num = 1
print("\nYour Spells:")
for i in spells:
print("\nEnter", num, "for:", i.upper())
print("Info:")
print(spells[i]['mana'], "mana cost")
print("Description:", spells[i]['description'])
abilities.append(i)
num+=1
abilityChoice = int(input("Please enter your choice: "))
while(not(1<=abilityChoice<=len(abilities))):
print("Your choice is not in your ability range")
abilityChoice = int(input("Please enter your choice: "))
abilitySelection = abilities[abilityChoice-1]
print("You chose:", abilitySelection)
return abilityList(abilitySelection,aCharDict)
def abilityList(selectedMove, aCharDict):
aList = []
attack = aCharDict['stats']['attack']
defense = aCharDict['stats']['defense']
selectedMove.lower()
# Warrior Spells: crush lvl 1, smash lvl 2
# Assassin Spells: gouge lvl 1, cheap shot lvl 2
# Sorcerer Spells: energy blast lvl 1, magic bind lvl 2
if(selectedMove == "crush"):
aList.append("Crush")
aList.append(float(attack * 1.5)) # damage
aList.append(50) # mana
aList.append("none") # enemy condition
aList.append(0) # enemy ATK changes
aList.append(0) # enemy DEF changes
aList.append(0) # main ATK changes
aList.append(0) # main DEF changes
return aList
elif(selectedMove == "gouge"):
aList.append("Gouge")
aList.append(float(attack * 1.5))
aList.append(50)
aList.append("none")
aList.append(0)
aList.append(0)
aList.append(0)
aList.append(0)
return aList
elif(selectedMove == "energyBlast"):
aList.append("Energy Blast")
aList.append(float(attack * 1.7))
aList.append(50)
aList.append("none")
aList.append(0)
aList.append(0)
aList.append(0)
aList.append(0)
return aList
# lvl 2 abilities
elif(selectedMove == "smash"):
aList.append("Smash")
aList.append(float(attack* 1.5))
aList.append(60)
aList.append("stun")
aList.append(0)
aList.append(0)
aList.append(0)
aList.append(0)
return aList
elif(selectedMove == "shadowStep"):
aList.append("Shadow Step")
aList.append(float(attack * 1.5))
aList.append(60)
aList.append("stun")
aList.append(0)
aList.append(0)
aList.append(0)
aList.append(0)
return aList
elif(selectedMove == "magic bind"):
aList.append("Magic Bind")
aList.append(float(attack * 1.5))
aList.append(60)
aList.append("stun")
aList.append(0)
aList.append(0)
aList.append(0)
aList.append(0)
return aList
# LEVEL MECHANICS
def caveSelection(mainCharDict):
print("\n\nAlright you are now off on your adventure!\n\n")
response = str(input("There are three caves in front of you, Would you like to go into the shallow one, average one, or deep one? "))
response.lower()
trigger = 0
while (trigger == 0):
if (response == "shallow"):
shallowCaveFight(mainCharDict)
trigger += 1
elif (response == "average"):
averageCaveFight(mainCharDict)
trigger += 1
elif (response == "deep"):
deepCaveFight(mainCharDict)
trigger += 1
else:
response = str(input("Please input 'shallow', 'average', or 'deep' "))
def shallowCaveFight(mainCharDict):
print("\nThis is a shallow cave fight")
randEnemyElementPool = []
randEnemyClassPool = ["warrior", "assassin", "sorcerer"]
enemyList = []
if (mainCharDict['stats']['clan'] == "water"):
randEnemyElementPool.append("fire")
randEnemyElementPool.append("forest")
elif (mainCharDict['stats']['clan'] == "fire"):
randEnemyElementPool.append("forest")
randEnemyElementPool.append("water")
else:
randEnemyElementPool.append("water")
randEnemyElementPool.append("fire")
classDecider = random.randint(0, 2)
elementDecider = random.randint(0, 1)
enemyList.append("Small Enemy")
enemyList.append(randEnemyElementPool[elementDecider])
enemyList.append(randEnemyClassPool[classDecider])
if (enemyList[2] == "warrior"):
enemyList.append(1) # level
enemyList.append(10) # attack
enemyList.append(.5) # defense
enemyList.append(80) # health
enemyList.append(100) # mana
enemyList.append(5) # luck
enemyList.append(10) # xp
if (enemyList[2] == "assassin"):
enemyList.append(1) # level
enemyList.append(7) # attack
enemyList.append(.3) # defense
enemyList.append(60) # health
enemyList.append(100) # mana
enemyList.append(15) # luck
enemyList.append(10) # xp
if (enemyList[2] == "sorcerer"):
enemyList.append(1) # level
enemyList.append(8) # attack
enemyList.append(.5) # defense
enemyList.append(70) # health
enemyList.append(100) # mana
enemyList.append(5) # luck
enemyList.append(10) # xp
enemyCharDict = {'stats': {},'inventory': {}, "moves": {}}
convertFromListToDictEnemy(enemyList,enemyCharDict)
combatSim(mainCharDict,enemyCharDict)
def averageCaveFight(mainCharDict):
print("This is your average cave fight")
randEnemyElementPool = []
randEnemyClassPool = ["warrior", "assassin", "sorcerer"]
enemyList = []
if (mainCharDict['stats']['clan'] == "water"):
randEnemyElementPool.append("fire")
randEnemyElementPool.append("forest")
elif (mainCharDict['stats']['clan'] == "fire"):
randEnemyElementPool.append("forest")
randEnemyElementPool.append("water")
else:
randEnemyElementPool.append("water")
randEnemyElementPool.append("fire")
classDecider = random.randint(0, 2)
elementDecider = random.randint(0, 1)
enemyList.append("Average Enemy")
enemyList.append(randEnemyElementPool[elementDecider])
enemyList.append(randEnemyClassPool[classDecider])
if (enemyList[2] == "warrior"):
enemyList.append(5) # level
enemyList.append(18) # attack
enemyList.append(.5) # defense
enemyList.append(170) # health
enemyList.append(100) # mana
enemyList.append(5) # luck
enemyList.append(40) # xp
if (enemyList[2] == "assassin"):
enemyList.append(1) # level
enemyList.append(14) # attack
enemyList.append(.3) # defense
enemyList.append(145) # health
enemyList.append(100) # mana
enemyList.append(15) # luck
enemyList.append(40) # xp
if (enemyList[2] == "sorcerer"):
enemyList.append(1) # level
enemyList.append(16) # attack
enemyList.append(.5) # defense
enemyList.append(160) # health
enemyList.append(100) # mana
enemyList.append(5) # luck
enemyList.append(40) # xp
enemyCharDict = {'stats': {},'inventory': {}, "abilities": {}}
convertFromListToDictEnemy(enemyList,enemyCharDict)
combatSim(mainCharDict,enemyCharDict)
def deepCaveFight(mainCharDict):
print("This is a deep cave fight")
randEnemyElementPool = []
randEnemyClassPool = ["warrior", "assassin", "sorcerer"]
enemyList = []
if (mainCharDict['stats']['clan'] == "water"):
randEnemyElementPool.append("fire")
randEnemyElementPool.append("forest")
elif (mainCharDict['stats']['clan'] == "fire"):
randEnemyElementPool.append("forest")
randEnemyElementPool.append("water")
else:
randEnemyElementPool.append("water")
randEnemyElementPool.append("fire")
classDecider = random.randint(0, 2)
elementDecider = random.randint(0, 1)
enemyList.append("HUGE Enemy")
enemyList.append(randEnemyElementPool[elementDecider])
enemyList.append(randEnemyClassPool[classDecider])
if (enemyList[2] == "warrior"):
enemyList.append(5) # level
enemyList.append(28) # attack
enemyList.append(.5) # defense
enemyList.append(300) # health
enemyList.append(100) # mana
enemyList.append(5) # luck
enemyList.append(60) # xp
if (enemyList[2] == "assassin"):
enemyList.append(1) # level
enemyList.append(23) # attack
enemyList.append(.3) # defense
enemyList.append(250) # health
enemyList.append(100) # mana
enemyList.append(25) # luck
enemyList.append(60) # xp
if (enemyList[2] == "sorcerer"):
enemyList.append(1) # level
enemyList.append(25) # attack
enemyList.append(.5) # defense
enemyList.append(280) # health
enemyList.append(100) # mana
enemyList.append(5) # luck
enemyList.append(60) # xp
enemyCharDict = {'stats': {}, 'inventory': {}, "abilities": {}}
convertFromListToDictEnemy(enemyList, enemyCharDict)
combatSim(mainCharDict, enemyCharDict)
def shop(mainCharDict):
print("**You walk into a dusty old shop, behind the counter sits an old store owner**")
print("Hello traveller, I am the potion seller, please take a look at my wares\n")
currentGold = mainCharDict['inventory']['gold']
inventory = mainCharDict['inventory']
print("You currently have", currentGold, "gold")
shopDict = {"items": {"health potion": {"name": "health potion", "cost": 15},
"mana potion": {"name": "mana potion", "cost": 10}}}
num = 1
shopList = []
print("Shop inventory:\n")
for i in shopDict['items']:
print("Enter", num, "for:")
print(shopDict['items'][i]['name'])
print("cost:", shopDict['items'][i]['cost'], "gold\n")
shopList.append(shopDict['items'][i]['name'])
num += 1
itemIndex = int(input("Enter the item # of what you would like to buy: "))
while (not (1 <= itemIndex <= len(shopList))):
print("I'm sorry we don't have that item in stock, please enter a valid item #")
itemIndex = int(input("Enter the item # of what you would like to buy: "))
selectedItem = shopList[itemIndex - 1]
selectedItemPrice = shopDict['items'][selectedItem]['cost']
print("You chose", selectedItem)
itemQuantity = int(input("How many " + selectedItem + "'s would you like? (Enter number amount)"))
transactionTotal = itemQuantity * selectedItemPrice
print("The total for your", itemQuantity, selectedItem + "(s) is", transactionTotal, "gold")
buyDecision = input("Would you like to finalize this transaction? (Enter yes or no)")
if (buyDecision == "yes"):
if (currentGold >= transactionTotal):
currentGold = currentGold - transactionTotal
print("\nGood deal! Enjoy your items!")
print("You have", currentGold, " gold left")
inventory.update({selectedItem: itemQuantity})
currentGold = mainCharDict['inventory']['gold']
if (currentGold < transactionTotal):
print("\nWell dang traveller, looks like you don't have enough cheddar...")
print("Come back next time with enough dough and maybe we'll talk then")
else:
print("Alright, then get the hell out of my shop and come back when you want to buy something")
print("Here's your inventory:", inventory)
save(mainCharDict)
def main():
endGame = True
while( endGame != False):
print("\n")
print("WELCOME TO PLIGHT OF FRICTION\n\n")
print("Enter 0 if you would like to save the game and quit")
print("Enter 1 if you would like to create a new character for the game")
print("Enter 2 if you would like to load a character into the game")
print("Enter 3 if you would like to go into dungeon mode")
print("Enter 4 if you would like to visit the shop")
print("Enter 5 if you want to view your stats")
userControl = input("Please type in your selection: ")
# save and quit
if(userControl == "0"):
save(mainChar)
print("Thanks for playing! See you soon!")
endGame = False
# create a new character
if(userControl == "1"):
if(warningForCharCreation()):
mainCharList = []
mainCharList = welcome(mainCharList)
convertFromListToDict(mainCharList,mainChar)
statDisplay(mainChar)
# load character
if(userControl == "2"):
load(mainChar)
# cave selection to go into combat
if(userControl == "3"):
caveSelection(mainChar)
# go to shop
if(userControl == "4"):
shop(mainChar)
# look at stats on character
if(userControl == "5"):
statDisplay(mainChar)
mainChar = {'stats': {}, 'inventory': {}, 'abilities': {}}
main()
|
from django.contrib import admin
from django.urls import path
from bandas import views
urlpatterns = [
path("", views.HomeView.as_view(), name="home"),
path("bandas/", views.IndexView.as_view(), name="index"),
path("albuns/", views.AlbunsView.as_view(), name="albuns"),
path("bandas/<int:pk>", views.detail, name="detail"),
path("bandas/adicionar/", views.AdicionarView.as_view(), name="adicionar"),
path("bandas/sucesso/", views.BandaCriadaView.as_view(), name="sucesso"),
path("bandas/editar/<int:pk>", views.editar_banda, name="editar"),
path("bandas/apagar/<int:pk>", views.apagar_banda, name="apagar"),
path("bandas/adicionar-album/<int:pk>", views.criar_albuns, name="criar_album"),
]
|
#!/usr/bin/env python
import os
import argparse
from FileProcessing import ProcessFile
def run_deamon(args):
directories = validate_args(args)
# Get a list of files from the input directory that need to be processed
file_names = os.listdir(directories["input"])
file_paths_to_process = [os.path.join(directories["input"], file_name) for file_name in file_names]
# Ignore hidden files
file_paths_to_process = [file_path for file_path in file_paths_to_process if not os.path.basename(file_path).startswith(".")]
# Do the work
ProcessFile.process_files(file_paths_to_process, directories)
def create_arg_parser():
parser = argparse.ArgumentParser(description='A PDF Metadata Collector')
parser.add_argument('--input', help='The input directory where raw pdfs are uploaded for processing.', required=True)
parser.add_argument('--wip', help='The directory where pdfs are kept as they are being processed.', required=True)
parser.add_argument('--complete', help='The directory where fully processed pdfs are moved.', required=True)
parser.add_argument('--reject', help='The directory where media is moved when it cannot be processed', required=True)
return parser
def validate_args(args):
# This function will make sure that the values supplied were valid.
# An exception will be raised if the args are not valid
try:
directories = {
"input": args.input,
"wip": args.wip,
"complete": args.complete,
"reject": args.reject
}
for key in directories.keys():
directory = directories[key]
abs_path = os.path.abspath(directory)
if not os.path.isdir(abs_path):
raise Exception("The specified '{0}' directory '{1}' does not exist.".format(key, directory))
if directory != abs_path:
directories[key] = abs_path
return directories
except Exception as ex:
raise Exception("The arguments could not be validated.") from ex
|
"""
Copyright (c) 2016-2020 Keith Sterling http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from programy.parser.exceptions import ParserException
from programy.utils.text.text import TextUtils
class TemplateGraph:
def __init__(self, aiml_parser):
self._aiml_parser = aiml_parser
self._template_factory = aiml_parser.brain.template_factory
@property
def aiml_parser(self):
return self._aiml_parser
@property
def template_factory(self):
return self._template_factory
#
# TEMPLATE_EXPRESSION ::== TEXT | TAG_EXPRESSION | (TEMPLATE_EXPRESSION)*
#
def parse_template_expression(self, pattern):
node = self.get_base_node()
node.parse_template_node(self, pattern)
return node
def get_node_class_by_name(self, name):
if self._template_factory.exists(name):
return self._template_factory.new_node_class(name)
else:
raise ParserException("No node [%s] registered in Template Node Factory" % (name))
# Helper function to return TemplateNode
def get_base_node(self):
base_class = self.get_node_class_by_name('base')
return base_class()
# Helper function to return TemplateWordNode
def get_word_node(self, text):
word_class = self.get_node_class_by_name('word')
return word_class(text)
def parse_tag_expression(self, expression, branch):
tag_name = TextUtils.tag_from_text(expression.tag)
if self._template_factory.exists(tag_name):
if tag_name == "condition":
node_instance = self._template_factory.new_node_class(tag_name)()
else:
node_instance = self._template_factory.new_node_class(tag_name)()
node_instance.parse_expression(self, expression)
branch.children.append(node_instance)
else:
self.parse_unknown_as_xml_node(expression, branch)
#######################################################################################################
# UNKNONWN NODE
# When its a node we don't know, add it as a text node. This deals with html nodes creeping into the text
def parse_unknown_as_xml_node(self, expression, branch):
xml_node_class = self.get_node_class_by_name('xml')
xml_node = xml_node_class()
branch.children.append(xml_node)
xml_node.parse_expression(self, expression)
|
outdir='WP_plots'
import os
os.system('mkdir -p '+outdir)
outdir+='/'
from testing import makeEffPlots_async
dfudsprob='prob_isB+prob_isBB+prob_isLeptB'
infile='/eos/cms/store/cmst3/group/dehep/DeepJet/Predictions/Jan/DF_FT_fullRec_reg_BN/ttbar/tree_association.txt'
TWP = '>0.825'
MWP = '>0.345'
LWP = '>0.055'
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'jet_pt', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>20 && (isB+isBB+isLeptonicB+isLeptonicB_C)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_pt.pdf', #output file (pdf)
'jet p_{T} [GeV]', #xaxisname
'j jet efficiency' , #yaxisname
rebinfactor=5) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'jet_pt', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>20 && (isUD + isS + isG)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_pt_light.pdf', #output file (pdf)
'jet p_{T} [GeV]', #xaxisname
'light jet efficiency' , #yaxisname
rebinfactor=5) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'jet_eta', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>30 && (isB+isBB+isLeptonicB+isLeptonicB_C)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_eta.pdf', #output file (pdf)
'jet #eta', #xaxisname
'b jet efficiency' , #yaxisname
rebinfactor=5) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'jet_eta', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>30 && (isUD + isS + isG)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_eta_light.pdf', #output file (pdf)
'jet #eta', #xaxisname
'light jet efficiency' , #yaxisname
rebinfactor=5) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'npv', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>30 && (isB+isBB+isLeptonicB+isLeptonicB_C)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_npv.pdf', #output file (pdf)
'npv', #xaxisname
'b jet efficiency' , #yaxisname
rebinfactor=5) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'jet_pt', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>30 && (isUD + isS + isG)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_npv_light.pdf', #output file (pdf)
'npv', #xaxisname
'light jet efficiency' , #yaxisname
rebinfactor=5) #normalise
infile='/afs/cern.ch/user/j/jkiesele/eos_DeepJet/Predictions/Jan/DF_FT_fullRec_reg_BN/qcd_merged_PREDICTED/tree_association.txt'
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'jet_pt', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>20 && (isB+isBB+isLeptonicB+isLeptonicB_C)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_pt_QCD.pdf', #output file (pdf)
'jet p_{T} [GeV]', #xaxisname
'b jet efficiency' , #yaxisname
rebinfactor=1, Xmin=20., Xmax=900.) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'jet_pt', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>20 && (isUD + isS + isG)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_pt_light_QCD.pdf', #output file (pdf)
'jet p_{T} [GeV]', #xaxisname
'light jet efficiency' , #yaxisname
rebinfactor=1, SetLogY = True, minimum = 0.0005, maximum = 1.5, Xmin=20., Xmax=900.) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'fabs(jet_eta)', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>30 && jet_pt <150 && (isB+isBB+isLeptonicB+isLeptonicB_C)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_eta_QCD.pdf', #output file (pdf)
'jet #eta', #xaxisname
'b jet efficiency' , #yaxisname
rebinfactor=5) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'fabs(jet_eta)', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>30 && jet_pt <150 && (isUD + isS + isG)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_eta_light_QCD.pdf', #output file (pdf)
'jet #eta', #xaxisname
'light jet efficiency' , #yaxisname
rebinfactor=5, SetLogY = True, minimum = 0.0005, maximum = 1.5) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'npv', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>30 && jet_pt <150 && (isB+isBB+isLeptonicB+isLeptonicB_C)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_npv_QCD.pdf', #output file (pdf)
'npv', #xaxisname
'b jet efficiency' , #yaxisname
rebinfactor=5, Xmin=9, Xmax=65) #normalise
makeEffPlots_async(infile, #input file or file list
['DJ: tight WP','DJ: medium WP','DJ: loose WP'], #legend names (needs to be list)
'npv', #variable to plot
[dfudsprob+TWP,
dfudsprob+MWP,
dfudsprob+LWP],
'jet_pt>30 && jet_pt <150 &&(isUD + isS + isG)', #cut to apply
'auto', #line color and style (e.g. 'red,dashed')
outdir+'DF_ID_jet_npv_light_QCD.pdf', #output file (pdf)
'npv', #xaxisname
'light jet efficiency' , #yaxisname
rebinfactor=5, SetLogY = True, minimum = 0.0005, maximum = 1.5, Xmin=9, Xmax=65) #normalise
|
import json
from flask import Blueprint, render_template, request, redirect, url_for
from src.models.lessons.lesson import Lesson
from src.models.teachers.teacher import Teacher
from src.models.attendance.attendance import Attendance
__author__ = 'ahosha'
lesson_blueprint = Blueprint('lessons', __name__)
@lesson_blueprint.route('/')
def index():
lessons = Lesson.all()
return render_template('lessons/lesson_index.jinja2', lessons=lessons)
@lesson_blueprint.route('/new', methods=['GET', 'POST'])
def create_lesson():
if request.method == 'POST':
name = request.form['name']
teacherusername = request.form['teacher']
date = request.form['date']
time = request.form['time']
lessontype = request.form['lessontype']
Lesson(name, teacherusername, date, time, lessontype).save_to_mongo()
return redirect(url_for('.index'))
return render_template("lessons/new_lesson.jinja2", teachers=Teacher.all(), types=Lesson.get_lesson_types())
@lesson_blueprint.route('/edit/<string:lesson_id>', methods=['GET', 'POST'])
def edit_lesson(lesson_id):
if request.method == 'POST':
name = request.form['name']
teacherusername = request.form['teacherusername']
date = request.form['date']
time = request.form['time']
lessontype = request.form['lessontype']
lesson = Lesson.get_by_id(lesson_id)
lesson.name = name
lesson.teacherusername = teacherusername
lesson.date = date
lesson.time = time
lesson.lessontype = lessontype
lesson.save_to_mongo()
return redirect(url_for('.index'))
else:
lesson = Lesson.get_by_id(lesson_id)
teachers = Teacher.all()
attendances = Attendance.get_by_lessonname(lesson.name)
return render_template("lessons/edit_lesson.jinja2", lesson=lesson, teacherusername=lesson.teacherusername,
teachers=teachers, types=Lesson.get_lesson_types(), curlessontype=lesson.lessontype,
attendances=attendances)
@lesson_blueprint.route('/delete/<string:lesson_id>')
def delete_lesson(lesson_id):
lesson = Lesson.get_by_id(lesson_id)
lesson.delete()
return redirect(url_for('.index'))
@lesson_blueprint.route('/<string:lesson_id>')
def lesson_page(lesson_id):
lesson = Lesson.get_by_id(lesson_id)
attendances = Attendance.get_by_lessonname(lesson.name)
return render_template('lessons/lesson.jinja2', lesson=Lesson.get_by_id(lesson_id), attendances=attendances)
|
#!/usr/bin/env python3
# Documentation: https://docs.python.org/3/library/socket.html
import socket, json, sys
sys.path.append("..")
import socket_utils
import requests
HOST = "" # Empty string means to listen on all IP's on the machine, also works with IPv6.
# Note "0.0.0.0" also works but only with IPv4.
PORT = 63000 # Port to listen on (non-privileged ports are > 1023).
ADDRESS = (HOST, PORT)
def main():
"""
- Authenticates connection with the Agent Pi
- Receives credentials such as Username and Password from AgentPi and authenticates it
- Returns authenticated credentials allowing AgentPi access to log in"""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(ADDRESS)
s.listen()
print("Listening on {}...".format(ADDRESS))
while True:
print("Waiting for Agent Pi...")
conn, addr = s.accept()
with conn:
print("Connected to {}".format(addr))
print()
user = socket_utils.recvJson(conn)
param = {
'username' : user["username"],
'password' : user["password"],
'carid' : user["carid"]
}
print(user["date"])
if (user["finish"] == 0):
if (user["password"] == "none"):
response = requests.get(("http://220.244.177.218:5000/api/findBooking2"), params = param)
print(response.text)
else:
response = requests.get(("http://220.244.177.218:5000/api/findBooking"), params = param)
print(response.text)
if (response.text == "True"):
socket_utils.sendJson(conn, { "authenticated": True })
else:
socket_utils.sendJson(conn, { "nope": True })
else:
response = requests.get(("http://220.244.177.218:5000/api/returnCar"), params = param)
if (response.text == "True"):
socket_utils.sendJson(conn, { "returned": True })
else:
socket_utils.sendJson(conn, { "nobooking": True })
# Execute program.
if __name__ == "__main__":
main()
|
import pymel.core as pm
from PyFlow.Core.Common import *
from PyFlow.Core import FunctionLibraryBase
from PyFlow.Core import IMPLEMENT_NODE
class MayaDisplayLib(FunctionLibraryBase):
def __init__(self, packageName):
super(MayaDisplayLib, self).__init__(packageName)
@staticmethod
@IMPLEMENT_NODE(returns=("StringPin", ""), meta={'Category': 'Display', 'Keywords': []})
def currentLinearUnit(fullName=("BoolPin", False)):
return pm.currentUnit(query=True, linear=True, f=fullName)
@staticmethod
@IMPLEMENT_NODE(returns=None, nodeType=NodeTypes.Callable, meta={'Category': 'Display', 'Keywords': []})
def setCurrentLinearUnit(unit=("StringPin", "cm", {"ValueList": ["mm", "millimeter", "cm", "centimeter", "m", "meter", "km", "kilometer", "in", "inch", "ft", "foot", "yd", "yard", "mi", "mile"]})):
return pm.currentUnit(linear=unit)
@staticmethod
@IMPLEMENT_NODE(returns=("StringPin", ""), meta={'Category': 'Display', 'Keywords': []})
def currentAngularUnit(fullName=("BoolPin", False)):
return pm.currentUnit(query=True, angle=True, f=fullName)
@staticmethod
@IMPLEMENT_NODE(returns=None, nodeType=NodeTypes.Callable, meta={'Category': 'Display', 'Keywords': []})
def setCurrentAngularUnit(unit=("StringPin", "deg", {"ValueList": ["deg", "degree", "rad", "radian"]})):
return pm.currentUnit(angle=unit)
@staticmethod
@IMPLEMENT_NODE(returns=("StringPin", ""), meta={'Category': 'Display', 'Keywords': []})
def currentTimeUnit():
return pm.currentUnit(query=True, time=True)
|
import FWCore.ParameterSet.Config as cms
process = cms.Process("METCLEAN")
process.load("FWCore.MessageService.MessageLogger_cfi")
process.MessageLogger.cerr.FwkReport.reportEvery = 100
process.TFileService = cms.Service("TFileService",
fileName = cms.string("metcleaningcomparison.root"),
closeFileFast = cms.untracked.bool(False)
)
#process.maxEvents = cms.untracked.PSet( output = cms.untracked.int32(100) )
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
#######################################################################
# Configure input source
process.source = cms.Source("PoolSource",
# replace 'myfile.root' with the source file you want to use
fileNames = cms.untracked.vstring(
# 'file:myfile.root'
'/store/data/Commissioning10/MinimumBias/RECO/v9/000/135/131/3698D99C-705A-DF11-9275-000423D944FC.root'
)
)
#######################################################################
# Configure trigger requirements
#process.load("L1TriggerConfig.L1GtConfigProducers.L1GtTriggerMaskTechTrigConfig_cff")
#process.load("HLTrigger.HLTfilters.hltLevel1GTSeed_cfi")
#process.hltLevel1GTSeed.L1TechTriggerSeeding = cms.bool(True)
#process.hltLevel1GTSeed.L1SeedsLogicalExpression = cms.string("${L1TTBITS}")
process.load("HLTrigger.HLTfilters.hltHighLevel_cfi")
#
# Noise data:
#
process.jetnobptx = process.hltHighLevel.clone(HLTPaths = cms.vstring("HLT_L1Jet10U_NoBPTX"))
process.bscorbptx = process.hltHighLevel.clone(HLTPaths = cms.vstring("HLT_L1_BscMinBiasOR_BptxPlusORMinus"))
process.trigpath = cms.Path(process.jetnobptx*(~process.bscorbptx))
#######################################################################
# Configure reflagger cleaning:
# Standard configurations
process.load('Configuration/StandardSequences/Services_cff')
process.load('Configuration/StandardSequences/GeometryExtended_cff')
process.load('Configuration/StandardSequences/MagneticField_AutoFromDBCurrent_cff')
process.load('Configuration/StandardSequences/Reconstruction_cff')
process.load('Configuration/StandardSequences/FrontierConditions_GlobalTag_cff')
process.load('Configuration/EventContent/EventContent_cff')
import JetMETAnalysis.HcalReflagging.RemoveAddSevLevel as RemoveAddSevLevel
process.hcalRecAlgos=RemoveAddSevLevel.AddFlag(process.hcalRecAlgos,"UserDefinedBit0",10)
# HBHE RecHit reflagger
process.load("JetMETAnalysis/HcalReflagging/hbherechitreflaggerJETMET_cfi")
process.hbherecoReflagged = process.hbherechitreflaggerJETMET.clone()
process.hbherecoReflagged.debug=0
process.GlobalTag.globaltag ='GR10_P_V4::All'
# Set energy threshold for identifying noise
#process.hbherecoReflagged.Ethresh=0.5
# Set number of channels/hpd that must be above threshold in order for the HPD to be marked noisy
#process.hbherecoReflagged.Nhits=14
# Turn this on to check # of hits per RBX, rather than per HPD
#process.hbherecoReflagged.RBXflag=False
# Use the reflagged HBHE RecHits to make the CaloTowers
process.cleanTowerMaker = process.towerMaker.clone(hbheInput = "hbherecoReflagged")
process.cleanTowerMakerWithHO = process.towerMakerWithHO.clone(hbheInput = "hbherecoReflagged")
# Path and EndPath definitions
process.reflagging_step = cms.Path(process.hbherecoReflagged)
process.rereco_step = cms.Path(process.caloTowersRec*(process.recoJets*process.recoJetIds+process.recoTrackJets)*process.recoJetAssociations*process.metreco) # re-reco jets and met
#process.rereco_step = cms.Path(process.towerMaker*process.ak5CaloJets*process.met) # a simpler use case
#######################################################################
# Configure "tcMET" (Frank's/Avi's cleaning)
process.load("TCMETcleaned357.CleanedTCMETProducer.cleanedtcmetproducer_cfi")
process.tcMetClean.tcmetInputTag = cms.InputTag("met","","RECO") # recoCaloMETs_met__RECO
process.tcMetClean.useHFcorrection = cms.bool(False)
process.tcMetClean.useECALcorrection = cms.bool(False)
process.tcMetClean.useHCALcorrection = cms.bool(True)
process.tcMetClean.alias = cms.string("TCcleanedCaloMET")
process.tcmetpath = cms.Path(process.tcMetClean)
#######################################################################
# Configure HCAL event filter cleaning
process.load('CommonTools/RecoAlgos/HBHENoiseFilter_cfi')
process.evfiltpath = cms.Path(process.HBHENoiseFilter)
# If you would like to also add a filter which cuts on the EMF of an RBX,
# we recommend that you add the line:
#process.HBHENoiseFilter.maxRBXEMF = cms.double(0.01)
#######################################################################
# Configure Endpath
# Output definition
process.output = cms.OutputModule(
"PoolOutputModule",
splitLevel = cms.untracked.int32(0),
# outputCommands = process.RECOEventContent.outputCommands,
# fileName = cms.untracked.string('/uscmst1b_scratch/lpc1/3DayLifetime/pdudero/output_file.root'),
fileName = cms.untracked.string('pooloutput.root'),
dataset = cms.untracked.PSet(
dataTier = cms.untracked.string('RECO'),
filterName = cms.untracked.string('')
)
)
process.output.outputCommands = cms.untracked.vstring("drop *",
"keep CaloTowersSorted_*_*_*",
"keep *_TriggerResults_*_*",
"keep *_hbhereco_*_*",
"keep *_hbherecoReflagged_*_*",
"keep *_hfreco_*_*",
"keep *_hfrecoReflagged_*_*",
"keep recoCaloMETs_*_*_*",
"keep recoMETs_*_*_*")
#new rechit collection name is: HBHERecHitsSorted_hbherecoReflagged__METCLEAN
#######################################################################
# Configure Analyzer
process.compare = cms.EDAnalyzer('METcleaningComparator',
dirtyInput = cms.untracked.PSet(
hbheRechitLabel = cms.untracked.InputTag("hbhereco"),
caloMETlabel = cms.untracked.InputTag("met","","RECO"),
# verbose = cms.untracked.bool(True)
),
tcmetCleanOutput = cms.untracked.PSet(
recoMETlabel = cms.untracked.InputTag("tcMetClean","","METCLEAN"),
# verbose = cms.untracked.bool(True)
),
reflagCleanOutput = cms.untracked.PSet(
hbheRechitLabel = cms.untracked.InputTag("hbherecoReflagged"),
caloMETlabel = cms.untracked.InputTag("met","","METCLEAN"),
# verbose = cms.untracked.bool(True)
),
evfiltCleanOutput = cms.untracked.PSet(
trgResultsLabel = cms.untracked.InputTag("TriggerResults","","METCLEAN"),
caloMETlabel = cms.untracked.InputTag("met","","METCLEAN"),
# verbose = cms.untracked.bool(True)
),
evfiltPathName = cms.untracked.string("evfiltpath"),
hbheFlagBit = cms.untracked.int32(31)
# bit 31 is UserDefinedBit0; this duplicates the setting inside hbherechitreflaggerJETMET_cfi.py
)
#######################################################################
# Schedule definition
process.out_step = cms.EndPath(process.compare*process.output)
process.schedule = cms.Schedule(process.trigpath,
process.tcmetpath,
process.reflagging_step,
process.rereco_step,
process.evfiltpath,
process.out_step)
|
class GPU_Discrete:
def __init__(self):
self.name = self.__name()
self.temp_label = 'GPU Discrete'
self.temp_row = []
def get_name(self):
return self.name
def get_temp_label(self):
return self.temp_label
def get_temperature(self):
self.__temperature()
return self.temp_row
def __name(self):
path = 'sysfs/gpu_discrete_name'
with open(path, 'r') as f:
gpu_name_str = f.readline()
return gpu_name_str.split('VGA compatible controller: ')[1].rstrip('\n')
def __temperature(self):
path = 'sysfs/gpu_discrete_temp'
with open(path, 'r') as f:
l = f.readlines()
if not l:
temp_cur = self.temp_row[0]
else:
temp_cur = int(l[-1])
if not self.temp_row:
self.temp_row = [temp_cur] * 3
else:
self.temp_row[0] = temp_cur
self.temp_row[1] = min(temp_cur, self.temp_row[2])
self.temp_row[2] = max(temp_cur, self.temp_row[2])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.