index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
986,400 | e4a715eeea1024121fedbc4a4dba0b6973ce5113 | import subprocess,os,sys
from timeit import timeit
# timeit.timeit(stmt, setup, timer, number=??)
count = 10
def timingIt(prog):
cmds = "import subprocess,os,sys;"
cmds += "from timeit import timeit;"
cmds += 'subprocess.call(["' + prog + '"],stdout=open(os.devnull, "w"))'
print prog, timeit(cmds, number=count)
timingIt("01.VectorsWithPrintf.exe");
timingIt("02.ArraysWithPrintf.exe");
timingIt("03.ArraysWithCout.exe");
timingIt("04.CWithPrintf.exe");
1
|
986,401 | 25ea6de399dc5476199d73f24e231d9a89a349ca | EPS = 0.00000001
if __name__ == "__main__":
T = int(raw_input())
memo = {}
for cnt in xrange(T):
n = int(raw_input())
hikerInfo = []
# STARTING POSITION, NUMBER OF CIRCLES, FASTEST POSITION
for i in xrange(n):
hikerInfo.append(map(int, raw_input().split()))
indInfo =[]
for start, numHikers, velocity in hikerInfo:
for j in xrange(numHikers):
indInfo.append({"start": start, "velocity": (velocity + j)})
if len(indInfo) == 1:
ans = 0
else:
firstLap1 = (360 - indInfo[0]["start"]) * indInfo[0]["velocity"]
secondLap1 = firstLap1 + 360*indInfo[0]["velocity"]
firstLap2 = (360 - indInfo[1]["start"]) * indInfo[1]["velocity"]
secondLap2 = firstLap2 + 360*indInfo[1]["velocity"]
# print firstLap1, secondLap1, firstLap2, secondLap2
if secondLap1 <= firstLap2 or secondLap2 <= firstLap1:
ans = 1
else:
ans = 0
#
print "Case #%s: %s" % (cnt + 1, ans)
|
986,402 | 96b30501ea4bd19b8ea01da1ded25b7c87150dd1 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
"""
@Author :yicg
@Time : 2021/2/18 下午4:31
@Version : 1.0
@Description :
"""
import time
#1.基本操作
print(time.asctime()) # 当前时间的国外时间格式 Thu Feb 18 16:34:19 2021
print(time.time()) #当前时间戳 1613637308.6508229
time.sleep(1) # 等待时间
print(time.localtime(1613637308.6508229)) #将时间戳转化成带格式的时间元组 time.struct_time(tm_year=2021, tm_mon=2, tm_mday=18, tm_hour=16, tm_min=35, tm_sec=8, tm_wday=3, tm_yday=49, tm_isdst=0)
print(time.localtime()) #当前时间的时间元组
print(time.strftime("%Y-%m-%d %H:%M:%S",time.localtime())) #把当前时间格式化 2021-02-18 16:43:33
print("=================")
#2.获取两天前的时间
#当前时间错
now_timestamp=time.time()
#两天前的时间戳
before_timestamp=now_timestamp-60*60*24*2
#转化成时间元组
tump_time=time.localtime(before_timestamp)
print(time.strftime("%Y-%m-%d %H:%M:%S", tump_time))
print("=================")
|
986,403 | 489f4a9e178908812bfcf863dc9877b25b927c59 | import numpy as np
import cv2
img = cv2.imread('im1.jpg')
cv2.imshow('original',img)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
986,404 | 6ec44e03d75b794bea0f693466b6f85e2a85433c | from mpi4py import MPI
comm=MPI.COMM_WORLD
rank=comm.Get_rank()
sendmsg=rank
print("I am rank {} I have sendmsg as {}".format(rank,sendmsg))
recvmsg1=comm.reduce(sendmsg,op=MPI.SUM,root=0)
if rank==0:
print("rank zero",recvmsg1)
print("all reduce at rank {}".format(rank))
recvmsg2=comm.allreduce(sendmsg,op=MPI.SUM)
print("I am rank {} I have recvmsg2 op=MPI.SUM as {}".format(rank,recvmsg2)) |
986,405 | 93d1b40ff4803fb2846251325784c4b94961381b | from argparse import ArgumentParser
import logging
from run_tests import run_test
def main():
"""
:return:
"""
parser = ArgumentParser()
parser.add_argument(
"--testing",
type=bool,
default=True
)
parser.add_argument(
"--test_type",
type=str,
default="BaseNetworkGeneration"
)
args = parser.parse_args()
if args.testing:
run_test(test_type=args.test_type)
else:
print(f"Args: {args.testing}")
for d in dir(args):
if d[0] != "_":
print(d)
if __name__ == "__main__":
main()
|
986,406 | 60a7f8efb0c344579cf9ac772163d45f15325f68 | import abjad
import baca
from akasha import library
#########################################################################################
########################################### 02 ##########################################
#########################################################################################
def make_empty_score():
score = library.make_empty_score()
voices = baca.section.cache_voices(score, library.voice_abbreviations)
time_signatures = library.time_signatures(
"A",
count=20,
fermata_measures=[3, 5, 8, 10, 14, 16, 18, 20],
rotation=0,
)
time_signatures = baca.section.wrap(time_signatures)
return score, voices, time_signatures
def GLOBALS(skips, rests):
for index, item in (
(1 - 1, "44"),
(1 - 1, baca.Accelerando()),
(3 - 1, "55"),
(6 - 1, "44"),
(6 - 1, baca.Accelerando()),
(10 - 1, "55"),
):
skip = skips[index]
baca.metronome_mark(skip, item, manifests=library.manifests)
moment_tokens = (
(2, 5, "AB"),
(3, 5, "B(A)"),
(4, 6, "BC"),
(5, 2, "C"),
(6, 2, "AC"),
)
moment_markup = library.moment_markup(moment_tokens)
baca.section.label_moment_numbers(skips, moment_markup)
stage_tokens = (
(1, 2 + 1),
(3, 1 + 1),
(5, 2 + 1),
(7, 1 + 1),
(9, 3 + 1),
(11, 1 + 1),
(13, 1 + 1),
(15, 1 + 1),
)
stage_markup = library.stage_markup("02", stage_tokens)
baca.section.label_stage_numbers(skips, stage_markup)
for index, string in (
(3 - 1, "long"),
(5 - 1, "long"),
(8 - 1, "short"),
(10 - 1, "short"),
(14 - 1, "long"),
(16 - 1, "long"),
(18 - 1, "long"),
(20 - 1, "short"),
):
baca.global_fermata(rests[index], string)
def V1(voice, time_signatures):
music = baca.make_mmrests(time_signatures(1, 5))
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(6, 7),
force_rest_lts=~abjad.Pattern([0, 1, 2]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(8), head=voice.name)
voice.extend(music)
music = library.make_sparse_getato_rhythm(
time_signatures(9),
force_rest_tuplets=~abjad.Pattern([-2, -1]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(10), head=voice.name)
voice.extend(music)
music = library.make_accelerando_rhythm(
time_signatures(11, 13),
force_rest_lts=~abjad.Pattern([-11, -10, -8, -6, -4, -2, -1]),
fuse_counts=[1, 2],
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(14), head=voice.name)
voice.extend(music)
music = library.make_accelerando_rhythm(
time_signatures(15),
force_rest_lts=~abjad.Pattern([0, 2, 3, -1]),
fuse_counts=[1, 2],
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(16), head=voice.name)
voice.extend(music)
music = library.make_ritardando_rhythm(
time_signatures(17),
force_rest_lts=~abjad.Pattern([0, 2, -1]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(18, 20), head=voice.name)
voice.extend(music)
def V2(voice, time_signatures):
music = baca.make_mmrests(time_signatures(1, 3))
voice.extend(music)
music = library.make_sparse_getato_rhythm(
time_signatures(4),
force_rest_tuplets=~abjad.Pattern([0, 1]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(5), head=voice.name)
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(6, 7),
force_rest_lts=~abjad.Pattern([2, 3, 4]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(8), head=voice.name)
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(9),
force_rest_lts=~abjad.Pattern([1, 2, 3]),
rotation=-2,
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(10), head=voice.name)
voice.extend(music)
music = library.make_ritardando_rhythm(
time_signatures(11, 13),
force_rest_lts=~abjad.Pattern([-10, -8, -7, -5, -3, -2, -1]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(14), head=voice.name)
voice.extend(music)
music = library.make_ritardando_rhythm(
time_signatures(15),
force_rest_lts=~abjad.Pattern([0, 1, 4, -1]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(16), head=voice.name)
voice.extend(music)
music = library.make_accelerando_rhythm(
time_signatures(17),
force_rest_lts=~abjad.Pattern([0, 2, -1]),
fuse_counts=[1, 2],
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(18), head=voice.name)
voice.extend(music)
music = library.make_ritardando_rhythm(
time_signatures(19),
force_rest_lts=~abjad.Pattern([0, 1, -1]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(20), head=voice.name)
voice.extend(music)
def VA(voice, time_signatures):
music = baca.make_mmrests(time_signatures(1, 3))
voice.extend(music)
music = library.make_sparse_getato_rhythm(
time_signatures(4),
force_rest_tuplets=~abjad.Pattern([-1]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(5), head=voice.name)
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(6, 7),
force_rest_lts=~abjad.Pattern([1, 2, 3]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(8), head=voice.name)
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(9),
force_rest_lts=~abjad.Pattern([2, 3, 4]),
rotation=-2,
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(10), head=voice.name)
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(11, 13),
rotation=-4,
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(14), head=voice.name)
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(15),
rotation=-8,
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(16, 20), head=voice.name)
voice.extend(music)
def VC(voice, time_signatures):
music = library.make_cello_solo_rhythm(
time_signatures(1, 2),
rotation=0,
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(3, 8), head=voice.name)
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(9),
force_rest_lts=~abjad.Pattern([0, 1, 2]),
rotation=-2,
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(10), head=voice.name)
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(11, 13),
force_rest_lts=[-1],
rotation=-6,
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(14), head=voice.name)
voice.extend(music)
music = library.make_polyphony_rhythm(
time_signatures(15),
force_rest_lts=[-1],
rotation=-10,
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(16, 18), head=voice.name)
voice.extend(music)
music = library.make_sparse_getato_rhythm(
time_signatures(19),
force_rest_tuplets=~abjad.Pattern([1]),
)
voice.extend(music)
music = baca.make_mmrests(time_signatures(20), head=voice.name)
voice.extend(music)
def v1(m):
with baca.scope(m[6, 7]) as o:
baca.pitches(o, "E4 F4 E+4", exact=True)
library.material_annotation_spanner(o, "B")
library.material_annotation_spanner(m[9], "A")
with baca.scope(m[11, 13]) as o:
library.material_annotation_spanner(o, "C")
baca.dynamic(o.pleaf(0), "pp")
baca.markup(o.pleaf(0), r"\baca-tasto-plus-xfb-markup")
library.material_annotation_spanner(m[15], "C")
library.material_annotation_spanner(m[17], "C")
with baca.scope(m[11, 19]) as o:
baca.pitches(o, "D5 E5")
def v2(m):
library.material_annotation_spanner(m[4], "A")
with baca.scope(m[6, 7]) as o:
library.material_annotation_spanner(o, "B")
baca.pitches(o, "D4 D~4 C4", exact=True)
with baca.scope(m[9]) as o:
library.material_annotation_spanner(o, "B")
baca.pitches(o, "C#4 C#+4", exact=True)
with baca.scope(m[11, 13]) as o:
library.material_annotation_spanner(o, "C")
baca.dynamic(o.pleaf(0), "pp")
baca.markup(o.pleaf(0), r"\baca-tasto-plus-xfb-markup")
library.material_annotation_spanner(m[15], "C")
library.material_annotation_spanner(m[17], "C")
library.material_annotation_spanner(m[19], "C")
with baca.scope(m[11, 19]) as o:
baca.pitches(o, "Bb4 C5")
def va(m):
with baca.scope(m[1, 3]) as o:
baca.staff_lines(o.leaf(0), 5)
library.material_annotation_spanner(m[4], "A")
with baca.scope(m[6, 7]) as o:
library.material_annotation_spanner(o, "B")
baca.pitches(o, "Eb4 D4 E4", exact=True)
library.material_annotation_spanner(m[9], "B")
baca.pitches(m[9], "C4", exact=True)
with baca.scope(m[11, 13]) as o:
library.material_annotation_spanner(o, "B")
baca.pitches(o, "D4 D+4 D#4 E4 F#4 F4", exact=True)
library.material_annotation_spanner(m[15], "B")
baca.pitches(m[15], "Eb4 D4", exact=True)
def vc(m):
with baca.scope(m[1, 2]) as o:
library.cello_solo_pitches(function=o)
library.material_annotation_spanner(o, "B")
baca.dynamic(o.pleaf(0), "mp")
baca.markup(o.pleaf(0), r"\baca-tasto-plus-half-scratch-markup")
with baca.scope(m[9]) as o:
library.material_annotation_spanner(o, "B")
baca.pitches(o, "C4 C~4 B3", exact=True)
with baca.scope(m[11, 13]) as o:
library.material_annotation_spanner(o, "B")
baca.pitches(o, "Bb3 Bb~3 A3 Ab3 G3 A3", exact=True)
with baca.scope(m[15]) as o:
library.material_annotation_spanner(o, "B")
baca.pitches(o, "A3 A#3 B3", exact=True)
library.material_annotation_spanner(m[19], "A")
def composites(cache):
with baca.scope(
[cache["v2"][4], cache["va"][4], cache["v1"][9], cache["vc"][19]]
) as o:
baca.staccato(baca.select.pheads(o))
library.getato_pitches(-2, [0], function=o)
for group in o:
baca.dynamic(
baca.select.pleaf(group, 0),
"p",
)
with baca.scope([cache["v1"][6], cache["v2"][6], cache["va"][6]]) as o:
for group in o:
with baca.scope(group) as u:
baca.dynamic(u.pleaf(0), "mp")
baca.markup(u.pleaf(0), r"\baca-tasto-plus-half-scratch-markup")
@baca.build.timed("make_score")
def make_score(first_measure_number, previous_persistent_indicators):
score, voices, time_signatures = make_empty_score()
baca.section.set_up_score(
score,
time_signatures(),
append_anchor_skip=True,
always_make_global_rests=True,
first_measure_number=first_measure_number,
manifests=library.manifests,
score_persistent_indicators=previous_persistent_indicators["Score"],
)
GLOBALS(score["Skips"], score["Rests"])
V1(voices.v1, time_signatures)
V2(voices.v2, time_signatures)
VA(voices.va, time_signatures)
VC(voices.vc, time_signatures)
baca.section.reapply_persistent_indicators(
voices,
previous_persistent_indicators,
manifests=library.manifests,
)
cache = baca.section.cache_leaves(
score,
len(time_signatures()),
library.voice_abbreviations,
)
v1(cache["v1"])
v2(cache["v2"])
va(cache["va"])
vc(cache["vc"])
composites(cache)
return score
def main():
environment = baca.build.read_environment(__file__, baca.build.argv())
score = make_score(
environment.first_measure_number,
environment.previous_metadata["persistent_indicators"],
environment.timing,
)
metadata = baca.section.postprocess_score(
score,
environment,
library.manifests,
always_make_global_rests=True,
empty_fermata_measures=True,
error_on_not_yet_pitched=True,
fermata_extra_offset_y=4.5,
global_rests_in_topmost_staff=True,
)
baca.tags.activate(
score,
baca.tags.LOCAL_MEASURE_NUMBER,
baca.tags.MOMENT_NUMBER,
baca.tags.STAGE_NUMBER,
)
lilypond_file = baca.lilypond.file(
score,
include_layout_ly=True,
includes=["../stylesheet.ily"],
)
baca.build.persist_lilypond_file(
environment.arguments,
environment.section_directory,
environment.timing,
lilypond_file,
metadata,
)
if __name__ == "__main__":
main()
|
986,407 | c22298845895673b7e5cf5544183539bd760f171 | """
This is the virtual base class of retriever
"""
import os
import numpy as np
import torch
from metrics import *
from utils import *
class BaseRetriever(torch.nn.Module):
def __init__(self, documents, config):
"""
Args:
- documents: a big dict for all documents
"""
super().__init__()
self.documents = documents
self.doc_list = list(self.documents.keys())
self.config = config
@cost
def retrieve(self, queries, mode="test"):
# 1. calculate logits
doc_logits, doc_labels = self.forward(queries)
# 2. predict
self.predict(queries, doc_logits, self.config.retriever_topk)
# 3. compute_loss
if mode == "train":
loss = self.compute_loss(doc_logits, doc_labels)
else:
loss = 0
return loss
def compute_loss(self, logits, labels):
"""
- "doc_logit": a torch tensor
Returns:
- cross_entropy loss
"""
if self.config.retriever_name not in ["AlbertRetriever, BertRetriever"]:
loss_fn = torch.nn.CrossEntropyLoss()
loss = loss_fn(logits, labels)
else:
# select topk to do cross entropy
new_logits = []
new_labels = []
for logit, label in zip(logits, labels):
value, index = logit.topk(self.config.retriever_topk, largest=True)
if label not in index:
index[-1] = label
new_logits.append(logit[index])
new_labels.append((index==label).nonzero())
new_logits = torch.stack(new_logits)
new_labels = torch.stack(new_labels).squeeze()
loss_fn = torch.nn.CrossEntropyLoss()
loss = loss_fn(new_logits, new_labels)
return loss
@cost
def predict(self, queries, doc_logits, topk=-1):
"""
Generated keys:
- "doc_candidates": a list of tuple (id, score)
- "doc_order": a list of tuple (id, score)
"""
if topk==-1:
topk = 100 # TODO: hard code here
# doc_logits = to_numpy(doc_logits)
# 2. sort documents and return topk
for query, logit in zip(queries, doc_logits):
# only calculate topk 100
value, index = logit.topk(100, largest=True)
value = list(to_numpy(value))
index = list(to_numpy(index))
doc_order = [self.doc_list[i] for i in index]
# predict topk to reader
index = index[:topk]
# whether to cheat?
if self.config.retriever_cheat and 'doc_id' in query:
pos_doc_list = query["pos_cand"]
pos_doc = np.random.choice(pos_doc_list, size=1)
p = self.doc_list.index(pos_doc)
if p not in index:
index[-1] = p
selected_docs = [ (self.doc_list[i], logit[i]) for i in index]
query["doc_candidates"] = selected_docs
query["doc_order"] = doc_order
def collect_metric(self, queries):
metric_result = {}
# 1. topk
topk = [1,5,10,100]
for k in topk:
metric_result["top%d"%k] = []
for query in queries:
doc_order = query["doc_order"]
pos_doc_list = query["pos_cand"] # positive label
# select a best guess
pos, label = np.inf, "docid"
for l in pos_doc_list:
if l in doc_order:
p = doc_order.index(l)
else:
p = 101
if p < pos:
pos = p
label = l
result = topk_fn(doc_order, label, topk)
for i, k in enumerate(topk):
metric_result["top%d"%k].append(result[i])
return metric_result
def load(self, path, model_id):
"""
load {model_id}-retriever from {path}
"""
self.load_state_dict(torch.load(os.path.join(path, '{}-retriever'.format(model_id))))
def save(self, path, model_id):
"""
save {model_id}-retrieverin {path}
"""
torch.save(self.state_dict(), os.path.join(path, '{}-retriever'.format(model_id)))
# YZ: For a retriever, only needs to write this two function
def forward(self, queries):
"""
Generated_keys:
- "doc_logit": logit for every doc
"""
pass
def update(self, loss):
"""
Update parameters
"""
pass
|
986,408 | 4be70e87b58cab629f47098eecc769f2e9033f74 | import re
import unittest
# with open('../../input/day22') as f:
with open('../../input/day22-sample') as f:
data = f.read().strip().split("\n")
p = re.compile(r'(\w+) .=(.*),.=(.*),.=(.*)')
ins = []
X = set()
Y = set()
Z = set()
for line in data:
m = p.match(line)
switch = m.group(1) == 'on'
x1, x2 = m.group(2).split('..')
y1, y2 = m.group(3).split('..')
z1, z2 = m.group(4).split('..')
x1, x2, y1, y2, z1, z2 = int(x1), int(x2), int(y1), int(y2), int(z1), int(z2)
ins.append((switch, x1, x2, y1, y2, z1, z2))
# build a coordinate compressions
x1, x2 = min(x1, x2), max(x1, x2)
y1, y2 = min(y1, y2), max(y1, y2)
z1, z2 = min(z1, z2), max(z1, z2)
X.add(x1)
X.add(x2)
Y.add(y1)
Y.add(y2)
Z.add(z1)
Z.add(z2)
def compress(c):
coord = {}
c = sorted(c)
llen = {}
for i, v in enumerate(c):
coord[v] = i
if i < len(c) - 1:
llen[i] = c[i + 1] - v
else:
llen[i] = None
return coord, llen
xc, xl = compress(X)
yc, yl = compress(Y)
zc, zl = compress(Z)
print(f'xc {xc}')
print(f'yc {yc}')
print(f'zc {zc}')
print(f'xl {xl}')
print(f'yl {yl}')
print(f'zl {zl}')
def solve(part):
on = set()
for step,i in enumerate(ins):
print(step, i)
turn_on, x1, x2, y1, y2, z1, z2 = i
if part == 1:
if x1 < -50 or y1 < -50 or z1 < -50:
continue
if x2 > 50 or y2 > 50 or z2 > 50:
continue
for x in range(xc[x1], xc[x2]):
for y in range(yc[y1], yc[y2]):
for z in range(zc[z1], zc[z2]):
print(f'x {x}, y {y}, z {z}')
print(f'x1 {x1}, y1 {y1}, z1 {z1}')
if turn_on:
# print('add', x, y, z)
on.add((x, y, z))
elif (x, y, z) in on:
on.remove((x, y, z))
# print(on)
# calculate on cube
ans = 0
for x, y, z in on:
assert x in xl, f'{x}, {xl}'
l1 = xl[x]
l2 = yl[y]
l3 = zl[z]
ans += l1 * l2 * l3
return ans
def part1():
return solve(1)
def part2():
return solve(2)
class TestSum(unittest.TestCase):
def test1(self):
ans = part1()
print(ans)
assert ans == 615700, f'{ans}'
def test2(self):
ans = part2()
print(ans)
assert ans == 0, f'{ans}'
|
986,409 | ec5e1259841b8fe2aa51b90fe114b5769d654733 | # prepare_data()
# make_experiment()
# collect_result()
import re
import shutil
import subprocess
import sys
import fortranformat as ff
import logging
from saxs_experiment import LogPipe
import os
def prepare_data(all_files, tmpdir, method, verbose_logfile, mydirvariable):
for file in all_files: # not strict format for pdbs file
shutil.copy(f'{mydirvariable}/{file}.pdb', f'{tmpdir}/pdbs/')
shutil.copy(f'{mydirvariable}/{file}.dat', f'{tmpdir}/dats/')
def make_experiment(all_files, tmpdir, verbose, verbose_logfile, method, path, mydirvariable):
# Angular axis m01000.sax Datafile m21000.sub 21-Jun-2001
# .0162755E+00 0.644075E+03 0.293106E+02
with open(tmpdir + '/method/curve_gajoe.dat', 'w') as file_gajoe:
file_gajoe.write(' Angular axis m01000.sax Datafile m21000.sub 21-Jun-2001\n')
lineformat = ff.FortranRecordWriter('(1E12.6)')
with open(tmpdir + '/method/curve.modified.dat') as file1:
for line in file1:
if not line.strip():
break
data1 = float(line.split()[0])
data2 = float(line.split()[1])
data3 = float(line.split()[2])
a = lineformat.write([data1])
x = a[1:]
b = lineformat.write([data2])
c = lineformat.write([data3])
file_gajoe.write(f' {x} {b} {c}\n')
# S values num_lines
# 0.000000E+00
# ------
# Curve no. 1
# 0.309279E+08
num_lines = sum(1 for line in open(mydirvariable + all_files[0] + ".dat")) - 2
with open(tmpdir + '/method/juneom.eom', 'w') as file1:
file1.write(f' S values {num_lines} \n')
with open(mydirvariable + all_files[0] + ".dat") as file2:
for line in file2:
if line.startswith('#'):
continue
data = float(line.split()[0])
lineformat = ff.FortranRecordWriter('(1E14.6)')
b = lineformat.write([data])
file1.write(f'{b}\n')
for i, filename in enumerate(all_files, start=1):
with open(mydirvariable + filename + ".dat") as file2:
file1.write(f'Curve no. {i} \n')
for line in file2:
if line.startswith('#'):
continue
data1 = float(line.split()[1])
lineformat = ff.FortranRecordWriter('(1E14.6)')
b = lineformat.write([data1])
file1.write(f'{b}\n')
if verbose_logfile:
logpipe = LogPipe(logging.DEBUG)
logpipe_err = LogPipe(logging.ERROR)
p1 = subprocess.Popen(['yes'], stdout=subprocess.PIPE)
call = subprocess.Popen([path, f'{tmpdir}/method/curve_gajoe.dat', f'-i={tmpdir}/method/juneom.eom',
'-t=5'], cwd=f'{tmpdir}/results/', stdin=p1.stdout,
stdout=logpipe, stderr=logpipe_err)
call.communicate()
logpipe.close()
logpipe_err.close()
else:
p1 = subprocess.Popen(['yes'], stdout=subprocess.PIPE)
call = subprocess.Popen([path, f'{tmpdir}/method/curve_gajoe.dat', f'-i={tmpdir}/method/juneom.eom',
'-t=5'], cwd=f'{tmpdir}/results/', stdin=p1.stdout,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
call.communicate()
if call.returncode:
print(f'ERROR: GAJOE failed', file=sys.stderr)
logging.error(f'GAJOE failed.')
sys.exit(1)
def collect_results(tmpdir, all_files):
# process results from gajoe (/GAOO1/curve_1/
chi2 = None
structure_weight = []
m = re.compile('^\s*\d+\)')
with open(tmpdir + '/results/GA001/curve_1/logFile_001_1.log') as file_gajoe:
for line in file_gajoe:
if '-- Chi^2 : ' in line:
chi2 = float(line.split(':')[1])
# curve weight
# 00002ethod/juneom.pd ~0.253.00
# 00003ethod/juneom.pd ~0.172.00
p = m.search(line)
if p:
index = int(line.split()[1][:5]) - 1
weight = float((line.split()[4][1:6]).strip('*'))
structure_weight.append((all_files[index]+'.pdb', weight))
return [(chi2, structure_weight)]
# ([chi2,[(structure, weight), (structure,weight), (structure, weight),... ], [chi2,(),...])
|
986,410 | 99f05120d9dac847d52c21155880031b96c86a7e | from bs4 import BeautifulSoup
import requests, lxml, html5lib
import pandas as pd
class nflScraper(object):
"""
Base class for web scraping NFL data.
Basic operations are:
1. find a link
2. find a table
3. extract information from table.
"""
domain = "https://www.pro-football-reference.com"
def __init__(self):
raise NotImplementedError
def getLink(self):
"""Identify table link"""
raise NotImplementedError
def getTable(self):
"""Identify the table (in HTML)"""
raise NotImplementedError
def getStats(self):
"""Extract all statistics from the table"""
raise NotImplementedError
|
986,411 | 4d94c43e797d7920c74fd2bb011f5f4177a11765 | Solution to [Validating and Parsing Email Addresses](https://www.hackerrank.com/challenges/validating-named-email-addresses) |
986,412 | fd1cc2138aee2d8baa32057e25d7c1b206a52e44 | import indiesolver
import numpy as np
import scipy
def evaluate(solution):
x = solution["parameters"]
mf = 3
collf = CollGaussRadau_Right(num_nodes=mf, tleft=0.0, tright=1.0)
# coll = CollGaussLobatto(num_nodes=m, tleft=0.0, tright=1.0)
# coll = EquidistantNoLeft(num_nodes=m, tleft=0.0, tright=1.0)
Qf = collf.Qmat[1:, 1:]
Idf = np.eye(mf)
QT = Qf.T
[_, _, U] = scipy.linalg.lu(QT, overwrite_a=True)
Qdf = U.T
mc = int((mf + 1) / 2)
collc = CollGaussRadau_Right(num_nodes=mc, tleft=0.0, tright=1.0)
# coll = CollGaussLobatto(num_nodes=m, tleft=0.0, tright=1.0)
# coll = EquidistantNoLeft(num_nodes=m, tleft=0.0, tright=1.0)
Qc = collc.Qmat[1:, 1:]
Idc = np.eye(mc)
QT = Qc.T
[_, _, U] = scipy.linalg.lu(QT, overwrite_a=True)
Qdc = U.T
sum1r = x['x11r'] + x['x12r'] + x['x13r']
sum2r = x['x21r'] + x['x22r'] + x['x23r']
sum1i = x['x11i'] + x['x12i']
sum2i = x['x21i'] + x['x22i']
sum3i = x['x31i'] + x['x32i']
if sum1r == 0.0 or sum2r == 0.0 or sum1i == 0.0 or sum2i == 0.0 or sum3i == 0.0:
solution["metrics"] = {}
solution["metrics"]["rho"] = 99
return solution
Tr = np.array(
[
[x['x11r'] / sum1r, x['x12r'] / sum1r, x['x13r'] / sum1r],
[x['x21r'] / sum2r, x['x22r'] / sum2r, x['x23r'] / sum2r],
]
)
Ti = np.array(
[
[x['x11i'] / sum1i, x['x12i'] / sum1i],
[x['x21i'] / sum2i, x['x22i'] / sum2i],
[x['x31i'] / sum3i, x['x32i'] / sum3i],
]
)
# THIS WORKS REALLY WELL! No need to take imaginary parts in x, though (found minimum has zero imaginary parts)
k = 0
obj_val = 0.0
for i in range(-8, 8):
for l in range(-8, 8):
k += 1
lamdt = -(10**i) + 1j * 10**l
C = Idf - lamdt * Qf
Pf = Idf - lamdt * Qdf
Rf = Idf - np.linalg.inv(Pf).dot(C)
Pc = Idc - lamdt * Qdc
Rc = Idf - Ti.dot(np.linalg.inv(Pc)).dot(Tr).dot(C)
R = Rf.dot(Rc)
rhoR = max(abs(np.linalg.eigvals(R)))
obj_val += rhoR
obj_val /= k
solution["metrics"] = {}
solution["metrics"]["rho"] = obj_val
return solution
# y = [5.8054876, 8.46779587, 17.72188108, 6.75505219, 5.53129906]
y = [0.0, 0.0, 0.0, 0.0, 0.0]
# y = [1.0, 1.0, 1.0, 1.0, 1.0]
ymax = 20.0
ymin = -20.0
params = dict()
params['x11r'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[0]}
params['x12r'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[1]}
params['x13r'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[2]}
params['x21r'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[3]}
params['x22r'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[4]}
params['x23r'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[0]}
params['x11i'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[1]}
params['x12i'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[2]}
params['x21i'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[3]}
params['x22i'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[4]}
params['x31i'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[3]}
params['x32i'] = {'type': 'float', 'space': 'decision', 'min': ymin, 'max': ymax, 'init': y[4]}
problem = {
'problem_name': 'Qdelta_sum_ml',
'parameters': params,
'metrics': {'rho': {'type': 'objective', 'goal': 'minimize'}},
}
worker = indiesolver.indiesolver()
worker.initialize("indiesolver.com", 8080, "dg8f5a0dd9ed")
reply = worker.create_problem(problem)
if reply["status"] != "success":
print(reply)
exit()
curr_min = 99
pars = None
for iteration in range(0, 12500):
reply = worker.ask_new_solutions(8)
solutions = dict()
solutions["solutions"] = []
if reply["status"] == "success":
for solution in reply["solutions"]:
solutions["solutions"].append(evaluate(solution))
rho = solution["metrics"]["rho"]
curr_min = min(curr_min, rho)
if curr_min == rho:
pars = [solution["parameters"][k] for k in solution["parameters"]]
if curr_min == rho or solution["ID"] % 1000 == 0:
print(solution["ID"], curr_min, pars)
worker.tell_metrics(solutions)
else:
print(reply)
exit()
|
986,413 | d3e105d8cc972d42840f5a659ba3efb8a6276974 | import geoip2.database
# # create a reader object
# reader = geoip2.database.Reader('/home/yang/pyproject/i2p/GeoLite2-City.mmdb')
# # give the ip address
# response = reader.city('223.3.68.179')
# # get the attribution of the ip
# print(response.country.iso_code)
#
# print(response.location.latitude)
#
# print(response.location.longitude)
#
# reader.close()
def lookup_lat_and_long(ip):
reader = geoip2.database.Reader('./GeoLite2-City.mmdb')
try:
response = reader.city(ip)
lat = response.location.latitude
lng = response.location.longitude
reader.close()
return {"lat": lat, "lng": lng}
except geoip2.errors.AddressNotFoundError:
return None
if __name__ == '__main__':
result = lookup_lat_and_long('217.82.84.59')
print(result['lat'])
|
986,414 | ac161f6441e8f5f66b22426b2a898b3cc3afebab | # -*- coding: utf-8 -*-
import nose
import unittest
import time
from suds import WebFault
from suds.client import Client
from suds.sax.element import Element
from authenticator import Authenticator
from creation import *
import db
import datetime
import urllib2
import base64
ids = [ 18559,
64272,
64309,
64356,
64393,
64450,
64471,
64884,
64915,
64919,
17891,
49944,
51758,
51770,
52967,
53014,
53459,
54177,
54208,
54241,
54450]
# Action.id, имеющие test_id = 1
actionIds = [18559,
49939,
51755,
52963,
53010,
53384,
53455,
53726,
54172,
54203,
54236,
54446,
54504,
54934,
55074,
62677,
62727,
62745,
62818,
62855,
62875,
63110,
63146,
63174,
63223,
63257,
63300,
63443,
63444,
63531,
63581,
63605,
63683,
63786,
63950,
64009,
64078,
64126,
64174,
64273,
64310,
64338,
64357,
64394,
64419,
64443,
64451,
64472,
64655,
64727,
64774,
64798,
64846,
64885,
64922,
64979,
64993,
65222,
65259,
65326,
65368,
65417,
65477,
65510,
65753,
65801,
65861,
66055,
66067,
66099,
66132,
66152,
66198,
66227,
66271,
66294,
66301,
66562,
66617,
66648,
66721,
66793,
66829,
66848,
66906,
66973,
67084,
67243,
67291,
67318,
67339,
67399,
67445,
67523,
67542,
67596,
67609,
67673,
67841,
67898,
67974,
68029,
68060,
68096,
68111,
68168,
68304,
68381,
68423,
68461,
68501,
68548,
68564,
68589,
68624,
68880,
68919,
68959,
68993,
69016,
69037,
69097,
69172,
69230,
69269,
69628,
69645,
69677,
69709,
69744,
69810,
69844,
69877,
69902,
69925,
69955,
69969,
69985,
69995,
70028,
70172,
70222,
70314,
70348,
70420,
70454,
70476,
70503,
70560,
70610,
70634,
70663,
70922,
70969,
71001,
71023,
71048,
71070,
71094,
71125,
71157,
71180,
71204,
71239,
71263,
71318,
71477,
71514,
71539,
71663,
71739,
71773,
71809,
71915,
71986,
72053,
72110,
72231,
72256,
72340,
72364,
72404,
72431,
72455,
72486,
72781,
72851,
72882,
72903,
72938,
72969,
72999,
73028]
class TestLisIntegration(unittest.TestCase):
def setUp(self):
url = 'http://localhost:8080/tmis-ws-laboratory/tmis-laboratory-integration?wsdl'
self.client = Client(url, username='', password='', cache=None)
medipadWsUrl = 'http://localhost:8080/tmis-ws-medipad/tmis-medipad?wsdl'
self.medipadClient = Client(medipadWsUrl, username='', password='', cache=None)
# Authenticate
authenticator = Authenticator()
authData = authenticator.asAdmin()
# Add auth headers
tmisAuthNS = ('ta', 'http://korus.ru/tmis/auth')
authHeader = Element('tmisAuthToken', ns=tmisAuthNS).setText(authData.authToken.id)
self.medipadClient.set_options(soapheaders=authHeader)
db.initialize()
# Tests
def testSetRequest(self):
for aid in ids:
try:
self.sendAnalysisRequest(aid)
except WebFault, e:
if "Action has no tests" in e.message:
pass
elif "Action has no event" in e.message:
pass
elif "No biomaterial found for request" in e.message:
pass
else:
print e.message
raise e
def testSetResults(self):
for aid in ids:
try:
self.setAnalysisResults(aid, "12345678", True, [], "76543241")
except WebFault, e:
if "Action has no tests" in e.message:
pass
elif "Action has no event" in e.message:
pass
else:
print e.message
raise e
def testSetAnalysisResult(self):
eventId = 29801
diagnosticTypeId = 5
ids = createDiagnosticForPatient(self.medipadClient, eventId, diagnosticTypeId)
for aid in ids:
res = self.createAnalysisResult()
self.setAnalysisResults(aid, "", True, [res], "")
db.deleteAction(aid)
def testSetSensitivityResult(self):
eventId = 29801
# Чувствительность к антибиотикам
diagnosticTypeId = 482
ids = createDiagnosticForPatient(self.medipadClient, eventId, diagnosticTypeId)
for aid in ids:
res = self.createSensitivityResult()
self.setSensitivityResults(aid, "", True, [res], "")
db.deleteAction(aid)
def testInvalidRequestId(self):
ex = False
try:
self.setAnalysisResults(0, "12345678", False, [], "76543241")
except WebFault as wtf:
ex = True
self.assertTrue(ex)
# Utils
def sendAnalysisRequest(self, aid):
url = 'http://localhost:8080/tmis-ws-laboratory/tmis-client-laboratory?wsdl'
client = Client(url, username='', password='',cache=None)
client.service.sendAnalysisRequest(aid)
def setAnalysisResults(self, aid, barcode, completed, results, defects):
url = 'http://localhost:8080/tmis-ws-laboratory/tmis-laboratory-integration?wsdl'
client = Client(url, username='', password='',cache=None)
client.service.setAnalysisResults(aid, barcode, completed, results, defects)
def setSensitivityResults(self, aid, barcode, completed, results, defects):
url = 'http://localhost:8080/tmis-ws-laboratory/tmis-laboratory-integration?wsdl'
client = Client(url, username='', password='',cache=None)
client.service.setSensitivityResults(aid, barcode, completed, results, defects)
def getImageBase64(self):
#url = 'http://localhost:4848/resource/community-theme/images/login-product_name_open.png'
#u = urllib2.urlopen(url)
u = open('1.png', 'r')
data = u.read()
u.close()
print str(data.__class__)
base64str = base64.b64encode(data)
return base64str
def createAnalysisResult(self):
result = self.client.factory.create("analysisResult")
result.code = "999"
result.name = u"Тестовый показатель"
result.endDate = datetime.datetime.now()
result.norm = "0.1-0.3"
result.normality = "NORM"
result.unitCode = u"ммоль/л"
result.value = "0.2"
result.valueType = "NUMERIC"
image = self.getImageBase64()
result.imageValues = [image]
return result
def createSensitivityResult(self):
result = self.client.factory.create("antibioticSensitivity")
result.name = u"Тестовый показатель"
result.value = "S"
result.mic = "42"
result.comment = u"Выявлена относительно низкая чувствительность"
return result
if __name__ == '__main__':
nose.main()
|
986,415 | e16e7b3bd9064cf4adb70a6e0883cdbdbc119551 | import re
import boto3
import pandas as pd
from botocore.exceptions import ClientError
from typing import List
BUCKET_NAME = 'sok-repository-eval-benchmarks'
client = boto3.client("s3")
def get_csv_files_in_s3_path(path: str, filter: str, get_s3_url: bool = False):
response = client.list_objects(Bucket=BUCKET_NAME, Prefix=path)
files = []
for content in response.get('Contents', []):
filename = content.get('Key')
if filter in filename and '.csv' in filename:
if get_s3_url:
files.append(f"s3://{BUCKET_NAME}/{filename}")
else:
files.append(filename)
return files
def get_folder_in_s3_path(path: str):
result = client.list_objects_v2(Bucket=BUCKET_NAME, Prefix=path, Delimiter='/')
folder_names = []
for o in result.get('CommonPrefixes'):
folder_names.append(o.get('Prefix'))
return folder_names
def get_most_recent_folder_from_s3_bucket():
"""
Determines the name of the S3 folder that has the most recent timestamp in its name.
The timestamp is formatted as YYYYMMDD_HHMMSS.
:return: A string of the most recent folder based on its name (a timestamp).
"""
s3 = boto3.resource('s3')
bucket = s3.Bucket(BUCKET_NAME)
result = bucket.meta.client.list_objects(Bucket=bucket.name, Delimiter='/')
folders = []
date_pattern = re.compile(r"[0-9_]+")
for o in result.get('CommonPrefixes'):
folder_name = o.get('Prefix')
if re.match(date_pattern, folder_name):
folders.append(folder_name)
folders.sort(reverse=True)
return folders[0]
def upload_file_to_s3_bucket(file_path: str, destination_path: str):
try:
client.upload_file(file_path, BUCKET_NAME, destination_path)
except ClientError as e:
import sys
print(f"Could not upload {file_path} to {destination_path} in bucket {BUCKET_NAME}.", file=sys.stderr)
def get_labels_data_from_s3(name_filter: str, root_folder: str = None) -> (List[str], List[pd.DataFrame]):
labels, data = [], []
# get the folder with the most recent timestamp (e.g., 20200729_0949/)
if root_folder is None:
root_folder = get_most_recent_folder_from_s3_bucket()
elif not root_folder.endswith('/'):
root_folder += '/'
# get all subfolders - this corresponds to the benchmarked tools (e.g., Cingulata, SEAL)
tool_folders_path = get_folder_in_s3_path(root_folder)
for tp in tool_folders_path:
# get path to all CSV files that have 'name_filter' in its name
s3_urls = get_csv_files_in_s3_path(tp, name_filter, get_s3_url=True)
# throw an error if there is more than one CSV with 'cardio' in the folder as each benchmark run should exactly
# produce one CSV file per program
if len(s3_urls) == 0:
# just skip any existing folder without a CSV file (e.g., the plot/ folder)
continue
elif len(s3_urls) > 1:
raise ValueError(
f"Error: More than one CSV file for '{name_filter}'' found!\nCreate a separate folder for each tool "
f"configuration, e.g., SEAL-BFV, SEAL-CKKS.")
# remove the directory (timestamped folder) segment from the tool's path
tool_name = tp.replace(root_folder, "")
# remove the trailing '/' from the tool's name (as it is a directory)
if tool_name[-1] == '/':
tool_name = tool_name[:-1]
# use the tool's name as label for the plot
labels.append(tool_name)
# read the CSV data from S3
data.append(pd.read_csv(s3_urls[0], delimiter=','))
# call the plot
if len(labels) == 0:
import sys
sys.stderr.write(f"ERROR: Plotting {name_filter} failed because no data is available!")
return
return labels, data, root_folder
|
986,416 | f72e860d6d4a3a33961195081f5e0dccf3c1476f | #!/usr/bin/env python3
# Copyright (c) 2002-2017, California Institute of Technology.
# All rights reserved. Based on Government Sponsored Research under contracts NAS7-1407 and/or NAS7-03001.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the California Institute of Technology (Caltech), its operating division the Jet Propulsion Laboratory (JPL),
# the National Aeronautics and Space Administration (NASA), nor the names of its contributors may be used to
# endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE CALIFORNIA INSTITUTE OF TECHNOLOGY BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test suite to make sure that mod_twms is properly handling TWMS errors.
"""
import os
import sys
import unittest2 as unittest
import xmlrunner
from shutil import copy, move, rmtree
from xml.etree import cElementTree as ElementTree
from oe_test_utils import check_response_code, check_tile_request, file_text_replace, get_layer_config, get_url, make_dir_tree, restart_apache, run_command
from optparse import OptionParser
base_url = 'http://localhost'
apache_conf_dir = '/etc/httpd/conf.d'
class TestModTwmsErrorHandling(unittest.TestCase):
@classmethod
def setUpClass(self):
# Get the path of the test data -- we assume that the script is in the parent dir of the data dir
oedata_path = os.path.join(os.getcwd(), 'twms_onearth_test_data')
self.testdata_path = os.path.join(oedata_path, 'mod_twms')
wmts_configs = ('wmts_cache_configs', 'wmts_cache_staging', 'test_imagery/cache_all_wmts.config')
self.image_files_path = os.path.join(oedata_path, 'test_imagery')
self.test_oe_config = os.path.join(oedata_path, 'oe_test.conf')
self.test_apache_config = os.path.join(self.testdata_path, 'test_twms_err_apache.conf')
template_dir, staging_dir, cache_config = wmts_configs
# Make staging cache files dir
template_path = os.path.join(oedata_path, template_dir)
staging_path = os.path.join(oedata_path, staging_dir)
cache_path = os.path.join(oedata_path, cache_config)
make_dir_tree(staging_path)
# Copy XML/MRF files to staging cache files dir, swapping in the location of the imagery files
for file in [f for f in os.listdir(template_path) if os.path.isfile(os.path.join(template_path, f))]:
file_text_replace(os.path.join(template_path, file), os.path.join(staging_path, file),
'{cache_path}', self.image_files_path)
# Run oe_create_cache_config to make the cache config files
cmd = 'oe_create_cache_config -cbd {0} {1}'.format(staging_path, cache_path)
run_command(cmd)
rmtree(staging_path)
# Put the correct path into the Apache config (oe_test.conf)
file_text_replace(self.test_oe_config, os.path.join('/etc/httpd/conf.d', os.path.basename(self.test_oe_config)), '{cache_path}', oedata_path)
# Put the correct path into the Apache config (test_twms_err_apache.conf)
file_text_replace(self.test_apache_config, os.path.join('/etc/httpd/conf.d', os.path.basename(self.test_apache_config)), '{cache_path}', self.testdata_path)
restart_apache()
# KVP Tests
# http://localhost/onearth/test/wmts/wmts.cgi?layer=test_weekly_jpg&tilematrixset=EPSG4326_16km&Service=WMTS&Request=GetTile&Version=1.0.0&Format=image%2Fjpeg&TileMatrix=0&TileCol=0&TileRow=0&time=2012-02-22
# http://localhost/onearth/test/twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fjpeg&styles=&&width=512&height=512&bbox=-180,-198,108,90&TIME=2012-02-22
# Missing parameters
def test_missing_request(self):
test_url = base_url + '/mod_twms/twms.cgi?layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fjpeg&styles=&&width=512&height=512&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_missing_layer(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&srs=EPSG:4326&format=image%2Fjpeg&styles=&&width=512&height=512&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_missing_format(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4326&styles=&&width=512&height=512&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_missing_tilematrixset(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&format=image%2Fjpeg&styles=&&width=512&height=512&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_missing_heightwidth(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fjpeg&styles=&&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_missing_bbox(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fjpeg&styles=&&width=512&height=512'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
# Invalid parameters
def test_bad_request(self):
test_url = base_url + '/mod_twms/twms.cgi?request=NOTEXIST&layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fjpeg&styles=&&width=512&height=512&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_bad_layer(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=bogus_layer&srs=EPSG:4326&format=image%2Fjpeg&styles=&&width=512&height=512&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_bad_style(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fjpeg&styles=shaolin&&width=512&height=512&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_bad_format(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fppng&styles=&&width=512&height=512&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_bad_tilematrixset(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4328&format=image%2Fjpeg&styles=&&width=512&height=512&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_bad_heightwidth_value(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fjpeg&styles=&&width=0&height=-5&bbox=-180,-198,108,90'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
def test_bad_bbox_value(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fjpeg&styles=&&width=512&height=512&bbox=-180,-198,10'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
#def test_kvp_tilerow_out_of_range(self):
#test_url = base_url + '/test_mod_twms_err/twms.cgi?layer=test_layer&version=1.0.0&service=wmts&request=gettile&format=image/png&tilematrixset=GoogleMapsCompatible_Level6&tilematrix=0&tilerow=5&tilecol=0'
#test_wmts_error(self, test_url, 400, 'TileOutOfRange', 'TILEROW', 'TILEROW is out of range, maximum value is 0')
#def test_kvp_tilecol_out_of_range(self):
#test_url = base_url + '/test_mod_twms_err/twms.cgi?layer=test_layer&version=1.0.0&service=wmts&request=gettile&format=image/png&tilematrixset=GoogleMapsCompatible_Level6&tilematrix=0&tilerow=0&tilecol=5'
#test_wmts_error(self, test_url, 400, 'TileOutOfRange', 'TILECOL', 'TILECOL is out of range, maximum value is 0')
def test_bad_time_format(self):
test_url = base_url + '/mod_twms/twms.cgi?request=GetMap&layers=test_weekly_jpg&srs=EPSG:4326&format=image%2Fjpeg&styles=&&width=512&height=512&bbox=-180,-198,108,90&time=86753-09'
response_code = 400
response_value = 'Bad Request'
check_code = check_response_code(test_url, response_code, response_value)
error = 'The TWMS response code does not match what\'s expected. URL: {0}, Expected Response Code: {1}'.format(test_url, response_code)
self.assertTrue(check_code, error)
@classmethod
def tearDownClass(self):
# Delete Apache test config
os.remove(os.path.join('/etc/httpd/conf.d/' + os.path.basename(self.test_oe_config)))
os.remove(os.path.join('/etc/httpd/conf.d/' + os.path.basename(self.test_apache_config)))
restart_apache()
if __name__ == '__main__':
# Parse options before running tests
parser = OptionParser()
parser.add_option('-o', '--output', action='store', type='string', dest='outfile', default='test_mod_twms_err_results.xml',
help='Specify XML output file (default is test_mod_twms_err_results.xml')
parser.add_option('-s', '--start_server', action='store_true', dest='start_server', help='Load test configuration into Apache and quit (for debugging)')
parser.add_option('-l', '--conf_location', action='store', dest='apache_conf_dir',
help='Apache config location to install test files to (default is /etc/httpd/conf.d)',
default=apache_conf_dir)
parser.add_option('-u', '--base_url', action='store', dest='base_url',
help='Base url for the Apache install on this machine (default is http://localhost)', default=base_url)
(options, args) = parser.parse_args()
# Set the globals for these tests
apache_conf_dir = options.apache_conf_dir
base_url = options.base_url
# --start_server option runs the test Apache setup, then quits.
if options.start_server:
TestModTwmsErrorHandling.setUpClass()
sys.exit('Apache has been loaded with the test configuration. No tests run.')
# Have to delete the arguments as they confuse unittest
del sys.argv[1:]
with open(options.outfile, 'wb') as f:
print('\nStoring test results in "{0}"'.format(options.outfile))
unittest.main(
testRunner=xmlrunner.XMLTestRunner(output=f)
)
|
986,417 | 6bbce1d9ea5e7027b3c1f4a77611a93b3d657172 | import django
from django import template
from django.template.base import Node, NodeList, TemplateSyntaxError, token_kwargs
from django.template.library import parse_bits
from django.utils.safestring import mark_safe
from django_components.component import registry
# Django < 2.1 compatibility
try:
from django.template.base import TokenType
except ImportError:
from django.template.base import TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK
class TokenType:
TEXT = TOKEN_TEXT
VAR = TOKEN_VAR
BLOCK = TOKEN_BLOCK
# Django < 2.0 compatibility
if django.VERSION > (2, 0):
PARSE_BITS_DEFAULTS = {
"varargs": None,
"varkw": [],
"defaults": None,
"kwonly": [],
"kwonly_defaults": None,
}
else:
PARSE_BITS_DEFAULTS = {
"varargs": None,
"varkw": [],
"defaults": None,
}
register = template.Library()
COMPONENT_CONTEXT_KEY = "component_context"
def get_components_from_registry(registry):
"""Returns a list unique components from the registry."""
unique_component_classes = set(registry.all().values())
components = []
for component_class in unique_component_classes:
components.append(component_class())
return components
@register.simple_tag(name="component_dependencies")
def component_dependencies_tag():
"""Render both the CSS and JS dependency tags."""
rendered_dependencies = []
for component in get_components_from_registry(registry):
rendered_dependencies.append(component.render_dependencies())
return mark_safe("\n".join(rendered_dependencies))
@register.simple_tag(name="component_css_dependencies")
def component_css_dependencies_tag():
"""Render the CSS tags."""
rendered_dependencies = []
for component in get_components_from_registry(registry):
rendered_dependencies.append(component.render_css_dependencies())
return mark_safe("\n".join(rendered_dependencies))
@register.simple_tag(name="component_js_dependencies")
def component_js_dependencies_tag():
"""Render the JS tags."""
rendered_dependencies = []
for component in get_components_from_registry(registry):
rendered_dependencies.append(component.render_js_dependencies())
return mark_safe("\n".join(rendered_dependencies))
@register.simple_tag(name="component")
def component_tag(name, *args, **kwargs):
component_class = registry.get(name)
component = component_class()
return component.render(*args, **kwargs)
class SlotNode(Node):
def __init__(self, name, nodelist, component=None):
self.name, self.nodelist, self.component = name, nodelist, component
def __repr__(self):
return "<Slot Node: %s. Contents: %r>" % (self.name, self.nodelist)
def render(self, context):
if COMPONENT_CONTEXT_KEY not in context.render_context:
context.render_context[COMPONENT_CONTEXT_KEY] = {}
if self.component not in context.render_context[COMPONENT_CONTEXT_KEY]:
context.render_context[COMPONENT_CONTEXT_KEY][self.component] = {}
rendered_slot = self.nodelist.render(context)
if self.component:
context.render_context[COMPONENT_CONTEXT_KEY][self.component][
self.name
] = rendered_slot
return ""
@register.tag("slot")
def do_slot(parser, token, component=None):
bits = token.split_contents()
if len(bits) != 2:
raise TemplateSyntaxError("'%s' tag takes only one argument" % bits[0])
slot_name = bits[1].strip('"')
nodelist = parser.parse(parse_until=["endslot"])
parser.delete_first_token()
return SlotNode(slot_name, nodelist, component=component)
class ComponentNode(Node):
def __init__(self, component, extra_context, slots):
extra_context = extra_context or {}
self.component, self.extra_context, self.slots = component, extra_context, slots
def __repr__(self):
return "<Component Node: %s. Contents: %r>" % (self.component, self.slots)
def render(self, context):
extra_context = {
key: filter_expression.resolve(context)
for key, filter_expression in self.extra_context.items()
}
context.update(extra_context)
self.slots.render(context)
if COMPONENT_CONTEXT_KEY in context.render_context:
slots_filled = context.render_context[COMPONENT_CONTEXT_KEY][self.component]
return self.component.render(slots_filled=slots_filled, **context.flatten())
return self.component.render()
@register.tag("component_block")
def do_component(parser, token):
"""
{% component_block "name" variable="value" variable2="value2" ... %}
"""
bits = token.split_contents()
tag_args, tag_kwargs = parse_bits(
parser=parser,
bits=bits,
params=["tag_name", "component_name"],
takes_context=False,
name="component_block",
**PARSE_BITS_DEFAULTS
)
tag_name = tag_args.pop(0)
if len(bits) < 2:
raise TemplateSyntaxError(
"Call the '%s' tag with a component name as the first parameter" % tag_name
)
component_name = bits[1]
if not component_name.startswith(('"', "'")) or not component_name.endswith(
('"', "'")
):
raise TemplateSyntaxError(
"Component name '%s' should be in quotes" % component_name
)
component_name = component_name.strip('"')
component_class = registry.get(component_name)
component = component_class()
extra_context = {}
if len(bits) > 2:
extra_context = component.context(**token_kwargs(bits[2:], parser))
slots_filled = NodeList()
tag_name = bits[0]
while tag_name != "endcomponent_block":
token = parser.next_token()
if token.token_type != TokenType.BLOCK:
continue
tag_name = token.split_contents()[0]
if tag_name == "slot":
slots_filled += do_slot(parser, token, component=component)
elif tag_name == "endcomponent_block":
break
return ComponentNode(component, extra_context, slots_filled)
|
986,418 | a93c08285db122733789db6871b4dbb8fb9b8f37 | from django.db import models
# Create your models here.
class Search(models.Model):
topic = models.CharField(max_length=100)
|
986,419 | 0c72656d3872c3af5c9fc5339432e68ebe5673b1 | import pandas as pd
import seaborn as sns
import matplotlib
import matplotlib.pyplot as plt
# 한글 설정
import platform
print(platform.system())
if platform.system() == 'Windows':
matplotlib.rcParams['font.family'] = 'Malgun Gothic' # '맑은 고딕'으로 설정
else:
matplotlib.rcParams['font.family'] = 'NanumGothicCoding' # '맑은 고딕'으로 설정,
matplotlib.rcParams['axes.unicode_minus'] = False
# 디스플레이 설정 변경
pd.set_option('display.max_columns', 20) # 출력할 최대 열의 개수
pd.set_option('display.unicode.east_asian_width', True) # 유니코드 사용 너비 조정
pd.set_option('display.width', 600) # 콘솔 출력 너비
# 파일 불러오기
df_2015_2019 = pd.read_csv('전국 전체 분양가격(2015_2019).csv', encoding='utf-8')
print(df_2015_2019.shape, '\n', df_2015_2019.head(), '\n', df_2015_2019.tail())
df_2013_2015 = pd.read_csv('전국 평균 분양가격(2015년 09월까지).csv',
encoding='euc-kr',
skiprows=1,
header=0, engine='python')
print(df_2013_2015.shape, '\n', df_2013_2015.head(), '\n', df_2013_2015.tail())
print(df_2013_2015.columns)
# 24열 이후 삭제(통계정보)
df_2013_2015 = df_2013_2015.drop(columns=df_2013_2015.columns[24:])
print(df_2013_2015.columns)
year = df_2013_2015.iloc[0]
# 결측치를 전의 값으로 채워줌
year = year.fillna(method='ffill')
year
month = df_2013_2015.iloc[1]
print(year, '\n', month)
for i, y in enumerate(year):
if i > 1:
year[i] = ' '.join([str(year[i]), '{:,.0f}'.format(month[i])])
year[1] = '시군구'
print(year)
df_2013_2015.columns = year
print(df_2013_2015)
# 통계정보 제거
df_2013_2015 = df_2013_2015.drop(df_2013_2015.index[[0, 1, 2, 10, 12, 22]])
print(df_2013_2015)
df_2013_2015.loc[4, '구분'] = ''
df_2013_2015.loc[14, '구분'] = ''
print(df_2013_2015)
# 지역 컬럼을 새로 만들어 시도와 시군구를 병합
# 결측치 빈문자로
df_2013_2015['구분'] = df_2013_2015['구분'].fillna('')
df_2013_2015.시군구 = df_2013_2015.시군구.fillna('')
print(df_2013_2015)
df_2013_2015['지역명'] = df_2013_2015.구분 + df_2013_2015.시군구
#
print(df_2013_2015)
print(df_2013_2015.drop(['구분', '시군구'], axis=1))
df_2013_2015 = df_2013_2015.drop(['구분', '시군구'], axis=1)
# 여기까지
print("df_2013_2015", '\n', df_2013_2015, '\n')
melt_columns = df_2013_2015.columns.copy()
print(melt_columns, type(melt_columns))
# df_2013_2015 = pd.melt(df_2013_2015, id_vars=['지역명'],
# value_vars=['2013 12', '2014 1', '2014 2', '2014 3', '2014 4',
# '2014 5', '2014 6', '2014 7', '2014 8', '2014 9', '2014 10', '2014 11',
# '2014 12', '2015 1', '2015 2', '2015 3', '2015 4', '2015 5', '2015 6',
# '2015 7', '2015 8', '2015 9'])
# index를 list로 변경
melt_columns = melt_columns[:len(melt_columns)-1].tolist()
print("melt_columns", '\n', melt_columns)
df_2013_2015 = pd.melt(df_2013_2015, id_vars=['지역명'],
value_vars=melt_columns)
print(df_2013_2015.head())
# 지역명, 0, value => '지역명', '기간', '분양가'로 열이름 변경
df_2013_2015.columns = ['지역명', '기간', '분양가']
print(df_2013_2015.head())
# 기간을 연도와 월로 분리하여 추가
df_2013_2015['연도'] = df_2013_2015['기간'].apply(lambda year_month: year_month.split(' ')[0])
df_2013_2015['월'] = df_2013_2015['기간'].apply(lambda year_month: year_month.split(' ')[1])
print(df_2013_2015.head())
print(df_2015_2019.head())
print(df_2015_2019.info(), '\n\n')
print(df_2013_2015.info(), '\n\n')
df_2013_2015.연도 = df_2013_2015.연도.astype(int)
df_2013_2015.월 = df_2013_2015.월.astype(int)
print(df_2013_2015.info(), '\n\n')
plt.figure(figsize=(18, 10))
plt.subplot(221)
sns.boxplot(data=df_2013_2015, x='지역명', y='분양가', hue='연도')
plt.subplot(222)
sns.barplot(data=df_2013_2015, x='지역명', y='분양가', hue='연도')
plt.subplot(223)
sns.boxplot(data=df_2015_2019, x='지역명', y='평당분양가격', hue='연도')
plt.subplot(224)
sns.barplot(data=df_2015_2019, x='지역명', y='평당분양가격', hue='연도')
plt.suptitle("2013-2015, 2015-2019년 지역별 평당분양가격", size=20)
plt.show()
# 컬럼명 맞추기
print(df_2013_2015.columns, '\n\n', df_2015_2019.columns, '\n\n')
df_2013_2015_prepare = df_2013_2015[['지역명', '연도', '월', '분양가']]
total_columns = ['지역명', '연도', '월', '평당분양가격']
df_2013_2015_prepare.columns = total_columns
df_2015_2019_prepare = df_2015_2019[['지역명', '연도', '월', '평당분양가격']]
print(df_2013_2015_prepare.head(), '\n\n', df_2015_2019_prepare.head())
print(df_2013_2015_prepare.shape, '\n\n', df_2015_2019_prepare.shape)
# 결합하기
df_2013_2019 = pd.concat([df_2013_2015_prepare, df_2015_2019_prepare])
print("total : ", df_2013_2019.shape)
# 2013년부터 2019년 11월 전국 신규 민간 아파트 분양가격 동향 시각화
df_year_mean = df_2013_2019.groupby(['연도'])['평당분양가격'].mean()
print(df_year_mean)
fig = plt.figure(figsize=(15, 12))
ax1 = fig.add_subplot(2, 2, 1)
ax2 = fig.add_subplot(2, 2, 2)
ax3 = fig.add_subplot(2, 2, 3)
ax4 = fig.add_subplot(2, 2, 4)
plt.subplots_adjust(left=0.1, right=0.9, bottom=0.1, top=0.87, wspace=0.5, hspace=0.5) # 여백지정
df_year_mean.plot.bar(rot=0, ax=ax1)
sns.barplot(data=df_2013_2019, x='연도', y='평당분양가격', ax=ax2)
df_2013_2019[['연도', '지역명', '평당분양가격']].boxplot(by=['연도'], ax=ax3)
df_2013_2019_daegu = df_2013_2019.loc[df_2013_2019.지역명 == '대구']
sns.boxplot(x='연도', y='평당분양가격', data=df_2013_2019_daegu, ax=ax4)
ax1.set_title('연도별 평균 평당분양가격 - bar')
ax2.set_title('연도별 평균 평당분양가격 - seaborn')
ax3.set_title('연도별 평균 평당분양가격 - boxplot')
ax4.set_title('대구 연도별 평균 평당분양가격 - boxplot')
fig.suptitle('연도별 평균 평당분양가격')
plt.show()
|
986,420 | 2895bbc6da5091e0bdd5b4f2b16e82183824ff03 | #coding=utf-8
import RPi.GPIO as GPIO
import time
servopin = 18
GPIO.setmode(GPIO.BOARD)
GPIO.setup(servopin, GPIO.OUT, initial=False)
p = GPIO.PWM(servopin,50) #50HZ
p.start(0)
time.sleep(2)
def turn(s,t):
k=s
if s<=t:
while k<=t:
p.ChangeDutyCycle(2.5 + 10 * k / 180) #设置转动角度
time.sleep(0.02) #等该20ms周期结束
p.ChangeDutyCycle(0) #归零信号
time.sleep(0.02)
k+=10
while k>=t:
p.ChangeDutyCycle(2.5 + 10 * k / 180) #设置转动角度
time.sleep(0.02) #等该20ms周期结束
p.ChangeDutyCycle(0) #归零信号
time.sleep(0.02)
k-=10
|
986,421 | 7008dc1f4bad0bf31d9a41e445fe7488fff4f1ac | import math
def solve_c(path="E:\\Users\\Neta\\Desktop\\dashyts\\googlejam.txt"):
f = open(path, "r")
T = f.readline()
T = int(T)
#path_out = "E:\\Users\\Neta\\Desktop\\dashyts\\googlejamSOL.txt"
#fout = open(path_out, "w")
case = 0
for i in range(T):
line = f.readline()
case = case + 1
args = line.split(" ")
N = int(args[0])
K = int(args[1])
log2k = math.log2(K)
#print("log2k is: " + str(log2k))
floorlog2k = math.floor(log2k)
#print("floorlog2k is: " + str(floorlog2k))
down = 2**floorlog2k
#print("down is: " + str(down))
up = N+1 -K
#print("up: "+str(up))
midanswer = math.ceil(up/down)
#print("midanswer is: "+ str(midanswer))
finalanswer = (midanswer - 1)
if finalanswer %2 == 0:
print("Case #" + str(case) +": "+ str(int(finalanswer/2))+" "+str(int(finalanswer/2)))
else:
print("Case #" + str(case) +": "+ str(int(finalanswer/2)+1)+" "+str(int(finalanswer/2)))
|
986,422 | 21336c984053c7f4827320b54f601a315b522e02 | import sys
import os
import numpy as np
import skimage.io as skio
import matplotlib.pyplot as plt
import skimage as ski
import math
def returnRowsAndCols(image):
return image.shape[0], image.shape[1]
def negativefunc(originalImage):
rows = originalImage.shape[0]
cols = originalImage.shape[1]
newIamge = np.zeros(originalImage.shape)
for x in range(0, rows-1):
for y in range(0, cols-1):
newIamge[x, y] = (2-1)-originalImage[x, y]
return newIamge
# Sharpening Part
def makeAnewImageWithSomeValues(image, slidingWindowSize, value):
rows, cols = returnRowsAndCols(image)
incrementRows = int(slidingWindowSize[0]/2)
incrementCols = int(slidingWindowSize[1]/2)
rows += incrementRows*2
cols += incrementCols*2
newImage = np.full((rows, cols), value, image.dtype)
# newImage = np.full_like(image, value)
rows, cols = returnRowsAndCols(image)
for i in range(0, rows):
for j in range(0, cols):
newImage[i+incrementRows, j+incrementCols] = image[i, j]
return newImage
def giveAWindowSizedArrayOnGivenPoint(padded_image, sliding_window_size, index):
sliding_window_rows, sliding_window_cols = sliding_window_size
window_fetched = np.zeros(sliding_window_size)
window_fetched_i, window_fetched_j = 0, 0
for i in range(index[0], index[0]+sliding_window_rows):
for j in range(index[1], index[1]+sliding_window_cols):
window_fetched[window_fetched_i][window_fetched_j] = padded_image[i][j]
window_fetched_j += 1
window_fetched_j = 0
window_fetched_i += 1
return window_fetched
def applyFilter(window_original_image, filterToApply):
summation = 0
rows, cols = returnRowsAndCols(window_original_image)
for i in range(0, rows):
for j in range(0, cols):
summation += (window_original_image[i][j] * filterToApply[i][j])
return int(summation)
def traverseWithslidingWindowSize(original_image, slidingWindowSize, padded_image, filterToApply, typeOfFilter):
rows, cols = returnRowsAndCols(original_image)
new_image = np.zeros(original_image.shape)
for i in range(0, rows):
for j in range(0, cols):
window_fetched = giveAWindowSizedArrayOnGivenPoint(padded_image, slidingWindowSize, (i, j))
if typeOfFilter == 1:
new_image[i, j] = medianFilter(window_fetched)
elif typeOfFilter == 2:
new_image[i, j] = averageFilter(window_fetched, filterToApply)
elif typeOfFilter == 3:
new_image[i, j] = laplacianFilter(window_fetched, filterToApply)
elif typeOfFilter == 4:
new_image[i, j] = sobelFilter(window_fetched, filterToApply)
elif typeOfFilter == 5:
new_image[i, j] = erosion(window_fetched, filterToApply)
elif typeOfFilter == 6:
new_image[i, j] = dillation(window_fetched, filterToApply)
return new_image
def printImage(image):
rows, cols = returnRowsAndCols(image)
for i in range(0, rows):
for j in range(0, cols):
print(image[i, j], end=" ")
print("\n")
# ------------------------------------------------------------------------------------------------------------------
# Morphology Part
def number_of_nonzero_elements(window):
rows, cols = returnRowsAndCols(np.asarray(window))
count = 0
for i in range(0, rows):
for j in range(0, cols):
if window[i][j] == 1:
count += 1
return count
def fit_hit_miss(window, filter_to_apply):
summation = applyFilter(window, filter_to_apply)
n_nz_e = number_of_nonzero_elements(filter_to_apply)
if summation == n_nz_e:
return 0 # 0 stands for fit
elif summation > 0:
return 1 # 1 stands for hit
else:
return 2 # 2 stands for miss
def erosion(window, filter_to_apply):
action_to_take = fit_hit_miss(window, filter_to_apply)
if action_to_take == 0:
return 1
elif action_to_take == 1:
return 0
else:
return 0
def apply_erosion(original_image, filter_to_apply, filter_size):
padded_image = makeAnewImageWithSomeValues(original_image, filter_size, 0)
return traverseWithslidingWindowSize(original_image, filter_size, padded_image, filter_to_apply, 5)
# 5 stands for erosion
def dillation(window, filter_to_apply):
action_to_take = fit_hit_miss(window, filter_to_apply)
if action_to_take == 0:
return 1
elif action_to_take == 1:
return 1
else:
return 0
def apply_dillation(original_image, filter_to_apply, filter_size):
padded_image = makeAnewImageWithSomeValues(original_image, filter_size, 0)
return traverseWithslidingWindowSize(original_image, filter_size, padded_image, filter_to_apply, 6)
# 6 stands for dillation
def apply_opening(original_image, filter_to_apply, filter_size):
return apply_dillation(apply_erosion(original_image, filter_to_apply, filter_size), filter_to_apply, filter_size)
def apply_closing(original_image, filter_to_apply, filter_size):
return apply_erosion(apply_dillation(original_image, filter_to_apply, filter_size), filter_to_apply, filter_size)
# def apply_region_filling(original_image, point, structuring_element, structuring_element_size):
# temp =
def make_structuring_element(size, value):
structuring_element = np.full(size, value)
return structuring_element
# ------------------------------------------------------------------------------------------------------------------
# Task 1
# 1
image = skio.imread("morphology tasks/fingerprint-1.tif", as_gray=True)
image1 = np.asarray(image)
image1 = ski.img_as_float(image1)
# 2
structuring_element_size = [3, 3]
structuring_element = make_structuring_element(structuring_element_size, 1)
# 3
image2 = apply_opening(image1, structuring_element, structuring_element_size)
# 4
image3 = apply_closing(image2, structuring_element, structuring_element_size)
# 5
# plt.imshow(image1, cmap="gray")
# plt.show()
# plt.imshow(image2, cmap="gray")
# plt.show()
# plt.imshow(image3, cmap="gray")
# plt.show()
printImage(image1)
# ------------------------------------------------------------------------------------------------------------------
# Task 2
# # 1
# image4 = skio.imread("morphology tasks/wires.tif", as_gray=True)
# image5 = np.asarray(image4)
# image5 = ski.img_as_float(image4)
# # 2
# structuring_element_size = [15, 15]
# structuring_element = make_structuring_element(structuring_element_size, 1)
# image6 = apply_closing(image5, structuring_element, structuring_element_size)
# # 3
# structuring_element_size = [53, 53]
# structuring_element = make_structuring_element(structuring_element_size, 1)
# image7 = apply_closing(image5, structuring_element, structuring_element_size)
# # 4
# plt.imshow(image5, cmap="gray")
# plt.show()
# plt.imshow(image6, cmap="gray")
# plt.show()
# plt.imshow(image7, cmap="gray")
# plt.show()
# ------------------------------------------------------------------------------------------------------------------
# Task 3
# # 1
# image8 = skio.imread("morphology tasks/FigP0919(UTK).tif", as_gray=True)
# image9 = np.asarray(image8)
# image9 = ski.img_as_float(image9)
# # 2
# structuring_element_size = [3, 3]
# structuring_element = make_structuring_element(structuring_element_size, 1)
# image10 = apply_erosion(image9, structuring_element, structuring_element_size)
# image11 = image9-image10
# # 3
# plt.imshow(image9, cmap="gray")
# plt.show()
# plt.imshow(image11, cmap="gray")
# plt.show()
# ------------------------------------------------------------------------------------------------------------------
|
986,423 | 81781d5f67f2e3fc2159c7ea7db51d4767936057 | # North Sea example
# =================
#
# .. highlight:: python
#
# This example gives an overview of how to set up a tidal model of the North Sea.
#
# In this demo, we are working with geographic data and so need to import a number of
# additional packages and configure for the right timezones and map projection. It is
# common to use UTC as the time zone, since it is used by most observation data sets and
# domains often cover multiple time zones. For the map projection, we use the UTM
# coordinate system, which subdivides the surface of the Earth into zones and applies a
# tangent plane approximation within each zone. In our case, UTM zone 30 is the
# appropriate one.
from thetis import *
import thetis.coordsys as coordsys
import thetis.forcing as forcing
import csv
import os
sim_tz = timezone.pytz.utc
coord_system = coordsys.UTMCoordinateSystem(utm_zone=30)
# Having imported all the relevant packages, the first thing we need is a mesh of the
# domain of interest. This part is skipped for the purposes of this demo, but details
# can be found in the corresponding
# `example <https://github.com/thetisproject/thetis/tree/master/examples/north_sea>`__
# in the Thetis source code. It involves extracting coastline data from the GSHHS
# coastline data set :cite:`GSHHS:1996` and using this to generate a mesh using qmesh
# :cite:`qmesh:2018`. The mesh is stored as a GMSH file named `north_sea.msh` and is
# plotted below.
#
# .. figure:: north_sea_mesh.png
# :figwidth: 80%
# :align: center
#
# Note that the boundary segments are given different tags, depending on whether they
# correspond to open ocean (tag 100) or coasts (tag 200). This is because we impose
# different boundary conditions in each case.
#
# We set up the (UTM) mesh and calculate its longitude-latitude coordinates as follows:
mesh2d = Mesh("north_sea.msh")
lonlat = coord_system.get_mesh_lonlat_function(mesh2d)
lon, lat = lonlat
# With the mesh, we can now move on to set up fields defined upon it. For the
# bathymetry data, we use the
# `ETOPO1 data set <https://www.ngdc.noaa.gov/mgg/global>`__
# :cite:`ETOPO1:2009`, :cite:`ETOPO1tech:2009`.
# A NetCDF file containing such data for the North Sea can be downloaded from the
# webpage, stored as `etopo1.nc` and then interpolated onto the unstructured mesh
# using SciPy. An `interpolate_bathymetry` script is provided in the
# `example <https://github.com/thetisproject/thetis/tree/master/examples/north_sea>`__
# in the Thetis source code, which follows the
# `recommendations in the Firedrake documentation for interpolating data <https://firedrakeproject.org/interpolation.html#interpolation-from-external-data>`__.
# However, the NetCDF file cannot be included here for
# copyright reasons, so we insteady provide a HDF5 file containing the data already
# interpolated onto the mesh. Note that HDF5 files currently have to be saved and
# loaded using the same number of processors. The bathymetry field was generated by
# a serial run, so the following will not work in parallel.
P1_2d = get_functionspace(mesh2d, "CG", 1)
bathymetry_2d = Function(P1_2d, name="Bathymetry")
with CheckpointFile("north_sea_bathymetry.h5", "r") as f:
m = f.load_mesh("firedrake_default")
g = f.load_function(m, "Bathymetry")
bathymetry_2d.assign(g)
# The resulting bathymetry field is plotted below.
#
# .. figure:: north_sea_bathymetry.png
# :figwidth: 80%
# :align: center
#
# Observe that the plot also includes eight orange crosses. These indicate tide
# gauges where we would like to compare our tidal model against real data. For
# details on obtaining such data, we refer to the
# `example <https://github.com/thetisproject/thetis/tree/master/examples/north_sea>`__
# in the source code. For the purposes of this demo, we have included a CSV file
# named `stations_elev.csv` containing the gauge locations. We can read it as follows:
def read_station_data():
with open("stations_elev.csv", "r") as csvfile:
stations = {
d["name"]: (float(d["latitude"]), float(d["longitude"]))
for d in csv.DictReader(csvfile, delimiter=",", skipinitialspace=True)
}
return stations
# We also require fields for the Manning friction coefficient and the Coriolis forcing.
# These can be set up as follows:
manning_2d = Function(P1_2d, name="Manning coefficient")
manning_2d.assign(3.0e-02)
omega = 7.292e-05
coriolis_2d = Function(P1_2d, name="Coriolis forcing")
coriolis_2d.interpolate(2 * omega * sin(lat * pi / 180.0))
# We also need to choose a time window of interest and discretise it appropriately.
# We arbitrarily choose the simulation to start at 00:00 UTC on 15th January 2022
# and end exactly three days later. We are using a fairly coarse mesh (and will use
# an implicit time integration scheme) and so can get away with using timesteps of
# one hour.
start_date = datetime.datetime(2022, 1, 15, tzinfo=sim_tz)
end_date = datetime.datetime(2022, 1, 18, tzinfo=sim_tz)
dt = 3600.0
t_export = 3600.0
# We are now in a position where we can create the Thetis solver object and pass it
# all of the above parameters. We choose the implicit time integration scheme DIRK22
# because it is more suitable than the default Crank-Nicolson integrator in the case
# where we take large timesteps. (Crank-Nicolson is asymptotically unstable.)
solver_obj = solver2d.FlowSolver2d(mesh2d, bathymetry_2d)
options = solver_obj.options
options.element_family = "dg-dg"
options.polynomial_degree = 1
options.coriolis_frequency = coriolis_2d
options.manning_drag_coefficient = manning_2d
options.horizontal_velocity_scale = Constant(1.5)
options.use_lax_friedrichs_velocity = True
options.simulation_export_time = t_export
options.simulation_initial_date = start_date
options.simulation_end_date = end_date
options.swe_timestepper_type = "DIRK22"
options.swe_timestepper_options.use_semi_implicit_linearization = True
options.timestep = dt
options.fields_to_export = ["elev_2d", "uv_2d"]
options.fields_to_export_hdf5 = []
solver_obj.create_equations()
# To extract free surface elevation timeseries at the tide gauges, we add in
# some :class:`TimeSeriesCallback2D` instances. We need to provide the solver
# object, the field names to be evaluated, the UTM coordinates and finally the
# name of each tide gauge.
for name, (sta_lat, sta_lon) in read_station_data().items():
sta_x, sta_y = coord_system.to_xy(sta_lon, sta_lat)
cb = TimeSeriesCallback2D(
solver_obj,
["elev_2d"],
sta_x,
sta_y,
name,
append_to_log=False,
)
solver_obj.add_callback(cb)
# We still need to add a crucially important component to our tidal model...
# the tides! To do this, we make use of the
# `TPXO tidal forcing data set <https://www.tpxo.net/>`__ :cite:`TPXO:2002`.
# In order for this demo to work you will need to obtain NetCDF files for
# the forcing data as described on the
# `TPXO access page <https://www.tpxo.net/tpxo-products-and-registration>`__.
# We recommend that you store them in a subdirectory `tpxo` of a directory
# either located in a subdirectory `data` or referenced by the environment
# variable `$DATA`.
#
# With the data in place, we can set up a Firedrake :class:`Function` to control
# the elevation forcings on ocean boundaries and pass them into a
# :class:`TPXOTidalBoundaryForcing` instance.
data_dir = os.path.join(os.environ.get("DATA", "./data"), "tpxo")
if not os.path.exists(data_dir):
raise IOError(f"Forcing data directory {data_dir} does not exist")
forcing_constituents = ["Q1", "O1", "P1", "K1", "N2", "M2", "S2", "K2"]
elev_tide_2d = Function(P1_2d, name="Tidal elevation")
tbnd = forcing.TPXOTidalBoundaryForcing(
elev_tide_2d,
start_date,
coord_system,
data_dir=data_dir,
constituents=forcing_constituents,
boundary_ids=[100],
)
# Set the tidal field at time zero (of the simulation).
tbnd.set_tidal_field(0.0)
# As mentioned above, the forcing data drives the boundary conditions on
# boundary segments with tag 100. For open ocean boundaries in sufficiently deep
# open water, it is usually sufficient to use a zero boundary condition for the
# velocity because its magnitude is not significant. We pass this information to
# the solver object as follows:
solver_obj.bnd_functions["shallow_water"] = {
100: {"elev": elev_tide_2d, "uv": Constant(as_vector([0, 0]))},
}
# Note that we have assumed a fully "spun-up" tidal model here. It is standard
# practice to "spin-up" the model from a state of rest, slowly introducing the
# tidal forcings over one or two simulated weeks. For such preparatory runs, we
# need to modify the boundary condition expressions slightly. See the example in
# the source code for details on this. After a two week spin-up period, we obtain
# the following free surface elevation field (as well as a velocity field).
#
# .. figure:: north_sea_init.png
# :figwidth: 80%
# :align: center
#
# For the purposes of this demo, we have included HDF5 files containing spun-up
# elevation and velocity fields in the `outputs_spinup` directory. These can be
# used to initialise the model as follows. Again, the spun-up HDF5 data were
# generated by a serial run, so this demo will not work in parallel.
solver_obj.load_state(14, outputdir="outputs_spinup", t=0, iteration=0)
# The spin-up run was exported to HDF5 at daily intervals, so the first argument
# indicates that we resume on the fifteenth day (counting from zero). The last
# two keyword arguments are used to reset the clock for the subsequent simulation.
#
# The final ingredient that we need is a callback function that updates the tidal
# forcings as the simulation progresses. With that, we are ready to run the model!
def update_forcings(t):
tbnd.set_tidal_field(t)
solver_obj.iterate(update_forcings=update_forcings)
# The elevation timeseries at the tide gauges should be as shown in the following
# plot, along with in-situ data from the
# `CMEMS catalogue <http://www.marineinsitu.eu/access-data/>`__ :cite:`CMEMS:2022`.
# Observe that the tidal cycles are well matched. The magnitudes are not so well
# matched. These results are generated on a coarse mesh, for the purposes of having
# a demo that can be run in a short amount of time. In order to more accurately
# approximate the observations, it would be beneficial to use a finer mesh. It could
# also be beneficial to calibrate the various parameters that define the tidal model,
# for example the Manning friction coefficient.
#
# .. figure:: north_sea_elev_ts.png
# :figwidth: 100%
# :align: center
#
# Scripts for generating all of the figures in this demo can be found in the
# `example <https://github.com/thetisproject/thetis/tree/master/examples/north_sea>`__
# in the source code.
#
# This tutorial can be dowloaded as a Python script `here <demo_2d_north_sea.py>`__.
#
#
# .. rubric:: References
#
# .. bibliography:: demo_references.bib
# :filter: docname in docnames
|
986,424 | 91f695e2a981d21a031f1f41ee9c80da9494447e | from experience_replay import PrioritizedExperienceReplay
import numpy as np
from numpy import clip
from numpy.random import rand, randint
from atari_preprocessing import ProcessedAtariEnv
from deep_q_networks import DeepQNetwork
import tensorflow as tf
from tensorflow import math
argmax = math.argmax
import time
import os
from log_training import QLearningLogger
class BaseQAgent:
"""
******************
** BaseQAgent **
******************
Base class for logged training and testing of a deep Q-learning agent.
Methods for decision making and performing parameter updates will be defined in subclasses.
-----------
Parameters:
-----------
env: object;
OpenAI gym learning environment
memory: object;
an instance of PrioritizedExperienceReplay for storage and sampling of an agent's experiences
policy_network: object;
an instance of DeepQNetwork for parameter updates and decision making
target_network: object;
an instance of DeepQNetwork for the estimation of target values for parameter updates
frame_shape: tuple;
the shape of one frame rendered by the environment
save_path: string;
the path in which the training logs will be stored
logger: object;
an instance of QLearningLogger for detailed documentation of the learning progress
"""
def __init__(self,
env = ProcessedAtariEnv(),
memory = PrioritizedExperienceReplay(),
policy_network = None,
target_network = None,
frame_shape = (84, 84),
save_path = None,
logger = QLearningLogger):
self.env = env
self.memory = memory
self.policy_network = policy_network
self.target_network = target_network
self.num_stacked_frames = memory.num_stacked_frames
self.save_path = None
if save_path is not None:
self.save_path = save_path
self.logger = logger(self.save_path)
# internal variables
self._current_state = np.zeros((1, self.num_stacked_frames, *frame_shape), dtype = np.uint8)
self._step_counter = 0
# logging variables
self._q_values = []
self._losses = []
self._score = 0.0
def _batch_update(self):
pass
def _predict_current_q_values(self):
pass
def _make_decision(self):
pass
def _random_warmup(self, num_steps):
"""Prefill the replay memory with num_steps random transitions"""
new_frame = self.env.reset()
reward = 0.0
action = 0
done = False
self.memory.add_experience(action, reward, new_frame, 1, done)
for i in range(num_steps):
action = np.random.randint(self.num_actions)
new_frame, reward, done, _ = self.env.step(action)
self.memory.add_experience(action, reward, new_frame, 1, done)
if done:
new_frame = self.env.reset()
self.memory.add_experience(0, 0.0, new_frame, 1, False)
self.memory.add_experience(0, 0.0, new_frame, 1, True)
def _start_episode(self, test = False):
"""Start episode with a number of num_stacked_frames no-ops
to get the first state of an episode"""
self.env.true_reset()
new_frame = self.env.reset()
if not test:
self.memory.add_experience(0, 0.0, new_frame, False)
for i in range(self.num_stacked_frames):
self._current_state[0, i] = new_frame
new_frame, reward, done, _ = self.env.step(0)
if not test:
self.memory.add_experience(0, reward, new_frame, done)
def _update_target_model(self):
"""Copy current weights of the policy network to the target network"""
self.target_network.model.set_weights(self.policy_network.model.get_weights())
def _pretrain(self, pretrain_steps, target_interval):
"""Pretrain the agent on its memory (prefilled with demonstrations)"""
for step in range(pretrain_steps):
self._batch_update(pretrain = True)
if step % target_interval == 0:
self.logger.save_model(self.policy_network.model)
print('\nStep: %i' %(step))
self._update_target_model()
print('Validation Score: %f' %(self.test()[0]))
print('\n')
def train(self,
num_episodes = 100,
num_steps = 500000,
max_steps_per_episode = 10000,
target_interval = 10000,
learning_interval = 4,
frame_skip = 1,
warmup_steps = None,
pretrain_steps = None,
output_freq = 50,
save_freq = 5,
store_memory = False):
"""
Train a deep Q-learning agent
-----------
Parameters:
-----------
num_episodes: integer;
the minimum number of training episodes
num_steps: integer;
the minimum number of training steps
max_steps_per_episode: integer;
the maximum number of steps per episode
target_interval: integer;
the length of the interval in which the target network is kept unchanged
learning_interval: integer;
in every learning_interval-th step a parameter update is performed
frame_skip: integer;
the number of frames in which chosen actions are repeated
(Attention: in many environments this technique is already handled, so the default is 1)
warmup_steps: integer;
the number of random transitions for prefilling the memory
pretrain_steps: integer;
the number of steps to pretrain the agent on its memory (prefilled with demonstrations)
output_freq: integer;
the frequency of outputting the current training logs
save_freq: integer;
the frequency of saving the current training logs
store_memory: boolean;
whether to store the whole memory of the agent for later use (Attention: this file may be very large)
"""
# prefill memory with random transitions if requested
if warmup_steps is not None:
self._random_warmup(warmup_steps)
# pretrain the agent on its on own memory
if pretrain_steps is not None:
self._pretrain(pretrain_steps, target_interval)
# logging initialization
self._score, self._q_values, self._losses = 0., [], []
raw_frames = np.zeros(shape = (max_steps_per_episode, *self.env._unprocessed_frame.shape), dtype = np.uint8)
episode_idx = 0
while episode_idx < num_episodes or self._step_counter < num_steps:
# reset environment and get first state
self._start_episode()
for i in range(max_steps_per_episode):
#-------------------------------------------------------------------------------#
#####################
# Interactive Phase #
#####################
# choose an action, observe reactions of the environment and
# add this experience to the agent's memory
if self._step_counter % frame_skip == 0:
action = self._make_decision()
new_frame, reward, done, _ = self.env.step(action)
self.memory.add_experience(action, reward, new_frame, 1, done)
# update current state
self._current_state[0, :(self.num_stacked_frames-1)] = self._current_state[0, 1:]
self._current_state[0, self.num_stacked_frames-1] = new_frame
#-------------------------------------------------------------------------------#
#-------------------------------------------------------------------------------#
##################
# Learning Phase #
##################
# perform a parameter update of the current policy model
if self._step_counter % learning_interval == 0:
self._batch_update()
# update the target model
if self._step_counter % target_interval == 0:
self._update_target_model()
#-------------------------------------------------------------------------------#
# logging
self._score += self.env._unprocessed_reward
raw_frames[i] = self.env._unprocessed_frame
self._step_counter += 1
if self.env.was_real_done:
self.logger.add_episode_logs(self._step_counter, self._score, self._q_values, self._losses, raw_frames[:i])
self._score, self._q_values, self._losses = 0., [], []
break
if done:
self.env.reset()
if not self.env.was_real_done:
self.memory.add_experience(action, reward, new_frame, 1, True)
self.logger.add_episode_logs(self._step_counter, self._score, self._q_values, self._losses, raw_frames[:i])
self._score, self._q_values, self._losses = 0., [], []
if episode_idx%(num_episodes/output_freq)==0:
validation_score, validation_frames = self.test(record = True, max_steps_per_episode = max_steps_per_episode)
#validation_score, validation_frames = 0, []
lower_idx = int(clip(episode_idx-(num_episodes/output_freq)+1, 0, num_episodes-1))
self.logger.show_progress(lower_idx, episode_idx, validation_score, validation_frames, self.policy_network.model)
if episode_idx%(num_episodes/save_freq)==0:
self.logger.make_plots()
self.logger.save_all(self.policy_network.model, self.memory, store_memory)
episode_idx += 1
print('==========================\ntraining session completed\n==========================\n\n\n=======\nSummary\n======='
)
self.logger.show_progress(0, num_episodes, summary = True)
self.logger.make_plots()
self.logger.save_all(self.policy_network.model, self.memory, store_memory)
def test(self, record = False, eps = 0.001, max_steps_per_episode = 10000):
"""
Test a deep Q-learning agent in one episode
using an epsilon-greedy strategy
-----------
Parameters:
-----------
record: boolean;
whether to record the frames of the episode
eps: float;
the probability of choosing a random action
max_steps_per_episode: integer;
the maximum number of steps per episode
"""
frames = []
done = False
total_reward = 0
reward = 0
self.env.true_reset()
new_frame, _, _, _ = self.env.step(0)
t = 0
while not self.env.was_real_done and t<max_steps_per_episode:
if done:
self.env.reset()
if record:
frames.append(self.env._unprocessed_frame)
action = self._make_decision(test_eps = eps)
new_frame, reward, done, info = self.env.step(action)
total_reward += self.env._unprocessed_reward
# update current state
self._current_state[0, :(self.num_stacked_frames-1)] = self._current_state[0, 1:]
self._current_state[0, self.num_stacked_frames-1] = new_frame
t += 1
return(total_reward, frames)
class DQNAgent(BaseQAgent):
def __init__(self,
env = ProcessedAtariEnv(),
memory = PrioritizedExperienceReplay(),
policy_network = None,#DeepQNetwork(),
target_network = None,#DeepQNetwork(),
frame_shape = (84, 84),
save_path = None,
discount_factor = 0.99,
n_step = None,
double_q = False,
expert_memory = None,
prioritized_replay = False
):
BaseQAgent.__init__(self, env, memory, policy_network, target_network, frame_shape, save_path)
self._idx_range = np.arange(self.memory.batch_size, dtype = np.int32)
self.discount_factor = discount_factor
self.n_step = n_step
self.expert_memory = expert_memory
self.double_q = double_q
self.prioritized_replay = prioritized_replay
def _get_mini_batch(self, expert = False):
if expert:
memory = self.expert_memory
else:
memory = self.memory
if self.n_step is None:
(self.state_1_batch,
self.action_batch,
self.reward_batch,
self.state_2_batch,
self.done_batch,
self.batch_weights) = memory.get_mini_batch()
else:
(self.state_1_batch,
self.action_batch,
self.reward_batch,
self.state_2_batch,
self.done_batch,
self.n_step_return_batch,
self.state_n_batch,
self.n_done_batch,
self.batch_weights) = memory.get_mini_batch([self.n_step, self.discount_factor])
def _get_target_action_values(self):
# predict all action values for the next states using the target network
target_values = self.target_network.predict(self.state_2_batch)
if not self.double_q:
optimal_actions = argmax(target_values, axis = 1)
else:
# get optimal actions in the next states with respect to the policy network
optimal_actions = self.policy_network.get_optimal_actions(self.state_2_batch)
# get target values corresponding to the optimal actions
optimal_action_values = tf.reduce_sum(tf.multiply(target_values, tf.one_hot(optimal_actions, target_values.shape[1])), axis = 1)
# compute the new target action values using the Bellman-equation
target_action_values = self.reward_batch + (1 - self.done_batch) * self.discount_factor * optimal_action_values
if self.n_step is None:
n_step_target_action_values = None
else:
# predict all action values for the next states using the target network
n_step_target_values = self.target_network.predict(self.state_n_batch)
if not self.double_q:
n_step_optimal_actions = argmax(n_step_target_values, axis = 1)
else:
# get optimal actions in the next states with respect to the policy network
n_step_optimal_actions = self.policy_network.get_optimal_actions(self.state_n_batch)
# get target values corresponding to the optimal actions
n_step_optimal_action_values = tf.reduce_sum(tf.multiply(n_step_target_values,
tf.one_hot(n_step_optimal_actions, n_step_target_values.shape[1])), axis = 1)
# compute the new target action values using the Bellman-equation
n_step_target_action_values = self.n_step_return_batch + (1 - self.n_done_batch) * (self.discount_factor)**(self.n_step) * n_step_optimal_action_values
return(target_action_values, n_step_target_action_values)
def _choose_memory(self):
memory_weight = self.memory._priority_tree.get_total_weight()
expert_weight = self.expert_memory._priority_tree.get_total_weight()
return(np.random.binomial(size = 1, n = 1, p = expert_weight/(expert_weight + memory_weight)) == 1)
def _batch_update(self, pretrain = False):
# decide, from which memory to sample
expert = False
if self.expert_memory is not None:
expert = self._choose_memory()
if pretrain and self.expert_memory is not None:
expert = True
# sample mini batch
self._get_mini_batch(expert)
# compute target values
target_action_values, n_step_target_action_values = self._get_target_action_values()
# train the policy network using the target values and obtain the loss
mean_loss, losses = self.policy_network.train(self.state_1_batch, self.action_batch, target_action_values, n_step_target_action_values, expert, self.batch_weights)
self._losses.append(mean_loss)
# update priorities according to losses
if self.prioritized_replay:
if expert:
self.expert_memory.update_mini_batch_priorities(losses.numpy())
else:
self.memory.update_mini_batch_priorities(losses.numpy())
def _predict_current_q_values(self):
self._current_predictions = self.policy_network.predict(self._current_state)[0]
class EpsilonGreedyAgent(BaseQAgent):
def __init__(self,
env = ProcessedAtariEnv(),
memory = PrioritizedExperienceReplay(),
policy_network = None,#DeepQNetwork(),
target_network = None,#DeepQNetwork(),
frame_shape = (84, 84),
save_path = None,
num_actions = 4,
epsilon = 0.05):
BaseQAgent.__init__(self, env, memory, policy_network, target_network, frame_shape, save_path)
self.epsilon = epsilon
self.num_actions = num_actions
def _make_decision(self):
if rand() < self.epsilon:
return(randint(self.num_actions))
else:
self._predict_current_q_values()
print(self._current_predictions)
return(argmax(self._current_predictions, axis = 1))
class EpsilonAnnealingAgent(BaseQAgent):
def __init__(self,
env = ProcessedAtariEnv(),
memory = PrioritizedExperienceReplay(),
policy_network = None,#DeepQNetwork(),
target_network = None,#DeepQNetwork(),
frame_shape = (84, 84),
save_path = None,
num_actions = 4,
eps_schedule = [[1.0, 0.1, 1000000],
[0.1, 0.001, 5000000]]):
BaseQAgent.__init__(self, env, memory, policy_network, target_network, frame_shape, save_path)
self.eps_schedule = np.array(eps_schedule)
self.eps_schedule[:,2] = np.cumsum(self.eps_schedule[:,2])
self.eps_lag = 0
self.num_actions = num_actions
def _get_current_epsilon(self):
if self._step_counter > self.eps_schedule[0, 2] and self.eps_schedule.shape[0] > 1:
self.eps_schedule = np.delete(self.eps_schedule, 0, 0)
self.eps_lag = self._step_counter
max_eps, min_eps, eps_steps = self.eps_schedule[0]
epsilon = max_eps - min(1, (self._step_counter - self.eps_lag) / (eps_steps - self.eps_lag)) * (max_eps - min_eps)
return(epsilon)
def _make_decision(self, test_eps = None):
if test_eps is not None:
epsilon = test_eps
else:
epsilon = self._get_current_epsilon()
if rand() < epsilon:
return(randint(self.num_actions))
else:
self._predict_current_q_values()
action = int(argmax(self._current_predictions))
self._q_values.append(self._current_predictions[action])
return(action)
class EpsGreedyDQNAgent(EpsilonGreedyAgent, DQNAgent):
def __init__(self,
env = ProcessedAtariEnv(),
memory = PrioritizedExperienceReplay(),
policy_network = None,#DeepQNetwork(),
target_network = None, #DeepQNetwork(),
frame_shape = (84, 84),
save_path = None,
discount_factor = 0.99,
n_step = None,
double_q = False,
expert_memory = None,
prioritized_replay = False,
num_actions = 4,
epsilon = 0.05):
EpsilonGreedyAgent.__init__(self, env, memory, policy_network, target_network, frame_shape, save_path, num_actions, epsilon)
DQNAgent.__init__(self, env, memory, policy_network, target_network, frame_shape, save_path, discount_factor, n_step, double_q, expert_memory, prioritized_replay)
class EpsAnnDQNAgent(EpsilonAnnealingAgent, DQNAgent):
def __init__(self,
env = ProcessedAtariEnv(),
memory = PrioritizedExperienceReplay(),
policy_network = None, #DeepQNetwork(),
target_network = None, #DeepQNetwork(),
frame_shape = (84, 84),
save_path = None,
discount_factor = 0.99,
n_step = None,
double_q = False,
expert_memory = None,
prioritized_replay = False,
num_actions = 4,
eps_schedule = [[1.0, 0.1, 1000000],
[0.1, 0.001, 5000000]]
):
EpsilonAnnealingAgent.__init__(self, env, memory, policy_network, target_network, frame_shape, save_path, num_actions, eps_schedule)
DQNAgent.__init__(self, env, memory, policy_network, target_network, frame_shape, save_path, discount_factor, n_step, double_q, expert_memory, prioritized_replay)
|
986,425 | bd61fe6523c58bcc6692016d8da60f5417d5c376 | def main():
print("hello my name is Matheus Guelfi")
main() |
986,426 | bf2ddb7a6878c6875d367714181271dc624f2a55 |
matrix = [
[112, 42, 83, 119],
[56, 125, 56, 49],
[15, 78, 101, 43],
[62, 98, 114, 108]
]
def flippingMatrix(matrix):
# Write your code here
n = int(len(matrix) / 2)
current_sum = _compute_sum(matrix)
max_value = 0
max_value_location = (0,0)
for i, row in enumerate(matrix[n:]):
for col, item in enumerate(row[n:]):
if item < max_value:
max_value_location = (i, col)
if max_value_location[0] <= n:
_reverse_column(matrix, )
return current_sum
def _compute_sum(matrix):
n = int(len(matrix) / 2)
sum = 0
for row in matrix[:n]:
for i in row[:n]:
sum += i
return sum
def _reverse_column(matrix, col):
column_reversed = list(reversed([i[col] for i in matrix]))
for i, row in enumerate(matrix):
row[col] = column_reversed[i]
def _reverse_row(matrix, row):
matrix[row] = list(reversed(matrix[row]))
if __name__ == '__main__':
print(flippingMatrix(matrix))
|
986,427 | 5c599c58f29d79d7c7fd611cfbef00d0b2e193f9 | # Generated by Django 2.1.7 on 2019-04-04 14:06
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("khetha", "0002_schema_sketch")]
operations = [
migrations.AlterModelOptions(
name="answeroption", options={"ordering": ["order"]}
),
migrations.AlterModelOptions(name="question", options={"ordering": ["order"]}),
migrations.AlterModelOptions(name="task", options={"ordering": ["order"]}),
migrations.AddField(
model_name="answeroption",
name="order",
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name="question",
name="order",
field=models.PositiveIntegerField(default=0),
),
migrations.AddField(
model_name="task",
name="order",
field=models.PositiveIntegerField(default=0),
),
]
|
986,428 | 7bec7793eca7a8fc621dbf5d48ca890785fcb193 | from apps.users import dm_users
from flask import redirect, session, jsonify
from apps.users.a_schemas import UserSchema
from server.connection import update
from functools import wraps
from apps.logs import o_logs
from apps.users import m_users
def user_active_required(func):
@wraps(func)
def wrapper(*args, **kwargs):
if 'username' in session:
user = dm_users.get_user_by_email(session['username'])
if user.status:
return func(*args, **kwargs)
return redirect("/")
return wrapper
def get_all_users_jsonify():
user_db = dm_users.get_all_users()
schema_user = UserSchema(many=True)
user = schema_user.dump(user_db).data
return jsonify(user)
def serialize_user_form():
user_form = m_users.user_form()
return jsonify(user_form)
def serialize_user_form():
user_form = m_users.user_form()
return jsonify(user_form)
|
986,429 | 50b24385df344259552a593cd60c5433d500072a | import os, io, json, shutil
from django.core.exceptions import SuspiciousFileOperation
from cmsplugin_cascade import app_settings
import tempfile
try:
import czipfile as zipfile
except ImportError:
import zipfile
def unzip_archive(label, zip_ref):
common_prefix = os.path.commonprefix(zip_ref.namelist())
if not common_prefix:
raise SuspiciousFileOperation("The zip archive {} is not packed correctly".format(label))
icon_font_root = app_settings.CMSPLUGIN_CASCADE['icon_font_root']
try:
try:
os.makedirs(icon_font_root)
except os.error:
pass # the directory exists already
temp_folder = tempfile.mkdtemp(prefix='', dir=icon_font_root)
for member in zip_ref.infolist():
zip_ref.extract(member, temp_folder)
font_folder = os.path.join(temp_folder, common_prefix)
# this is specific to fontello
with io.open(os.path.join(font_folder, 'config.json'), 'r') as fh:
config_data = json.load(fh)
except Exception as exc:
shutil.rmtree(temp_folder, ignore_errors=True)
raise SuspiciousFileOperation("Can not unzip uploaded archive {}: {}".format(label, exc))
return os.path.relpath(font_folder, icon_font_root), config_data
|
986,430 | c0011ac48c4f8caa314e17b35d9e3445eb8879cb | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2019 Arne Köhn <arne@chark.eu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Usage: vimeo_linker.py [options]
Searches for videos on the ACL vimeo channel and tries to link them
to papers.
As the vimeo api is not public, you need to copy the
vimeo_apikeys.py.dummy to vimeo_apikeys.py and fill in your API keys
obtained from vimeo.
Arguments:
--cache-vimeo Cache the results obtained from vimeo. For debugging.
--cache-matchings Cache the paper matchings. For debugging.
--from-year=N Papers starting at this year are considered for matching. Newer year speeds up the matching. [default: 2013]
"""
import difflib
import vimeo
import time
import pickle
import os
import re
from anthology import Anthology
from docopt import docopt
# ruff: noqa: F403, F405, E741
from vimeo_apikeys import *
def checkVideo(paper):
for elem in paper.attachments:
if elem["type"] == "video":
return True
return False
args = docopt(__doc__)
fromYear = int(args["--from-year"])
cacheVimeo = args["--cache-vimeo"]
cacheMatchings = args["--cache-matchings"]
v = vimeo.VimeoClient(token=personalAccessToken, key=clientId, secret=apiSecret)
allpapers = Anthology(importdir="../data/").papers
print("number of papers in anthology: ", len(allpapers))
papers = {k: v for k, v in allpapers.items() if int(v.attrib["year"]) > fromYear}
print(
"number of papers in anthology without video after " + str(fromYear) + ": ",
len(papers),
)
requestUrl = "/users/46432367/videos?per_page=100"
cont = True
nameUrls = {}
numRequests = 0
fetchUrls = True
if cacheVimeo and os.path.isfile("videos.pickle"):
nameUrls = pickle.load(open("videos.pickle", "rb"))
fetchUrls = False
while cont and fetchUrls:
res = v.get(requestUrl)
if res is None:
print("Result was None; sleeping and trying again")
time.sleep(2)
continue
numRequests += 1
j = res.json()
if not res.ok:
print("could not fetch videos from vimeo API!")
exit(1)
for elem in j["data"]:
nameUrls[elem["name"]] = elem["link"]
requestUrl = j["paging"]["next"]
print(requestUrl)
cont = requestUrl is not None
# seems to be needed to not run into read timeouts.
time.sleep(1)
if fetchUrls and cacheVimeo:
pickle.dump(nameUrls, open("videos.pickle", "wb"))
notFounds = []
result = ""
matcher = difflib.SequenceMatcher()
def trySubstringMatch(videoName):
for idx, p in papers.items():
if p.is_volume:
continue
title = p.get_title("plain")
if len(title) <= 20:
continue
if title.lower() in videoName:
return (idx, p)
return None
def tryMatch(name):
matcher.set_seq1(name)
for idx, p in papers.items():
if p.is_volume:
continue
title = p.get_title("plain")
matcher.set_seq2(title.lower())
if (
matcher.real_quick_ratio() > 0.8
and matcher.quick_ratio() > 0.8
and matcher.ratio() > 0.8
):
return (idx, p)
return None
num_elems = len(nameUrls)
id_video = []
computeMatch = True
if os.path.isfile("videos_papers.pickle") and cacheMatchings:
id_video = pickle.load(open("videos_papers.pickle", "rb"))
computeMatch = False
if computeMatch:
i = 1
for name, url in nameUrls.items():
print(i, " of ", num_elems, end="")
i += 1
# clean tacl tag and author list
if name.startswith("[TACL]"):
name = name[6:]
if "---" in name:
name = name.split("---")[0]
found = False
# skip video names that are obviously not paper titles
# because they don't contain spaces.
if " " in name:
name = name.lower()
res = trySubstringMatch(name)
if res is None:
res = tryMatch(name)
# try to remove author list appended by : or by -
if res is None:
if ":" in name:
res = tryMatch(name.rsplit(":", 1)[0])
if res is None:
if "-" in name:
res = tryMatch(name.rsplit("-", 1)[0])
if res is not None:
(idx, p) = res
if checkVideo(p):
print("video already exists, skipping ...")
continue
title = p.get_title("plain")
print("found title " + title + " for video " + name)
result += (
title
+ "\t"
+ idx
+ "\t"
+ url
+ "\t"
+ name
+ "\t"
+ p.get_booktitle("plain")
+ "\n"
)
id_video.append((idx, url))
else:
notFounds.append((name, url))
print("no paper found for video" + name)
f = open("/tmp/vimeo_videos.csv", "w")
f.write("title\tACL-ID\tvideo url\tvideo name\tpaper title\n")
f.write(result)
f.close()
f = open("/tmp/vimeo_not_matched.csv", "w")
for name, url in notFounds:
f.write(url + "\t" + name + "\n")
f.close()
if computeMatch and cacheMatchings:
pickle.dump(id_video, open("videos_papers.pickle", "wb"))
venues = set([x[0].split("-")[0] for x in id_video])
id_video_dict = {id: video for id, video in id_video}
paperidre = re.compile(r".*<url>(\w\d\d-\d\d\d\d)</url>")
has_video = False
idx = "notset"
for v in venues:
with open("../data/xml/" + v + ".xml") as f:
with open("../data/xml/" + v + ".xml.new", "w") as out:
for l in f:
m = paperidre.match(l)
if m:
idx = m[1] # v+"-"+m[1]
has_video = idx in id_video_dict
if has_video and r"</paper>" in l:
out.write(
' <video href="' + id_video_dict[idx] + '" tag="video"/>\n'
)
has_video = False
out.write(l)
|
986,431 | 573e7c3904f02c0b7998a02497355d09d3b6c20d | # -*- coding: utf-8 -*-
import unittest
from typestruct import Packet, types, Endian
from dataclasses import dataclass
@dataclass
class ExamplePacket(Packet):
bool: types.Bool
int8: types.int8
int16: types.int16
int32: types.int32
int64: types.int64
bool2: types.Bool
uint8: types.uint8
uint16: types.uint16
uint32: types.uint32
uint64: types.uint64
array: types.Slice(2)
vararray: types.Varlength(lambda s: s.int8)
@dataclass
class Int64Packet(Packet):
value: types.int64
@dataclass
class VarlengthPacket(Packet):
length: types.int8
value: types.Varlength(lambda x: x.length)
class TestSerialization(unittest.TestCase):
def test_serialization(self):
packet = ExamplePacket(True,
0x01, 0x0102, 0x01020304, 0x0102030405060708,
False,
0x01, 0x0102, 0x01020304, 0x0102030405060708,
b"\xde\xad", b"\x01")
self.assertEqual(packet.serialize(), b'\x01\x01\x02\x01\x04\x03\x02'
b'\x01\x08\x07\x06\x05\x04\x03\x02\x01\x00\x01\x02'
b'\x01\x04\x03\x02\x01\x08\x07\x06\x05\x04\x03\x02'
b'\x01\xde\xad\x01')
def test_big_endian(self):
packet = Int64Packet(0x0102030405060708)
self.assertEqual(packet.serialize(endian=Endian.BIG_ENDIAN),
b'\x01\x02\x03\x04\x05\x06\x07\x08')
def test_little_endian(self):
packet = Int64Packet(0x0102030405060708)
self.assertEqual(packet.serialize(endian=Endian.LITTLE_ENDIAN),
b'\x08\x07\x06\x05\x04\x03\x02\x01')
def test_varlength(self):
packet = VarlengthPacket(2, b"\xde\xad")
self.assertEqual(packet.serialize(endian=Endian.BIG_ENDIAN),
b"\x02\xde\xad")
packet = VarlengthPacket(4, b"\xde\xad\xbe\xef")
self.assertEqual(packet.serialize(endian=Endian.BIG_ENDIAN),
b"\x04\xde\xad\xbe\xef")
class TestDeserialization(unittest.TestCase):
def test_deserialization(self):
packet = ExamplePacket.deserialize(b'\x01\x01\x02\x01\x04\x03\x02\x01'
b'\x08\x07\x06\x05\x04\x03\x02\x01'
b'\x00\x01\x02\x01\x04\x03\x02\x01'
b'\x08\x07\x06\x05\x04\x03\x02\x01'
b'\xde\xad\x01')
target = ExamplePacket(True,
0x01, 0x0102, 0x01020304, 0x0102030405060708,
False,
0x01, 0x0102, 0x01020304, 0x0102030405060708,
b"\xde\xad", b"\x01")
self.assertEqual(packet, target)
def test_big_endian(self):
packet = Int64Packet.deserialize(b'\x01\x02\x03\x04\x05\x06\x07\x08',
endian=Endian.BIG_ENDIAN)
self.assertEqual(packet.value, 0x0102030405060708)
def test_little_endian(self):
packet = Int64Packet.deserialize(b'\x01\x02\x03\x04\x05\x06\x07\x08',
endian=Endian.LITTLE_ENDIAN)
self.assertEqual(packet.value, 0x0807060504030201)
def test_varlength(self):
packet = VarlengthPacket.deserialize(b"\x02\xde\xad")
self.assertEqual(packet.length, 0x02)
self.assertEqual(packet.value, b'\xde\xad')
packet = VarlengthPacket.deserialize(b"\x04\xde\xad\xbe\xef")
self.assertEqual(packet.length, 0x04)
self.assertEqual(packet.value, b'\xde\xad\xbe\xef')
|
986,432 | b5025289ef465e1bbe4c15232ec34a57c855a4ab | import numpy as np
from sklearn import datasets
iris = datasets.load_iris()
iris_data = iris.data
iris_labels = iris.target
#print(iris_data[0], iris_data[79], iris_data[100])
#print(iris_labels[0], iris_labels[79], iris_labels[100])
np.random.seed(42)
indices = np.random.permutation(len(iris_data))
n_training_samples = 12
learnset_data = iris_data[indices[:-n_training_samples]]
learnset_labels = iris_labels[indices[:-n_training_samples]]
testset_data = iris_data[indices[-n_training_samples:]]
testset_labels = iris_labels[indices[-n_training_samples:]]
#print(learnset_data[:4], learnset_labels[:4])
#print(testset_data[:4], testset_labels[:4])
def distance(instance1, instance2):
# just in case, if the instances are lists or tuples:
instance1 = np.array(instance1)
instance2 = np.array(instance2)
return np.linalg.norm(instance1 - instance2)
#print(distance([3, 5], [1, 1]))
#print(distance(learnset_data[3], learnset_data[44]))
def get_neighbors(training_set,
labels,
test_instance,
k,
distance=distance):
"""
get_neighors calculates a list of the k nearest neighbors
of an instance 'test_instance'.
The list neighbors contains 3-tuples with
(index, dist, label)
where
index is the index from the training_set,
dist is the distance between the test_instance and the
instance training_set[index]
distance is a reference to a function used to calculate the
distances
"""
distances = []
for index in range(len(training_set)):
dist = distance(test_instance, training_set[index])
distances.append((training_set[index], dist, labels[index]))
distances.sort(key=lambda x: x[1])
neighbors = distances[:k]
return neighbors
for i in range(5):
neighbors = get_neighbors(learnset_data,
learnset_labels,
testset_data[i],
3,
distance=distance)
from collections import Counter
def vote(neighbors):
class_counter = Counter()
for neighbor in neighbors:
class_counter[neighbor[2]] += 1
return class_counter.most_common(1)[0][0]
for i in range(n_training_samples):
neighbors = get_neighbors(learnset_data,
learnset_labels,
testset_data[i],
3,
distance=distance)
# print("index: ", i, ", result of vote: ", vote(neighbors), ", label: ", testset_labels[i], ", data: ", testset_data[i])
def vote_prob(neighbors):
class_counter = Counter()
for neighbor in neighbors:
class_counter[neighbor[2]] += 1
labels, votes = zip(*class_counter.most_common())
winner = class_counter.most_common(1)[0][0]
votes4winner = class_counter.most_common(1)[0][1]
return winner, votes4winner/sum(votes)
for i in range(n_training_samples):
neighbors = get_neighbors(learnset_data,
learnset_labels,
testset_data[i],
5,
distance=distance)
# print("index: ", i,", vote_prob: ", vote_prob(neighbors),", label: ", testset_labels[i],", data: ", testset_data[i])
def vote_harmonic_weights(neighbors, all_results=True):
class_counter = Counter()
number_of_neighbors = len(neighbors)
for index in range(number_of_neighbors):
class_counter[neighbors[index][2]] += 1/(index+1)
labels, votes = zip(*class_counter.most_common())
#print(labels, votes)
winner = class_counter.most_common(1)[0][0]
votes4winner = class_counter.most_common(1)[0][1]
if all_results:
total = sum(class_counter.values(), 0.0)
for key in class_counter:
class_counter[key] /= total
return winner, class_counter.most_common()
else:
return winner, votes4winner / sum(votes)
for i in range(n_training_samples):
neighbors = get_neighbors(learnset_data,
learnset_labels,
testset_data[i],
6,
distance=distance)
# print("index: ", i,", result of vote: ", vote_harmonic_weights(neighbors, all_results=True))
def vote_distance_weights(neighbors, all_results=True):
class_counter = Counter()
number_of_neighbors = len(neighbors)
for index in range(number_of_neighbors):
dist = neighbors[index][1]
label = neighbors[index][2]
class_counter[label] += 1 / (dist**2 + 1)
labels, votes = zip(*class_counter.most_common())
#print(labels, votes)
winner = class_counter.most_common(1)[0][0]
votes4winner = class_counter.most_common(1)[0][1]
if all_results:
total = sum(class_counter.values(), 0.0)
for key in class_counter:
class_counter[key] /= total
return winner, class_counter.most_common()
else:
return winner, votes4winner / sum(votes)
for i in range(n_training_samples):
neighbors = get_neighbors(learnset_data,
learnset_labels,
testset_data[i],
6,
distance=distance)
# print("index: ", i, ", result of vote: ", vote_distance_weights(neighbors, all_results=True))
train_set = [(1, 2, 2),
(-3, -2, 0),
(1, 1, 3),
(-3, -3, -1),
(-3, -2, -0.5),
(0, 0.3, 0.8),
(-0.5, 0.6, 0.7),
(0, 0, 0)
]
labels = ['apple', 'banana', 'apple',
'banana', 'apple', "orange",
'orange', 'orange']
k = 1
for test_instance in [(0, 0, 0), (2, 2, 2),
(-3, -1, 0), (0, 1, 0.9),
(1, 1.5, 1.8), (0.9, 0.8, 1.6)]:
neighbors = get_neighbors(train_set,
labels,
test_instance,
2)
print("vote distance weights: ", vote_distance_weights(neighbors)) |
986,433 | c080439d910f4c7fedb959bcfcc6adececccb867 | #autor:Roberto Ortega Ortega
"""validarNombre:
algoritmo que segun el las opciones introducidas, redireccionara las
acciones a otro algoritmo, para asi continuar con el proceso de montaje"""
from src.aleatorioConocido import aleatorioConocido
from src.aleatorioNuevo import aleatorioNuevo
from src.secuencialConocido import secuencialConocido
from src.secuencialNuevo import secuencialNuevo
def validarNombre(selectorGui,nombre,opcion,conocido):
selectorGui.destroy()
if conocido==1:
if opcion == 0:
secuencialNuevo(nombre,1)
else:
aleatorioNuevo(nombre)
else :
if opcion==0:
secuencialConocido(nombre,1)
else:
aleatorioConocido(nombre) |
986,434 | 1dc68d4368d9f0f563ef26464b69b76071b0c7f0 | afgelegde_km = float(input("Hoeveel km leg je jaarlijks af? "))
verbruik = float(input("Hoeveel liter verbruik je per 100km? "))
prijs_per_liter = float(input("Hoeveel kost brandstof per liter? "))
totale_kosten = afgelegde_km * (verbruik/100) * prijs_per_liter
kostprijs_km = verbruik / 100 * prijs_per_liter
print("De totale kosten per jaar voor het opgegeven aantal km is: €" + str(totale_kosten), "en de kostprijs per km is: €" + str(kostprijs_km)) |
986,435 | 96dfbfd8155cbc38f5b81532613a71c26876d1d1 | #!/usr/bin/env python3
import os
import numpy
from distutils.core import Extension, setup
import distutils_pytest
# seems that this will clean build every time, might make more sense to just have a lightweight wrapper & precompiled lib?
cppmodule = Extension(
'humanleague',
define_macros = [('MAJOR_VERSION', '2'),
('MINOR_VERSION', '0'),
('PATCH_VERSION', '0'),
('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')
],
extra_compile_args=['-Wall', '-std=c++11'],
include_dirs = ['.', '/usr/include', '/usr/local/include', numpy.get_include()],
# libraries = [':humanleague.so'],
# library_dirs = ['/usr/local/lib','../src'],
sources = ['src/Sobol.cpp',
'src/SobolImpl.cpp',
'src/QIS.cpp',
'src/QISI.cpp',
'src/QIWS.cpp',
'src/GQIWS.cpp',
'src/StatFuncs.cpp',
'src/NDArrayUtils.cpp',
'src/Index.cpp',
'src/Integerise.cpp',
'src/UnitTester.cpp',
'src/TestNDArray.cpp',
'src/TestQIWS.cpp',
'src/TestSobol.cpp',
'src/TestStatFuncs.cpp',
'src/TestIndex.cpp',
'src/TestSlice.cpp',
'src/TestReduce.cpp',
'humanleague/Object.cpp',
'humanleague/py_api.cpp'],
# for now safer to put up with full rebuilds every time
depends = ['Object.h', 'Array.h']
)
setup(
name = 'humanleague',
version = '2.0.0',
description = 'microsynthesis using quasirandom sampling',
author = 'Andrew Smith',
author_email = 'a.p.smith@leeds.ac.uk',
url = '',
long_description = '''
microsynthesis using quasirandom sampling and/or IPF
''',
ext_modules = [cppmodule],
# these settings appear not to be required
# tests_require=['nose'],
# test_suite='tests',
)
|
986,436 | 10d53aade8b00306de47bef73850045478bb75bc | from django.db import models
from django.conf import settings
from datetime import datetime
from rest_framework.reverse import reverse as api_reverse
class Task(models.Model):
name = models.CharField(max_length=200)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, blank=True, null=True)
description = models.TextField()
contractId = models.IntegerField()
status = models.IntegerField()
timeOfInit = models.DateTimeField(default=datetime.now())
noSubtasks = models.IntegerField(blank=True, null=True)
blocker = models.CharField(max_length=200, blank=True, null=True)
note = models.CharField(max_length=250, blank=True, null=True)
def __str__(self):
return self.name
@property
def owner(self):
return self.user
class Blocker(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
class Subtask(models.Model):
name = models.CharField(max_length=200)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, blank=True, null=True)
description = models.TextField()
task = models.ForeignKey(Task, on_delete=models.CASCADE)
status = models.IntegerField()
def __str__(self):
return self.name
class Skill(models.Model):
name = models.CharField(max_length=200)
def __str__(self):
return self.name
class SubtaskSkill(models.Model):
subtask = models.ForeignKey(Subtask, on_delete=models.CASCADE)
skill = models.ForeignKey(Skill, on_delete=models.CASCADE)
class UserSkill(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
skill = models.ForeignKey(Skill, on_delete=models.CASCADE)
|
986,437 | 28a8d81c442d6b06784b81a74b1555486043c2b1 | from flask import Flask, render_template, request, redirect, session # Import Flask to allow us to create our app.
app = Flask(__name__) # Global variable __name__ tells Flask whether or not we are running the file
# directly, or importing it as a module.
app.secret_key = 'bcxjnmkbnhbfrvsghfn'
@app.route('/')
def index():
return render_template("index.html")
app.run(debug=True) # Run the app in debug mode.
|
986,438 | fd376c909598143d2f508b70e80c9916f508df18 | from collections import Counter
def function_length_frequency(func, hrange):
return Counter(len(func(n)) for n in hrange).most_common()
if __name__ == '__main__':
from executable_hailstone_library import hailstone
upto = 100000
hlen, freq = function_length_frequency(hailstone, range(1, upto))[0]
print("The length of hailstone sequence that is most common for\n"
"hailstone(n) where 1<=n<%i, is %i. It occurs %i times."
% (upto, hlen, freq))
|
986,439 | ef459201d8abd580093c3157d9e1800b988bb188 |
#calss header
class _RAP():
def __init__(self,):
self.name = "RAP"
self.definitions = [u'a type of popular music with a strong rhythm in which the words are spoken, not sung: ', u'a statement accusing someone of a crime, or the punishment that someone is given for a crime: ', u'a judgment or a reaction: ', u'a sudden short noise, especially one made by hitting a hard surface: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
|
986,440 | d58a662dab81e43f9ec1a3f79a6182ecd8f86836 | from django.test import TestCase
from .models import *
import pandas as pd
import requests
import json
# Create your tests here.
def create_data():
result = pd.read_csv('../data/data_adg/지하철_교육시설포함_최종.txt', sep=",", encoding="utf-8")
# result = pd.DataFrame(result,columns=["result_id","address", "gu", "dong", "mac_score", "lot_score",
# "burgerking_score",'seveneleven_score', 'cu_score', 'gs25_score', 'starbucks_score', 'lottecinema_score',
# 'megabox_score', 'cgv_score', 'total_score', 'high_oil', 'oil' 'oil2', 'total', 'catch', 'percent', 'mac_count',
# 'lot_count', 'burgerking_count', 'seveneleven_count','cu_count', 'gs25_count','starbucks_count','lottecinema_count','megabox_count',
# 'cgv_count', 'total_count','first','second', 'third'])
# print(result.loc[5])
# print(result.loc[5]['address'])
for i in range(len(result.index)):
print(result.loc[i]['address'])
a = TotalAddress(
address = result.loc[i]['address'],
gu = result.loc[i]['gu'],
dong = result.loc[i]['dong'],
mac_score = float(result.loc[i]['mac_score']),
lot_score = float(result.loc[i]['lot_score']),
burgerking_score = float(result.loc[i]['burgerking_score']),
seveneleven_score = float(result.loc[i]['seveneleven_score']),
cu_score = float(result.loc[i]['cu_score']),
gs25_score = float(result.loc[i]['gs25_score']),
starbucks_score = float(result.loc[i]['starbucks_score']),
lottecinema_score = float(result.loc[i]['lottecinema_score']),
megabox_score = float(result.loc[i]['megabox_score']),
cgv_score = float(result.loc[i]['cgv_score']),
total_score = float(result.loc[i]['total_score']),
high_oil = float(result.loc[i]['high_oil']),
oil = float(result.loc[i]['oil']),
oil2 = float(result.loc[i]['oil2']),
total = int(result.loc[i]['total']),
catch = int(result.loc[i]['catch']),
percent = float(result.loc[i]['percent']),
mac_count = int(result.loc[i]['mac_count']),
lot_count = int(result.loc[i]['lot_count']),
burgerking_count = int(result.loc[i]['burgerking_count']),
seveneleven_count = int(result.loc[i]['seveneleven_count']),
cu_count = int(result.loc[i]['cu_count']),
gs25_count = int(result.loc[i]['gs25_count']),
starbucks_count = int(result.loc[i]['starbucks_count']),
lottecinema_count = int(result.loc[i]['lottecinema_count']),
megabox_count = int(result.loc[i]['megabox_count']),
cgv_count = int(result.loc[i]['cgv_count']),
total_count = int(result.loc[i]['total_count']),
first = result.loc[i]['first'],
second = result.loc[i]['second'],
third = result.loc[i]['third'],
first_mapx = result.loc[i]['first_mapx'],
first_mapy = result.loc[i]['first_mapy'],
second_mapx = result.loc[i]['second_mapx'],
second_mapy = result.loc[i]['second_mapy'],
third_mapx = result.loc[i]['third_mapx'],
third_mapy = result.loc[i]['third_mapy'],
elementary_count= result.loc[i]['elementary_count'],
middle_count=result.loc[i]['middle_count'],
high_count=result.loc[i]['high_count'],
park_count=result.loc[i]['park_count'],
station=result.loc[i]['station']
)
print(i, '/', len(result))
a.save()
else:
print("create data done")
create_data() |
986,441 | 252e5edc7a1ec8ddf53f1e632e5efbc9390181a2 | x=[1,2,3,4]
y=[5,6,7,8]
a=0
b=0
for i in range(len(x)):
if i%2==0:
a=a+x[i]
for k in range(len(y)):
if k%2!=0:
b=b+y[k]
print(a+b) |
986,442 | bd34535000e9551e915945ed077bc43d30b77fe9 | import requests
import argparse
from termcolor import colored
import time
print(colored("""
_ _ _ _ _ _
| || | |_| |_ _ __ | (_)_ _____
| __ | _| _| '_ \ | | \ V / -_)
|_||_|\__|\__| .__/ |_|_|\_/\___|
|_|
#C0ded By Red Virus
""","red"))
parser = argparse.ArgumentParser("""
Live : Check If Url Up Or Down
ex:python3 Httplive.py --live http://google.com
""")
parser.add_argument("-Live","--Live")
args = parser.parse_args()
url = args.Live
def httplive(url):
try:
req = requests.get(url)
if req.status_code == 200:
print(colored("Http Live: ","green"),url)
except requests.exceptions.ConnectionError:
print(colored("Http Down: ","red"),url)
if url !=None:
print("Please Wait ...")
time.sleep(3)
httplive(url) |
986,443 | e17fa09f6b9bb247c9e7483a69f8bd1bc6aa8793 | #!/usr/bin/python3
pip install -r requirements.txt
|
986,444 | 4a7233b40d06d7458d450f3965f085b570e668d3 | import requests
#Opening file
books_rest = requests.get('http://pulse-rest-testing.herokuapp.com/books')
print(books_rest.json()[0])
print(books_rest.text)
print(books_rest.status_code)
print(books_rest.headers)
#First script
payload = {'title': 'Kobzar', 'author': 'Taras Shevchenko'}
r = requests.post('http://pulse-rest-testing.herokuapp.com/books', data=payload)
print(r.text)
print(r.json()['id'])
id_my_book = r.json()['id']
books_rest = requests.get(f'http://pulse-rest-testing.herokuapp.com/books/{id_my_book}')
assert books_rest.json()['id'] == id_my_book and books_rest.json()['title'] == 'Kobzar' and books_rest.json()['author'] == 'Taras Shevchenko'
print('Book has been created successfully')
payload = {'title': "Zahar Berkut", 'author': "Ivan Franko"}
z = requests.put(f'http://pulse-rest-testing.herokuapp.com/books/{id_my_book}', data=payload)
print(z.text)
books_rest = requests.get(f'http://pulse-rest-testing.herokuapp.com/books/{id_my_book}')
assert books_rest.json()['id'] == id_my_book and books_rest.json()['title'] == "Zahar Berkut" and books_rest.json()['author'] == "Ivan Franko"
print('Book has been changed successfully')
print(books_rest.json())
z = requests.delete(f'http://pulse-rest-testing.herokuapp.com/books/{id_my_book}')
assert z.status_code == 204
print('Book has been deleted successfully')
#Second script
payload = {'title': 'Hamlet', 'author': 'William Shakespeare'}
books_rest = requests.post('http://pulse-rest-testing.herokuapp.com/books', data=payload)
print(books_rest.text)
print(books_rest.json()['id'])
id_my_book = books_rest.json()['id']
k = requests.get(f'http://pulse-rest-testing.herokuapp.com/books/{id_my_book}')
assert k.json()['id'] == id_my_book and k.json()['title'] == 'Hamlet' and k.json()['author'] == 'William Shakespeare'
print('Book has been created successfully')
payload = {'name': 'Horace', 'type': 'Main', 'level': 1, 'book': f'{id_my_book}'}
books_rest = requests.post('http://pulse-rest-testing.herokuapp.com/roles', data=payload)
id_person = books_rest.json()['id']
books_rest = requests.get(f'http://pulse-rest-testing.herokuapp.com/roles/{id_person}')
assert books_rest.json()['id'] == id_person and books_rest.json()['name'] == 'Horace' and books_rest.json()['type'] == 'Main' and books_rest.json()[
'level'] == 1
print('Role has been created successfully')
payload = {'name': 'Polonius', 'type': 'Secondary', 'level': 2, 'book': f'{id_my_book}'}
books_rest = requests.put(f'http://pulse-rest-testing.herokuapp.com/roles/{id_person}', data=payload)
assert books_rest.json()['id'] == id_person and books_rest.json()['name'] == 'Polonius' and books_rest.json()['type'] == 'Secondary' and books_rest.json()[
'level'] == 2
print('Role has been changed successfully')
print(books_rest.json())
j = requests.delete(f'http://pulse-rest-testing.herokuapp.com/books/{id_my_book}')
assert j.status_code == 204
print('Book has been deleted successfully')
j = requests.delete(f'http://pulse-rest-testing.herokuapp.com/roles/{id_person}')
assert j.status_code == 204
print('Role has been deleted successfully')
|
986,445 | 10a448260898f39ec04501d93b6d6cfeabdb149f | print("Welcome to the rollercoaster!")
height = int(input("What is your height in cm? "))
bill = 0
if height >= 120:
print("You can ride")
age = int(input("How old are u? "))
if age < 12:
bill = 5
print(f"Children pay ${bill}")
elif age <= 18:
bill = 7
print(f"Youth pay ${bill}")
elif 45 <= age <= 55:
print("Dont worry, its on us.")
else:
bill = 12
print(f"Adults pay ${bill}")
photo = input("Do you want a picture?\n")
if photo.lower()[0] == 'y':
bill += 3
if bill > 0:
print(f"You final bill is ${bill}")
else:
print("It's FREE :)")
else:
print("Sorry :( you can't ride")
|
986,446 | 40a492927dd0acf8c7b1a83fa0cbf231626ed9e8 | # -*- coding: utf-8 -*-
"""
Created on Thu May 10 16:17:20 2018
@author: HUDSON
"""
"""
Constroi a estrutura de uma rede neural artificial
"""
from pybrain.structure import FeedForwardNetwork
from pybrain.structure import LinearLayer, SigmoidLayer, BiasUnit
from pybrain.structure import FullConnection
rede = FeedForwardNetwork()
#cria a camada de entrada
camadaEntrada = LinearLayer(2)
#cria a camada oculta
camadaOculta = SigmoidLayer(3)
#cria camada saida
camadaSaida = SigmoidLayer(1)
bias1 = BiasUnit()
bias2 = BiasUnit()
#adiciona as camadas dentro da rede
rede.addModule(camadaEntrada)
rede.addModule(camadaOculta)
rede.addModule(camadaSaida)
rede.addModule(bias1)
rede.addModule(bias2)
#ligação da camada de entrada e camada oculta
entradaOculta = FullConnection(camadaEntrada, camadaOculta)
ocultaSaida = FullConnection(camadaOculta, camadaSaida)
biasOculta = FullConnection(bias1, camadaOculta)
biasSaida = FullConnection(bias2, camadaSaida)
rede.sortModules()
#print(rede)
#print(entradaOculta.params)
#print(ocultaSaida.params)
#print(biasOculta.params)
#print(biasSaida.params)
|
986,447 | 8faf3b175aceece7fe3c82517b3b90be538eb16a | '''initialize'''
from .shapes import tetrisShape
from .gameboard import InnerBoard, ExternalBoard, SidePanel |
986,448 | e76f85a035659b04534e816567a748f7876dd7c5 | """
Name : c12_09_roll_a_dice.py
Book : Python for Finance (2nd ed.)
Publisher: Packt Publishing Ltd.
Author : Yuxing Yan
Date : 6/6/2017
email : yany@canisius.edu
paulyxy@hotmail.com
"""
import random
def rollDice():
roll = random.randint(1,6)
return roll
i =1
n=10
result=[]
random.seed(123)
while i<n:
result.append(rollDice())
i+=1
print(result) |
986,449 | 3228b74dd15ce0534c2e01f2e366376ca20d2b39 | import pandas as pd
import sys
path = sys.argv[1]
train = pd.read_csv(path + '/' + path +'.train_modified.csv',low_memory=False)
test = pd.read_csv(path + '/' + path +'.test_modified.csv',low_memory=False)
valid = pd.read_csv(path + '/' + path +'.valid_modified.csv',low_memory=False)
train['id'] = pd.DataFrame(range(len(train)))
train.to_csv(path + '/train.csv',index=False)
valid['id'] = pd.DataFrame(range(len(train),len(train) + len(valid)))
valid.to_csv(path + '/valid.csv',index=False)
test['id'] = pd.DataFrame(range(len(train) + len(valid),len(train) + len(valid) + len(test)))
test.to_csv(path + '/test.csv',index=False)
full = train.append([valid,test])
full.to_csv(path + '/full.csv',index=False)
|
986,450 | 5ce36e8ce45e5ef5b419c5fd7db03f5edd2d87c8 | from .environment import Environment
class Grassland(Environment):
def __init__(self, name):
Environment.__init__(self, name, animal_max=22, plant_max=15)
def animal_count(self):
return f"This place has {len(self.animals)} animals in it"
def add_animal(self, animal):
try:
if animal.likes_grass:
self.animals.append(animal)
else:
raise AttributeError()
except AttributeError:
print("Only animals that like grass can be added to the grassland.")
input("\n\nPress any key to continue...")
def add_plant(self, plant):
try:
# if plant.freshwater and plant.requires_current:
self.plants.append(plant)
except AttributeError:
raise AttributeError(
"Cannot add plants that require brackish water or stagnant water to a river biome")
def __str__(self):
return f'{self.name}'
|
986,451 | 633075ea98229f72c492a97dbaa19971847b7fb4 | import discord
import random
from discord.ext import commands
from main import bot
from cogs.errorhandler import rfooter
class myhelp(commands.HelpCommand):
def get_command_signature(self, command):
return f"{self.clean_prefix}{command.qualified_name} {command.signature}"
#the help command
async def send_bot_help(self, mapping: dict=None):
embed = discord.Embed(title="Help")
for cog, command in mapping.items():
s = [f"**{c.qualified_name}** {c.help}" for c in command]
#if there are commands in this cog
if s:
#using getattr() here to avoid raising an error
cog_name = getattr(cog, "qualified_name", "No Category")
embed.add_field(name=cog_name, value="\n".join(s), inline=False)
embed.set_footer(text=f"Requested by {str(self.context.message.author)}")
await self.context.reply(embed=embed)
#the command specific help command
async def send_command_help(self, command: str=None):
embed = discord.Embed(title=self.get_command_signature(command))
embed.add_field(name="Help", value=command.help, inline=False)
a = command.aliases
#if the command has aliases
if a:
embed.add_field(name="Aliases", value=", ".join(a), inline=False)
embed.set_footer(text=f"Requested by {str(self.context.message.author)}")
await self.context.reply(embed=embed)
#help command specific error handler
async def send_error_message(self, e):
embed = discord.Embed(title="An error occured!", description=f"```{e}```")
embed.set_footer(text=rfooter[random.randint(0, len(rfooter)-1)])
await self.context.reply(embed=embed)
class utility(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.help_command = bot.help_command
bot.help_command = myhelp(command_attrs={"aliases": ["h"]})
bot.help_command.cog = self
@commands.command()
async def ping(self, ctx):
"""
Gets the bot's latency to Discord
"""
await ctx.message.add_reaction("🏓")
embed = discord.Embed(title="Pong!", description=f"{1000*round(bot.latency, 3)} ms")
embed.set_footer(text=f"Requested by {str(ctx.message.author)}")
await ctx.reply(embed=embed)
def setup(bot):
bot.add_cog(utility(bot))
|
986,452 | 71d4b2053e68b157b1b5c1bbae384b1d60e6568b | # Generated by Django 3.2.5 on 2021-07-29 08:33
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('summary', models.TextField(max_length=64, verbose_name='Заголовок')),
('description', models.TextField(max_length=3000, verbose_name='Описание')),
('created_at', models.DateTimeField(max_length=30, null=True, verbose_name='Useless')),
],
),
migrations.CreateModel(
name='Type',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('all_types', models.CharField(choices=[('task', 'task'), ('bug', 'bug'), ('enchantment', 'enchantment'), ('research', 'research')], default='task', max_length=16)),
('type', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='type', to='webapp.task', verbose_name='Type')),
],
),
migrations.CreateModel(
name='Status',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('all_statuses', models.CharField(choices=[('new', 'new'), ('in_progress', 'in_progress'), ('pause', 'pause'), ('done', 'done')], default='new', max_length=16)),
('status', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='status', to='webapp.task', verbose_name='Status')),
],
),
]
|
986,453 | c723784ceb1f0a7c699f95e1c8469ea32a4d78b2 | #imports
import discord
from random import shuffle
import configparser
import random
import requests
import aiohttp
import traceback
import sys
import re
import json
import time
import asyncio
import os
from datetime import datetime
from discord import game
from random import randint
from discord.ext import commands
#bot stuff
bot_prefix = ':', '-', '@CommuniBot ', 'CommuniBot ', '@CommuniBot#4412 ', 'CommuniBot#4412', 'communibot'
bot = commands.Bot(command_prefix=bot_prefix)
bot.remove_command('help')
start_time = time.time()
starttime2 = time.ctime(int(time.time()))
config = configparser.ConfigParser()
config.sections()
config.read('config.ini')
config.sections()
Secrets = config['Secrets']
#events
@bot.event
async def on_ready():
bot.start_time = time.time()
print("Online.")
await bot.change_presence(game=discord.Game(name='on {} servers | -help.'.format(len(bot.servers))), status=discord.Status.online)
#functions
def owner(ctx):
return ctx.message.author.id == '276043503514025984'
#commands
#owner
@bot.command(pass_context=True)
@commands.check(owner)
async def say(ctx, *, text: str = None):
"""Say something.
Usage: -say <text>
Example: -say Potatoes!
Permission: Bot Owner
"""
await bot.say(text)
await bot.delete_message(ctx.message)
@bot.command(pass_context=True)
@commands.check(owner)
async def shutdown(ctx):
"""Shutdown.
Usage: -shutdown
Permission: Bot Owner
"""
await bot.say('Shutting down.')
await bot.delete_message(ctx.message)
await bot.logout()
@bot.command(pass_context=True)
@commands.check(owner)
async def servers(ctx):
"""See the list of servers.
Usage: -servers
Permission: Bot Owner
"""
msg = ""
for server in bot.servers:
name = str(server) + "\n"
msg += name
embed=discord.Embed(description=f'{msg}', color=0x2874A6)
await bot.delete_message(ctx.message)
await bot.say(embed=embed)
@bot.command(pass_context=True)
@commands.check(owner)
async def leaveserver(ctx, serverid: str):
"""Leave a server.
Usage: -leaveserver <Server ID>
Example: -leaveserver 123456789
Permission: Bot Owner
"""
server = bot.get_server(serverid)
if server:
await bot.leave_server(server)
msg = '{} = Left server!'.format(server.name)
else:
msg1 = ':x: I could not find the ID of that server or you just forgot to say ID of the server!'
return await bot.say(msg1)
await bot.say(msg)
#ping
@bot.command(pass_context=True)
async def ping(ctx):
'''Check how fast the bot responds.
Usage: -ping
'''
channel = ctx.message.channel
t1 = time.perf_counter()
await bot.send_typing(channel)
t2 = time.perf_counter()
embed=discord.Embed(description='Pong! {} milliseconds.'.format(round((t2-t1)*1000)), color=0x2874A6)
await bot.say(embed=embed)
@bot.command(pass_context=True)
async def prefixes(ctx):
'''Check all of CommuniBot's prefixes.
Usage: -prefixes
'''
embed=discord.Embed(description='My prefix are: **-**, **:**,**@CommuniBot**, **CommuniBot**, **@CommuniBot#4412**, **CommuniBot#4412** and **communibot**.',color=0x2874A6)
await bot.say(embed=embed)
#help
@bot.command(pass_context=True)
async def help(ctx):
'''See all of the commands from here.
Usage: -help
'''
embed=discord.Embed(description='Help\nPrefixes: -prefixes\n-ping - Shows the amount of milliseconds taken to respond.\n-info - Shows information about CommuniBot!\n-uptime - Shows the uptime status of CommuniBot!\n\n\n-jokes + \nShows a list of joke commands.\n\n-actions + \nShows a list of action commands.\n\n-moderation +\nShows a list of moderation commands.\n\n-fun +\nShows a list of fun commands.\n\n-server +\nLists commands about the server.\n\n-utilities +\nShows a list of commands about utilities.', color=0x2874A6)
await bot.say(embed=embed)
@bot.group(pass_context=True)
async def fun(ctx):
'''See all of the fun commands from here.
Usage: -fun
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Fun commands\n\n\n-coinflip\nThe bot chooses between heads or tails.\n\n-8ball\nUse the magic 8ball!\n\n-comic\nShows a random comic.\n\n-cat\nShows a random cat picture.\n\n-dog\nShows a random dog picture.\n\n-say\nSay anything after the command and it will repeat it back.\n\n-choose\nMake CommuniBot choose over three or more things.\n\n-roll\nRoll any number above one.\n\n-roast\nRoast someone with a burning statement.\n\n-memes\nRandomizes between lots of memes.\n\n-piglatin\nTranslate your text into piglatin.', color=0x2874A6)
await bot.say(embed=embed)
@bot.command(pass_context=True)
async def coinflip(ctx):
'''Flip a coin to either land on heads or tails.
Usage: -coinflip
'''
choice = random.choice(['Heads!','Tails!'])
await bot.say(choice)
@bot.command(name="8ball", pass_context=True, aliases=['eightball'])
async def _8ball(self, *, question : str):
'''Ask the bot a question and it will answer.
Usage: -8ball <text>?
Example: -8ball Is this question a question?
'''
responses = [["Signs point to yes.", "Yes.", "Without a doubt.", "As I see it, yes.", "You may rely on it.", "It is decidedly so.", "Yes - definitely.", "It is certain.", "Most likely.", "Outlook good."],
["Reply hazy, try again.", "Concentrate and ask again.", "Better not tell you now.", "Cannot predict now.", "Ask again later."],
["My sources say no.", "Outlook not so good.", "Very doubtful.", "My reply is no.", "Don't count on it."]]
if "?" in question:
await self.bot.say(":8ball:" + random.choice(random.choice(responses)))
else:
await self.bot.say("That doesn't look like a question. You need to put a question mark at the end of your sentence.")
@bot.command(pass_context=True, no_pm=True)
async def comic(ctx):
'''Check out a random comic.
Usage: -comic
'''
api = "https://xkcd.com/{}/info.0.json".format(random.randint(1, 1800))
async with aiohttp.ClientSession() as session:
async with session.get(api) as r:
response = await r.json()
embed = discord.Embed(title="Comic", description=response["title"], color=0xFF0000)
embed.set_image(url=response["img"])
await bot.say(embed=embed)
@bot.command(pass_context=True)
async def cat(ctx):
'''Check out a random cat.
Usage: -cat
'''
api = 'https://random.cat/meow'
async with aiohttp.ClientSession() as session:
async with session.get(api) as r:
if r.status == 200:
response = await r.json()
embed = discord.Embed(title="Cat", description="Here's your random cat image", color=0xFF0000)
embed.set_author(name=f"{ctx.message.author.display_name}", icon_url=f"{ctx.message.author.avatar_url}")
embed.set_image(url=response['file'])
await bot.say(embed=embed)
else:
await bot.say('Could not access random.cat API!')
@bot.command(pass_context=True)
async def dog(ctx):
'''Check out a random dog.
Usage: -dog
'''
api = "https://api.thedogapi.co.uk/v2/dog.php"
async with aiohttp.ClientSession() as session:
async with session.get(api) as r:
if r.status == 200:
response = await r.json()
embed = discord.Embed(title="Dog", description="Here's your random Dog", color=0xFF0000)
embed.set_author(name=f"{ctx.message.author.display_name}", icon_url=f"{ctx.message.author.avatar_url}")
embed.set_image(url=response['data'][0]["url"])
await bot.say(embed=embed)
else:
x = "Could not find a dog :sad:!"
embed = discord.Embed(title='Error', color=colorfail)
embed.description = x
await bot.say(embed=embed)
@bot.command(name='choose', aliases=['select','choice'],pass_context=True)
async def choose(self, ctx, *args):
'''Make the bot choose over two or more things.
Usage: -choose <one> <two> <three> <etc>
Example: -choose Potatoes Tomatoes Unicorns Carrots
'''
choice = random.choice(args)
await bot.say(f'**`{choice}`**')
@bot.command(pass_context=True)
async def roll(ctx, number: int=100):
'''Rolls a random number. If no number is put in, it will choose 100. It must be higher than one.
Usage: -roll <number>
Example: -roll 1242
'''
if number > 1:
await bot.say(f"{ctx.message.author.mention} | :game_die: {randint(1, number)}")
else:
await bot.say(f"{ctx.message.author.mention} Please insert a number higher than one.")
@bot.command(pass_context=True)
async def roast(ctx, person: discord.Member):
'''Roast someone.
Usage: -roast <@person>
Example: -roast @Pointless
'''
roast_possible_responses = ["{}, your ass must be pretty jealous of all the shit that comes out of your mouth.","{}, some day you'll go far, and I hope you stay there.","{}, I'm trying my absolute hardest to see things from your perspective, but I just can't get my head that far up my ass.","{}, I'm not a protocolgist, but I know an asshole when I see one.","{}, Do yourself a favor and ignore anyone who tels you to be yourself. Bad idea in your case.","{}, Everyone's entitled to act stupid once in awhile, but you really abuse the privilege.","{}, Can you die of constipation? I ask because I'm worried about how full of shit you are.","{}, Sorry, I didn't get that. I don't speak bullshit.","{}, There are some remarkably dumb people in this world. Thanks for helping me understand that.","{}, I could eat a bowl of alphabet soup and shit out a smarter statement than whatever you just said.","{}, You always bring me so much joy, as soon as you leave the room.","{}, I'd tell you how I really feel, but I wasn't born with enough middle fingers to express myself in this case.","{}, You have the right to remain silent because whatever you say will probably be stupid anyway.","{}, your family tree must be a cactuss because you're all a bunch of pricks.","{}, You'll never be the man your mom is.","{}, If laughter is the best medicine, your face must be curing the world.","{}, scientists say the universe is made up of neutrons, protons and electrons. They forgot to mention morons, as you are one.","{}, if you really want to know about mistakes, you should ask your parents.","{}, I thought of you today. It reminded me to take the garbage out.","{}, you're such a beautiful, intelligent, wonderful person. Oh I'm sorry, I thought we were having a lying competition.","{}, I may love to shop but I'm not buying your bullshit.","{}, I just stepped in something that was smarter than you, and smelled better too."]
roast_current_response = random.choice(roast_possible_responses)
await bot.say(roast_current_response.format(person.mention))
@bot.command(pass_context=True, aliases=['meme'])
async def memes(ctx):
'''Randomizes between lots of memes
Usage: -memes
'''
memes_possible_responses = ['{} https://upload.wikimedia.org/wikipedia/commons/a/ab/Lolcat_in_folder.jpg','{} https://upload.wikimedia.org/wikipedia/commons/1/1a/Cat_crying_%28Lolcat%29.jpg','{} http://i0.kym-cdn.com/photos/images/original/001/284/242/202.jpg','{} https://i.ytimg.com/vi/6HA2D3LsJQs/hqdefault.jpg','{} http://www.lolcats.com/images/u/11/43/lolcatsdotcomsyucc7vghgeu3ygu.jpg','{} https://shesaid.com/wp-content/uploads/2016/09/7Ak9p.jpg','{} https://i.ytimg.com/vi/Bkco3bE2tg8/hqdefault.jpg','{} https://vignette.wikia.nocookie.net/epicrapbattlesofhistory/images/1/15/LOLCATS-Cloud.jpeg/revision/latest?cb=20140723224315','{} https://i.chzbgr.com/full/9089826560/h07C1DAA9/','{} https://media.mnn.com/assets/images/2012/11/lolcat_main_0.jpg','{} https://i.pinimg.com/736x/aa/3c/0f/aa3c0f3dd59210f9953a5c1c0d46c2d6--funny-pets-funny-animals.jpg','{} https://img.scoop.it/NhznTvgS8CGETQOQgXJ3DDl72eJkfbmt4t8yenImKBVvK0kTmF0xjctABnaLJIm9','{} https://www.oddee.com/wp-content/uploads/_media/imgs/articles2/a97873_rsz_drunkdial.jpg','{} https://i.chzbgr.com/full/9107324928/h0A65249F/','{} https://i.ytimg.com/vi/IaK6EUSUG4I/hqdefault.jpg','{} https://blogs.loc.gov/digitalpreservation/files/2014/07/864385794_40eef8f22b_z1.jpg','{} https://i.chzbgr.com/full/9101861888/h2C7008DC/','{} http://i0.kym-cdn.com/photos/images/facebook/001/031/948/10b.jpg','{} http://i0.kym-cdn.com/photos/images/facebook/000/559/061/d94.png','{} https://pics.me.me/my-cat-made-this-face-when-i-meowed-back-at-8561541.png','{} http://www.lolcats.com/images/u/12/43/lolcatsdotcomnapkin.jpg','{} https://longlivethekitty.com/wp-content/uploads/lolcat_airplane.jpg','{} http://www.lolcats.com/images/u/08/39/lolcatsdotcomly2r5yakozqlbhmn.jpg','{} http://firstmonday.org/ojs/index.php/fm/article/viewFile/5391/4103/40381','{} https://bighugelabs.com/img/lolcat-sample.jpg','{} http://static.wixstatic.com/media/8e31f964a29559e19acfb44ea027ab0c.wix_mp_1024','{} http://www.rationalitynow.com/blog/wp-content/uploads/2009/12/owlcatl.jpg','{} http://i0.kym-cdn.com/photos/images/facebook/000/519/843/833.jpg','{} http://runt-of-the-web.com/wordpress/wp-content/uploads/2017/01/wrong-answer-you-aint-cheat.jpg','{} http://i.imgur.com/yYT55QA.jpg','{} http://i0.kym-cdn.com/photos/images/original/001/209/914/6b4.jpg','{} http://i0.kym-cdn.com/photos/images/original/001/209/916/fe7.jpg','{} https://i.imgur.com/XuFg46x.jpg','{} https://i.imgur.com/vlA7u5k.jpg','{} https://stepcdn.com/assets/2017-02/03/11/8e3r2/trump-twitter-700x.jpg','{} http://runt-of-the-web.com/wordpress/wp-content/uploads/2017/01/adding-salt-to-your-drama.jpg','{} http://i0.kym-cdn.com/photos/images/original/001/211/181/422.jpg','{} https://pics.me.me/when-you-use-furthermore-in-your-essay-im-missing-the-25131584.png','{} https://pics.me.me/when-you-use-furthermore-in-your-essay-sprinkle-dat-extra-16049743.png','{} https://ecdn.teacherspayteachers.com/thumbitem/-Salt-Bae-Meme-Writing-Freebie-2978990-1485278672/original-2978990-1.jpg','{} https://pics.me.me/when-black-parents-add-an-apostrophe-to-their-childs-name-11763958.png','{} http://i0.kym-cdn.com/photos/images/facebook/001/209/136/1ef.png','{} https://pics.me.me/your-head-salt-bae-who-won-the-meme-battle-for-13363207.png','http://runt-of-the-web.com/wordpress/wp-content/uploads/2017/01/Caucasian-on-your-cv.jpg','{} https://lh3.googleusercontent.com/vnCrrk7gvVIoLQsV4HnLiabPXqKA7ls86cm-2Snuk-B9NOup-OtblK8UXYdo3qhBIk7SqtOTUEVpIOMnYmAzJ_H1jKIsJ8ElWPipvAkUthqAxhtwG1ar3ANnuFdC5pTbeNrqb8Q-','{} https://pics.me.me/cheating-lies-deceit-ent-unfaithful-god-when-he-was-creating-11587232.png','{} https://pics.me.me/mexico-autodeciaraodny-autobusas-rapid-a-pre-playas-de-tijuana-ropuerto-11675269.png','{} https://pics.me.me/thedukeofmeines-edukeof-memes-saltbae-911-jetfeul-steelbeams-twintowers-bush-proof-12396857.png','{} https://static.boredpanda.com/blog/wp-content/uploads/2017/01/818286176889085952-png__700.jpg','{} https://i.imgflip.com/vgh66.jpg','{} http://s2.quickmeme.com/img/b0/b0039e31a5f5ff0fbf9336d47e5d3ec2a80232f3e31e10883c15dbc66be3809d.jpg','{} http://weknowmemes.com/generator/uploads/generated/g1365444091774137766.jpg','{} https://i.chzbgr.com/original/1738866432/hC0106396/','{} http://www.imagefully.com/wp-content/uploads/2015/08/I-Dunno-Lol-Dog-Image.jpg','{} http://images4.fanpop.com/image/photos/15900000/lol-dogs-dogs-15905734-500-375.jpg','{} http://4.bp.blogspot.com/-Rny6ymoavqs/UAhodFDkDPI/AAAAAAAAsiU/8nUf9LUjGyw/s1600/funny-dog-pictures-there-there-ugly-bald-puppy.jpg','{} http://images6.fanpop.com/image/photos/37300000/Funny-Dogs-dogs-37339100-421-428.jpg','{} https://ilifejourney.files.wordpress.com/2012/10/dog-and-spiders.jpg','{} https://i1.wp.com/thefunniestpictures.com/wp-content/uploads/2014/08/Funny-Dog-1.jpg?fit=499%2C334&ssl=1','{} https://ci.memecdn.com/722962.jpg','{} https://collarfolk.com/wp-content/uploads/2017/05/8963bb3fdd1f319b0154cc646a0de37a.jpg','{} https://memegenerator.net/img/instances/500x/64586542/oh-por-deus.jpg','{} https://imgfave.azureedge.net/image_cache/1383619315754765.jpg','{} https://www.seabreeze.com.au/Img/Photos/Other/3722545.jpg','{} http://blogs.discovermagazine.com/discoblog/files/2012/10/dog_meme.jpeg','{} https://static.fjcdn.com/pictures/Lol_98ff89_2584253.jpg','{} http://i0.kym-cdn.com/photos/images/facebook/000/151/934/imade40cakes128548225192353750.jpg','{} http://s2.quickmeme.com/img/a7/a70f44decdb833e94ed530c63cce6775182c03a2f8d5f8301114b52f9724ce80.jpg','{} http://funnyanimalphoto.com/wp-content/uploads/2013/10/dog_loves_bacon.jpg?bd03d3','{} http://s2.quickmeme.com/img/fc/fc02f94bf37ff24f18337ac7de31631ef2b35296e87409184aef259c94f53d1d.jpg','{} https://i.imgur.com/u7mM6mE.jpg', '{} https://i.pinimg.com/736x/71/27/71/712771dd7c68cb9c3ccccc69a9f2e953--bit.jpg','{} https://cdn.discordapp.com/attachments/393566779269709824/396739756437929984/doggie.gif\nCredit to @Windfave#5304.']
memes_current_response = random.choice(memes_possible_responses)
await bot.say(memes_current_response.format(ctx.message.author.mention))
#info
@bot.command(pass_context=True, aliases=['botinfo','information','botinformation','binfo','boti','binformation'])
async def info(ctx):
'''Tells you information about the bot.
Usage: -info
'''
member = ctx.message.author
second = time.time() - start_time
minute, second = divmod(second, 60)
hour, minute = divmod(minute, 60)
day, hour = divmod(hour, 24)
week, day = divmod(day, 7)
join = discord.Embed(description= '',title = 'Information about CommuniBot', colour = 0xFFFF);
join.add_field(name = '__Information__', value = f"This bot was made in discord.py and was created by <@276043503514025984> (Pointless). It is for a bot that has moderation, fun commands, memes and more. It aims to make communities get less bots in total, so it doesn't look like there's too much bots in the Member list.", inline=True);
join.add_field(name = '__Creator__', value = f"<@276043503514025984> - Created the the bot and all the commands, except for the ones who created some.", inline=True);
join.add_field(name = '__Helped__', value = f"<@338600456383234058> - Created -say (for owner) and -poll command.", inline=True)
join.add_field(name = '__Status__', value = f"Uptime: I've been online for %d week(s), %d day(s), %d hour(s), %d minute(s), %d second(s)!" % (week, day, hour, minute, second) + "\nTotal Servers: {} servers.".format(len(bot.servers)), inline=True)
join.add_field(name = '__Links__', value = f"Invite link for the bot: https://discordapp.com/oauth2/authorize?client_id=406890237604790302&scope=bot&permissions=2146958591\nInvite link for the support server: https://discord.gg/Fz2pKVE\nLink for Discord Bots: https://discordbots.org/bot/406890237604790302\nLink for Github page: https://github.com/P01nt-Less/CommuniBot\nLink for Reddit page: https://www.reddit.com/r/The_Community/", inline=True)
return await bot.say(embed = join);
await bot.say(embed=embed)
#actions
@bot.group(Pass_context=True)
async def actions(ctx):
'''Shows the action commands.
Usage: -action
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Action commands\n\n\n-punch\nPunches someone you\'d like to do that to.\n\n-legkick\nKicks anyone you like.\n\n-hug\nHugs anyone you like.\n\n-kiss\nKiss anyone you like.\n\n-uppercut\nUppercut anybody you like.\n\n-wave\nWave at anyone you\'d like to.\n\n-smile\nJust smile.\n\n-frown\nFrown. :(\n\n-slap\nSlap anyone you like.\n\n-stab\nStab people! Muehehehe!\n\n-murder\nMurder someone...\n\n-shoot\nShoot a person! Dun dun dunn!!', color=0x2874A6)
await bot.say(embed=embed)
@bot.command(pass_context=True)
async def punch(ctx, person: discord.Member):
'''Punch someone.
Usage: -punch <@person>
Example: -punch @Pointless
'''
punch_possible_responses = ["{} punched {} in the face, in the face."]
punch_current_response = random.choice(punch_possible_responses)
await bot.say(punch_current_response.format(ctx.message.author.mention, person.mention))
@bot.command(pass_context=True)
async def legkick(ctx, person: discord.Member):
'''Kick someone.
Usage: -legkick <@person>
Example: -legkick @Pointless
'''
kick_possible_responses = ["{} kicked {} in the leg."]
kick_current_response = random.choice(kick_possible_responses)
await bot.say(kick_current_response.format(ctx.message.author.mention, person.mention))
@bot.command(pass_context=True)
async def hug(ctx, person: discord.Member):
'''Hug someone.
Usage: -hug <@person>
Example: -hug @Pointless
'''
hug_possible_responses = ["{} hugged {} tightly."]
hug_current_response = random.choice(hug_possible_responses)
await bot.say(hug_current_response.format(ctx.message.author.mention, person.mention))
@bot.command(pass_context=True)
async def kiss(ctx, person: discord.Member):
'''Kiss someone.
Usage: -kiss <@person>
Example: -kiss @Pointless
'''
kiss_possible_responses = ["{} kissed {} on the cheek."]
kiss_current_response = random.choice(kiss_possible_responses)
await bot.say(kiss_current_response.format(ctx.message.author.mention, person.mention))
@bot.command(pass_context=True)
async def uppercut(ctx, person: discord.Member):
'''Uppercut someone.
Usage: -uppercut <@person>
Example: -uppercut @Pointless
'''
uppercut_possible_responses = ["{} uppercut {} and turned {} into a giraffe."]
uppercut_current_response = random.choice(uppercut_possible_responses)
await bot.say(uppercut_current_response.format(ctx.message.author.mention, person.mention, person.mention))
@bot.command(pass_context=True)
async def wave(ctx, person: discord.Member):
'''Wave at someone.
Usage: -wave <@person>
Example: -wave @Pointless
'''
uppercut_possible_responses = ["{} waved at {} with a smile on their face."]
uppercut_current_response = random.choice(uppercut_possible_responses)
await bot.say(uppercut_current_response.format(ctx.message.author.mention, person.mention))
@bot.command(pass_context=True)
async def smile(ctx):
'''Just smile.
Usage: -smile
'''
smile_possible_responses = ["{} smiled."]
smile_current_response = random.choice(smile_possible_responses)
await bot.say(smile_current_response.format(ctx.message.author.mention))
@bot.command(pass_context=True)
async def frown(ctx):
'''Just frown.
Usage: -frown
'''
punch_possible_responses = ["{} frowned."]
punch_current_response = random.choice(punch_possible_responses)
await bot.say(punch_current_response.format(ctx.message.author.mention))
@bot.command(pass_context=True)
async def slap(ctx, person: discord.Member):
'''Slap someone.
Usage: -slap <@person>
Example: -slap @Pointless
'''
slap_possible_responses = ["{} slapped {}'s face making it red. "]
slap_current_response = random.choice(slap_possible_responses)
await bot.say(slap_current_response.format(ctx.message.author.mention, person.mention))
@bot.command(pass_context=True)
async def stab(ctx, person: discord.Member):
'''Stab someone.
Usage: -stab <@person>
Example: -stab @Pointless
'''
slap_possible_responses = ["{} stabbed {} in the heart making his last feeling pain."]
slap_current_response = random.choice(slap_possible_responses)
await bot.say(slap_current_response.format(ctx.message.author.mention, person.mention))
@bot.command(pass_context=True)
async def murder(ctx, person: discord.Member):
'''Murder someone.
Usage: -murder <@person>
Example: -murder @Pointless
'''
slap_possible_responses = ["{} murdered {} with no one knowing anything that happened."]
slap_current_response = random.choice(slap_possible_responses)
await bot.say(slap_current_response.format(ctx.message.author.mention, person.mention))
@bot.command(pass_context=True)
async def shoot(ctx, person: discord.Member):
'''Shoot someone.
Usage: -shoot <@person>
Example: -shoot @Pointless
'''
slap_possible_responses = ["{} shot {} straight in the head making them collapse onto the floor."]
slap_current_response = random.choice(slap_possible_responses)
await bot.say(slap_current_response.format(ctx.message.author.mention, person.mention))
#jokes
@bot.group(pass_context=True)
async def jokes(ctx):
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Joke commands\n\n\n-insovietrussia\nThis command randomizes between lots of "In Soviet Russia" jokes.\n\n-chucknorris\nRandomizes between lots of Chuck Norris jokes/facts.\n\n-dadjoke\nRandomizes between lots of really not funny bad Dad jokes!',color=0x2874A6)
await bot.say(embed=embed)
@bot.command(pass_context=True)
async def insovietrussia(ctx):
'''Randomizes between lots of insovietrussia jokes.
Usage: -insovietrussia
'''
insovietrussia_possible_responses = ["In America, you drive a car. In Soviet Russia, a car drives you!", "In America the president gets assassinated. In soviet Russia, the president assassinates you!", "In America, you throw a grenade! In Soviet Russia, a grenade throws you!", "In America, you eat food. In Soviet Russia, food eats you!", "In America, you write 'R'. In Soviet Russia, you write 'Я'!", "In America, you break the law. In Soviet Russia, the law breaks you!", "Roses are red, Violets are blue, In Soviet Russia, a poem writes you!", "In America, the Grinch steals Christmas. In Soviet Russia, Christmas steals the Grinch!", "In America, you laugh at jokes. In Soviet Russia, jokes laugh at you!", "In America, Jesus sacrifices for you. In Soviet Russia, you sacrifice for Jesus!", "In America, Russians spy on you. In Soviet Russia, you spy on Russians!", "In America, you find Waldo. In Soviet Russia, Waldo finds you!", "In America, you call the police. In Soviet Russia, police calls you!", "In America, you watch TV. In Soviet Russia, the TV watches you!", "In America, you eat a cookie. But in Soviet Russia, the cookie eats you!", "In America, you play games. In Soviet Russia, games play you!"]
insovietrussia_current_response = random.choice(insovietrussia_possible_responses)
await bot.say(insovietrussia_current_response)
@bot.command(pass_context=True)
async def chucknorris(ctx):
'''Randomizes between lots of Chuck Norris jokes.
Usage: -chucknorris
'''
chucknorris_possible_responses = ["Chuck Norris threw a grenade and killed 50 people, then it exploded.", "Chuck Norris can kill two stones with one bird.", "Chuck Norris can pick oranges from an apple tree and make the best lemonade you've ever tasted.", "Once a cobra bit Chuck Norris' leg. After five days of excruciating pain, the cobra died.", "When a zombie apocalypse starts, Chuck Norris doesn't try to survive. The zombies do.", "Chuck Norris can hear sign language.", "Chuck Norris beat the sun in a staring contest.","It is considered a great accomplishment to go down Niagara Falls in a wooden barrel. Chuck Norris can go up Niagara Falls in a cardboard box.", "Chuck Norris was once on Celebrity Wheel of Fortune and was the first to spin. The next 29 minutes of the show consisted of everyone standing around awkwardly, waiting for the wheel to stop.", "Giraffes were created when Chuck Norris uppercut a horse.", "When the Bogeyman goes to sleep every night he checks his closet for Chuck Norris.", "When Chuck Norris was in middle school, his English teacher assigned an essay: \'What is courage?\' He received an A+ for turning in a blank page with only his name at the top.", "Chuck Norris will never have a heart attack... even a heart isn't foolish enough to attack Chuck Norris.", "Chuck Norris can kill your imaginary friends.", "Chuck can set ants on fire with a magnifying glass. At night.", "Chuck Norris once went to mars. Thats why there are no signs of life.", "When Bruce Banner gets mad he turns into the Hulk. When the Hulk gets mad he turns into Chuck Norris. When Chuck Norris gets mad, run.", "Chuck Norris is the reason Waldo is hiding.", "Chuck Norris is the only person that can punch a cyclops between the eye.", "When Chuck Norris enters a room, he doesn't turn the lights on, he turns the dark off.", "M.C. Hammer learned the hard way that Chuck Norris can touch this.", "Chuck Norris can build a snowman out of rain.", "Chuck Norris was once charged with three attempted murders in Boulder County, but the Judge quickly dropped the charges because Chuck Norris does not \'attempt\' murder.", "Leading hand sanitizers claim they can kill 99.9 percent of germs. Chuck Norris can kill 100 percent of whatever the hell he wants.", "Chuck Norris's computer has no \'backspace\' button, Chuck Norris doesn't make mistakes.", "Chuck Norris makes onions cry.", "Chuck Norris tells Simon what to do.", "Chuck Norris plays Russian roulette with a fully loaded revolver... and wins."]
chucknorris_current_response = random.choice(chucknorris_possible_responses)
await bot.say(chucknorris_current_response)
@bot.command(pass_context=True)
async def dadjoke(ctx):
'''Randomizes between lots of terrible Dad jokes.
Usage: -dadjoke
'''
chucknorris_possible_responses = ["What time did the man go to the dentist? Tooth hurt-y.","A ham sandwich walks into a bar and orders a beer. \nBartender says, 'Sorry we don't serve food here.'","Whenever the cashier at the grocery store asks my dad if he would like the milk in a bag he replies, 'No, just leave it in the carton!'","Me: 'Dad, make me a sandwich!' \nDad: 'Poof, You’re a sandwich!'","How do you make a Kleenex dance? Put a little boogie in it!","Two peanuts were walking down the street. One was a salted.","We were getting fast food when the lady at the window said, 'Any condiments?' My dad responded, 'Compliments? You look very nice today!'","My dad’s name is Phil, and whenever I finish eating and say, 'Dad, I’m full,' he always replies, 'No, I’m full; you're Ruby.'","I'm reading a book about anti-gravity. It's impossible to put down!","You're American when you go into the bathroom, and you're American when you come out, but do you know what you are while you're in there? European.","Did you know the first French fries weren't actually cooked in France? They were cooked in Greece.","Want to hear a joke about a piece of paper? Never mind... it's tearable."," I just watched a documentary about beavers. It was the best dam show I ever saw!","Spring is here! I got so excited I wet my plants!","I bought some shoes from a drug dealer. I don't know what he laced them with, but I was tripping all day!","When a dad drives past a graveyard: Did you know that's a popular cemetery? Yep, people are just dying to get in there!","Why did the invisible man turn down the job offer? He couldn't see himself doing it.","I used to have a job at a calendar factory but I got the sack because I took a couple of days off.","How do you make holy water? You boil the hell out of it.","MOM: 'How do I look?' DAD: 'With your eyes.'","What did the horse say after it tripped? 'Help! I’ve fallen and I can’t giddyup!'","Did you hear about the circus fire? It was in tents!","Don't trust atoms. They make up everything!","What do you call a cow with two legs? Lean beef. If the cow has no legs, then it’s ground beef.","How many tickles does it take to make an octopus laugh? Ten-tickles.","I’m only familiar with 25 letters in the English language. I don’t know why.","What's the best part about living in Switzerland? I don't know, but the flag is a big plus.","What do prisoners use to call each other? Cell phones.","Why couldn't the bike standup by itself? It was two tired.","What do you call a dog that can do magic? A Labracadabrador.","The fattest knight at King Arthur’s round table was Sir Cumference. He acquired his size from too much pi."," Did you see they made round bails of hay illegal in Wisconsin? It’s because the cows weren’t getting a square meal.","SERVER: 'Sorry about your wait.' DAD: 'Are you saying I’m fat?'","You know what the loudest pet you can get is? A trumpet.","I was interrogated over the theft of a cheese toastie. Man, they really grilled me.","What do you get when you cross a snowman with a vampire? Frostbite.","What do you call a deer with no eyes? No idea!","Can February March? No, but April May!","Why can't you hear a pterodactyl go to the bathroom? Because the pee is silent."," What does a zombie vegetarian eat? 'GRRRAAAAAIIIINNNNS!'","Why wasn't the woman happy with the velcro she bought? It was a total ripoff.","What do you call someone with no body and no nose? Nobody knows.","You heard of that new band 1023MB? They're good but they haven't got a gig yet.","Why did the crab never share? Because he's shellfish."]
chucknorris_current_response = random.choice(chucknorris_possible_responses)
await bot.say(chucknorris_current_response)
#moderation
@bot.group(pass_context=True)
async def moderation(ctx):
'''Shows a list of moderation commands.
Usage: -moderation
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Moderation commands\n\n-kick\n-kick <username mentioned>\nKick someone.\nNeeds permission kick_members.\n\n-ban\n-ban <mentioned username>\nBan someone.\nNeeds ban_members permission.\n\n-clear\n-clear <2 or over>\nClears the amount of messages you want to be cleared.\nNeeds permission manage_messages.\n\n-mute\n-mute <username mentioned>\nmute someone.\nNeeds permission manage_messages.\n\n-unmute\n-unmute <username mentioned>\nunmute someone.\nNeeds permission manage_messages.\n\n-unban\n-unban <mentioned username>\nUnban someone.\nNeeds ban_members permission.', color=0x2874A6)
await bot.say(embed=embed)
@bot.command(pass_context = True)
async def kick(ctx, *, member : discord.Member = None):
'''Kick someone
Usage: -kick <@person>
Example: -kick @Pointless
Permission: kick_members
'''
if not ctx.message.author.server_permissions.kick_members:
embed=discord.Embed(description=':x: You don\'t have enough permissions for this: kick_members.', color=0xFF0000)
await bot.delete_message(ctx.message)
await bot.say(embed=embed)
return
if not member:
await bot.delete_message(ctx.message)
return await bot.say(ctx.message.author.mention + ", specify a user to kick!")
try:
await bot.kick(member)
await bot.delete_message(ctx.message)
except Exception as e:
if 'Privilege is too low' in str(e):
embed = discord.Embed(description = ":x: The person you are trying to ban has high permissions.", color = 0xFF0000)
await bot.delete_message(ctx.message)
return await bot.say(embed = embed)
embed = discord.Embed(description =f"**%s** has been kicked!"%member.name, color = 0xFF0000)
await bot.delete_message(ctx.message)
return await bot.say(embed = embed)
@bot.command(pass_context = True)
async def ban(ctx, *, member : discord.Member = None, reason:str=None):
'''Ban someone.
Usage: -ban <@person>
Example: -ban @Pointless
Permission: ban_members
'''
if not ctx.message.author.server_permissions.ban_members:
embed=discord.Embed(description=':x: You don\'t have enough permissions for this: ban_members.', color=0xFF0000)
await bot.say(embed=embed)
await bot.delete_message(ctx.message)
return
if not member:
await bot.delete_message(ctx.message)
return await bot.say(ctx.message.author.mention + ", specify a user to ban!")
try:
await bot.ban(member)
await bot.delete_message(ctx.message)
except Exception as e:
if 'Privilege is too low' in str(e):
embed = discord.Embed(description = ":x: The person you are trying to ban has high permissions.", color = 0xFF0000)
await bot.delete_message(ctx.message)
return await bot.say(embed = embed)
embed = discord.Embed(description = "**%s** has been banned!"%member.name, color = 0xFF0000)
await bot.delete_message(ctx.message)
return await bot.say(embed = embed)
@bot.command(pass_context = True, aliases=['uban'])
async def unban(ctx, *, member : discord.Member = None):
'''Unban someone.
Usage: -unban <@person>
Example: -unban @Pointless
Permission: ban_members
'''
if not ctx.message.author.server_permissions.ban_members:
embed=discord.Embed(description=':x: You don\'t have enough permissions for this: ban_members.', color=0xFF0000)
await bot.say(embed=embed)
await bot.delete_message(ctx.message)
return
if not member:
return await bot.say(ctx.message.author.mention + ", specify a user to unban!")
await bot.delete_message(ctx.message)
try:
await bot.unban(member)
await bot.delete_message(ctx.message)
except Exception as e:
if 'Privilege is too low' in str(e):
embed = discord.Embed(description = ":x: The person you are trying to ban has high permissions.", color = 0xFF0000)
await bot.delete_message(ctx.message)
return await bot.say(embed = embed)
embed = discord.Embed(description = "**%s** has been unbanned!"%member.name, color = 0xFF0000)
await bot.delete_message(ctx.message)
return await bot.say(embed = embed)
@bot.command(pass_context=True, aliases=['purge','prune'])
async def clear(ctx, amount:int):
'''Clear messages of over 2 or more.
Usage: -clear <number>
Example: -clear 15
Permission: manage_messages
'''
if not ctx.message.author.server_permissions.manage_messages:
embed=discord.Embed(description=':x: You don\'t have enough permissions for this: manage_messages.', color=0xFF0000)
await bot.say(embed=embed)
await bot.delete_message(ctx.message)
return
deleted = await bot.purge_from(ctx.message.channel, limit=amount)
await asyncio.sleep(0.1)
try:
deleted_message = await bot.say("{}, I have deleted {} messages.".format(ctx.message.author.mention, len(deleted)))
await asyncio.sleep(5)
await bot.delete_message(deleted_message)
except:
pass
@bot.command(pass_context=True, no_pm=True)
async def mute(ctx, *, member : discord.Member, reason:str=None):
'''Mute someone.
Usage: -mute <@person>
Example: -mute @Pointless
Permission: manage_messages
'''
if not ctx.message.author.server_permissions.manage_messages:
embed1 = discord.Embed(description = f"**{ctx.message.author}, I added the role %s to %s.**", color = 0xFF0000, set_footer = f'Command executed by: {ctx.message.author}')
await bot.delete_message(ctx.message)
return await bot.say(embed = embed1)
else:
overwrite = discord.PermissionOverwrite()
overwrite.send_messages = False
await bot.edit_channel_permissions(ctx.message.channel, member, overwrite)
await bot.delete_message(ctx.message)
embed = discord.Embed(description = "**%s** has been muted!"%member.name, color = 0xFF0000)
await bot.delete_message(ctx.message)
return await bot.say(embed = embed)
@bot.command(pass_context=True, no_pm=True, aliases=['umute'])
async def unmute(ctx, *, member : discord.Member):
'''Unmute someone.
Usage: -unmute <@person>
Example: -unmute @Pointless
Permission: manage_messages
'''
if not ctx.message.author.server_permissions.manage_messages:
embed1 = discord.Embed(description = f"**{ctx.message.author}, You do not have the manage_messages permission.**", color = 0xFF0000, set_footer = f'Command executed by: {ctx.message.author}')
await bot.delete_message(ctx.message)
return await bot.say(embed = embed1)
else:
overwrite = discord.PermissionOverwrite()
overwrite.send_messages = True
await bot.edit_channel_permissions(ctx.message.channel, member, overwrite)
await bot.delete_message(ctx.message)
embed = discord.Embed(description = "**%s** has been unmuted!"%member.mention, color = 0xFF0000)
await bot.delete_message(ctx.message)
return await bot.say(embed = embed)
@bot.command(pass_context = True, aliases=['sban'])
async def softban(ctx, *, member : discord.Member = None):
'''Softban someone.
Usage: -softban <@person>
Example: -softban @Pointless
Permission: ban_members
'''
if not ctx.message.author.server_permissions.ban_members:
embed1 = discord.Embed(description = f"**{ctx.message.author}, You do not have the ban_members permission.**", color = 0xFF0000, set_footer = f'Command executed by: {ctx.message.author}')
await bot.delete_message(ctx.message)
return await bot.say(embed = embed1)
if not member:
embed2 = discord.Embed(description = f"**{ctx.message.author}, Specify a user to ban next time you use this command.**", color = 0xFF0000, set_footer = f'Command executed by: {ctx.message.author}')
await bot.delete_message(ctx.message)
return await bot.say(embed = embed2)
try:
await bot.ban(member)
await bot.unban(member.server,member)
await bot.delete_message(ctx.message)
except Exception as e:
if 'Privilege is too low' in str(e):
embed3 = discord.Embed(description = f"**{ctx.message.author}, The person you are trying to soft-ban has high permissions.**", color = 0xFF0000, set_footer = f'Command executed by: {ctx.message.author}')
await bot.delete_message(ctx.message)
return await bot.say(embed = embed3)
embed3 = discord.Embed(description = "**%s** has been soft-banned!"%member.name, color = 0xFF0000)
await bot.delete_message(ctx.message)
return await bot.say(embed = embed3)
@bot.command(pass_context=True, no_pm=True)
async def removerole(ctx, user: discord.Member, *, role):
'''Remove a role from someone.
Usage: -removerole <@person> <role>
Example: -removerole @Pointless Memer
Permission: manage_roles
'''
if ctx.message.author.server_permissions.manage_roles:
await bot.remove_roles(user, discord.utils.get(ctx.message.server.roles, name=role))
embed1 = discord.Embed(description = f"**{ctx.message.author},**I removed the role {role} from {user.mention}.**", color = 0xFF0000, set_footer = f'Command executed by: {ctx.message.author}')
return await bot.say(embed = embed1)
await bot.delete_message(ctx.message)
else:
embed2 = discord.Embed(description = f"**{ctx.message.author}, You do not have the manage_roles permission.**", color = 0xFF0000, set_footer = f'Command executed by: {ctx.message.author}')
return await bot.say(embed = embed2)
@bot.command(pass_context=True, no_pm=True)
async def addrole(ctx, user: discord.Member, *, role):
'''Add a role to someone.
Usage: -addrole <@person> <role>
Example: -addrole @Pointless Memer
Permission: manage_roles
'''
if ctx.message.author.server_permissions.manage_roles:
await bot.add_roles(user, discord.utils.get(ctx.message.server.roles, name=role))
embed1 = discord.Embed(description = f"**{ctx.message.author}, I added the role {role} to {user.mention}.**", color = 0xFF0000, set_footer = f'Command executed by: {ctx.message.author}')
return await bot.say(embed = embed1)
await bot.delete_message(ctx.message)
else:
embed2 = discord.Embed(description = f"**{ctx.message.author}, You do not have the manage_roles permission.**", color = 0xFF0000, set_footer = f'Command executed by: {ctx.message.author}')
return await bot.say(embed = embed2)
await bot.delete_message(ctx.message)
@bot.command(pass_context = True, no_pm = True)
async def announce(ctx, *, text: str):
'''Announce something.
Usage: -announce <text>
Example: -announce I like potatoes!
Permission: Administrator
'''
if not ctx.message.author.server_permissions.administrator:
embed1 = discord.Embed(description = f"**{ctx.message.author}, You do not have the administrator permission.**", color = 0xFF0000, footer = f'Command executed by: {ctx.message.author}')
return await bot.say(embed = embed1)
else:
embed=discord.Embed(title = "__Announcement__", description=text, color = 0xFF0000, footer = f'Announcement by: {ctx.message.author}.')
await bot.delete_message(ctx.message)
await bot.say(embed = embed)
#server
@bot.group(pass_context=True)
async def server(ctx):
'''Shows a list of server commands.
Usage: -server
Example: -server
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Server commands\n\n-serverinfo\nCheck the info about the server.\n\n-bans\nLists the banned people on the server.\n\n-userinfo\nCheck info about you or other users.', color=0x2874A6)
await bot.say(embed=embed)
@bot.command(pass_context = True)
async def serverinfo(ctx):
'''Shows information about the server.
Usage: -serverinfo
'''
server = ctx.message.server
roles = [x.name for x in server.role_hierarchy]
role_length = len(roles)
if role_length > 50: #if theres too much roles
roles = roles[:50]
roles.append('>>>> Displaying[50/%s] Roles'%len(roles))
roles = ', '.join(roles);
channelz = len(server.channels);
time = str(server.created_at); time = time.split(' '); time= time[0];
join = discord.Embed(description= '%s '%(str(server)),title = 'Server Name', colour = 0xFFFF);
join.set_thumbnail(url = server.icon_url);
join.add_field(name = '__Owner__', value = str(server.owner) + '\n' + server.owner.id, inline=True);
join.add_field(name = '__ID__', value = str(server.id), inline=True)
join.add_field(name = '__Member Count__', value = str(server.member_count), inline=True);
join.add_field(name = '__Text/Voice Channels__', value = str(channelz), inline=True);
join.add_field(name = '__Roles (%s)__'%str(role_length), value = roles, inline=True);
join.add_field(name = '__Region__', value = str(server.region), inline=True)
join.add_field(name = '__AFK Timeout__', value = str(server.afk_timeout), inline=True)
join.add_field(name = '__AFK Channel__', value = str(server.afk_channel), inline=True)
join.add_field(name = '__Verification Level__', value = str(server.verification_level), inline=True)
join.add_field(name = '__Custom Emotes__', value=len(server.emojis), inline=True)
join.add_field(name = '__Channels__', value=len(server.channels), inline=True)
join.add_field(name = '__Features:__', value=ctx.message.server.features, inline=True)
join.set_footer(text ='Created at: %s'%time);
return await bot.say(embed = join);
@bot.command(pass_context = True, aliases=['banlist'])
async def bans(ctx):
'''Shows a list of bans.
Usage: -bans
'''
x = await bot.get_bans(ctx.message.server)
x = '\n'.join([y.name for y in x])
xc = len(x)
if xc == 0:
x = 'No one is banned.'
embed = discord.Embed(title = "List of banned people:", description = x, color = 0xFFFFF)
return await bot.say(embed = embed)
@bot.command(pass_context=True)
async def userinfo(ctx, user: discord.Member = None):
'''Shows information about a user.
Usage: -userinfo <@person>
Example: -userinfo @Pointless
'''
member = discord.User
if not user:
user = ctx.message.author
join = discord.Embed(description= '%s '%(str(user)),title = '__Full Name__', colour = 0xFFFF);
join.set_thumbnail(url = user.avatar_url);
join.add_field(name = '__Name__', value = str(user.name),inline=True);
join.add_field(name = '__ID__', value = str(user.id),inline=True);
join.add_field(name = '__Nickname__', value = str(user.nick),inline=True);
join.add_field(name = '__Account Created At__', value = str(user.created_at),inline=True);
join.add_field(name = '__Joined Server At__', value = str(user.joined_at),inline=True);
join.add_field(name = '__Game__', value = str(user.game),inline=True);
join.add_field(name = '__Roles__', value = str("%s") % ", ".join([role.name for role in user.roles if role.name != "@everyone"]),inline=True);
join.add_field(name = '__Avatar URL__', value = str(user.avatar_url),inline=True);
join.add_field(name = '__Status__', value = str(user.status),inline=True);
join.add_field(name = '__Highest Role__', value = str(user.top_role),inline=True);
return await bot.say(embed = join);
#utilities
@bot.group(pass_context=True)
async def utilities(ctx):
'''Shows a list of utility commands.
Usage: -utilities
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Utility commands\n\n\n-avatar\nGet the avatar link of a user.\n\n-poll\nCreate a poll with the thumbs up, shrug and thumbs down reaction.\n\n-embed\nEmbed a message so CommuniBot can say what you wanted.\n\n-translate\nTranslate from one language to another. Supported list of languages: https://tech.yandex.com/translate/doc/dg/concepts/api-overview-docpage/#languages \n\n-urbandict\nSearch definitions in the Urban Dictionary.\n\n-math +\nShows a list of mathematic commands.\n\n-conversion +\nShows a list of conversion commands.', color=0x2874A6)
await bot.say(embed=embed)
@bot.command(pass_context=True, no_pm=True)
async def avatar(ctx, member : discord.Member = None):
'''Get the avatar of a member.
Usage: -avatar <@person>
Example: -avatar @Pointless
'''
channel = ctx.message.channel
if member is None:
member = ctx.message.author
await bot.say(member.avatar_url)
@bot.command(pass_context=True)
async def poll(ctx,*, message: str):
'''Creates a poll.
Usage: -poll <text>
Example: -poll Do you like CommuniBot?
'''
embed = discord.Embed(color = ctx.message.author.color, timestamp = datetime.utcnow())
embed.set_author(name = "Poll", icon_url = ctx.message.author.avatar_url)
embed.description = (message)
embed.set_footer(text = ctx.message.author.name)
x = await bot.say(embed = embed)
await bot.add_reaction(x, "👍")
await bot.add_reaction(x, "🤷")
await bot.add_reaction(x, "👎")
@bot.command(pass_context=True, aliases=['tr'])
async def translate(ctx, tl, *words: str):
'''Translate something. Supported list of languages: https://tech.yandex.com/translate/doc/dg/concepts/api-overview-docpage/#languages
Usage: translate <from>-<to>
Example: translate en-pl sandwich
'''
words = ' '.join(words)
answer = requests.get("https://translate.yandex.net/api/v1.5/tr.json/translate?key=trnsl.1.1.20170315T092303Z.ece41a1716ebea56.a289d8de3dc45f8ed21e3be5b2ab96e378f684fa&text={0}&lang={1}".format(words,tl)).json()
await bot.say("{0} {1}".format(ctx.message.author.mention, str(answer["text"])[2:-2]))
@bot.command(pass_context=True, aliases=['urbandictionary','urbandict','udict','udictionary','udefine','urbandefine'])
async def urban(ctx,*msg):
word = ' '.join(msg)
api = "http://api.urbandictionary.com/v0/define"
response = requests.get(api, params=[("term", word)]).json()
if len(response["list"]) == 0: return await client.say("Could not find that word!")
embed = discord.Embed(title = ":mag: Search Word", description = word, color = 0xFFF00)
embed.add_field(name = "Top definition:", value = response['list'][0]['definition'])
embed.add_field(name = "Examples:", value = response['list'][0]["example"])
embed.set_footer(text = "Tags: " + ', '.join(response['tags']))
await bot.say(embed = embed)
@bot.command(name='_-', pass_context=True)
async def _correction(ctx):
'''-_- Correction.
'''
return
@bot.group(pass_context=True, aliases=['maths','mathematics','mathematic','calculation'])
async def math(ctx):
'''Shows a list of math commands.
Usage: -math
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Math commands\n\n\n-add\nAdd two numbers together.\n\n-subtract\nSubtract two numbers.\n\n-multiply\nMultiply two numbers together.\n\n-divide\nDivide two numbers together\n\n-modulo\nFind the remainder of a number when divided together.\n\n-exponent\nFind a number to the power of another number.', color=0x2874A6)
await bot.say(embed=embed)
@bot.command(pass_context=True, aliases=['addition'])
async def add(ctx, number1: int, number2: int):
'''Add two numbers together.
Usage: -add <a> <b>
Example: -add 2 2
'''
await bot.say(number1 + number2)
@bot.command(pass_context=True, aliases=['subtraction','minus'])
async def subtract(ctx, number1: int, number2: int):
'''Subtract two numbers.
Usage: -subtract <a> <b>
Example: -subtract 4 2
'''
await bot.say(number1 - number2)
@bot.command(pass_context=True, aliases=['times','multiplication'])
async def multiply(ctx, number1: int, number2: int):
'''Multiply two numbers together.
Usage: -multiply <a> <b>
Example: -multiply 2 2
'''
await bot.say(number1 * number2)
@bot.command(pass_context=True, aliases=['division','share'])
async def divide(ctx, number1: int, number2: int):
'''Divide two numbers.
Usage: -divide <a> <b>
Example: -divide 10 5
'''
await bot.say(number1 / number2)
@bot.command(pass_context=True, aliases=['remainder','modulus'])
async def modulo(ctx, number1: int, number2: int):
'''Find the remainder of when you divide two numbers together.
Usage: -modulo <a> <b>
Example: -modulo 7 3
'''
await bot.say(number1 % number2)
@bot.command(pass_context=True, aliases=['power'])
async def exponent(ctx, number1: int, number2: int):
'''Find the power of a number how many times you like.
Usage: -exponent <a> <b>
Example: -exponent 4 2
'''
await bot.say(number1 ** number2)
@bot.group(pass_context=True, aliases=['convert'])
async def conversion(ctx):
'''Shows a list of commands that converts from one unit of whatever to another.
Usage: -conversion
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Conversion commands\n\n\n-temperature +\nShows a list of temperature conversion commands.', color=0x2874A6)
await bot.say(embed=embed)
@bot.group(pass_context=True)
async def temperature(ctx):
'''Shows a list of temperature conversion commands.
Usage: -temperature
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Temperature Conversion commands\n\n\n-centigrade +\nShows a list of centigrade commands.\n\n-kelvin +\nShows a list of Kelvin commands.\n\n-fahrenheit +\nShows a list of Fahrenheit commands.\n\n-rankine +\nShows a list of Rankine commands.', color=0x2874A6)
await bot.say(embed=embed)
@bot.group(pass_context=True)
async def centigrade(ctx):
'''Shows a list of centigrade conversion commands.
Usage: -centigrade
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Centigrade Conversion commands\n\n\n-centigrade-fahrenheit\nConverts Centigrade to Fahrenheit.\n\n-centigrade-kelvin\nConverts Centigrade to Kelvin.\n\n-centigrade-rankine\nConverts Centigrade to Rankine.', color=0x2874A6)
await bot.say(embed=embed)
@bot.group(pass_context=True)
async def kelvin(ctx):
'''Shows a list of Kelvin conversion commands.
Usage: -kelvin
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Kelvin Conversion commands\n\n\n-kelvin-fahrenheit\nConverts Kelvin to Fahrenheit.\n\n-kelvin-centigrade\nConverts Kelvin to Centigrade.\n\n-kelvin-rankine\nConverts Kelvin to Rankine.', color=0x2874A6)
await bot.say(embed=embed)
@bot.group(pass_context=True)
async def fahrenheit(ctx):
'''Shows a list of Fahrenheit conversion commands.
Usage: -fahrenheit
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Fahrenheit Conversion commands\n\n\n-fahrenheit-centigrade\nConverts Fahrenheit to Centigrade.\n\n-fahrenheit-kelvin\nConverts Fahrenheit to Kelvin.\n\n-fahrenheit-rankine\nConverts Fahrenheit to Rankine.', color=0x2874A6)
await bot.say(embed=embed)
@bot.group(pass_context=True)
async def rankine(ctx):
'''Shows a list of Rankine conversion commands.
Usage: -rankine
'''
if ctx.invoked_subcommand is None:
embed=discord.Embed(description='Rankine Conversion commands\n\n\n-rankine-fahrenheit\nConverts Rankine to Fahrenheit.\n\n-rankine-kelvin\nConverts Rankine to Kelvin.\n\n-rankine-centigrade\nConverts Rankine to Centigrade..', color=0x2874A6)
await bot.say(embed=embed)
@bot.command(name='centigrade-fahrenheit',pass_context=True)
async def centigrade_fahrenheit(ctx, number: int):
'''Convert Centigrade to Fahrenheit
Usage: -centigrade-fahrenheit <number>
Example: -centigrade-fahrenheit 10
'''
await bot.say(str((number * 1.8) + 32) + '°F')
@bot.command(name='centigrade-kelvin',pass_context=True)
async def centigrade_kelvin(ctx, number: int):
'''Convert Centigrade to Kelvin
Usage: -centigrade-kelvin <number>
Example: -centigrade-kelvin 10
'''
await bot.say(str(number + 273.15) + '°K')
@bot.command(name='centigrade-rankine',pass_context=True)
async def centigrade_rankine(ctx, number: int):
'''Convert Centigrade to Rankine
Usage: -centigrade-rankine <number>
Example: -centigrade-rankine 10
'''
await bot.say(str((number + 273.15) * (9/5)) + '°R')
@bot.command(name='fahrenheit-centigrade',pass_context=True)
async def fahrenheit_centigrade(ctx, number: int):
'''Convert Fahrenheit to Centigrade
Usage: -fahrenheit-centigrade <number>
Example: -fahrenheit-centigrade 10
'''
await bot.say(str((number - 32) / 1.8) + '°C')
@bot.command(name='fahrenheit-kelvin',pass_context=True)
async def fahrenheit_kelvin(ctx, number: int):
'''Convert Fahrenheit to Kelvin
Usage: -fahrenheit-kelvin <number>
Example: -fahrenheit-kelvin 10
'''
await bot.say(str((number - 32) / 1.8) + '°K')
@bot.command(name='fahrenheit-rankine',pass_context=True)
async def fahrenheit_rankine(ctx, number: int):
'''Convert Fahrenheit to Rankine
Usage: -fahrenheit-rankine <number>
Example: -fahrenheit-rankine 10
'''
await bot.say(str(number + 459.67) + '°R')
@bot.command(name='kelvin-fahrenheit',pass_context=True)
async def kelvin_fahrenheit(ctx, number: int):
'''Convert Kelvin to Fahrenheit
Usage: -kelvin-fahrenheit <number>
Example: -kelvin-fahrenheit 10
'''
await bot.say(str((number * (9/5)) - 459.67) + '°F')
@bot.command(name='kelvin-centigrade',pass_context=True)
async def kelvin_centigrade(ctx, number: int):
'''Convert Kelvin to Centigrade
Usage: -kelvin-Centigrade <number>
Example: -kelvin-Centigrade 10
'''
await bot.say(str(number - 273.15 ) + '°C')
@bot.command(name='kelvin-rankine',pass_context=True)
async def kelvin_rankine(ctx, number: int):
'''Convert Kelvin to Rankine
Usage: -kelvin-rankine <number>
Example: -kelvin-rankine 10
'''
await bot.say(str(number * (9/5)) + '°R')
@bot.command(name='rankine-fahrenheit',pass_context=True)
async def rankine_fahrenheit(ctx, number: int):
'''Convert Rankine to Fahrenheit
Usage: -rankine-fahreneheit <number>
Example: -rankine-fahrenheit 10
'''
await bot.say(str(number - 459.67) + '°F')
@bot.command(name='rankine-centigrade',pass_context=True)
async def rankine_centigrade(ctx, number: int):
'''Convert Rankine to Centigrade
Usage: -rankine-centigrade <number>
Example: -rankine-centigrade 10
'''
await bot.say(str((number - 491.67) * (5/9)) + '°C')
@bot.command(name='rankine-kelvin',pass_context=True)
async def rankine_kelvin(ctx, number: int):
'''Convert Rankine to Kelvin
Usage: -rankine-kelvin <number>
Example: -rankine-kelvin 10
'''
await bot.say(str(number * (5/9)) + '°K')
#token
bot.run(Secrets['Token']) |
986,454 | 39bf8f973810c62dbd32859696a1b7e3064b3298 | # Generated by Django 3.0.7 on 2020-06-19 10:27
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('detalji_za_pretragu', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='detaljipretrage',
old_name='naziv',
new_name='naziv_detalji',
),
]
|
986,455 | c422b8aea6f92106ce1fdf1f9960f04283a3821f | # -*- coding: utf-8 -*-
''' Data Quality Checking Module
This module contains functions for identifying corrupt or bad quality data.
'''
import numpy as np
from solardatatools.utilities import local_median_regression_with_seasonal,\
local_quantile_regression_with_seasonal
def daily_missing_data_simple(data_matrix, threshold=0.2,
return_density_signal=False):
"""
This function takes a PV power data matrix and returns a boolean array,
identifying good days. The good days are the ones that are not missing a
significant amount of data. This assessment is made based on the fraction
of non-zero and non-NaN values each day. In a typical "good" data set,
around 40-60% of the measured values each day will be non-zero. The default
threshold for this function is 20%.
:param data_matrix: numpy.array, a matrix containing PV power signals
:param threshold: float, the threshold to identify good days
:return: a boolean array, with a True if the day passes the test and a
False if the day fails
"""
nans = np.isnan(data_matrix)
capacity_est = np.quantile(data_matrix[~nans], 0.95)
data_copy = np.copy(data_matrix)
data_copy[nans] = 0.
foo = data_copy > 0.005 * capacity_est
bar = np.sum(foo, axis=0) / data_matrix.shape[0]
good_days = bar > threshold
if return_density_signal:
return good_days, bar
else:
return good_days
def daily_missing_data_advanced(data_matrix, threshold=0.2,
return_density_signal=False,
return_fit=False):
nans = np.isnan(data_matrix)
capacity_est = np.quantile(data_matrix[~nans], 0.95)
data_copy = np.copy(data_matrix)
data_copy[nans] = 0.
foo = data_copy > 0.02 * capacity_est
density_signal = np.sum(foo, axis=0) / data_matrix.shape[0]
use_days = density_signal > threshold
fit_signal = local_quantile_regression_with_seasonal(
density_signal,
use_ixs=use_days,
tau=0.9,
solver='MOSEK'
)
scores = density_signal / fit_signal
out = [scores]
if return_density_signal:
out.append(density_signal)
if return_fit:
out.append(fit_signal)
if len(out) == 1:
out = out[0]
else:
out = tuple(out)
return out
def dataset_quality_score(data_matrix, threshold=0.2, good_days=None,
use_advanced=True):
"""
This function scores a complete data set. The score is the fraction of days
in the data set that pass the missing data test. A score of 1 means all the
days in the data set pass the test and are not missing data.
:param data_matrix: numpy.array, a matrix containing PV power signals
:param threshold: float, the threshold to identify good days
:return: the score, a float between 0 and 1
"""
if good_days is None:
if use_advanced:
good_days = daily_missing_data_advanced(data_matrix, threshold=threshold)
else:
good_days = daily_missing_data_simple(data_matrix, threshold=threshold)
score = np.sum(good_days) / data_matrix.shape[1]
return score
|
986,456 | 1da86ed225eed48d9f1ccd73419a290520d09f1a | import sys
import re
import networkx as nx
known_edges = """
1 2
2 3
3 5
5 8
8 13
"""
edges = """
1 2,7
2 1,3,14
3 5,6
4 6,8
5 3,8,10
6 1,4,10,11
7 1,2,9
8 10,13,15
9 5,7,10
10 7,11,12
11 2,3,6,9
12 4,5,7
13 8,12
14 15
15 11,16
16 13
"""
edge_data = []
for a, bs in re.findall(r'(?m)^(\d+) ([\d,]+)$', known_edges + edges):
for b in bs.split(","):
a = int(a)
b = int(b)
if a == b:
continue
edge_data.append([a, b])
g = nx.DiGraph()
g.add_edges_from(edge_data)
start = 1
end = 13
if len(sys.argv) > 1:
start = int(sys.argv[1])
if len(sys.argv) > 2:
end = int(sys.argv[2])
print(nx.shortest_path(g, start, end))
|
986,457 | efd5a59831ecf5a488f3a73c59c93cec56414764 | a=int(input('enter the value of a'))
b=int(input('enter the value of b'))
if a>b:
print("a is greater than b")
else:
print("b is greater than a")
dl309@soetcse:~/lekhana$ python3 pg123.py
enter the value of a23
enter the value of b56
b is greater than a
|
986,458 | 7ec1ae74abde00d229d7792fc48a6c403352e980 | #-Runs PCR-GLOBWB using the ERA-Interim Reanalysis over the period 1979-2010 inclusive
# using mean daily temperature and daily GPCP-corrected rainfall totals as well as monthly reference
# potential evapotranspiration based on Penman-Monteith
# NOTE: the model is run by a generic water balance model that takes daily input, including reference potential evapotranspiration
# 1) daily meto input is retrieved by symbolic links
# monthly potential evapotranspiration is scaled on the basis of Hamon's temperature based method;
# 2) the model is initially run over the entire, available reanalysis period until the cell-based average
# specific runoff from the land surface converges within 1% for the last year (note that runoff over the
# water surface is instantaneaous and does not change from year-to-year); for thespin-up, the report interval
# is set to yearly only, except for the last year of the reanalysis period; for the definite run, all years are reported
# on a daily timestep and stored;
# 3) in the definite run, also the routing model is applied (standard parameterization); from the runs, monthly storages
# are retrieved for comparsion with GRACE
##-Modules used
import os, sys,shutil, math, calendar,tarfile,datetime, zlib, zipfile
import pcraster as pcr
import pcraster.framework as pcrm
from pcrBasicFunctions import *
from pcrglobwb_WaterBodies_forsubsample import *
from random import choice
#-functions
def estimateBankFulDischarge(Q,constant= 9.8874,exponent= 0.8363):
return constant*Q**exponent
def extractWaterBodyParameterization(zipFileName,extractionPath,matchList):
#-function without child, extracting matches from archive
zipArchive= zipfile.ZipFile(zipFileName)
matchedFiles= []
for sourceFileName in zipArchive.namelist():
for match in matchList:
if match in sourceFileName:
matchedFiles.append(sourceFileName)
for sourceFileName in matchedFiles:
targetFileName= os.path.join(extractionPath,os.path.split(sourceFileName)[1])
tempFile= open(targetFileName,'wb')
tempFile.write(zipArchive.read(sourceFileName))
tempFile.close()
#-close file
zipArchive.close()
#-Main
#-Initialization
#-forcing dataset to be used
forcingDataSet= 'VIC_0.5'
domainStr= 'Africa'
#-date management: start and end date and corresponding time steps
# and duration (days) of a model step
duration= 1.
timeSec= 86400
startYear= 2000
endYear= 2005
yearRange= range(startYear,endYear)
lastYear= endYear-1
initYear= startYear-1
initYears= range(startYear,startYear+1)
julianDay= [1,32,60,91,121,152,182,213,244,274,305,335,366]
addDay= [0,0,1,1,1,1,1,1,1,1,1,1,1]
#-paths
#-results and storage
mapsDir= 'maps/0.5deg'
tempDir= 'temp'
pathIni= 'initialconditions'
pathRes= 'results/0.5deg'
pathResMonthly= 'resmonthly'
pathResZip= 'reszip/0.5deg'
pathRemote= '/forcings/'
#-model outputs for import and archiving
resStackList= ['qloc','qw','qc','wst','vel','fldd','fldf','wata','watd','rest','mbe','mbr']
qlocStart= 0
iniOutVarStart= 2
addOutVarStart= 4
extFile= '.001'
# - archives with daily results stored as tar.gz and zip file with parameterization
dailyArchiveFileName= 'pcrglobwb_%s_%04d.tar.gz'
reservoirParameterizationZipFileName= 'reservoirparameterization.zip'
##-file names used by the routing model, this is messy!
#-initialization: boolean settings
noReservoirs= False
testLocalWaterBalance= False
reportLocalWaterBalance= False
getSurfaceWaterAttributes= True
initializeRoutingModel= False
#-number of runs to initialize the model
nrRunsInitialization= 1
#-weight to update long-term average discharge and bankful values
updateQWeight= 0.2
#-maps: clone map and cell area
clone= pcr.readmap(os.path.join(mapsDir,'cloneAfrica.map'))
#-set sample domain
xMin= -25.5
xMax= 57.5
yMin= -35.0
yMax= 37.5
sampleResolution= 0.5
resampleRatio= 1.
#-class to clip map during read
clippedRead= pcrPartialRead(xMin,xMax,yMin,yMax,sampleResolution,resampleRatio,clone)
cellAreaMap= os.path.join(mapsDir,'cellarea30.map')
cellArea= clippedRead.get(cellAreaMap,'scalar')
#-channel and floodplain characteristics
LDDMap= os.path.join(mapsDir,'lddsound_30min.map')
LDD= pcr.lddrepair(clippedRead.get(LDDMap,'ldd'))
channelGradient= clippedRead.get(os.path.join(mapsDir,'globalgradchannel.map'))
channelWidth= clippedRead.get(os.path.join(mapsDir,'channel_width.map'))
channelLength= clippedRead.get(os.path.join(mapsDir,'channel_length.map'))
channelDepth= clippedRead.get(os.path.join(mapsDir,'channel_depth.map'))
floodplainMask= pcr.spatial(pcr.boolean(1)) # NOTE: set to zero for static, to one for dynamic floodplains
channelManN= 0.04
floodplainManN= 0.10
#-flood plain parameterization
#-root of file name with maps of relative elvation above floodplain
# and associated fractions
relZFileName= 'elev%04d.map'
areaFractions=[0.0,0.01,0.05,0.10,0.20,0.30,0.40,\
0.50,0.60,0.70,0.80,0.90,1.00]
# reduction parameter of smoothing interval and error threshold
reductionKK= 0.5
criterionKK= 40.
#-modelSignature
if pcr.cellvalue(pcr.mapmaximum(pcr.scalar(floodplainMask)),1)[0] == 1:
modelSignature= forcingDataSet+'_dynamic-routing'
else:
modelSignature= forcingDataSet+'_static-routing'
if noReservoirs:
modelSignature= modelSignature+'_noreservoirs'
modelSignature+= '_%s' % domainStr
#-stacks of specific runoff and direct flux over water surface [m per unit area and per time step]
# for model input and root of maps of initial storage [m3], discharge [m3/s], flood depth [m] and
# flood fraction [m2/m2]
landSurfaceQFileName= os.path.join(pathRes,'qloc')
waterSurfaceQFileName= os.path.join(pathRes,'qw')
actualStorageFileName= os.path.join(pathRes,'wst')
QFileName= os.path.join(pathRes,'qc')
flowVelocityFileName= os.path.join(pathRes,'vel')
floodedDepthFileName= os.path.join(pathRes,'fldd')
floodedFractionFileName= os.path.join(pathRes,'fldf')
surfaceWaterAreaFileName= os.path.join(pathRes,'wata')
surfaceWaterDepthFileName= os.path.join(pathRes,'watd')
surfaceWaterResidenceTimeFileName= os.path.join(pathRes,'rest')
mbeFileName= os.path.join(pathRes,'mbe.map')
mbrFileName= os.path.join(pathRes,'mbr.map')
#-Start
#-echo to screen and file
currentDate= datetime.datetime.now()
logFileName= 'pcr-globwb_%s_%04d-%02d-%02d_30min.log' % (modelSignature,currentDate.year,currentDate.month,currentDate.day)
logFile= open(logFileName,'w')
wStr= 'Running routing for PCR-GLOBWB large-scale hydrological model forced with %s over %d-%d inclusive\n' %\
(modelSignature.replace('_',' '),startYear,lastYear)
print wStr
logFile.write(wStr+'\n')
#-create output directory for initial conditions
try:
os.makedirs(os.path.join(pathIni,modelSignature))
except:
pass
#-create temporary output directory
try:
createOutputDirs([tempDir])
except:
pass
#-read in maps with water body parameterization for initialization
extractWaterBodyParameterization(reservoirParameterizationZipFileName,tempDir,[str(initYear)])
#-initialize the model if specified
if initializeRoutingModel:
#-initialization
pathIniSource= os.path.join(pathIni,modelSignature,'ini_%04d' % initYear)
if not os.path.exists(pathIniSource):
createOutputDirs([pathIniSource])
#-long-term values of evapotranspiration and discharge from monthly files
wStr= '* initialization: extracting long-term evaporation and discharge for %d-%d' % (min(initYears),max(initYears))
print wStr
logFile.write(wStr+'\n')
#-fraction water surface needed to initialize long-term discharge and open water evaporation
fractionWater= clippedRead.get(os.path.join(tempDir,'fracwat_%d.map' % initYear))
#-discharge in m3/s, runoff over water surface in m3/year
averageQ= pcr.scalar(0)
averageQWat= pcr.scalar(0)
yearCnt= 0
for year in initYears:
yearCnt+= 1
#-extract input from archive
dailyArchiveInputFileName= dailyArchiveFileName % (forcingDataSet,year)
print '- processing archive %s' % dailyArchiveInputFileName
extractDailyData(os.path.join(pathRemote,forcingDataSet),dailyArchiveInputFileName,pathRes,\
['q1mon','q2mon','q3mon','qwmon'],calendar.isleap(year))
#-update long-term values
for month in xrange(1,13):
day= julianDay[month]
if calendar.isleap(year):
day+= addDay[month]
day-= 1
monthLength= calendar.monthrange(year,month)[1]
#-local runoff and evaporation in m3/month
qloc= pcr.scalar(0)
qwat= pcr.scalar(0)
for fileRoot in ['q1mon','q2mon','q3mon']:
qloc+= (1.-fractionWater)*cellArea*clippedRead.get(generateNameT(os.path.join(pathRes,fileRoot),day))
for fileRoot in ['qwmon']:
qwat+= fractionWater*cellArea*clippedRead.get(generateNameT(os.path.join(pathRes,fileRoot),day))
averageQ+= pcr.accuthresholdflux(LDD,qloc+pcr.max(0.,qwat),pcr.max(0.,-qwat))/(12*monthLength*24*3600)
averageQWat+= qwat
#-update maps for number of years and compute bankful discharge
averageQ/= yearCnt
averageQIni= averageQ
averageQ= pcr.ifthenelse(clippedRead.get(os.path.join(tempDir,'waterbodyoutlet_%d.map' % initYear),'nominal') != 0,\
averageQ,0)
averageQWat/= yearCnt
bankfulQ= estimateBankFulDischarge(averageQ)
#-report maps
targetFileName= os.path.join(pathIniSource,'qavg_longterm.map')
pcr.report(averageQ,targetFileName)
targetFileName= os.path.join(pathIniSource,'qbank_longterm.map')
pcr.report(bankfulQ,targetFileName)
targetFileName= os.path.join(pathIniSource,'qwat_avg_longterm.map')
pcr.report(averageQWat,targetFileName)
#-define initital values for waterbodies
#-initializing class of water bodies for the first year
if not initializeRoutingModel:
pathIniSource= os.path.join(pathIni,modelSignature,'ini_%04d' % startYear)
fractionWater= clippedRead.get(os.path.join(tempDir,'fracwat_%d.map' % initYear))
waterBodies= pcrglobWaterBodies(os.path.join(tempDir,'waterbodyid_%d.map' % initYear),\
os.path.join(tempDir,'waterbodyoutlet_%d.map' % initYear),os.path.join(tempDir,'waterbodytype_%d.map' % initYear),\
os.path.join(mapsDir,'channel_width.map'),os.path.join(pathIniSource,'qavg_longterm.map'),\
os.path.join(pathIniSource,'qbank_longterm.map'),LDDMap,os.path.join(tempDir,'reservoirparameters_%d_30min_.tbl' % initYear),timeSec,clippedRead)
#-create initial files if the routing model is initialized
if initializeRoutingModel:
for variable in resStackList[iniOutVarStart:addOutVarStart]:
targetFileName= os.path.join(pathIniSource,pcrm.generateNameT(variable,0).replace('.000','.ini'))
if 'qc' in variable:
pcr.report(pcr.scalar(0),targetFileName)
elif 'wst'in variable:
waterBodiesType= pcr.nominal(waterBodies.returnMapValue(pcr.spatial(pcr.scalar(0.)),waterBodies.type))
endorheicLakes= pcr.ifthenelse((pcr.areatotal(pcr.scalar(waterBodies.outlet != 0),waterBodies.distribution) == 0) & \
(waterBodies.distribution != 0),waterBodies.distribution,0)
#-storage of water bodies is assigned at outlets
#-storage over endorheic lakes
actualStorage= pcr.ifthen(endorheicLakes != 0,\
pcr.upstream(LDD,pcr.ifthenelse((endorheicLakes == 0) & ((waterBodies.distribution == 0) | (waterBodies.outlet != 0)),\
averageQ,0)))
actualStorage= pcr.ifthen((waterBodies.location != 0) & (endorheicLakes != 0),\
pcr.max(0.,pcr.areatotal(pcr.cover(actualStorage*timeSec*365.25+averageQWat,0),endorheicLakes)))
#-storage over other water bodies
actualStorage= pcr.cover(actualStorage,\
pcr.ifthen((waterBodiesType == 2) & (waterBodies.location != 0),\
waterBodies.returnMapValue(pcr.spatial(pcr.scalar(0.)),waterBodies.capacity*waterBodies.maxLimit)),\
pcr.ifthen((waterBodiesType == 1) & (waterBodies.location != 0),\
pcr.areatotal(fractionWater*cellArea,waterBodies.distribution)*(averageQ/(waterBodies.cLake*channelWidth))**(2./3.)))
actualStorage= pcr.ifthen(waterBodies.distribution != 0,pcr.cover(actualStorage,0))
#-storage over rivers
actualStorage= pcr.cover(actualStorage,channelLength* (channelManN*averageQ*channelWidth**(2./3.)*channelGradient**-0.5 )**0.6)
pcr.report(actualStorage,targetFileName)
#-initial conditions created, execute the followingl until exitCode becomes true
# set run counter and completion identifier
runCnt= 0
exitCode= False
while not exitCode:
#-pre-processing: selection of year and in- and output locations depends on
# whether the model is initialized or actually ran for a time series
createOutputDirs([pathRes])
if initializeRoutingModel:
#-initialization: take random year from selection and set in and output of initial conditions
# to year prior to start run
wStr= 'initialization routing model'
year= choice(initYears)
pathIniSource=os.path.join(pathIni,modelSignature,'ini_%04d' % initYear)
if (runCnt+1) < nrRunsInitialization:
pathIniTarget= pathIniSource
else:
pathIniTarget= os.path.join(pathIni,modelSignature,'ini_%04d' % (startYear))
createOutputDirs([pathIniTarget])
else:
wStr= 'actual run'
#-run the model for actual sequence of years, output location incremented by one year
year= yearRange.pop(0)
pathIniSource=os.path.join(pathIni,modelSignature,'ini_%04d' % year)
pathIniTarget= os.path.join(pathIni,modelSignature,'ini_%04d' % (year+1))
createOutputDirs([pathIniTarget])
#-initializing, calling and running the model:
#-set start and end date and number of days
startDate= datetime.datetime(year,1,1)
endDate= datetime.datetime(year,12,31)
yearDays= endDate.toordinal()-startDate.toordinal()+1
##-extract input from archive
dailyArchiveInputFileName= dailyArchiveFileName % (forcingDataSet,year)
print '* processing archive %s' % dailyArchiveInputFileName
#extractDailyData(os.path.join(pathRemote,forcingDataSet),dailyArchiveInputFileName,pathRes,\
# resStackList[qlocStart:iniOutVarStart],calendar.isleap(year))
#-copy initial conditions
copyIniFiles(pathIniSource,pathRes,['ini'])
#-retrieve average long-term discharge and estimate bankful discharge
averageQ= pcr.readmap(os.path.join(pathIniSource,'qavg_longterm.map'))
bankfulQ= estimateBankFulDischarge(averageQ)
targetFileName= os.path.join(pathIniSource,'qbank_longterm.map')
pcr.report(bankfulQ,targetFileName)
#-call model
execfile('pcr_rout_dynfloodplain_v3_forsubsample.py')
routingscheme= pcrglobRoutingDynFloodPlains(startDate,endDate)
dynRouting = pcrm.DynamicFramework(routingscheme,lastTimeStep= int((endDate.toordinal()-startDate.toordinal()+1)/duration),\
firstTimestep= 1)
dynRouting.run()
#-run completed, update waterbodies for current year and post-process
currentDate= datetime.datetime.now()
wStr= '* run:%6d, year:%6d, end: %04d-%02d-%02d %02d:%02d %30s' %\
(runCnt,year,currentDate.year,currentDate.month,currentDate.day,currentDate.hour,currentDate.minute,wStr)
#-update waterbody parameters
if not initializeRoutingModel:
#-read in maps with water body parameterization for initialization and initialize water bodies for next year
createOutputDirs([tempDir])
extractWaterBodyParameterization(reservoirParameterizationZipFileName,tempDir,[str(year)])
#-update long-term average discharge when initialization complete, resetting it to zero for changed outlets
if runCnt >= 0:
if initializeRoutingModel:
changedOutlet= (waterBodies.outlet != 0) ^ (clippedRead.get(os.path.join(tempDir,'waterbodyoutlet_%d.map' % initYear)) != 0)
else:
changedOutlet= (waterBodies.outlet != 0) ^ (clippedRead.get(os.path.join(tempDir,'waterbodyoutlet_%d.map' % year)) != 0)
targetFileName= os.path.join(pathIniTarget,'updated_outlet.map')
pcr.report(changedOutlet,targetFileName)
try:
targetFileName= os.path.join(pathIniTarget,'qavg_longterm.map')
print targetFileName
Q= stackAverage(pathRes,resStackList[iniOutVarStart],1,yearDays)
print 1
averageQ= (1.-updateQWeight)*averageQ+updateQWeight*Q
print 2
#averageQ= pcr.ifthenelse(changedOutlet,averageQIni,averageQ)
print 3
pcr.report(averageQ,targetFileName)
#-update wStr
wStr+= 'total runoff [km3]: %.2e' % \
pcr.cellvalue(pcr.maptotal(pcr.ifthen(LDD == 5,Q*1.e-9*yearDays*timeSec)),1)[0]
except:
print sys.exc_info()
wStr+= '\n - long-term discharge has not been updated!'
if not initializeRoutingModel:
#-read in maps with water body parameterization for initialization and initialize water bodies for next year
del waterBodies
waterBodies= pcrglobWaterBodies(os.path.join(tempDir,'waterbodyid_%d.map' % year),\
os.path.join(tempDir,'waterbodyoutlet_%d.map' % year),os.path.join(tempDir,'waterbodytype_%d.map' % year),\
os.path.join(mapsDir,'channel_width.map'),os.path.join(pathIniSource,'qavg_longterm.map'),\
os.path.join(pathIniSource,'qbank_longterm.map'),LDDMap,os.path.join(tempDir,'reservoirparameters_%d_30min_.tbl' % year),timeSec,clippedRead)
fractionWater= clippedRead.get(os.path.join(tempDir,'fracwat_%d.map' % year))
wStr+= '\n - waterbodies updated for %d' % (year+1)
#-write log string to screen and file
print wStr
wStr+= '\n'
logFile.write(wStr)
logFile.flush()
#-update initial conditions
targetFileName=generateNameT(actualStorageFileName,0).replace('.000','.ini')
shutil.copy(targetFileName,targetFileName.replace('.ini','org.ini'))
actualStorage= pcr.readmap(targetFileName)
actualStorage= pcr.ifthenelse(waterBodies.distribution != 0,\
pcr.ifthenelse(waterBodies.location != 0, pcr.areatotal(actualStorage,waterBodies.distribution),0.),\
actualStorage)
pcr.report(actualStorage,targetFileName)
targetFileName= generateNameT(QFileName,0).replace('.000','.ini')
shutil.copy(targetFileName,targetFileName.replace('.ini','org.ini'))
Q= pcr.readmap(targetFileName)
Q= pcr.ifthenelse(waterBodies.distribution != 0,\
pcr.ifthenelse(waterBodies.location != 0, Q,0),Q)
pcr.report(Q,targetFileName)
#-move initial velocity estimate
sourceFileName= generateNameT(flowVelocityFileName,0).replace('.000','.ini')
targetFileName= sourceFileName.replace(pathRes,pathIniSource)
shutil.move(sourceFileName,targetFileName)
#-copy initial conditions and archive output
copyIniFiles(pathRes,pathIniTarget,['ini'])
if not initializeRoutingModel:
dailyArchiveOutputFileName= os.path.join(pathResZip,\
dailyArchiveFileName % (modelSignature,year))
print '\t* archiving daily output to %s' % dailyArchiveOutputFileName
dailyArchive= tarfile.open(dailyArchiveOutputFileName,'w:gz')
#-add files
for fileName in os.listdir(pathRes):
sourceFileName= os.path.join(pathRes,fileName)
if not '.ini' in fileName:
for variable in resStackList[iniOutVarStart:]:
if variable in fileName:
dailyArchive.add(sourceFileName,fileName)
for fileName in os.listdir(pathIniSource):
sourceFileName= os.path.join(pathIniSource,fileName)
dailyArchive.add(sourceFileName,sourceFileName)
dailyArchive.close()
'''
try:
print '\t* transferring output',
shutil.move(os.path.join(pathResZip,dailyArchiveFileName % (modelSignature,year)),\
os.path.join(pathRemote,forcingDataSet,dailyArchiveFileName % (modelSignature,year)))
print
except:
print 'has not succeeded'
'''
#-increment runCnt and reset initialization
# and decide to stop run when no actual years are left
runCnt+= 1
if runCnt >= nrRunsInitialization: initializeRoutingModel= False
if len(yearRange) == 0: exitCode= True
#-close log file, empty output directories and terminate
#createOutputDirs([pathRes,tempDir])
#-close log file and moving files
wStr= 'run terminated at %s' % datetime.datetime.now()
print wStr
logFile.write(wStr+'\n')
logFile.close()
"""print 'moving output'
for fileName in os.listdir(pathResZip):
if '.zip' in fileName or '.tar' in fileName:
sourceFileName= os.path.join(pathResZip,fileName)
targetFileName= os.path.join(pathRemote,forcingDataSet,fileName)
shutil.move(sourceFileName,targetFileName)
shutil.copy(logFileName,os.path.join(pathRemote,forcingDataSet,logFileName))
print 'all processed!'
"""
|
986,459 | 945cdcdba30dbcf5e82f7ba8eb7c5e0b4e9d3176 | # -*- coding: utf-8 -*-
from . import engineering_bom_batch
from . import engineering_bom
from . import engineering_bom_component
from . import engineering_bom_adjacency
from . import engineering_bom_diff
from . import engineering_part_diff
|
986,460 | 712e69b1e7f06f3e4ab351179a179514aecd1834 | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
# this solution avoids extra space
class Solution(object):
def LCA(self, root, p, q):
#base cases
if not root:
return False, False
if not root.left and not root.right:
if root.val == p.val:
return True, False
if root.val == q.val:
return False, True
#left subtree
left_p, left_q = self.LCA(root.left, p, q)
#if both p and q in left subtree no need to recurse on the right side
if left_p and left_q:
return True, True
#right subtree
right_p, right_q = self.LCA(root.right, p, q)
if right_p and right_q:
return True, True
#cases for current node being one of p or q
if root.val == p.val:
if left_q or right_q:
self.lca = root
return True, True
else:
return True, False
if root.val == q.val:
if left_p or right_p:
self.lca = root
return True, True
else:
return False, True
if (left_p and right_q) or (left_q and right_p):
self.lca = root
return True, True
if left_p or right_p:
return True, False
if left_q or right_q:
return False, True
return False, False
def lowestCommonAncestor(self, root, p, q):
"""
:type root: TreeNode
:type p: TreeNode
:type q: TreeNode
:rtype: TreeNode
"""
self.lca = None
self.LCA(root, p, q)
return self.lca
|
986,461 | 64fcb88d4949dfb6ae6cb237f5765766533346fb | #!/usr/bin/env python3
num = int(input())
if num % 2 == 0:
num = num // 2
print(num)
else:
num = (num * 3) + 1
print(num)
|
986,462 | 0e04be1dfc78a91a388d0e761a5652a9ebbf9976 | from django.conf.urls import url
from . import views
# urlpatterns是被django自动识别的路由列表变量
urlpatterns = [
# 每个路由信息都需要使用url函数来构造
# url(路径, 视图)
url(r'^weather/([a-z]+)/(\d{4})/$', views.weather),
]
|
986,463 | 4b5d60b743d464a93ca041dbcd4e3db0260eb48f | # -*- coding: utf8 -*-
from ffman.ffmpeg.framework.ffprobe import *
from ffman.ffmpeg.framework.parameters import *
class AVProb(object):
"""docstring for FFProbeFactory"""
def __init__(self, inFile):
self.input = Input('"'+inFile+'"')
self.input.add_formatparam('-hide_banner', None)
self.output = Output(None)
def getVideoLen(self):
self.input.add_formatparam('-v', 'error')
self.input.add_formatparam('-show_entries', 'format=duration')
self.input.add_formatparam('-of', 'default=noprint_wrappers=1:nokey=1')
result = self.run()
if len(result) == 1:
result = float(result[0].decode('utf-8'))
return result
def getVideoSize(self):
self.input.add_formatparam('-v', 'error')
self.input.add_formatparam('-show_entries', 'format=size')
self.input.add_formatparam('-of', 'default=noprint_wrappers=1:nokey=1')
result = self.run()
if len(result) == 1:
result = int(result[0].decode('utf-8'))
return result
def getVideoWidth(self):
self.input.add_formatparam('-v', 'error')
self.input.add_formatparam('-select_streams', 'v:0')
self.input.add_formatparam('-show_entries', 'stream=width')
self.input.add_formatparam('-of', 'default=noprint_wrappers=1:nokey=1')
result = self.run()
if len(result) == 1:
result = int(result[0].decode('utf-8'))
return result
def getVideoHeight(self):
self.input.add_formatparam('-v', 'error')
self.input.add_formatparam('-select_streams', 'v:0')
self.input.add_formatparam('-show_entries', 'stream=height')
self.input.add_formatparam('-of', 'default=noprint_wrappers=1:nokey=1')
result = self.run()
if len(result) == 1:
result = int(result[0].decode('utf-8'))
return result
def getVideoFrameRate(self):
self.input.add_formatparam('-v', 'error')
self.input.add_formatparam('-select_streams', 'v:0')
self.input.add_formatparam('-show_entries', 'stream=avg_frame_rate')
self.input.add_formatparam('-of', 'default=noprint_wrappers=1:nokey=1')
result = self.run()
if len(result) == 1:
result = result[0].decode('utf-8')
result = eval(result)
return result
def getVideoFrames(self):
self.input.add_formatparam('-v', 'error')
self.input.add_formatparam('-count_frames', None)
self.input.add_formatparam('-select_streams', 'v:0')
self.input.add_formatparam('-show_entries', 'stream=nb_read_frames')
self.input.add_formatparam('-of', 'default=noprint_wrappers=1:nokey=1')
result = self.run()
if len(result) == 1:
result = int(result[0].decode('utf-8'))
return result
"""
Other Handle
"""
def run(self):
return FFProbe('ffprobe', self.input, self.output).run()
def main():
ffprobe = AVProb("input.mp4")
print(ffprobe.getVideoFrames())
if __name__ == '__main__':
main() |
986,464 | 7e85209c19785ccfbb4ef33fa2ec8e9194601e94 | #########################################################
## CS 4750 (Fall 2018), Assignment #2 ##
## Script File Name: fst.py ##
## Student Name: Benxin Niu ##
## Login Name: bn2645 ##
## MUN #: 201518321 ##
#########################################################
import os
import sys
import collections
# The class the provide features like composition of two FST
class FstComposer:
def __init__(self, F1, F2):
self.num_state_validator = F1.num_state * F2.num_state
self.chars_validator = F1.chars.extend(F2.chars)
self.composed_fst = FST(self.num_state_validator, str(self.chars_validator))
self.__compose(F1, F2)
# self.composed_fst.print_fst_info()
# Compose two FST F1, F2
def __compose(self, F1, F2):
new_states = self.__compose_states(F1, F2)
for q1_q2 in new_states:
for q3_q4 in new_states:
self.__add_transitions(F1, F2, q1_q2, q3_q4, new_states)
# Add transitions to the composed FST
def __add_transitions(self, F1, F2, q1_q2, q3_q4, new_states):
q1, q2 = q1_q2["state"][0], q1_q2["state"][1]
q3, q4 = q3_q4["state"][0], q3_q4["state"][1]
for trans1 in F1.transitions:
for trans2 in F2.transitions:
if trans1.source == q1 and trans1.dest == q3 and trans2.source == q2 and trans2.dest == q4:
if trans1.upper == trans2.lower:
new_transition = Transition(q1_q2["new_s_name"], trans1.lower, trans2.upper, q3_q4["new_s_name"])
self.composed_fst.add_transitions(new_transition)
if trans1.upper == "-" and trans2.lower != "-":
dest = self.__find_new_name(new_states, [str(q3), str(q2)])
new_transition = Transition(q1_q2["new_s_name"], trans1.lower, "-", dest)
self.composed_fst.add_transitions(new_transition)
if trans1.upper != "-" and trans2.lower == "-":
dest = self.__find_new_name(new_states, [str(q1), str(q4)])
new_transition = Transition(q1_q2["new_s_name"], "-", trans2.upper, dest)
self.composed_fst.add_transitions(new_transition)
# add new states to the composed FST
def __compose_states(self, F1, F2):
states = list()
index = 1
for s1, f1 in F1.states.items():
for s2, f2 in F2.states.items():
new_s = list()
s_info = dict()
new_s.append(s1)
new_s.append(s2)
s_info["state"] = new_s
s_info["final"] = f1 and f2
s_info["new_s_name"] = str(index)
index = index + 1
self.composed_fst.add_state(s_info["new_s_name"], s_info["final"])
states.append(s_info)
return states
def __find_new_name(self, states_list, state):
for s in states_list:
if s["state"] == state:
return s["new_s_name"]
# provide static method that read a fst file and generate a FST instance
class FstGenerator:
def __init__(self):
pass
@staticmethod
def read_fst(fn):
if os.path.isfile(fn):
try:
open_file = open(fn, "r")
lines = open_file.readlines()
num, chars = lines.pop(0).split()
fst_instance = FST(num, chars)
for line in lines:
if not line.startswith(' '):
s, f = line.strip().split()
fst_instance.add_state(s, f)
elif line.startswith(' '):
l, u, d = line.strip().split()
transition = Transition(s, l, u, d)
fst_instance.add_transitions(transition)
# fst_instance.print_fst_info()
return fst_instance
except IOError:
raise IOError("Failure in opening {}".format(fn))
else:
raise AssertionError('file does not exist or is not a file! {} \n'.format(fn))
# Representation of a FST
class FST:
def __init__(self, num, chars):
self.num_state = int(num)
self.chars = chars.split()
self.states = collections.OrderedDict()
self.transitions = list()
def add_state(self, state, f):
is_final = f == "F"
self.states[state] = is_final
def add_transitions(self, transition):
self.transitions.append(transition)
def list_states(self):
print("FST has {} states".format(self.num_state))
for s, f in self.states.items():
print ("state: {}, is final: {}".format(s, f))
def list_transitions(self):
print("FST has {} strings".format(self.chars))
for t in self.transitions:
t.print_trans()
def print_fst_info(self):
print ("Composed FST has total of {} states, {} transitions".format(len(self.states), len(self.transitions)))
def reconstruct_upper(self, s):
self.__reconstruct_u(s, "", "", "1")
def reconstruct_lower(self, s):
self.__reconstruct_l(s, "", "", "1")
# recursive reconstruction upper form
def __reconstruct_u(self, word, lower, upper, state):
if lower == "@@" and self.states[state] is True:
print upper
elif lower == "@@" and self.states[state] is False:
return
else:
for trans in self.transitions:
if trans.source == str(state) and (word.startswith(lower + trans.lower) and word != lower + trans.lower):
self.__reconstruct_u(word, lower+trans.lower, upper+trans.upper, trans.dest)
elif trans.source == str(state) and word == lower+trans.lower:
self.__reconstruct_u(word, "@@", upper+trans.upper, trans.dest)
return
# recursive reconstruction upper form
def __reconstruct_l(self, word, upper, lower, state):
if upper == "@@" and self.states[state] is True:
print lower
elif upper == "@@" and self.states[state] is False:
return
else:
for trans in self.transitions:
if trans.source == str(state) and (word.startswith(upper + trans.upper) and word != upper + trans.upper):
self.__reconstruct_l(word, upper+trans.upper, lower+trans.lower, trans.dest)
elif trans.source == str(state) and word == upper+trans.upper:
self.__reconstruct_l(word, "@@", lower+trans.lower, trans.dest)
return
# Representation of a Transition
class Transition:
def __init__(self, s, l, u, d):
self.source = s
self.lower = l
self.upper = u
self.dest = d
def print_trans(self):
print("source: {}, lower: {}, upper: {}, dest: {}"
.format(self.source, self.lower, self.upper, self.dest))
|
986,465 | b30fb35a4f152cb2d32ef25a4017208aabadceef | import csv
import os
import shutil
from PIL import Image
from pprint import pprint
labelpath = r'C:\Users\Administrator\Desktop\Coding_stuff\facial_recognition\data\label\label.csv'
imagepath = r'C:\Users\Administrator\Desktop\Coding_stuff\facial_recognition\data\image\cropped'
destpath = r'C:\Users\Administrator\Desktop\Coding_stuff\facial_recognition\data\image\resized'
res = 224
for d, e, f in os.walk(imagepath):
for fi in f:
im = Image.open(os.path.join(imagepath, fi))
im = im.resize((res, res))
im.save(os.path.join(destpath, fi))
|
986,466 | ccbf8ee87a835481d6f915333d68598a0996f748 | from mods import config_path
config_path()
from D26 import uconfig
uconfig.features.train = [x for x in uconfig.features.train if not x in ["mult","nPho","nNeuHad","nChHad"]]
|
986,467 | 20c5dbd807065b4e6c3a4f8ed88fe2b0f7231dbc | # Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry import test
from telemetry.core import util
from measurements import blink_perf
class BlinkPerfAll(test.Test):
tag = 'all'
test = blink_perf.BlinkPerfMeasurement
def CreatePageSet(self, options):
path = os.path.join(util.GetChromiumSrcDir(),
'third_party', 'WebKit', 'PerformanceTests')
return blink_perf.CreatePageSetFromPath(path)
class BlinkPerfAnimation(test.Test):
tag = 'animation'
test = blink_perf.BlinkPerfMeasurement
def CreatePageSet(self, options):
path = os.path.join(util.GetChromiumSrcDir(),
'third_party', 'WebKit', 'PerformanceTests', 'Animation')
return blink_perf.CreatePageSetFromPath(path)
class BlinkPerfWebAnimations(test.Test):
tag = 'web_animations'
test = blink_perf.BlinkPerfMeasurement
def CreatePageSet(self, options):
path = os.path.join(util.GetChromiumSrcDir(),
'third_party', 'WebKit', 'PerformanceTests', 'Animation')
return blink_perf.CreatePageSetFromPath(path)
def CustomizeBrowserOptions(self, options):
options.AppendExtraBrowserArgs('--enable-web-animations-css')
|
986,468 | 8c1172e38813500ece98c1370ef84bf621deb690 | message = "Hello World"
new_message = message.replace("World", "Universe")
print(message[0:4])
print(message.count("Hello"))
print(new_message)
intro = "Game"
outro = "Time"
broadcast = f"{intro}, {outro.upper()}. Lets GO!"
print(broadcast)
# print(dir(outro))
|
986,469 | 64c001d0da88b9c28e3a5f51b0c20b0557c496f4 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Coded by Rendy Andhika
"""
ngapai bosq? mau recode?
tinggal pake aja susah amat sih?!
"""
try:
import os, requests, time
except ModuleNotFoundError:
print("\nSepertinya module requests BELUM Di Install")
print("$ pip install requests\n")
exit()
os.system('clear')
c=('\033[1;36m')
r=('\033[1;31m')
g=('\033[1;32m')
w=('\033[1;37m')
print("""%s
█████████ Pencipta: Yoga Wira
█▄█████▄█ ●▬▬▬▬▬▬▬▬▬๑۩۩๑▬▬▬▬▬▬▬▬●
█▼▼▼▼▼\033[1;94m- _ --_--
\033[1;96m█
\033[1;96m█ \033[1;94m-_-_- -_ -_-_- \033[1;93mDark Spam Call%s
\033[1;96m█
\033[1;96m█▲▲▲▲▲ \033[1;94m-_ - _
\033[1;96m█████████ «°°°°°°°°°°✧°°°°°°°°°°»
\033[1;96m ██ ██
\033[1;95m¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤ ¤°¤°¤°¤°¤°¤°¤
%s\033[1;93mAuthor: Mr.BV%s
%s\033[1;93mFB: Yoga Wira%s
%s\033[1;93mGithub: Rahasia Jancok%s
%s\033[1;93mTEAM: Brother (B.VH/Brother_ID)%s
\033[1;95m¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤ ¤°¤°¤°¤°¤°¤°¤°¤°¤°¤°¤
%sMASUKAN NOMOR ANDA "62" (EX: 628XXXXXX)%s
¤NOTE¤ Jika Terjadi Eror Atau Bug Silahkan Cht Author"""%(c,r,g,r,g,r,g,r,g,r,w,r))
print("%s[*] Klik ENTER Untuk Melewati Step%s"%(g,g))
no1 = input("[?] NOMOR TARGET 1 => %s"%(w))
no2 = input("%s[?] NOMOR TARGET 2 => %s"%(g,w))
no3 = input("%s[?] NOMOR TARGET 3 => %s"%(g,w))
jlmh=int(input("%s[?] JUMLAH SPAM => %s"%(g,w)))
dt1={'method':'CALL','countryCode':'id','phoneNumber':no1,'templateID':'pax_android_production'}
dt2={'method':'CALL','countryCode':'id','phoneNumber':no2,'templateID':'pax_android_production'}
dt3={'method':'CALL','countryCode':'id','phoneNumber':no3,'templateID':'pax_android_production'}
try:
print()
print("%s[-] RESULT:%s"%(r,w))
for i in range(jlmh):
print("[!] PLEASE WAIT...")
idk=("challengeID")
r1 = requests.post('https://api.grab.com/grabid/v1/phone/otp',data=dt1)
r2 = requests.post('https://api.grab.com/grabid/v1/phone/otp',data=dt2)
r3 = requests.post('https://api.grab.com/grabid/v1/phone/otp',data=dt3)
if str(idk) in str(r1.text):
print("[+] TARGET1 BERHASIL")
else:
print("[-] TARGET1 GAGAL")
if str(idk) in str(r2.text):
print("[+] TARGET2 BERHASIL")
else:
print("[-] TARGET2 GAGAL")
if str(idk) in str(r3.text):
print("[+] TARGET3 BERHASIL")
else:
print("[-] TARGET3 GAGAL")
print("="*30)
time.sleep(1)
except KeyboardInterrupt:
print("%ssampai jumpa gan..."%(c))
|
986,470 | c4071ca13f3a2bfbbb01f5fcee8d6f0ebd883b37 | import os, json
from selenium import webdriver
browser = webdriver.Firefox()
url = r'https://free-proxy-list.net'
browser.get(url)
click_next = r'proxylisttable_next.click()'
ip_list = {}
def find_elements(browser):
even_elements = browser.find_elements_by_class_name('even')
odd_elements = browser.find_elements_by_class_name('odd')
return even_elements, odd_elements
def parse_elements(elements):
for elem in elements:
td_tags = elem.find_elements_by_tag_name('td')
ip = td_tags[0].text
port = td_tags[1].text
ip_list[ip] = port
def store_ip(browser, click_next):
ip_list = []
even_elements, odd_elements = find_elements(browser)
parse_elements(even_elements)
parse_elements(odd_elements)
browser.execute_script(click_next)
for i in range(15):
store_ip(browser, click_next)
browser.close()
with open(os.path.join('data', 'proxies.json'), 'wt') as f:
json.dump(ip_list, f, indent=4) |
986,471 | 9f400c3d0486fdedde3302392519832133c6ebeb | import numpy as np
import matplotlib
import matplotlib.pyplot as plt
from random import randint
from time import time
def main():
# birthday paradox
print(birthday_paradox_A(4000))
# birthday_paradox_B_plot_C()
def birthday_paradox_A(n):
numbers = set()
while True:
x = randint(1,n)
if x in numbers:
break
numbers.add(x)
return len(numbers)+1
def birthday_paradox_B(m,n):
trials = list()
for i in range(1,m):
trials.append(birthday_paradox_A(n))
x = set(trials)
x.add(1)
x = list(x)
y = [sum(j <= i for j in trials)/m for i in x]
return x,y,trials
def birthday_paradox_B_plot_C():
m = 300
n = 4000
start = time()
x,y,trials = birthday_paradox_B(m,n)
print("trials = \n" + str(trials))
print("Running time = "+ str(time()-start))
plt.scatter(x,y,s=5,c='red')
plt.xlim(0,max(x))
plt.ylim(0,1)
plt.title('Birthday paradox\'s cumulative density plot')
plt.xlabel('number of trials k')
plt.ylabel('fraction of succeeded experiment')
plt.show()
print("Expected number of k = "+ str(sum(trials)/len(trials)))
def birthday_paradox_C():
pass
def loop_in_D(m,all_n):
r = []
for n in all_n:
start = time()
birthday_paradox_B(m,n)
r.append(time()-start)
return r
def birthday_paradox_D():
# all_n = [4000*i for i in range(1,250)]
all_n = [i for i in range(100000,1000000+1,100000)]
# all_n[0] = 4000
m = 300
y_300 = loop_in_D(m,all_n)
m = 5000
y_5000 = loop_in_D(m,all_n)
m = 10000
y_10000 = loop_in_D(m,all_n)
# plt.set_prop_cycle( cycler('color', colr_list) )
plt.gca().set_color_cycle(['red', 'green', 'blue'])
plt.plot(all_n,y_300)
plt.plot(all_n,y_5000)
plt.plot(all_n,y_10000)
plt.title('Running Time Experiment')
plt.xlabel('Value of n')
plt.ylabel('Number of seconds')
plt.legend(['m = 300', 'm = 5000', 'm = 10000'], loc='upper left')
plt.show()
if __name__ == "__main__":
main() |
986,472 | 29866d116c0678cb36864efec04d868e24e7d9f1 | #a
a,b=map(int,input().split())
if a%10==0:
a=str(a)
c=0
for i in range(len(a)-1,-1,-1):
if a[i]=='0':
c+=1
if b<=c:
print(a)
else:
a=a[:-c]
a=a+'0'*b
print(a)
elif 10%(a%10)==0:
no=int('1'+'0'*b)
while True:
if no%a==0:
print(no)
break
else:
no+=int('1'+'0'*b)
else:
print(str(a)+b*'0')
|
986,473 | b1c3954aaabe29dd26ba5d59496ba698ad901b08 | import arcpy
arcpy.env.overwriteOutput = True #sirve para sobreescribir los elementos
arcpy.env.workspace ="D:/ShapesPruebasSegmentacionUrbana"
arcpy.MakeTableView_management("D:/ShapesPruebasSegmentacionUrbana/ShapesFinal/zona_aeu", "zona_aeu_x")
arcpy.MakeTableView_management("D:/ShapesPruebasSegmentacionUrbana/ShapesFinal/MZS_AEU.dbf", "aeu_manzana")
where_inicial = " UBIGEO=\'020601\' "
with arcpy.arcpy.da.SearchCursor("zona_aeu_x", ['UBIGEO', 'ZONA','AEU'],where_inicial) as cursor1:
for row1 in cursor1:
where_segundo = ' "UBIGEO"=\'' +str(row1[0])+' \' AND "ZONA" =\''+str(row1[1])+'\' AND AEU='+str(row1[2])
where_rutas =where_segundo
where_viviendas = where_segundo+ ' AND ( USOLOCAL=1 OR USOLOCAL=3 ) '
i=0
with arcpy.arcpy.da.SearchCursor("aeu_manzana", ['UBIGEO', 'ZONA', 'IDMANZANA','AEU'],where_segundo) as cursor3:
for row3 in cursor3:
if i==0:
where_temporal3 = ' "FIRST_TB_M"=\'' + str(row3[2]) + '\''
where_temporal=' "IDMANZANA"=\'' + str(row3[2]) + '\''
where_temporal2=' "MANZANA"=\''+str(row3[2][11:])+'\''
else:
where_temporal = where_temporal+' OR "IDMANZANA"=\'' + str(row3[2]) + '\''
where_temporal2 = where_temporal2+' OR "MANZANA"=\''+str(row3[2][11:])+'\''
i=i+1
where = where_temporal
print where
if i==1:
where_mapa=where_temporal3
else:
where_mapa = ' "MZS_AEU_AE"=' + str(row1[2]) + ' AND "TB_MZS_TRA"=\''+str(row1[0])+'\' AND "TB_MZS_T_1"=\''+str(row1[1])+'\''
print where_mapa
where_rutas = where_rutas+ ' AND ('+where_temporal2+')'
where_viviendas = where_viviendas+' AND ('+where_temporal2+')'
#where_viviendas=' UBIGEO=\'021806\' AND "ZONA" =\'00800\' AND "MANZANA"=\'001O\' AND ( USOLOCAL=1 OR USOLOCAL=3 ) '
mxd = arcpy.mapping.MapDocument(r"D:/ShapesPruebasSegmentacionUrbana/mxd/segmentacion5.mxd")
df = arcpy.mapping.ListDataFrames(mxd, "Layers")[0]
arcpy.MakeFeatureLayer_management("D:/ShapesPruebasSegmentacionUrbana/Manzanas/TB_MZS_TRABAJO.shp", "manzanas",where)
arcpy.MakeFeatureLayer_management("D:/ShapesPruebasSegmentacionUrbana/Rutas/TB_RUTAS.shp","rutas",where_rutas)
arcpy.MakeFeatureLayer_management(r"D:/ShapesPruebasSegmentacionUrbana/Viviendas/TB_VIVIENDAS_U_TRABAJO.shp", "viviendas",where_viviendas)
#arcpy.SelectLayerByAttribute_management("rutas_temporal", "NEW_SELECTION", where)
#arcpy.SelectLayerByAttribute_management("viviendas_temporal", "NEW_SELECTION", where_viviendas)
#arcpy.SelectLayerByAttribute_management("manzanas_temporal", "NEW_SELECTION", where)
lyrFile1 = arcpy.mapping.Layer("rutas")
lyrFile2 = arcpy.mapping.Layer("viviendas")
lyrFile3 = arcpy.mapping.Layer("manzanas")
arcpy.ApplySymbologyFromLayer_management(lyrFile1,
"D:/ShapesPruebasSegmentacionUrbana/Layers/rutas_final.lyr")
arcpy.ApplySymbologyFromLayer_management(lyrFile2,
"D:/ShapesPruebasSegmentacionUrbana/Layers/vivienda_final.lyr")
arcpy.ApplySymbologyFromLayer_management(lyrFile3,
"D:/ShapesPruebasSegmentacionUrbana/Layers/manzana_final.lyr")
arcpy.mapping.AddLayer(df, lyrFile1)
arcpy.RefreshActiveView()
if lyrFile2.supports("LABELCLASSES"):
for lblclass in lyrFile2.labelClasses:
# lblclass.className = "[ORDEN]"
lblclass.expression = "[OR_VIV_AEU]"
lblclass.showClassLabels = True
lyrFile2.showLabels = True
arcpy.RefreshActiveView()
arcpy.mapping.AddLayer(df, lyrFile2)
arcpy.RefreshActiveView()
arcpy.mapping.AddLayer(df, lyrFile3)
arcpy.RefreshActiveView()
# for el in arcpy.mapping.ListDataFrames(mxd):
# print el.name
# for el2 in arcpy.mapping.ListLayers(mxd):
# print el2.name
ddp = mxd.dataDrivenPages
indexLayer = ddp.indexLayer
arcpy.SelectLayerByAttribute_management(indexLayer, "NEW_SELECTION", where_mapa)
for indexPage in ddp.selectedPages:
ddp.currentPageID = indexPage
ddp.exportToPDF(r"D:/ShapesPruebasSegmentacionUrbana/Croquis/Croquis"+str(row1[0])+str(row1[1])+str(row1[2])+".pdf", "CURRENT")
#arcpy.mapping.ExportToPNG(mxd,"D:/ShapesPruebasSegmentacionUrbana/Croquis/CroquiseEJEMPLO.png")
# arcpy.mapping.PrintMap(mxd, r"\\pincullo\CANONiR4051-OEDS", df)
arcpy.mapping.RemoveLayer(df, lyrFile1)
arcpy.mapping.RemoveLayer(df, lyrFile2)
arcpy.mapping.RemoveLayer(df, lyrFile3)
# arcpy.mapping.RemoveLayer(df,lyrFile3)
del mxd
del df
|
986,474 | 4b39cfd9888e984c49bc5bd5977661d0795c66cd | '''
Сколько совпадает чисел
Даны три целых числа. Определите, сколько среди них совпадающих.
Программа должна вывести одно из чисел: 3 (если все совпадают),
2 (если два совпадает) или 0 (если все числа различны).
'''
a, b, c = int(input()), int(input()), int(input())
if a == b == c:
result = 3
elif a == b or a == c or b == c:
result = 2
else:
result = 0
print(result)
|
986,475 | b7bc776cf5dbd68b81add53c44c11480494dbccb | #!/usr/bin/python
import json
import requests
from requests.exceptions import MissingSchema
"""
Stand alone module with all the functionality to grab
geolocation data from the http://api.sba.gov site and
write it into a file.
"""
def read_url(url):
"""
Tries to open a url, read its response and return it. If there is a
problem connecting to the url or if there is a non-successful status
code returned from the server, an error in JSON is returned.
:param url: str
:return: str
"""
try:
response = requests.get(url)
except requests.ConnectionError:
content = '{"error": "Bad Connection"}'
except MissingSchema: # The url does not exist
content = '{"error": "Bad Url"}'
else:
if response.status_code == 200:
content = response.text
else:
content = '{"error": "' + response.reason + '"}'
return content
def pretty_print_content(content):
"""
Takes a string, makes sure it's in JSON and prettifies it.
If the string is not in JSON, it will return an error in JSON
:param content: str
:return: str
"""
try:
parsed_content = json.loads(content)
except ValueError:
return '{"error": "Invalid JSON"}'
return json.dumps(parsed_content, sort_keys=True, indent=4, separators=(',', ': '))
def get_contents_of_urls(urls):
"""
Reads the content found at each url in the urls list,
converts it to JSON and extends it onto the returned list.
:param urls: list
:return: list
"""
contents = []
for url in urls:
content = read_url(url)
parsed_content = json.loads(content)
contents.extend(parsed_content)
return contents
def write_urls_to_file(urls, file_name):
"""
Reads the content found at each url in the list and
writes it into a file.
:param urls: list
:param file_name: str
:return:
"""
with open(file_name, 'w') as file_handler:
for url in urls:
content = read_url(url)
pretty_content = pretty_print_content(content)
file_handler.write(pretty_content)
# Main program to run on command line.
if __name__ == "__main__":
states = [
"AL", "AK", "AZ", "AR", "CA", "CO", "CT", "DC", "DE", "FL", "GA",
"HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MD",
"MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ",
"NM", "NY", "NC", "ND", "OH", "OK", "OR", "PA", "RI", "SC",
"SD", "TN", "TX", "UT", "VT", "VA", "WA", "WV", "WI", "WY"
]
base_url = "http://api.sba.gov/geodata/city_county_links_for_state_of/"
state_urls = [base_url + state + '.json' for state in states]
states_file_name = 'states_data.txt'
write_urls_to_file(state_urls, states_file_name) |
986,476 | abdb59ea3afe450c12cb7748ba84cc121d7a8440 | import os
base_dir = '/mac/okehara/dev/keras_teset'
train_dir = os.path.join(base_dir, 'train')
test_dir = os.path.join(base_dir, 'test')
validation_dir = os.path.join(base_dir, 'validation')
# from keras import layers
# from keras import models
#
# model = models.Sequential()
# model.add(layers.Conv2D(filters=32,
# kernel_size=(3, 3),
# activation='relu',
# input_shape=(150, 150, 3)))
# model.add(layers.MaxPool2D((2,2)))
# model.add(layers.Conv2D(64,
# (3,3),
# activation='relu'))
# model.add(layers.MaxPool2D((2,2)))
# model.add(layers.Conv2D(128,
# (3,3),
# activation='relu'))
# model.add(layers.MaxPool2D((2,2)))
# model.add(layers.Conv2D(128,
# (3,3),
# activation='relu'))
# model.add(layers.MaxPool2D((2,2)))
# model.add(layers.Flatten())
# model.add(layers.Dropout(0.5))
# model.add(layers.Dense(512, activation='relu'))
# model.add(layers.Dense(1, activation='sigmoid'))
#
#
# print(model.summary())
#
# from keras import optimizers
#
# model.compile(loss='binary_crossentropy',
# optimizer=optimizers.RMSprop(lr=1e-4),
# metrics=['acc'])
# model.save('cats_and_dogs_small_2.h5')
from keras import layers
from keras import models
model = models.load_model('cats_and_dogs_small_2.h5')
print(model.summary())
from keras.preprocessing import image
import numpy as np
img_path = 'test/cats/cat.1700.jpg'
img = image.load_img(img_path,
target_size=(150, 150))
img_tensor = image.img_to_array(img)
img_tensor = np.expand_dims(img_tensor, axis=0)
img_tensor /= 255.
print(img_tensor.shape)
import matplotlib.pyplot as plt
# plt.imshow(img_tensor[0])
# plt.show()
layer_outputs = [layer.output for layer in model.layers[:8]]
activation_model = models.Model(inputs=model.input, outputs=layer_outputs)
activations = activation_model.predict(img_tensor)
# first_layer_activation = activations[0]
# print(first_layer_activation.shape)
# plt.matshow(first_layer_activation[0, :, :, 7], cmap='viridis')
# plt.show()
# visualize activation
layer_names = []
for layer in model.layers[:8]:
layer_names.append(layer.name)
image_per_row = 16
for layer_name, layer_activation in zip(layer_names, activations):
print(layer_activation.shape)
n_features = layer_activation.shape[-1]
size = layer_activation.shape[1]
n_cols = n_features//image_per_row
display_grid = np.zeros((size*n_cols, image_per_row*size))
for col in range(n_cols):
for row in range(image_per_row):
channel_image = layer_activation[0,
:, :,
col*image_per_row+row]
channel_image -= channel_image.mean()
channel_image /= channel_image.std()
channel_image *= 64
channel_image += 128
channel_image = np.clip(channel_image, 0, 255).astype('uint8')
display_grid[col*size:(col+1)*size,
row*size:(row+1)*size] = channel_image
scale = 1./size
plt.figure(figsize=(scale*display_grid.shape[1],
scale*display_grid.shape[0]))
plt.title(layer_name)
plt.grid(False)
plt.imshow(display_grid, aspect='auto', cmap='viridis')
plt.show()
|
986,477 | b240ca220aaa866d44f20e6a6b10824cc8cacf17 | # gearman-collectd-plugin - gearmand_info.py
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; only version 2 of the License is applicable.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors:
# Garret Heaton <powdahound at gmail.com>
#
# About this plugin:
# This plugin uses collectd's Python plugin to record Gearmand information.
#
# collectd:
# http://collectd.org
# Gearman:
# http://gearman.org
# collectd-python:
# http://collectd.org/documentation/manpages/collectd-python.5.shtml
import collectd
import socket
# Host to connect to. Override in config by specifying 'Host'.
GEARMAND_HOST = 'localhost'
# Port to connect on. Override in config by specifying 'Port'.
GEARMAND_PORT = 4730
# Verbose logging on/off. Override in config by specifying 'Verbose'.
VERBOSE_LOGGING = False
def fetch_status():
"""Connect to Gearmand server and request status"""
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((GEARMAND_HOST, GEARMAND_PORT))
log_verbose('Connected to Gearmand at %s:%s' % (GEARMAND_HOST, GEARMAND_PORT))
except socket.error, e:
collectd.error('gearmand_info plugin: Error connecting to %s:%d - %r'
% (GEARMAND_HOST, GEARMAND_PORT, e))
return None
fp = s.makefile('r')
log_verbose('Sending info command')
s.sendall('status\r\n')
status = {}
while True:
data = fp.readline().strip()
log_verbose('Received data: %r' % data)
if not data or data == '.':
break
function, total, running, available_workers = data.split('\t')
status[function] = {
'total': total,
'running': running,
'available_workers': available_workers}
s.close()
return status
def configure_callback(conf):
"""Receive configuration block"""
global GEARMAND_HOST, GEARMAND_PORT, VERBOSE_LOGGING
for node in conf.children:
if node.key == 'Host':
GEARMAND_HOST = node.values[0]
elif node.key == 'Port':
GEARMAND_PORT = int(node.values[0])
elif node.key == 'Verbose':
VERBOSE_LOGGING = bool(node.values[0])
else:
collectd.warning('gearmand_info plugin: Unknown config key: %s.'
% node.key)
log_verbose('Configured with host=%s, port=%s' % (GEARMAND_HOST, GEARMAND_PORT))
def dispatch_value(info, key, type, type_instance=None):
"""Read a key from info response data and dispatch a value"""
if key not in info:
collectd.warning('gearmand_info plugin: Info key not found: %s' % key)
return
if not type_instance:
type_instance = key
value = int(info[key])
log_verbose('Sending value: %s=%s' % (type_instance, value))
val = collectd.Values(plugin='gearmand')
val.type = type
val.type_instance = type_instance
val.values = [value]
val.dispatch()
def read_callback():
log_verbose('Read callback called')
status = fetch_status()
if not status:
collectd.error('gearmand_info plugin: No status received')
return
# function stats
for function, info in status.items():
dispatch_value(info, 'total', 'gauge', "func-%s-total" % function)
dispatch_value(info, 'running', 'gauge', "func-%s-running" % function)
dispatch_value(info, 'available_workers', 'gauge',
"func-%s-available_workers" % function)
def log_verbose(msg):
if not VERBOSE_LOGGING:
return
collectd.info('gearmand_info plugin [verbose]: %s' % msg)
# register callbacks
collectd.register_config(configure_callback)
collectd.register_read(read_callback)
|
986,478 | e0cf5e6ab0fc4d76e0d47d2d8072babe6cf5c54b | # -*- coding: utf-8 -*-
# ===========================================================================
# The waveform and spectrogram plot adapted from:
# [librosa](https://github.com/bmcfee/librosa)
# Copyright (c) 2016, librosa development team.
# Modified work Copyright 2016-2017 TrungNT
# ===========================================================================
from __future__ import absolute_import, division, print_function
import colorsys
import copy
import io
import itertools
import os
import sys
import warnings
from collections import Mapping, OrderedDict, defaultdict
from contextlib import contextmanager
from numbers import Number
from typing import List, Optional, Tuple, Union
import numpy as np
from odin.utils import as_tuple
from odin.visual.heatmap_plot import *
from odin.visual.histogram_plot import *
from odin.visual.plot_utils import *
from odin.visual.scatter_plot import *
from odin.visual.stats_plot import *
from scipy import stats
from six import string_types
from six.moves import range, zip
try:
import matplotlib as mpl
import seaborn # import seaborn for pretty plot
import tensorflow as tf
from matplotlib import pyplot as plt
except ImportError:
seaborn = None
mpl = None
plt = None
tf = None
# ===========================================================================
# Helper for spectrogram
# ===========================================================================
def time_ticks(locs, *args, **kwargs): # pylint: disable=star-args
r'''Plot time-formatted axis ticks.
Parameters
----------
locations : list or np.ndarray
Time-stamps for tick marks
n_ticks : int > 0 or None
Show this number of ticks (evenly spaced).
If none, all ticks are displayed.
Default: 5
axis : 'x' or 'y'
Which axis should the ticks be plotted on?
Default: 'x'
time_fmt : None or {'ms', 's', 'm', 'h'}
- 'ms': milliseconds (eg, 241ms)
- 's': seconds (eg, 1.43s)
- 'm': minutes (eg, 1:02)
- 'h': hours (eg, 1:02:03)
If none, formatted is automatically selected by the
range of the times data.
Default: None
fmt : str
.. warning:: This parameter name was in librosa 0.4.2
Use the `time_fmt` parameter instead.
The `fmt` parameter will be removed in librosa 0.5.0.
kwargs : additional keyword arguments.
See `matplotlib.pyplot.xticks` or `yticks` for details.
Returns
-------
locs
labels
Locations and labels of tick marks
See Also
--------
matplotlib.pyplot.xticks
matplotlib.pyplot.yticks
Examples
--------
>>> # Tick at pre-computed beat times
>>> librosa.display.specshow(S)
>>> librosa.display.time_ticks(beat_times)
>>> # Set the locations of the time stamps
>>> librosa.display.time_ticks(locations, timestamps)
>>> # Format in seconds
>>> librosa.display.time_ticks(beat_times, time_fmt='s')
>>> # Tick along the y axis
>>> librosa.display.time_ticks(beat_times, axis='y')
'''
n_ticks = kwargs.pop('n_ticks', 5)
axis = kwargs.pop('axis', 'x')
time_fmt = kwargs.pop('time_fmt', None)
if axis == 'x':
ticker = plt.xticks
elif axis == 'y':
ticker = plt.yticks
else:
raise ValueError("axis must be either 'x' or 'y'.")
if len(args) > 0:
times = args[0]
else:
times = locs
locs = np.arange(len(times))
if n_ticks is not None:
# Slice the locations and labels evenly between 0 and the last point
positions = np.linspace(0, len(locs) - 1, n_ticks,
endpoint=True).astype(int)
locs = locs[positions]
times = times[positions]
# Format the labels by time
formats = {
'ms':
lambda t: '{:d}ms'.format(int(1e3 * t)),
's':
'{:0.2f}s'.format,
'm':
lambda t: '{:d}:{:02d}'.format(int(t / 6e1), int(np.mod(t, 6e1))),
'h':
lambda t: '{:d}:{:02d}:{:02d}'.format(int(
t / 3.6e3), int(np.mod(t / 6e1, 6e1)), int(np.mod(t, 6e1)))
}
if time_fmt is None:
if max(times) > 3.6e3:
time_fmt = 'h'
elif max(times) > 6e1:
time_fmt = 'm'
elif max(times) > 1.0:
time_fmt = 's'
else:
time_fmt = 'ms'
elif time_fmt not in formats:
raise ValueError('Invalid format: {:s}'.format(time_fmt))
times = [formats[time_fmt](t) for t in times]
return ticker(locs, times, **kwargs)
def _cmap(data):
r'''Get a default colormap from the given data.
If the data is boolean, use a black and white colormap.
If the data has both positive and negative values,
use a diverging colormap ('coolwarm').
Otherwise, use a sequential map: either cubehelix or 'OrRd'.
Parameters
----------
data : np.ndarray
Input data
Returns
-------
cmap : matplotlib.colors.Colormap
- If `data` has dtype=boolean, `cmap` is 'gray_r'
- If `data` has only positive or only negative values,
`cmap` is 'OrRd' (`use_sns==False`) or cubehelix
- If `data` has both positive and negatives, `cmap` is 'coolwarm'
See Also
--------
matplotlib.pyplot.colormaps
seaborn.cubehelix_palette
'''
_HAS_SEABORN = False
try:
_matplotlibrc = copy.deepcopy(mpl.rcParams)
import seaborn as sns
_HAS_SEABORN = True
mpl.rcParams.update(**_matplotlibrc)
except ImportError:
pass
data = np.atleast_1d(data)
if data.dtype == 'bool':
return plt.get_cmap('gray_r')
data = data[np.isfinite(data)]
robust = True
if robust:
min_p, max_p = 2, 98
else:
min_p, max_p = 0, 100
max_val = np.percentile(data, max_p)
min_val = np.percentile(data, min_p)
if min_val >= 0 or max_val <= 0:
if _HAS_SEABORN:
return sns.cubehelix_palette(light=1.0, as_cmap=True)
else:
return plt.get_cmap('OrRd')
return plt.get_cmap('coolwarm')
# ===========================================================================
# Plotting methods
# ===========================================================================
@contextmanager
def figure(nrow=8, ncol=8, dpi=180, show=False, tight_layout=True, title=''):
inches_for_box = 2.4
if nrow != ncol:
nrow = inches_for_box * ncol
ncol = inches_for_box * nrow
else:
nrow = inches_for_box * nrow
ncol = inches_for_box * ncol
nrow += 1.2 # for the title
fig = plt.figure(figsize=(ncol, nrow), dpi=dpi)
yield fig
plt.suptitle(title)
if show:
plot_show(block=True, tight_layout=tight_layout)
def merge_figures(nrow, ncol):
pass
def fig2data(fig):
"""w, h, 4"""
fig.canvas.draw()
w, h = fig.canvas.get_width_height()
buf = np.fromstring(fig.canvas.tostring_argb(), dtype=np.uint8)
buf.shape = (w, h, 4)
buf = np.roll(buf, 3, axis=2)
return buf
def data2fig(data):
fig = plt.figure()
plt.imshow(data)
return fig
def plot_figure(nrows=8, ncols=8, dpi=180):
fig = plt.figure(figsize=(ncols, nrows), dpi=dpi)
return fig
def plot_title(title, fontsize=12):
plt.suptitle(str(title), fontsize=fontsize)
def subplot(*arg, **kwargs):
subplot = plt.subplot(*arg)
if 'title' in kwargs:
subplot.set_title(kwargs['title'])
return subplot
def plot_frame(ax=None, left=None, right=None, top=None, bottom=None):
""" Turn on, off the frame (i.e. the bounding box of an axis) """
ax = to_axis(ax)
if top is not None:
ax.spines['top'].set_visible(bool(top))
if right is not None:
ax.spines['right'].set_visible(bool(right))
if bottom is not None:
ax.spines['bottom'].set_visible(bool(bottom))
if left is not None:
ax.spines['left'].set_visible(bool(left))
return ax
def plot_aspect(aspect=None, adjustable=None, ax=None):
r"""
aspect : {'auto', 'equal'} or num
'auto' automatic; fill the position rectangle with data
'equal' same scaling from data to plot units for x and y
num a circle will be stretched such that the height is num times
the width. aspect=1 is the same as aspect='equal'.
adjustable : None or {'box', 'datalim'}, optional
If not None, this defines which parameter will be adjusted to
meet the required aspect. See set_adjustable for further details.
"""
ax = to_axis(ax)
if aspect is not None and adjustable is None:
ax.axis(aspect)
else:
ax.set_aspect(aspect, adjustable)
return ax
@contextmanager
def plot_gridSpec(nrow, ncol, wspace=None, hspace=None):
"""
Example
-------
grid = plt.GridSpec(2, 3, wspace=0.4, hspace=0.3)
plt.subplot(grid[0, 0])
plt.subplot(grid[0, 1:])
plt.subplot(grid[1, :2])
plt.subplot(grid[1, 2])
"""
grid = plt.GridSpec(nrows=nrow, ncols=ncol, wspace=wspace, hspace=hspace)
yield grid
def plot_gridSubplot(shape, loc, colspan=1, rowspan=1):
"""
Example
-------
ax1 = plt.subplot2grid((3, 3), (0, 0))
ax2 = plt.subplot2grid((3, 3), (0, 1), colspan=2)
ax3 = plt.subplot2grid((3, 3), (1, 0), colspan=2, rowspan=2)
ax4 = plt.subplot2grid((3, 3), (1, 2), rowspan=2)
"""
return plt.subplot2grid(shape=shape,
loc=loc,
colspan=colspan,
rowspan=rowspan)
def plot_subplot(*args):
return plt.subplot(*args)
def set_labels(ax, title=None, xlabel=None, ylabel=None):
if title is not None:
ax.set_title(title)
if xlabel is not None:
ax.set_xlabel(xlabel)
if ylabel is not None:
ax.set_ylabel(ylabel)
def plot_vline(x, ymin=0., ymax=1., color='r', ax=None):
ax = ax if ax is not None else plt.gca()
ax.axvline(x=x, ymin=ymin, ymax=ymax, color=color, linewidth=1, alpha=0.6)
return ax
def plot_comparison_track(Xs,
legends,
tick_labels,
line_colors=None,
line_styles=None,
linewidth=1.,
marker_size=33,
marker_styles=None,
fontsize=10,
draw_label=True,
title=None):
""" Plot multiple series for comparison
Parameters
----------
Xs : list (tuple) of series
the list that contain list of data points
legends : list of string
name for each series
tick_labels : list of string
name for each data points
draw_label : bool
if True, drawing text of actual value of each point on top of it
"""
if len(Xs) != len(legends):
raise ValueError("Number of series (len(Xs)) is: %d different from "
"number of legends: %d" % (len(Xs), len(legends)))
nb_series = len(Xs)
if len(Xs[0]) != len(tick_labels):
raise ValueError("Number of points for each series is: %d different from "
"number of xticks' labels: %d" %
(len(Xs[0], len(tick_labels))))
nb_points = len(Xs[0])
# ====== some default styles ====== #
default_marker_styles = ['o', '^', 's', '*', '+', 'X', '|', 'D', 'H', '8']
if marker_styles is None and nb_series <= len(default_marker_styles):
marker_styles = default_marker_styles[:nb_series]
# ====== init ====== #
point_colors = []
inited = False
handlers = []
# ====== plotting ====== #
for idx, X in enumerate(Xs):
kwargs = {}
if line_colors is not None:
kwargs['color'] = line_colors[idx]
if line_styles is not None:
kwargs['linestyle'] = line_styles[idx]
else:
kwargs['linestyle'] = '--'
# lines
handlers.append(plt.plot(X, linewidth=linewidth, **kwargs)[0])
# points
ax = plt.gca()
for i, j in enumerate(X):
style = 'o' if marker_styles is None else marker_styles[idx]
if not inited:
p = plt.scatter(i, j, s=marker_size, marker=style)
point_colors.append(p.get_facecolor()[0])
else:
p = plt.scatter(i,
j,
s=marker_size,
marker=style,
color=point_colors[i])
if draw_label:
ax.text(i,
1.01 * j,
s=str(j),
ha='center',
va='bottom',
fontsize=fontsize)
inited = True
# ====== legends and tick labels ====== #
plt.gca().set_xticks(np.arange(len(tick_labels)))
plt.gca().set_xticklabels(tick_labels, rotation=-60, fontsize=fontsize)
plt.legend(handlers,
legends,
bbox_to_anchor=(1.05, 1),
loc=2,
borderaxespad=0.,
fontsize=fontsize)
if title is not None:
plt.suptitle(title)
def plot_gaussian_mixture(x,
gmm,
bins=80,
fontsize=12,
linewidth=2,
show_pdf=False,
show_probability=False,
show_components=True,
legend=True,
ax=None,
title=None):
import seaborn as sns
from odin.utils import as_tuple, catch_warnings_ignore
from scipy import stats
from sklearn.mixture import GaussianMixture
ax = to_axis(ax, is_3D=False)
n_points = int(bins * 12)
assert gmm.means_.shape[1] == 1, "Only support plotting 1-D series GMM"
x = x.ravel()
order = np.argsort(gmm.means_.ravel())
means_ = gmm.means_.ravel()[order]
precision_ = gmm.precisions_.ravel()[order]
colors = sns.color_palette(n_colors=gmm.n_components + 2)
# ====== Histogram ====== #
count, bins = plot_histogram(x=x,
bins=int(bins),
ax=ax,
normalize=False,
kde=False,
range_0_1=False,
covariance_factor=0.25,
centerlize=False,
fontsize=fontsize,
alpha=0.25,
title=title)
ax.set_ylabel("Histogram Count", fontsize=fontsize)
ax.set_xlim((np.min(x), np.max(x)))
ax.set_xticks(
np.linspace(start=np.min(x), stop=np.max(x), num=5, dtype='float32'))
ax.set_yticks(
np.linspace(start=np.min(count), stop=np.max(count), num=5,
dtype='int32'))
# ====== GMM PDF ====== #
x_ = np.linspace(np.min(bins), np.max(bins), n_points)
y_ = np.exp(gmm.score_samples(x_[:, np.newaxis]))
y_ = (y_ - np.min(y_)) / (np.max(y_) - np.min(y_)) * np.max(count)
if show_pdf:
ax.plot(x_,
y_,
color='red',
linestyle='-',
linewidth=linewidth * 1.2,
alpha=0.6,
label="GMM log-likelihood")
# ====== GMM probability ====== #
twinx = None
ymax = 0.0
if show_probability:
if twinx is None:
twinx = ax.twinx()
y_ = gmm.predict_proba(x_[:, np.newaxis])
for idx, (c, j) in enumerate(zip(colors, y_.T)):
twinx.plot(x_,
j,
color=c,
linestyle='--',
linewidth=linewidth,
alpha=0.8,
label=r"$p_{\#%d}(x)$" % idx)
ymax = max(ymax, np.max(y_))
# ====== draw the each Gaussian bell ====== #
if show_components:
if twinx is None:
twinx = ax.twinx()
for idx, (c, m, p) in enumerate(zip(colors, means_, precision_)):
with catch_warnings_ignore(Warning):
j = stats.norm.pdf(x_, m, np.sqrt(1 / p))
twinx.plot(x_,
j,
color=c,
linestyle='-',
linewidth=linewidth,
label=r"$PDF_{\#%d}$" % idx)
# mean, top of the bell
twinx.scatter(x_[np.argmax(j)],
np.max(j),
s=88,
alpha=0.8,
linewidth=0,
color=c)
ymax = max(ymax, np.max(j))
twinx.set_ylabel("Probability Density", fontsize=fontsize)
twinx.grid(False)
# set the limit for twinx
if twinx is not None:
twinx.set_ylim(0.0, ymax * 1.05)
# ====== show legend ====== #
if twinx is not None:
twinx.yaxis.label.set_color(colors[0])
twinx.tick_params(axis='y', colors=colors[0])
if legend:
ax.legend(fontsize=fontsize)
if twinx is not None:
twinx.legend(fontsize=fontsize)
return ax
def plot(x, y=None, ax=None, color='b', lw=1, **kwargs):
'''Plot the amplitude envelope of a waveform.
'''
ax = ax if ax is not None else plt.gca()
if y is None:
ax.plot(x, c=color, lw=lw, **kwargs)
else:
ax.plot(x, y, c=color, lw=lw, **kwargs)
return ax
def plot_ellipses(mean, sigma, color, alpha=0.75, ax=None):
""" Plot an ellipse in 2-D
If the data is more than 2-D, you can use PCA before
fitting the GMM.
"""
# ====== prepare ====== #
mean = mean.ravel()
assert len(mean) == 2, "mean must be vector of size 2"
assert sigma.shape == (2, 2), "sigma must be matrix of shape (2, 2)"
if ax is None:
ax = plt.gca()
covariances = sigma**2
# ====== create the ellipses ====== #
v, w = np.linalg.eigh(covariances)
u = w[0] / np.linalg.norm(w[0])
angle = np.arctan2(u[1], u[0])
angle = 180 * angle / np.pi # convert to degrees
v = 2. * np.sqrt(2.) * np.sqrt(v)
ell = mpl.patches.Ellipse(mean, v[0], v[1], 180 + angle, color=color)
ell.set_clip_box(ax.bbox)
ell.set_alpha(alpha)
ax.add_artist(ell)
def plot_indices(idx, x=None, ax=None, alpha=0.3, ymin=0., ymax=1.):
ax = ax if ax is not None else plt.gca()
x = range(idx.shape[0]) if x is None else x
for i, j in zip(idx, x):
if i:
ax.axvline(x=j, ymin=ymin, ymax=ymax, color='r', linewidth=1, alpha=alpha)
return ax
def plot_multiple_features(features,
order=None,
title=None,
fig_width=4,
sharex=False):
""" Plot a series of 1D and 2D in the same scale for comparison
Parameters
----------
features: Mapping
pytho Mapping from name (string type) to feature matrix (`numpy.ndarray`)
order: None or list of string
if None, the order is keys of `features` sorted in alphabet order,
else, plot the features or subset of features based on the name
specified in `order`
title: None or string
title for the figure
Note
----
delta or delta delta features should have suffix: '_d1' and '_d2'
"""
known_order = [
# For audio processing
'raw',
'stft_energy',
'stft_energy_d1',
'stft_energy_d2',
'frames_energy',
'frames_energy_d1',
'frames_energy_d2',
'energy',
'energy_d1',
'energy_d2',
'vad',
'sad',
'sap',
'sap_d1',
'sap_d2',
'pitch',
'pitch_d1',
'pitch_d2',
'loudness',
'loudness_d1',
'loudness_d2',
'f0',
'f0_d1',
'f0_d2',
'spec',
'spec_d1',
'spec_d2',
'mspec',
'mspec_d1',
'mspec_d2',
'mfcc',
'mfcc_d1',
'mfcc_d2',
'sdc',
'qspec',
'qspec_d1',
'qspec_d2',
'qmspec',
'qmspec_d1',
'qmspec_d2',
'qmfcc',
'qmfcc_d1',
'qmfcc_d2',
'bnf',
'bnf_d1',
'bnf_d2',
'ivec',
'ivec_d1',
'ivec_d2',
# For image processing
# For video processing
]
if isinstance(features, (tuple, list)):
features = OrderedDict(features)
if not isinstance(features, Mapping):
raise ValueError("`features` must be mapping from name -> feature_matrix.")
# ====== check order or create default order ====== #
if order is not None:
order = [str(o) for o in order]
else:
if isinstance(features, OrderedDict):
order = features.keys()
else:
keys = sorted(features.keys(
) if isinstance(features, Mapping) else [k for k, v in features])
order = []
for name in known_order:
if name in keys:
order.append(name)
# add the remain keys
for name in keys:
if name not in order:
order.append(name)
# ====== get all numpy array ====== #
features = [(name, features[name])
for name in order
if name in features and isinstance(features[name], np.ndarray) and
features[name].ndim <= 4]
plt.figure(figsize=(int(fig_width), len(features)))
for i, (name, X) in enumerate(features):
X = X.astype('float32')
plt.subplot(len(features), 1, i + 1)
# flatten 2D features with one dim equal to 1
if X.ndim == 2 and any(s == 1 for s in X.shape):
X = X.ravel()
# check valid dimension and select appropriate plot
if X.ndim == 1:
plt.plot(X)
plt.xlim(0, len(X))
plt.ylabel(name, fontsize=6)
elif X.ndim == 2: # transpose to frequency x time
plot_spectrogram(X.T, title=name)
elif X.ndim == 3:
plt.imshow(X)
plt.xticks(())
plt.yticks(())
plt.ylabel(name, fontsize=6)
else:
raise RuntimeError("No support for >= 3D features.")
# auto, equal
plt.gca().set_aspect(aspect='auto')
# plt.axis('off')
plt.xticks(())
# plt.yticks(())
plt.tick_params(axis='y', size=6, labelsize=4, color='r', pad=0, length=2)
# add title to the first subplot
if i == 0 and title is not None:
plt.title(str(title), fontsize=8)
if sharex:
plt.subplots_adjust(hspace=0)
def plot_spectrogram(x,
vad=None,
ax=None,
colorbar=False,
linewidth=0.5,
vmin='auto',
vmax='auto',
title=None):
''' Plotting spectrogram
Parameters
----------
x : np.ndarray
2D array
vad : np.ndarray, list
1D array, a red line will be draw at vad=1.
ax : matplotlib.Axis
create by fig.add_subplot, or plt.subplots
colorbar : bool, 'all'
whether adding colorbar to plot, if colorbar='all', call this
methods after you add all subplots will create big colorbar
for all your plots
path : str
if path is specified, save png image to given path
Notes
-----
Make sure nrow and ncol in add_subplot is int or this error will show up
- ValueError: The truth value of an array with more than one element is
ambiguous. Use a.any() or a.all()
'''
if vmin == 'auto':
vmin = np.min(x)
if vmax == 'auto':
vmax = np.max(x)
# colormap = _cmap(x)
# colormap = 'spectral'
colormap = 'nipy_spectral'
if x.ndim > 2:
raise ValueError('No support for > 2D')
elif x.ndim == 1:
x = x[:, None]
if vad is not None:
vad = np.asarray(vad).ravel()
if len(vad) != x.shape[1]:
raise ValueError('Length of VAD must equal to signal length, but '
'length[vad]={} != length[signal]={}'.format(
len(vad), x.shape[1]))
# normalize vad
vad = np.cast[np.bool](vad)
ax = to_axis(ax, is_3D=False)
ax.set_aspect('equal', 'box')
# ax.tick_params(axis='both', which='major', labelsize=6)
ax.set_xticks([])
ax.set_yticks([])
# ax.axis('off')
if title is not None:
ax.set_ylabel(str(title) + '-' + str(x.shape), fontsize=6)
img = ax.imshow(x,
cmap=colormap,
interpolation='kaiser',
alpha=0.9,
vmin=vmin,
vmax=vmax,
origin='lower')
# img = ax.pcolorfast(x, cmap=colormap, alpha=0.9)
# ====== draw vad vertical line ====== #
if vad is not None:
for i, j in enumerate(vad):
if j:
ax.axvline(x=i,
ymin=0,
ymax=1,
color='r',
linewidth=linewidth,
alpha=0.3)
# plt.grid(True)
if colorbar == 'all':
fig = ax.get_figure()
axes = fig.get_axes()
fig.colorbar(img, ax=axes)
elif colorbar:
plt.colorbar(img, ax=ax)
return ax
def plot_images(X: np.ndarray,
grids: Optional[Tuple[int, int]] = None,
image_shape: Optional[Tuple[int, int]] = None,
image_spacing: Optional[Tuple[int, int]] = None,
ax: Optional['Axes'] = None,
fontsize: int = 12,
title: Optional[str] = None):
r"""Tile images in X together into a single image for plotting
Parameters
----------
X : np.ndarray
2D-gray images with shape `[batch_dim, height, width]`
or 3D-color images `[batch_dim, color, height, width]`
grids : Optional[Tuple[int, int]], optional
number of rows and columns, by default None
image_shape : Optional[Tuple[int, int]], optional
resized shape of images, by default None
image_spacing : Optional[Tuple[int, int]], optional
space betwen rows and columns of images, by default None
"""
if X.ndim == 3 or X.ndim == 2:
cmap = plt.cm.Greys_r
elif X.ndim == 4:
cmap = None
X = tile_raster_images(X,
grids=grids,
image_shape=image_shape,
image_spacing=image_spacing)
ax = to_axis2D(ax)
ax.imshow(X, cmap=cmap)
if title is not None:
ax.set_title(str(title), fontsize=fontsize, fontweight='regular')
ax.axis('off')
return ax
def plot_images_old(x, fig=None, titles=None, show=False):
'''
x : 2D-gray or 3D-color images
for color image the color channel is second dimension
'''
if x.ndim == 3 or x.ndim == 2:
cmap = plt.cm.Greys_r
elif x.ndim == 4:
cmap = None
shape = x.shape[2:] + (x.shape[1],)
x = np.vstack([i.T.reshape((-1,) + shape) for i in x])
else:
raise ValueError('NO support for %d dimensions image!' % x.ndim)
if x.ndim == 2:
ncols = 1
nrows = 1
else:
ncols = int(np.ceil(np.sqrt(x.shape[0])))
nrows = int(ncols)
if fig is None:
fig = plt.figure()
if titles is not None:
if not isinstance(titles, (tuple, list)):
titles = [titles]
if len(titles) != x.shape[0]:
raise ValueError('Titles must have the same length with'
'the number of images!')
for i in range(ncols):
for j in range(nrows):
idx = i * ncols + j
if idx < x.shape[0]:
subplot = fig.add_subplot(nrows, ncols, idx + 1)
subplot.imshow(x[idx], cmap=cmap)
if titles is not None:
subplot.set_title(titles[idx])
subplot.axis('off')
if show:
# plt.tight_layout()
plt.show(block=True)
input('<Enter> to close the figure ...')
else:
return fig
def plot_hinton(matrix, max_weight=None, ax=None):
'''
Hinton diagrams are useful for visualizing the values of a 2D array (e.g.
a weight matrix):
Positive: white
Negative: black
squares, and the size of each square represents the magnitude of each value.
* Note: performance significant decrease as array size > 50*50
Example:
W = np.random.rand(10,10)
hinton_plot(W)
'''
"""Draw Hinton diagram for visualizing a weight matrix."""
ax = ax if ax is not None else plt.gca()
if not max_weight:
max_weight = 2**np.ceil(np.log(np.abs(matrix).max()) / np.log(2))
ax.patch.set_facecolor('gray')
ax.set_aspect('equal', 'box')
ax.xaxis.set_major_locator(plt.NullLocator())
ax.yaxis.set_major_locator(plt.NullLocator())
for (x, y), w in np.ndenumerate(matrix):
color = 'white' if w > 0 else 'black'
size = np.sqrt(np.abs(w))
rect = plt.Rectangle([x - size / 2, y - size / 2],
size,
size,
facecolor=color,
edgecolor=color)
ax.add_patch(rect)
ax.autoscale_view()
ax.invert_yaxis()
return ax
# ===========================================================================
# Helper methods
# ===========================================================================
def plot_show(block=True, tight_layout=False):
if tight_layout:
plt.tight_layout()
plt.show(block=block)
if not block: # manually block
input('<enter> to close all plots')
plt.close('all')
# ===========================================================================
# Detection plot
# ===========================================================================
def _ppndf(cum_prob):
""" @Original code from NIST
The input to this function is a cumulative probability.
The output from this function is the Normal deviate
that corresponds to that probability.
"""
SPLIT = 0.42
A0 = 2.5066282388
A1 = -18.6150006252
A2 = 41.3911977353
A3 = -25.4410604963
B1 = -8.4735109309
B2 = 23.0833674374
B3 = -21.0622410182
B4 = 3.1308290983
C0 = -2.7871893113
C1 = -2.2979647913
C2 = 4.8501412713
C3 = 2.3212127685
D1 = 3.5438892476
D2 = 1.6370678189
# ====== preprocess ====== #
cum_prob = np.array(cum_prob)
eps = np.finfo(cum_prob.dtype).eps
cum_prob = np.clip(cum_prob, eps, 1 - eps)
adj_prob = cum_prob - 0.5
# ====== init ====== #
R = np.empty_like(cum_prob)
norm_dev = np.empty_like(cum_prob)
# ====== transform ====== #
centerindexes = np.argwhere(np.abs(adj_prob) <= SPLIT).ravel()
tailindexes = np.argwhere(np.abs(adj_prob) > SPLIT).ravel()
# do centerstuff first
R[centerindexes] = adj_prob[centerindexes] * adj_prob[centerindexes]
norm_dev[centerindexes] = adj_prob[centerindexes] * \
(((A3 * R[centerindexes] + A2) * R[centerindexes] + A1) * R[centerindexes] + A0)
norm_dev[centerindexes] = norm_dev[centerindexes] /\
((((B4 * R[centerindexes] + B3) * R[centerindexes] + B2) * R[centerindexes] + B1) * R[centerindexes] + 1.0)
#find left and right tails
right = np.argwhere(cum_prob[tailindexes] > 0.5).ravel()
left = np.argwhere(cum_prob[tailindexes] < 0.5).ravel()
# do tail stuff
R[tailindexes] = cum_prob[tailindexes]
R[tailindexes[right]] = 1 - cum_prob[tailindexes[right]]
R[tailindexes] = np.sqrt((-1.0) * np.log(R[tailindexes]))
norm_dev[tailindexes] = ((
(C3 * R[tailindexes] + C2) * R[tailindexes] + C1) * R[tailindexes] + C0)
norm_dev[tailindexes] = norm_dev[tailindexes] / (
(D2 * R[tailindexes] + D1) * R[tailindexes] + 1.0)
# swap sign on left tail
norm_dev[tailindexes[left]] = norm_dev[tailindexes[left]] * -1.0
return norm_dev
def plot_detection_curve(x,
y,
curve,
xlims=None,
ylims=None,
ax=None,
labels=None,
legend=True,
title=None,
linewidth=1.2,
pointsize=8.0):
"""
Parameters
----------
x: array, or list|tuple of array
if list or tuple of array is given, plot multiple curves at once
y: array, or list|tuple of array
if list or tuple of array is given, plot multiple curves at once
curve: {'det', 'roc', 'prc'}
det: detection error trade-off
roc: receiver operating curve
prc: precision-recall curve
xlims : (xmin, xmax) in float
for DET, `xlims` should be in [0, 1]
ylims : (ymin, ymax) in float
for DET, `ylims` should be in [0, 1]
labels: {list of str}
labels in case ploting multiple curves
Note
----
for 'det': xaxis is FPR - Pfa, and yxais is FNR - Pmiss
for 'roc': xaxis is FPR - Pfa, and yaxis is TPR
for 'prc': xaxis is, yaxis is
"""
from odin import backend as K
# ====== preprocessing ====== #
if not isinstance(x, (tuple, list)):
x = (x,)
if not isinstance(y, (tuple, list)):
y = (y,)
if len(x) != len(y):
raise ValueError(
"Given %d series for `x`, but only get %d series for `y`." %
(len(x), len(y)))
if not isinstance(labels, (tuple, list)):
labels = (labels,)
labels = as_tuple(labels, N=len(x))
linewidth = float(linewidth)
# ====== const ====== #
eps = np.finfo(x[0].dtype).eps
xticks, xticklabels = None, None
yticks, yticklabels = None, None
xlabel, ylabel = None, None
lines = []
points = []
# ====== check input arguments ====== #
curve = curve.lower()
if curve not in ('det', 'roc', 'prc'):
raise ValueError("`curve` can only be: 'det', 'roc', or 'prc'")
if ax is None:
ax = plt.gca()
# ====== select DET curve style ====== #
if curve == 'det':
xlabel = "False Alarm probability (in %)"
ylabel = "Miss probability (in %)"
# 0.00001, 0.00002,
# , 0.99995, 0.99998, 0.99999
xticks = np.array([
0.00005, 0.0001, 0.0002, 0.0005, 0.001, 0.002, 0.005, 0.01, 0.02, 0.05,
0.1, 0.2, 0.4, 0.6, 0.8, 0.9, 0.95, 0.98, 0.99, 0.995, 0.998, 0.999,
0.9995, 0.9998, 0.9999
])
xticklabels = [
str(i)[:-2] if '.0' == str(i)[-2:] else
(str(i) if i > 99.99 else str(i)) for i in xticks * 100
]
if xlims is None:
xlims = (max(min(np.min(i) for i in x),
xticks[0]), min(max(np.max(i) for i in x), xticks[-1]))
xlims = (
[val for i, val in enumerate(xticks) if val <= xlims[0] or i == 0][-1] +
eps, [
val for i, val in enumerate(xticks)
if val >= xlims[1] or i == len(xticks) - 1
][0] - eps)
if ylims is None:
ylims = (max(min(np.min(i) for i in y),
xticks[0]), min(max(np.max(i) for i in y), xticks[-1]))
ylims = (
[val for i, val in enumerate(xticks) if val <= ylims[0] or i == 0][-1] +
eps, [
val for i, val in enumerate(xticks)
if val >= ylims[1] or i == len(xticks) - 1
][0] - eps)
# convert to log scale
xticks = _ppndf(xticks)
yticks, yticklabels = xticks, xticklabels
xlims, ylims = _ppndf(xlims), _ppndf(ylims)
# main line
# TODO: add EER value later
name_fmt = lambda name, dcf, eer: ('EER=%.2f;minDCF=%.2f' % (eer * 100, dcf * 100)) \
if name is None else \
('%s (EER=%.2f;minDCF=%.2f)' % (name, eer * 100, dcf * 100))
labels_new = []
for count, (Pfa, Pmiss, name) in enumerate(zip(x, y, labels)):
eer = K.metrics.compute_EER(Pfa=Pfa, Pmiss=Pmiss)
# DCF point
dcf, Pfa_opt, Pmiss_opt = K.metrics.compute_minDCF(Pfa=Pfa, Pmiss=Pmiss)
Pfa_opt = _ppndf((Pfa_opt,))
Pmiss_opt = _ppndf((Pmiss_opt,))
points.append(((Pfa_opt, Pmiss_opt), {'s': pointsize}))
# det curve
Pfa = _ppndf(Pfa)
Pmiss = _ppndf(Pmiss)
name = name_fmt(name, eer, dcf)
lines.append(((Pfa, Pmiss), {
'lw': linewidth,
'label': name,
'linestyle': '-' if count % 2 == 0 else '-.'
}))
labels_new.append(name)
labels = labels_new
# ====== select ROC curve style ====== #
elif curve == 'roc':
xlabel = "False Positive probability"
ylabel = "True Positive probability"
xlims = (0, 1)
ylims = (0, 1)
# roc
name_fmt = lambda name, auc: ('AUC=%.2f' % auc) if name is None else \
('%s (AUC=%.2f)' % (name, auc))
labels_new = []
for count, (i, j, name) in enumerate(zip(x, y, labels)):
auc = K.metrics.compute_AUC(i, j)
name = name_fmt(name, auc)
lines.append([(i, j), {
'lw': linewidth,
'label': name,
'linestyle': '-' if count % 2 == 0 else '-.'
}])
labels_new.append(name)
labels = labels_new
# diagonal
lines.append([(xlims, ylims), {
'lw': 0.8,
'linestyle': '-.',
'color': 'black'
}])
# ====== select ROC curve style ====== #
elif curve == 'prc':
raise NotImplementedError
# ====== ploting ====== #
fontsize = 9
if xticks is not None:
ax.set_xticks(xticks)
if xticklabels is not None:
ax.set_xticklabels(xticklabels, rotation=-60, fontsize=fontsize)
if yticks is not None:
ax.set_yticks(yticks)
if yticklabels is not None:
ax.set_yticklabels(yticklabels, fontsize=fontsize)
# axes labels
ax.set_xlabel(xlabel, fontsize=12)
ax.set_ylabel(ylabel, fontsize=12)
# plot all lines
for args, kwargs in lines:
ax.plot(*args, **kwargs)
# plot all points
for arg, kwargs in points:
ax.scatter(*arg, **kwargs)
if xlims is not None:
ax.set_xlim(xlims)
if ylims is not None:
ax.set_ylim(ylims)
ax.grid(color='black', linestyle='--', linewidth=0.4)
if title is not None:
ax.set_title(title, fontsize=fontsize + 2)
# legend
if legend and any(i is not None for i in labels):
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
# ===========================================================================
# Micro-control
# ===========================================================================
def plot_colorbar(colormap,
vmin=0,
vmax=1,
ax=None,
orientation='vertical',
tick_location=None,
tick_labels=None,
label=None):
"""
Parameters
----------
colormap : string, ColorMap type
vmin : float
vmax : float
ax : {None, matplotlib.figure.Figure or matplotlib.axes.Axes}
if `Figure` is given, show the color bar in the right hand side or
top side of the figure based on the `orientation`
orientation : {'vertical', 'horizontal'}
ticks : None
label : text label
fig : figure instance matplotlib
"""
if isinstance(colormap, string_types):
cmap = mpl.cm.get_cmap(name=colormap)
else:
cmap = colormap
norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
# ====== add colorbar for the whole figure ====== #
if ax is None or isinstance(ax, mpl.figure.Figure):
fig = plt.gcf() if ax is None else ax
if orientation == 'vertical':
cbar_ax = fig.add_axes([0.92, 0.15, 0.02, 0.7])
else:
cbar_ax = fig.add_axes([0.15, 0.92, 0.7, 0.02])
cb1 = mpl.colorbar.ColorbarBase(cbar_ax,
cmap=cmap,
norm=norm,
orientation=orientation)
# ====== add colorbar for only 1 Axes ====== #
elif isinstance(ax, mpl.axes.Axes):
mappable = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
mappable.set_array([]) # no idea why we need this
cb1 = plt.colorbar(mappable,
ax=ax,
pad=0.03 if orientation == 'vertical' else 0.1,
shrink=0.7,
aspect=25)
# ====== no idea ====== #
else:
raise ValueError("No support for `ax` type: %s" % str(type(ax)))
# ====== final configuration ====== #
if tick_location is not None:
cb1.set_ticks(tick_location)
if tick_labels is not None:
cb1.set_ticklabels(tick_labels)
if label is not None:
cb1.set_label(str(label))
return cb1
# ===========================================================================
# Shortcut
# ===========================================================================
def plot_close():
plt.close('all')
def plot_to_image(figure: plt.Figure, close_figure: bool = True) -> tf.Tensor:
"""Convert the figure to png image for tensorboard"""
# Save the plot to a PNG in memory.
buf = io.BytesIO()
figure.savefig(buf, format='png')
# Closing the figure prevents it from being displayed directly inside
# the notebook.
plt.close(figure)
buf.seek(0)
# Convert PNG buffer to TF image
image = tf.image.decode_png(buf.getvalue(), channels=4)
# Add the batch dimension
image = tf.expand_dims(image, 0)
return image
def plot_save(path='/tmp/tmp.pdf',
figs=None,
dpi=180,
tight_plot=False,
clear_all=True,
log=False,
transparent=False):
"""
Parameters
----------
clear_all: bool
if True, remove all saved figures from current figure list
in matplotlib
"""
if tight_plot:
plt.tight_layout()
if os.path.exists(path) and os.path.isfile(path):
os.remove(path)
if figs is None:
figs = [plt.figure(n) for n in plt.get_fignums()]
# ====== saving PDF file ====== #
if '.pdf' in path.lower():
saved_path = [path]
try:
from matplotlib.backends.backend_pdf import PdfPages
pp = PdfPages(path)
for fig in figs:
fig.savefig(pp,
dpi=dpi,
transparent=transparent,
format='pdf',
bbox_inches="tight")
pp.close()
except Exception as e:
sys.stderr.write('Cannot save figures to pdf, error:%s \n' % str(e))
# ====== saving PNG file ====== #
else:
saved_path = []
path = os.path.splitext(path)
ext = path[-1][1:].lower()
path = path[0]
kwargs = dict(dpi=dpi, bbox_inches="tight")
for idx, fig in enumerate(figs):
if len(figs) > 1:
out_path = path + ('.%d.' % idx) + ext
else:
out_path = path + '.' + ext
fig.savefig(out_path, transparent=transparent, **kwargs)
saved_path.append(out_path)
# ====== clean ====== #
if log:
sys.stderr.write('Saved figures to:%s \n' % ', '.join(saved_path))
if clear_all:
plt.close('all')
def plot_save_show(path,
figs=None,
dpi=180,
tight_plot=False,
clear_all=True,
log=True):
plot_save(path, figs, dpi, tight_plot, clear_all, log)
os.system('open -a /Applications/Preview.app %s' % path)
|
986,479 | 44991cc0c929e66cf70d1db1fddd040cf5b4a412 | from django.contrib import admin
from embed_video.admin import AdminVideoMixin
from .models import Song, VideoQuery, Choices, ActualSong
class SongAdmin(AdminVideoMixin, admin.ModelAdmin):
"""Админка песен"""
list_display = ("video", "name", "author", "duration_video")
admin.site.register(Song, SongAdmin)
class VideoQueryAdmin(admin.ModelAdmin):
"""Админка очереди"""
list_display = ("selection_time", "selected_video", "user_choices")
admin.site.register(VideoQuery, VideoQueryAdmin)
class ChoicesAdmin(admin.ModelAdmin):
"""Админка голосов"""
list_display = ("VideoInQuery", "voted_user", "voted_time")
admin.site.register(Choices, ChoicesAdmin)
class ActualSongAdmin(admin.ModelAdmin):
"""Админка текущей песни"""
list_display = ("video", "time_duration")
admin.site.register(ActualSong, ActualSongAdmin)
# Register your models here.
|
986,480 | 0f434b71630a9cb4e4a0a3212ffdc65cf2057806 | from rest_framework import routers
from .api import LeadViewSet
routers = routers.DefaultRouter()
routers.register('api/lead',LeadViewSet,"lead")
urlpatterns = routers.urls |
986,481 | afce968234ea595bbe32b98f12bcd1769f1dd105 | #!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from codecs import open
from setuptools import setup, find_packages
VERSION = "0.2.17"
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
]
DEPENDENCIES = []
setup(
name='hack',
version=VERSION,
description='Commands to quickly create web space and resources for hack projects.',
long_description='An Azure CLI Extension to quickly create and delete common hack project resources.',
license='MIT',
author='Christopher Harrison',
author_email='chrhar@microsoft.com',
url='https://github.com/Azure/azure-cli-extensions',
classifiers=CLASSIFIERS,
package_data={'azext_hack': ['azext_metadata.json']},
packages=find_packages(),
install_requires=DEPENDENCIES
)
|
986,482 | 99561a17e389d817f2636345ade0bc7c63d7b85f | from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse, JsonResponse
from django.conf import settings
from django.shortcuts import render, redirect, get_object_or_404
import logging
import json
from django.urls import reverse
from arevelcore.amplitude import AmplitudeLogger
from workspace.services import *
LOG = logging.getLogger(__name__)
amp = AmplitudeLogger()
def landing(request):
logging.info("Homepage req")
# return HttpResponse("Hello, Arevel.")
# return render(request, "index.html")
if request.user.is_authenticated():
return redirect(reverse('docs_list'))
return render(request, "landing.html")
@login_required
def docs_list(request):
docs = Docs.objects.filter(owner=request.user)
return render(request, 'docs_list.html', context={'docs': docs})
@login_required
def docs_create(request):
doc = create_doc(request.user)
amp.log_event(amp.create_event(request, "create doc", {
'doc': doc.uuid,
'trigger': 'manual'
}, user=request.user))
return redirect(doc.get_absolute_url())
@login_required
def latest_doc(request):
# Automatically create or get the first doc and redirect there.
# So users start in the detail view rather than the list view.
doc = Docs.objects.filter(owner=request.user).order_by("date_updated").first()
if not doc:
doc = create_doc(request.user)
amp.log_event(amp.create_event(request, "create doc", {
'doc': doc.uuid,
'trigger': 'auto'
}, user=request.user))
return redirect(doc.get_absolute_url())
@login_required
def doc_details(request, uuid):
doc = get_object_or_404(Docs, uuid=uuid)
has_document_permission(request, doc)
if request.method == 'POST':
# Since this is json format, should be safe against script injection...
raw_contents = request.POST.get('contents', '')
contents = json.loads(raw_contents)
doc.name = request.POST.get('name', '');
doc.contents = contents
doc.save()
amp.log_event(amp.create_event(request, "update doc", {
'doc': doc.uuid
}, user=request.user))
return HttpResponse("OK")
return render(request, "doc.html", context={'doc': doc})
def config(request):
logging.info("Config req")
return JsonResponse({
"is_prod": settings.IS_PROD,
"debug": settings.DEBUG
})
def ycdemo(request):
logging.info("YC Demo")
return render(request, 'yc.html')
def healthcheck(request):
return HttpResponse("OK")
def keepalive(request):
return HttpResponse("OK") |
986,483 | 9e90cd2c83906481156889028d06b7598f8f9260 | import matplotlib.pyplot as plt
import numpy as np
import os
import glob
import mn.plot.genplotlib as genplotlib
class CibData:
def __init__(self, paramsfile, resultsfile):
self.paramsfile = paramsfile
self.resultsfile = resultsfile
def Gendata(self):
d = {}
d['moviename'] = os.path.basename(os.path.abspath('.'))
if os.path.exists(self.paramsfile) == True:
for l in open(self.paramsfile):
d[l.split(',')[0]] = (l.split(',')[1].strip('\n'))
d['fps'] = int(d['fps'])
if os.path.exists(self.resultsfile) == True:
d['frac'] = findfraction(self.resultsfile)
return(d)
def loadresultsfile(fname):
"""Returns a numpy array from data in the ImageJ-generated file 'fname', which is a text file
containing the Mean ROI intensity for each frame of an image sequence.
This function is meant to be used with a file generated by the ImageJ function "multimeasure",
usually with the ImageJ custom macro "automeasure.txt".
"""
return(np.loadtxt(fname, skiprows=1, usecols=(1,3,4)))
#for x in a:
#return(np.loadtxt(fname, skiprows=1, usecols=(1,)))
def findfraction(resultsfile):
data = loadresultsfile(resultsfile)
d={}
for datum in data:
area, slice, len = datum
print area, slice, len
slicestr = str(slice)
if slicestr not in d:
d[slicestr] = []
a ={}
a['area'] = []
a['length'] = []
if len == 0:
a['area'].append(area)
if len != 0:
a['length'].append(len)
d[slicestr] = a
print(d)
fraction = []
for slice, value in d.iteritems():
f = value['area'][0]/value['length'][0]
fraction.append(f)
return(np.mean(fraction))
def batchfiles(fdir='.'):
"""Carries out the function 'fn_name' recursively on all files in the directory 'fdir'.
"""
os.chdir(fdir)
names = glob.iglob('*')
# Absolute path rather than relative path allows changing of directories in fn_name.
names = sorted([os.path.abspath(name) for name in names])
return(names)
def makenewdir(newdir):
"""Makes the new directory 'newdir' without raising an exception if 'newdir' already exists."""
try:
os.makedirs(newdir)
except OSError as e:
if e.errno == 17:
pass
def genresfile_moviefolder():
resdir = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath('.'))), 'summary')
makenewdir(resdir)
resfile = os.path.join(resdir, 'cibresults.txt')
return(resfile)
def genresfile_datafolder():
resdir = os.path.join(os.path.dirname(os.path.abspath('.')), 'summary')
makenewdir(resdir)
resfile = os.path.join(resdir, 'cibresults.txt')
return(resfile)
def writeresults(dict):
resfile = genresfile_moviefolder()
if os.path.isfile(resfile) != True:
f = open(resfile, 'w')
f.write('Movie' + ',' + 'NormCibArea,Condition' + '\n')
f.close()
f = open(resfile, 'a')
try:
fracstr = str(dict['frac'])
f.write(dict['moviename'] + ',' + fracstr + ',' + dict['condition'] + '\n')
except KeyError:
pass
f.close()
#PARAMS_FILE = 'cibareaparams'
PARAMS_FILE = 'params'
RESULTS_FILE = 'results1.txt'
x = genresfile_datafolder()
if os.path.exists(x) == True:
os.remove(x)
names = batchfiles('.')
for name in names:
print os.path.basename(name)
os.chdir(name)
params = CibData(PARAMS_FILE, RESULTS_FILE)
cibdict = params.Gendata()
writeresults(cibdict)
os.chdir('../../summary')
d = genplotlib.gendict_cibarea_circ('cibresults.txt')
md = genplotlib.genlist(d)
k = d.keys()
genplotlib.plotdata(d, md, k, 'b', 'NormCibArea', 'Fraction of cibarium open', ymin=0, ylim=100)
plt.savefig('cibareacirc')
#plt.figure()
#e = genplotlib.gendict_cibarea_dur('cibresults.txt')
#me = genplotlib.genlist(e)
#genplotlib.plotdata(e, me, k, 'b', 'Duration', 'Duration of Drinking', ymin=0, ylim=200)
#plt.savefig('duration')
#try:
#a =
#f = findfraction()
#list.append(f)
#except IOError as e:
#if e.errno == 2:
#continue
#print(list)
|
986,484 | cab7ee75ffb42e73895da865424006bbdd51d29d | prompt = ("\nPlease enter your name.")
prompt += ("\nEnter q to quit.")
while True:
guest = input(prompt)
if guest == 'q':
break
else:
print(guest.title()) |
986,485 | 87ab31b8f156b076bb2b7c46985eb6bcef200891 | import re
import os
import pdfplumber
import codecs
import pandas as pd
# Массив с номерами дела
number = list()
# Массив с датой дела
date = list()
# Массив с номером суда дела
court_number = list()
def info_coolect(pdf_file):
"""Собираем массивы с № приказов, датами и № участков"""
# Читаем первую страницу pdf-документа в формате str
pdf = pdfplumber.open(pdf_file)
page = pdf.pages[0]
text = page.extract_text()
# Получаем искомую подстроку и получем массив ее слов,
# из которого затем возьмем номер и дату
string = r'судебный приказ[ \n]\S*[ \n]\№'
second_string = r'исполнительный лист[ \n]\S*[ \n]\№'
# Если не находится первая строка, значит документ содержит ключевую строку второго типа
if re.search(string, text):
target_string = [value for value in text[re.search(string, text).span()[1]:re.search(string, text).span()[1] + 35].split(' ') if value]
elif re.search(second_string, text):
target_string = [value for value in text[re.search(second_string, text).span()[1]:re.search(second_string, text).span()[1] + 35].split(' ') if value]
else:
print('Номер и/или дата судебного приказа не найдена')
return next
# Обработка одного исключения, буквы и номер судебного приказа
# были рзделены
if len(target_string[0].replace('\n', '')) >= 3:
# Получаем номер судебного приказа
number.append(target_string[0].replace('\n', ''))
# Получаем дату судебного приказа
date.append(target_string[2].replace(',', '').replace('\n', ''))
# Если длина эл. меньше 3 символов, значит номер приказа разделен пробелом,
# и разбит на 2 эл. в массиве. Склеиваем их и берем датой другой эл. массива
else:
# Получаем номер судебного приказа
number.append(target_string[0].replace('\n', '') + target_string[1].replace('\n', ''))
# Получаем дату судебного приказа
date.append(target_string[3].replace(',', ''))
# Получаем номер судебного участка
if text.find('Судебный участок мирового судьи №') != -1:
court_number.append(text[text.find('Судебный участок мирового судьи №') + 32:text.find(
'Судебный участок мирового судьи №') + 38].split(' ')[1])
elif text.find('Судебный участок №') != -1:
court_number.append(text[text.find('Судебный участок №') + 19:text.find(
'Судебный участок №') + 21].split()[0])
else:
print('Номер участка не найден')
# Закрываем документ
pdf.close()
# Массив в котором будут сохранены пути из файла
paths = list()
# Считываем пути из файла
with codecs.open('пути к папкам.txt', 'r', 'utf_8_sig' ) as f:
for path in f.readlines():
# Убираем с каждой строки символы перехода на нову строку и возврата каретки
# Добавляем в массив
paths.append(path.replace('\n', '').replace('\r', ''))
# Проходим по всем указанным путям
for path in paths:
# Проходим по всем папкам
for root, dirs, files in os.walk(os.path.abspath(path)):
# Проходим по всем файлам
for file in files:
print(os.path.join(root, file))
info_coolect(os.path.join(root, file))
info_collection = pd.DataFrame()
# Собираем df с необходимой информацией
info_collection['№ приказа'] = number
info_collection['Дата'] = date
info_collection['№ участка'] = court_number
# Сохраняем таблицу excel
info_collection.to_excel('как угодно.xlsx') |
986,486 | dc0fee3f9a6069ec9e76fe009462d39eb276cadf | #!/usr/bin/env python
# code:UTF-8
# @Author : Sasuke
import unittest,time
from testfarm.test_program.app.weixin.element.main_page import HomePage
from testfarm.test_program.app.weixin.element.master_page import MasterPage
from testfarm.test_program.app.weixin.element.statement_page import Statement
from testfarm.test_program.conf.decorator import testcase, setup, teardown
from testfarm.test_program.app.student.login.object_page.login_page import LoginPage
from testfarm.test_program.app.weixin.element.teacher_manage import TeacherManage
from testfarm.test_program.app.weixin.element.public import Get_Cash
class Set_offer(unittest.TestCase):
"""教师管理 取消教师申请"""
@classmethod
@setup
def setUp(cls):
"""启动应用"""
cls.home = HomePage()
cls.login_page = LoginPage()
cls.master_page = MasterPage()
cls.statement_page = Statement()
cls.tea_manage = TeacherManage()
cls.public = Get_Cash()
@classmethod
@teardown
def tearDown(cls):
pass
@testcase
def test_offer(self):
self.public.enter_wxzx()
self.home.account_management()# 点击进入账户管理
self.home.click_teacher_manage() # 点击进入教师管理
if self.tea_manage.wait_teacher_manage_page():
self.change_name() # 修改名字
self.canael_invite() # 取消邀请
self.canael_invite() # 取消邀请
else:
print('\n\t❌error:进入页面错误,没有进入\n')
self.master_page.go_back() # 点击返回回到公众号主页面
self.master_page.get_btn() # 点击返回
self.master_page.go_back() # 点击返回
if self.login_page.wait_check_page_index():
self.login_page.click_weixin()
def change_name(self):
tea_ele_list = self.tea_manage.get_all_teacher()[1]
tea_ele_list[-1].click() # 点击邀请中的老师
if self.tea_manage.wait_teacher_info_page():
self.tea_manage.change_name_in_tea_info()
self.tea_manage.save_change()
if self.tea_manage.wait_teacher_manage_page():
info_list = self.tea_manage.get_all_teacher()[0]
if '时晓 邀请中 -' in str(info_list) and '小时 邀请中 -' in str(info_list):
print('✅姓名修改成功')
else:
print('\n\t❌error:姓名修改失败,请进行检查\n')
else:
print('\n\t❌error:进入页面错误,没有进入\n')
else:
print('\n\t❌error:进入页面错误,没有进入\n')
def canael_invite(self):
all_tea_ele_list = self.tea_manage.get_all_teacher()[1]
all_tea_ele_list[-1].click()
if self.tea_manage.wait_teacher_info_page():
tea_name = self.tea_manage.get_tea_name()
self.tea_manage.invite_cancel()
self.tea_manage.click_cancel()
self.tea_manage.invite_cancel()
self.tea_manage.click_ensure()
info_list = self.tea_manage.get_all_teacher()[0]
if '%s 邀请中 -'%tea_name not in str(info_list):
print('✅取消邀请成功')
else:
print('\n\t❌error:取消邀请失败,请进行检查\n')
else:
print('\n\t❌error:进入页面错误,没有进入\n') |
986,487 | a68f8a8f95882f62af30feb1a0aec4cd0fbcbae2 | # Generated by Django 2.0.1 on 2020-01-27 04:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0033_auto_20200127_0428'),
]
operations = [
migrations.AddField(
model_name='settings',
name='not_found_url',
field=models.CharField(blank=True, default='Go To Homepage', max_length=300, null=True),
),
migrations.AddField(
model_name='settings',
name='not_found_url_az',
field=models.CharField(blank=True, default='Go To Homepage', max_length=300, null=True),
),
migrations.AddField(
model_name='settings',
name='not_found_url_en',
field=models.CharField(blank=True, default='Go To Homepage', max_length=300, null=True),
),
]
|
986,488 | 8d3d19e00cc922651d56112a898c885dcb2a6709 | from django.http import HttpResponse
from django.template.loader import get_template
from Domain.models import Boda
from Domain.models import Lugar
from .models import FiestaEvento, AlimentoCarrito, Alimento, EntretenimientoCarrito, Entretenimiento
from Ceremonia.models import CeremoniaEvento
from django.contrib.auth.decorators import login_required
from django.contrib.auth import logout
from django.shortcuts import redirect
from Domain.models import *
from .utils import getPriceFormat
@login_required(login_url='index')
def fiestaDashboardView(request, user_id , boda_id , fiesta_id):
if str(request.user) != str(user_id):
logout(request)
return redirect('index')
user_id = request.user
enamorado = Enamorado.objects.get(User_id=user_id)
try:
boda2 = Boda.objects.get(Enamorado1_id=enamorado.id)
except:
boda2 = Boda.objects.get(Enamorado2_id=enamorado.id)
if str(boda2.id) != str(boda_id):
return redirect('tableroResumen')
boda = Boda.objects.filter(id__exact=boda_id)
fiesta = FiestaEvento.objects.get(Boda_id=boda_id)
ceremonia = CeremoniaEvento.objects.get(Boda_id=boda_id)
alimento = AlimentoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
entretenimiento = EntretenimientoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
indices_alimentos = []
indices_entretenimientos = []
mensaje_error = (False , "")
mensaje_succes = (False , "")
mensaje_delete = (False , "")
subtotal = ""
if str(fiesta_id) != str(fiesta.id):
return redirect('tableroResumen')
Lugares = Lugar.objects.filter(tipo='fiesta')
for indexLugar in range(0, len(Lugares)):
precio = getPriceFormat(Lugares[indexLugar].precio)
Lugares[indexLugar].precioSTR = precio
if fiesta.Lugar:
fiesta.Lugar.precioSTR = getPriceFormat(fiesta.Lugar.precio)
alimento = AlimentoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
entretenimiento = EntretenimientoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
indices_alimentos = []
indices_entretenimientos = []
count_entre = EntretenimientoCarrito.objects.all()
count_comida = AlimentoCarrito.objects.all()
flag_entre = len(count_entre) > 0
flag_comida = len(count_comida) > 0
flag_place = fiesta.Lugar != None
flag_foto = fiesta.Fotos
Fiesta = None
if request.method == 'GET':
Alimentos = Alimento.objects.all()
Entretenimientos = Entretenimiento.objects.all()
alimento = AlimentoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
entretenimiento = EntretenimientoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
for indexAlimento in range(0, len(Alimentos)):
precio = getPriceFormat(Alimentos[indexAlimento].precio)
Alimentos[indexAlimento].precioSTR = precio
for food in alimento:
precio = getPriceFormat(food.Alimento.precio)
precioSub = getPriceFormat(food.subtotal)
food.subtotal = precioSub
food.Alimento.precioSTR = precio
for indexEntre in range(0, len(Entretenimientos)):
precio = getPriceFormat(Entretenimientos[indexEntre].precio)
Entretenimientos[indexEntre].precioSTR = precio
for entre in entretenimiento:
precio = getPriceFormat(entre.Entretenimiento.precio)
entre.Entretenimiento.precioSTR = precio
indices_alimentos.clear()
indices_entretenimientos.clear()
if alimento.count() > 0:
for a in alimento:
indices_alimentos.append(a.Alimento.id)
if entretenimiento.count() > 0:
for e in entretenimiento:
indices_entretenimientos.append(e.Entretenimiento.id)
if boda_id != 0:
boda = Boda.objects.filter(id__exact=boda_id)
template = get_template('Fiesta/fiesta.html')
size_alimentos = len(indices_alimentos)
size_entre = len(indices_entretenimientos)
limite = 0
flag_comida = size_alimentos > limite
flag_entre = size_entre > limite
context = {
'Lugares' : Lugares,
'flag_place' : flag_place,
'flag_foto' : flag_foto,
'flag_entre' : flag_entre,
'flag_comida' : flag_comida,
'fiesta' : fiesta,
'alimento' : alimento,
'Alimentos' : Alimentos,
'indices_alimentos' : indices_alimentos,
'entretenimiento' : entretenimiento,
'Entretenimientos' : Entretenimientos,
'indices_entretenimientos' : indices_entretenimientos,
'precio' : getPriceFormat(fiesta.precio),
'user_id': user_id,
'boda_id': boda_id,
'fiesta_id':fiesta_id,
'ceremonia_id':ceremonia.id,
'enamoradoNombre': boda[0].Enamorado1,
'enamoradoNombre2': boda[0].Enamorado2
}
return HttpResponse(template.render(context, request))
if request.method == 'POST':
fiesta = FiestaEvento.objects.get(Boda_id=boda_id)
boda = Boda.objects.get(id=boda_id)
value_btn = request.POST.get('btn_value')
Alimentos = Alimento.objects.all()
alimento = AlimentoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
Entretenimientos = Entretenimiento.objects.all()
entretenimiento = EntretenimientoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
if alimento.count() > 0:
indices_alimentos.clear()
for a in alimento:
indices_alimentos.append(a.Alimento.id)
if entretenimiento.count() > 0:
for e in entretenimiento:
indices_entretenimientos.append(e.Entretenimiento.id)
if value_btn == "add_place":
if flag_place == False:
id_place = request.POST.get('id_place')
price = request.POST.get('price')
fiesta.Lugar_id = id_place
fiesta.precio = fiesta.precio + int(price)
flag_place = True
boda.precio = int(boda.precio) + int(price)
boda.save()
fiesta.save()
mensaje_succes = (True , "Lugar para tu fiesta correctamente asignado")
else:
mensaje_error = (True , "Ya tienes un lugar asignado para tu fiesta")
if value_btn == "add_foto":
if fiesta.Fotos == False:
flag_foto = True
fiesta.Fotos=True
fiesta.save()
mensaje_succes = (True , "Fotos para tu fiesta correctamente agregadas")
else:
mensaje_error = (True , "Las fotos para tu fiesta ya fueron agregadas")
if value_btn == "add_comida":
cantidad_comida = request.POST.get('cantidad_comida')
flag_comida = True
id_comida = request.POST.get('id_comida')
if int(id_comida) not in indices_alimentos:
price = request.POST.get('price')
comida_inst = Alimento.objects.filter(id__exact=id_comida)
subtotal = (int(price) * int (cantidad_comida))
comida = AlimentoCarrito(FiestaEvento = fiesta , Alimento = comida_inst[0], Cantidad = cantidad_comida, subtotal=subtotal)
fiesta.precio = fiesta.precio + subtotal
boda.precio = boda.precio + int(subtotal)
boda.save()
fiesta.save()
comida.save()
alimento = AlimentoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
mensaje_succes = (True , "Alimento para tu fiesta correctamente agregado")
indices_alimentos.clear()
if alimento.count() > 0:
for a in alimento:
indices_alimentos.append(a.Alimento.id)
else:
mensaje_error = (True , "Este alimento ya fue agregado")
if value_btn == "add_entretenimiento":
flag_entre = True
id_entre = request.POST.get('id_entretenimiento')
price = request.POST.get('price')
if int(id_entre) not in indices_entretenimientos:
fiesta.precio = fiesta.precio + int(price)
boda.precio = boda.precio + int(price)
boda.save()
fiesta.save()
entre_inst = Entretenimiento.objects.filter(id__exact=id_entre)
entre = EntretenimientoCarrito(FiestaEvento = fiesta , Entretenimiento = entre_inst[0])
entre.save()
entretenimiento = EntretenimientoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
indices_entretenimientos.clear()
mensaje_succes = (True , "Entretenimiento para tu fiesta correctamente agregado")
if entretenimiento.count() > 0:
for e in entretenimiento:
indices_entretenimientos.append(e.Entretenimiento.id)
else:
mensaje_error = (True , "Este entretenimiento ya fue agregado")
if value_btn == "delete_lugar":
if flag_place == True:
fiesta.Lugar = None
price = request.POST.get('price')
fiesta.precio = fiesta.precio - int(price)
boda.precio = boda.precio - int(price)
flag_place = False
fiesta.save()
boda.save()
mensaje_delete = (True , "Lugar eliminado correctamente")
else:
mensaje_error = (True , "Este lugar ya fue eliminado")
if value_btn == "delete_foto":
if flag_foto == True:
fiesta.Fotos=False
flag_foto = False
fiesta.save()
mensaje_delete = (True , "Fotos eliminadas correctamente")
else:
mensaje_error = (True , "Las fotos ya fueron removidas")
if value_btn == "delete_comida":
comida_id = request.POST.get('comida_id')
if int(comida_id) in indices_alimentos:
alimento_carrito_id = request.POST.get('alimento_carrito_id')
alimentocarrito = AlimentoCarrito.objects.filter(id__exact=alimento_carrito_id)
cantidad = alimentocarrito[0].Cantidad
subtotal = alimentocarrito[0].subtotal
alimentocarrito.delete()
alimento = AlimentoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
indices_alimentos.clear()
fiesta.precio = int(fiesta.precio) - (int(subtotal))
boda.precio = boda.precio - int(subtotal)
boda.save()
fiesta.save()
mensaje_delete = (True , "Alimento eliminado correctamente")
if alimento.count() > 0:
for a in alimento:
indices_alimentos.append(a.Alimento.id)
else:
mensaje_error = (True , "Este alimento ya fue eliminado")
if value_btn == "delete_entretenimiento":
entretenimiento_id = request.POST.get('entretenimiento_id')
if int(entretenimiento_id) in indices_entretenimientos:
entretenimiento_carrito_id = request.POST.get('entretenimiento_carrito_id')
entre = Entretenimiento.objects.get(id=entretenimiento_id)
entretenimientocarrito = EntretenimientoCarrito.objects.filter(id__exact=entretenimiento_carrito_id)
entretenimientocarrito.delete()
entretenimiento = EntretenimientoCarrito.objects.filter(FiestaEvento_id=fiesta.id)
indices_entretenimientos.clear()
price = request.POST.get('price')
fiesta.precio = int(fiesta.precio) - int(price)
boda.precio = boda.precio - int(price)
boda.save()
fiesta.save()
mensaje_delete = (True , "Entrenimiento eliminado correctamente")
if entretenimiento.count() > 0:
for e in entretenimiento:
indices_entretenimientos.append(e.Entretenimiento.id)
else:
mensaje_error = (True , "Este entretenimiento ya fue eliminado")
Lugares = Lugar.objects.filter(tipo='fiesta')
for indexLugar in range(0, len(Lugares)):
precio = getPriceFormat(Lugares[indexLugar].precio)
Lugares[indexLugar].precioSTR = precio
if fiesta.Lugar:
fiesta.Lugar.precioSTR = getPriceFormat(fiesta.Lugar.precio)
for indexAlimento in range(0, len(Alimentos)):
precio = getPriceFormat(Alimentos[indexAlimento].precio)
Alimentos[indexAlimento].precioSTR = precio
for food in alimento:
precio = getPriceFormat(food.Alimento.precio)
precioSub = getPriceFormat(food.subtotal)
food.subtotal = precioSub
food.Alimento.precioSTR = precio
for indexEntre in range(0, len(Entretenimientos)):
precio = getPriceFormat(Entretenimientos[indexEntre].precio)
Entretenimientos[indexEntre].precioSTR = precio
for entre in entretenimiento:
precio = getPriceFormat(entre.Entretenimiento.precio)
entre.Entretenimiento.precioSTR = precio
size_alimentos = len(indices_alimentos)
size_entre = len(indices_entretenimientos)
limite = 0
if size_alimentos > limite:
flag_comida = True
else:
flag_comida = False
if size_entre > limite:
flag_entre = True
else:
flag_entre = False
template = get_template('Fiesta/fiesta.html')
context = {
'Lugares' : Lugares,
'flag_place' : flag_place,
'flag_foto' : flag_foto,
'flag_entre' : flag_entre,
'flag_comida' : flag_comida,
'fiesta' : fiesta,
'alimento' : alimento,
'Alimentos' : Alimentos,
'indices_alimentos' : indices_alimentos,
'entretenimiento' : entretenimiento,
'Entretenimientos' : Entretenimientos,
'indices_entretenimientos' : indices_entretenimientos,
'mensaje' : mensaje_error,
'mensaje_succes' : mensaje_succes,
'mensaje_delete' : mensaje_delete,
'precio' : getPriceFormat(fiesta.precio),
'user_id': user_id,
'boda_id': boda_id,
'fiesta_id':fiesta_id,
'ceremonia_id' : ceremonia.id,
'enamoradoNombre': boda.Enamorado1,
'enamoradoNombre2': boda.Enamorado2
}
return HttpResponse(template.render(context, request))
|
986,489 | a971489a75d066a1c5929c69ca3687c2ba99cf70 | import os
import cv2
import numpy as np
import random
# structure of the bounding boxes should be (left, top, right, bottom, label)
def random_horizontal_flip(img, bounding_boxes):
p = 0.5
img_center = np.array(img.shape[:2])[::-1]/2
img_center = np.hstack((img_center, img_center))
if random.random() < p:
img = img[:, ::-1, :]
bounding_boxes[:, [0, 2]] += 2*(img_center[[0, 2]] - bounding_boxes[:, [0, 2]])
box_width = abs(bboxes[:, 0] - bboxes[:, 2])
bounding_boxes[:, 0] -= box_width
bounding_boxes[:, 2] += box_width
return img, bounding_boxes
def change_brightness(img):
'''
# convert to hsv
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
# select a random value to change
value = random.randint(-50, 50)
# First approach
print 'Before'
print hsv
#h, s, v = cv2.split(hsv)
# change brightness by value
hsv[:,:,2] = hsv[:,:,2] + value
# change for brightness to in 0-255 range
hsv[:,:,2][hsv[:,:,2] < 0] = 0
hsv[:,:,2][hsv[:,:,2] > 255] = 255
#v[v>255] = 255
# convert to rgb
#hsv = cv2.merge((h, s, v))
image = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
print 'After'
print hsv
'''
# Second approach
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
# select a random value to change
value = random.randint(50, 200)
hsv[:,:,2] = value
image = cv2.cvtColor(hsv, cv2.COLOR_HSV2BGR)
return image
def original_crop(img,box):
print(img.shape)
return img[box[0]:box[1], box[2]:box[3]]
def change_crop(img, box):
img_height, img_width, _ = img.shape
height = box[1] - box[0]
width = box[3] - box[2]
scale = 4
change_height = round(height / scale)
change_width = round(height / scale)
h1_change = random.randint(-change_height, change_height)
h2_change = random.randint(-change_height, change_height)
w1_change = random.randint(-change_width, change_width)
w2_change = random.randint(-change_width, change_width)
newbox = [0,0,0,0]
newbox[0] = box[0] + h1_change
newbox[1] = box[1] + h2_change
newbox[2] = box[2] + w1_change
newbox[3] = box[3] + w2_change
print(img_height)
print(img_width)
if newbox[0] < 0:
newbox[0] = 0
if newbox[0] > img_height:
newbox[0] = img_height-1
if newbox[1] < 0:
newbox[1] = 0
if newbox[1] > img_height:
newbox[1] = img_height-1
if newbox[2] < 0:
newbox[2] = 0
if newbox[2] > img_width:
newbox[2] = img_width-1
if newbox[3] < 0:
newbox[3] = 0
if newbox[3] > img_width:
newbox[3] = img_width-1
print(newbox)
return img[newbox[0]:newbox[1],newbox[2]:newbox[3]]
def flip_image_vert(img):
vert_img = cv2.flip(img,0)
return vert_img
def flip_image_hor(img):
hor_img = cv2.flip(img,1)
return hor_img
rootdir = '/home/student/dataset/'
writedir = '/home/student/Desktop/'
label = 'tomatosoup'
# first we extract all the index in the folder
for dirs, subdirs, files in os.walk(rootdir):
for dir in subdirs:
root_dir = dirs + dir + '/'
print(dir)
files_index = []
for files in os.walk(root_dir):
for file in files[2]:
split_holder = file.split('.')
index_value = int(split_holder[0])
if index_value not in files_index:
files_index.append(index_value)
files_index.sort()
os.makedirs('final_images_' + dir)
# loop through all the corresponding image and csv file
for file_index in files_index:
file_image = str(file_index) + str('.jpg')
file_csv = str(file_index) + str('.csv')
# top bottom left right
bounding_box = []
# open csv file and extract bounding box parameters
with open(root_dir + file_csv) as f:
for row in f:
bounding_box.append(int(row))
print(bounding_box)
# load the image
image = cv2.imread(root_dir + file_image)
# change brightness
image_bright = change_brightness(image)
# save original crop image
original_image = original_crop(image, bounding_box)
write_name = str(file_index) + '-original.jpg'
print(writedir + 'final_images_' + dir + '/' + write_name)
original_image_resize = cv2.resize(original_image, (150,150))
cv2.imwrite(os.path.join(writedir + 'final_images_' + dir + '/' + write_name), original_image_resize)
# cv2.imshow('Original Crop', original_image_resize)
# cv2.waitKey(0)
# save brightness changed image
crop_bright = original_crop(image_bright, bounding_box)
write_name = str(file_index) + '-bright.jpg'
crop_bright_resize = cv2.resize(crop_bright, (150,150))
cv2.imwrite(os.path.join(writedir + 'final_images_' + dir + '/' + write_name), crop_bright)
# cv2.imshow('Bright Crop', crop_bright_resize)
# cv2.waitKey(0)
# save random cropped image
image_crop = change_crop(image, bounding_box)
write_name = str(file_index) + '-crop.jpg'
image_crop_resize = cv2.resize(image_crop, (150,150))
cv2.imwrite(os.path.join(writedir + 'final_images_' + dir + '/' + write_name), image_crop)
# cv2.imshow('Random Crop', image_crop_resize)
# cv2.waitKey(0)
# save flipped image
flipped_image = flip_image_hor(original_image)
write_name = str(file_index) + '-flip.jpg'
flipped_image_resize = cv2.resize(flipped_image, (150,150))
cv2.imwrite(os.path.join(writedir + 'final_images_' + dir + '/' + write_name), flipped_image)
# cv2.imshow('Flip Crop', flipped_image_resize)
# cv2.waitKey(0)
|
986,490 | dc7c5fa7226e48bdafb28845a8f608d40305c331 | class P1:
def m1(self):
print("m1 method")
class P2:
def m1(self):
print("m2 method")
class child(P1,P2):
pass
obj=child()
obj.m1()
|
986,491 | 1b8dc1c75cf7fa93904f4bb941f6b905d0aab1d0 | from ..base import AutoEncoderBase
from ..config import Config
from itertools import chain
import torch
from torch import nn, Size, Tensor
import torchvision.transforms as trans
from typing import Callable, List
class AutoEncoder(AutoEncoderBase):
def __init__(
self,
input_dim: Size,
config: Config,
hidden: List[int] = [128, 64, 12, 2]
) -> None:
super().__init__()
input_dim_flat = input_dim[-1] * input_dim[-2]
len_ = len(hidden) - 1
enc = [(nn.ReLU(True), nn.Linear(hidden[i], hidden[i + 1])) for i in range(len_)]
self.encoder = nn.Sequential(
nn.Linear(input_dim_flat, hidden[0]),
*chain.from_iterable(enc)
)
dec = [(nn.Linear(hidden[i + 1], hidden[i]), nn.ReLU(True)) for i in reversed(range(len_))]
self.decoder = nn.Sequential(
*chain.from_iterable(dec),
nn.Linear(hidden[0], input_dim_flat),
nn.Tanh(),
)
self.input_dim_flat = input_dim_flat
self._encoded_dim = hidden.pop()
self.to(config.device)
config.initializer(self)
def encode(self, x: Tensor) -> Tensor:
shape = x.shape
return self.encoder(x.view(*shape[:-2], self.input_dim_flat))
def decode(self, z: Tensor, old_shape: Size = None) -> Tensor:
z = self.decoder(z)
if old_shape is None:
return z
else:
return z.view(old_shape)
def to_image(self, x: Tensor) -> Tensor:
return torch.clamp(0.5 * (x + 1.0), 0.0, 1.0)
def forward(self, x: Tensor) -> Tensor:
return self.decode(self.encode(x), old_shape=x.shape)
def transformer() -> Callable:
return trans.Compose([
trans.ToTensor(),
trans.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))
])
|
986,492 | 8250dc7a50d3f3d2c0b08d7cd61d010037f52fae | print("For equation: ax^2 + bx + c = 0: ")
a = int(input("Enter a: "))
b = int(input("Enter b: "))
c = int(input("Enter c: "))
d = b**2 - 4*a*c
if (d < 0):
print("The equation has no solution")
elif (d == 0):
x = -b / 2*a
print("The equation has one solution: x = ", x)
else:
x = (-b + d**(1/2)) / 2*a
y = (-b - d**(1/2)) / 2*a
print("The equation has two solutions: x = ", x, "or: x = ", y)
|
986,493 | 2a0f72da4883a9d5becf0c4924af1fb362a40699 | # coding=utf-8
# This is part of the ProFormA Editor
#
# This proformaEditor was created by the eCULT-Team of Ostfalia University
# http://ostfalia.de/cms/de/ecult/
# The software is distributed under a CC BY-SA 3.0 Creative Commons license
# https://creativecommons.org/licenses/by-sa/3.0/
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
# INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
# PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# script for running whole testsuite
#
# @copyright 2018 Ostfalia Hochschule fuer angewandte Wissenschaften
# @author Karin Borm <k.borm@ostfalia.de>
import editor
import time
#from testcases import xsd_094_Unittest
#from testcases import javaUnittest
#from testcases import setlxDgUnittest
#from testcases import pythonUnittest
import unittest
# Firefox does not pass tests
only_one_browser = True
loader = unittest.TestLoader()
start_dir = 'testcases'
# suite = loader.discover(start_dir, "java*.py")
suite = loader.discover(start_dir, "*test*.py")
runner = unittest.TextTestRunner(verbosity=2)
editor.browser = "Chrome"
print("----------------------------------------------------")
print("run test with * " + editor.browser + " * ")
print("----------------------------------------------------")
runner.run(suite)
if not only_one_browser:
time.sleep(3);
editor.browser = "Firefox"
print("----------------------------------------------------")
print("run test with * " + editor.browser + " * ")
print("----------------------------------------------------")
runner.run(suite)
|
986,494 | 920c7284f060a278551d1c7f8d6b125facc8a053 | # -*- coding: utf-8 -*-
import numpy as np
from pyorderedfuzzy.ofnumbers.ofnumber import OFNumber
from pyorderedfuzzy.ofmodels.ofseries import OFSeries, init_from_scalar_values
__author__ = "amarszalek"
def ofnormal(mu, sig2, s2, p):
dim = mu.branch_f.dim
minf = np.min(mu.branch_f.fvalue_y)
ming = np.min(mu.branch_g.fvalue_y)
c = 0.0
if min(minf, ming) <= 0.0:
c = np.abs(min(minf, ming)) + 1.0
eta = OFNumber(np.ones(dim)*c, np.ones(dim)*c) + mu
x = np.zeros(dim)
y = np.zeros(dim)
for i in range(dim):
if sig2.branch_f.fvalue_y[i] <= 0.0:
x[i] = 1.0
else:
x[i] = np.random.normal(1, np.sqrt(sig2.branch_f.fvalue_y[i]) / eta.branch_f.fvalue_y[i])
if sig2.branch_g.fvalue_y[i] <= 0.0:
y[i] = 1.0
else:
y[i] = np.random.normal(1, np.sqrt(sig2.branch_g.fvalue_y[i]) / eta.branch_g.fvalue_y[i])
r = np.random.random()
s = np.random.normal(0, np.sqrt(s2))
if r < p:
ksi = OFNumber(x, y)*eta+s
else:
ksi = OFNumber(x, y)*OFNumber(eta.branch_g, eta.branch_f)+s
return ksi-c
def ofnormal_sample(n, mu, sig2, s2, p):
sample = map(lambda i: ofnormal(mu, sig2, s2, p), range(n))
return OFSeries(list(sample))
def ofnormal_scalar(mu, sig, dim):
r = np.random.normal(mu, sig)
return OFNumber(np.ones(dim)*r, np.ones(dim)*r)
def ofnormal_scalar_sample(n, mu, sig, dim):
r = np.random.normal(mu, sig, size=(n,))
return init_from_scalar_values(r, dim)
|
986,495 | 5a44b66013bf4b9d361062c0d0e317a4f3bcb414 | import discord
from discord.ext.commands import Bot
from discord.ext import commands
import asyncio
import time
import random
Client = discord.Client()
client = commands.Bot(command_prefix = "$")
viplist = ("246993761673019394", "361888183505649664")
allowlist = ("kop","munt")
isresetting = False
with open("coins.txt") as obj:
coins = obj.read().splitlines()
with open("date.txt") as obj:
date = obj.read().splitlines()
@client.event
async def on_ready():
print("Bot is online en heeft tot nu toe geen foutmeldingen")
await client.change_presence(game=discord.Game(name="$info"))
@client.event
async def on_message(message):
if message.content == "$Removecoins":
isresetting = True
await client.send_message(message.channel, "Zeker weten? Zeg `COINSREMOVE` om door te gaan! Of zeg `STOP` om te stoppen!")
UserID = message.author.id
if message.content == "COINSREMOVE":
if isresetting:
await client.send_message(message.channel, "ok")
isresetting = False
if message.content == "$info":
userID = message.author.id
await client.send_message(message.channel, "Hey <@%s>! Ik ben Coin Flipper! Ik ben een bot waarmee je geld kan verdienen (En verliezen!) door te tossen! Om alle commands te zien, gebruik ``$Commands!``\nCoin Flipper is gemaakt met de progammeertaal **Python 3.6 IDLE.**\nDe maker van de bot is **@Stanlin#8449** op Discord!" % (userID))
if message.content == "$commands":
await client.send_message(message.channel, "**LET OP: Coin Flipper is gevoelig voor caps lock!**\nDit zijn mijn commands:\n**$commands** *Deze command heb jij net gebruikt, om alle commands te zien!*\n**$coinflip <jouw keuze van kant> [aantal munten]** *Gebruik dit om te tossen! Kies tussen 'munt' en 'kop' en vul een aantal coins in waarmee je wil gokken! Voorbeeld: $coinflip kop 10*\n**$info** *Gebruik deze command om de informatie van de bot te zien!*\n **$daily** *Met deze command kan je je dagelijkse 250 coins claimen!*\n**$coins** *Deze command vertelt jou hoeveel coins jij in je portemonnee hebt zitten!* *")
if message.content == "$coins":
try:
userID = message.author.id
user_coins = coins[coins.index(userID)+1]
await client.send_message(message.channel, "<@%s>, jij hebt op dit moment %s coins in je portemonnee!" % (userID, user_coins))
except ValueError:
await client.send_message(message.channel, "<@%s>, jij hebt op dit moment 0 coins in je portemonnee!" % (userID))
if message.content.startswith("$coinflip"):
userID = message.author.id
args = message.content.split(" ")
guess = args[1].upper
answer = random.choice(["Kop", "Munt", "Kop", "Munt"])
money = int(args[2])
user_coins = coins[coins.index(userID)+1]
if int(user_coins) < int(money):
await client.send_message(message.channel, "<@%s>, je hebt niet genoeg coins in je portemonnee om deze actie uit te voeren!" % (userID))
else:
if userID in coins:
if args[1].upper() == answer.upper():
moneynew = int(args[2]) + int(args[2])
await client.send_message(message.channel, "<@%s>, je hebt gewonnen! %s was het antwoord! Je hebt %s coins gewonnen, die zijn nu toegevoegd aan jouw portemonnee! :money_mouth:" % (userID, answer, money))
user_coins = coins[coins.index(userID)+1]
newcoins = int(user_coins) + money
start = coins.index(userID)
coins.pop(start)
coins.pop(start)
coins.append(userID)
coins.append(newcoins)
update_coins()
elif args[1].upper() is not answer.upper():
await client.send_message(message.channel, "<@%s>, je hebt verloren! %s was het antwoord. Je hebt %s coins verloren! :slight_frown:" % (userID, answer, money))
user_coins = coins[coins.index(userID)+1]
newcoinsl = int(user_coins) - money
start = coins.index(userID)
coins.pop(start)
coins.pop(start)
coins.append(userID)
coins.append(int(newcoinsl))
update_coins()
else:
userID = message.author.id
await client.send_message(message.channel, "<@%s>, jij hebt geen coins! Gebruik **$daily** om je dagelijkse coins te krijgen!" % (userID))
if message.content.startswith("$givememoney"):
userID = message.author.id
if userID in viplist:
args = message.content.split(" ")
password = args[1]
if password == "tryhard":
await client.send_message(message.channel, "Jij gold digger... Je hebt zojuist 10000 coins ontvangen, omdat ik je speciaal vind!")
if userID in coins:
user_coins = coins[coins.index(userID)+1]
newcoins = int(user_coins) + int(10000)
start = coins.index(userID)
coins.pop(start)
coins.pop(start)
coins.append(userID)
coins.append(int(newcoins))
update_coins()
else:
coins.append(userID)
coins.append(int(10000))
update_coins()
else:
await client.send_message(message.channel, "verkeerd wachtwoord!")
if message.content.startswith("$transfer"):
args = message.content.split(" ")
usernewID = int(args[2])
userID = message.author.id
moneytransfer = int(args[1])
correctformat =(str("@") + str(args[2])
if usernewID is not correctformat:
await client.send_message(message.channel, "<@%s>, dat was geen persoon! Zeker weten dat je de ID goed had?" % (userID))
else:
userID = message.author.id
if userID in coins:
user_coins = coins[coins.index(userID)+1]
if user_coins < int(args[2]):
await client.send_message(message.channel, "@<%s>, je hebt hier niet genoeg coins voor!" % (userID))
else:
newcoinsl = int(user_coins) - moneytransfer
start = coins.index(userID)
coins.pop(start)
coins.pop(start)
coins.append(userID)
coins.append(int(newcoinsl))
update_coins()
else:
await client.send_message(message.channel
else:
await client.send_message(message.channel,"<@%s>, mooi geprobeerd! Jij staat niet op mijn VIP lijst :wink:" % (userID))
if message.content == "$daily":
import datetime
today = datetime.date.today()
userID = message.author.id
if userID in date:
userdate = date[date.index(userID)+1]
if str(userdate) == str(today):
await client.send_message(message.channel, "<@%s>, je hebt je dagelijkse 250 coins voor vandaag algeclaimed! Kom morgen terug!" % (userID))
elif str(userdate) is not str(today):
if userID in coins:
user_coins = coins[coins.index(userID)+1]
newcoins = int(user_coins) + int(250)
start = coins.index(userID)
coins.pop(start)
coins.pop(start)
coins.append(userID)
coins.append(int(newcoins))
update_coins()
await client.send_message(message.channel, "<@%s>, je hebt je dagelijkse 250 coins geclaimed!" % (userID))
date.pop(start)
date.pop(start)
date.append(userID)
date.append(str(today))
update_date()
else:
coins.append(userID)
coins.append(int(250))
update_coins()
await client.send_message(message.channel, "<@%s>, je hebt je dagelijkse 250 coins geclaimed!" % (userID))
date.pop(start)
date.pop(start)
date.append(userID)
date.append(str(today))
update_date()
else:
if userID in coins:
user_coins = coins[coins.index(userID)+1]
newcoins = int(user_coins) + int(250)
start = coins.index(userID)
coins.pop(start)
coins.pop(start)
coins.append(userID)
coins.append(int(newcoins))
update_coins()
await client.send_message(message.channel, "<@%s>, je hebt je dagelijkse 250 coins geclaimed!" % (userID))
date.append(userID)
date.append(str(today))
update_date()
else:
coins.append(userID)
coins.append(int(250))
update_coins()
await client.send_message(message.channel, "<@%s>, je hebt je dagelijkse 250 coins geclaimed!" % (userID))
date.append(userID)
date.append(str(today))
update_date()
def update_coins():
with open("coins.txt", "w") as f:
for each in coins:
f.write(str(each))
f.write("\n")
def update_date():
with open("date.txt", "w") as f:
for each in date:
f.write(str(each))
f.write("\n")
client.run("BOT_TOKEN")
|
986,496 | 6995f1f09e25013c0b627782400bef20a7b75786 | class Shape():
def __init__(self, cd):
self.cd = cd
def area(self):
return self.cd
class Square(Shape):
def __init__(self,cd):
Shape.__init__(self, cd)
self.cd = cd
def area(self):
return self.cd * self.cd
sa = Square(3)
print(sa.area())
|
986,497 | 438720c1705ac7abd457242d6001b58822da17ec | from django.urls import path
from . import views
urlpatterns = [
path('' , views.employee_list, name='list'),
path('form/', views.employee_form, name='form'),
path('<int:id>/', views.employee_form, name='edit'),
path('delete/<int:id>/', views.employee_delete, name='delete'),
] |
986,498 | dd24b68e8e4126a74ec12984eb4a10fc67d59ab7 | #!/Users/mnicolls/Documents/Work/Madrigal/bin/python
import sys, os, traceback
import cgi
import time
class madCalculatorService:
"""madCalculatorService is the class that allows remote access to the "madCalculator.py":../scripts/madCalculator.py.html script.
Like all my python cgi scripts, madCalculatorService has the following structure: the entire cgi is
contained in one class, with a main function at the end which serves simply to call the __init__
function of the class. This __init__ function is responsible for calling all other class methods.
It is made up of a single try block, with the purpose of reporting all exceptions in well-formatted
text to both the user and the administrator. The __init__ function first makes sure the pythonlib
can be found. It then calls setScriptState to validate the the cgi arguments, which are simply the
arguments for the isprint command.
If any uncaught exception is thrown, its caught by the __init__ try block. If its an MadrigalError,
additional information is available. The catch blocks attempt to display the error message on the screen
by backing out of of large number of possible tags, which might prevent its display (in any case, the error
message will always be available in the page source. The formatted error message is also sent to the email
address given in the siteTab.txt metadata file.
This script is not meant to be used directly by a user, and thus is named Service. It is meant to be used by
scripting languages such as Matlab that want to call madCalculator via the web
Input cgi arguments:
1. year - int (required)
2. month - int (required)
3. day - int (required)
4. hour - int (required)
5. min - int (required)
6. sec - int (required)
7. startLat - Starting geodetic latitude, -90 to 90 (required)
8. endLat - Ending geodetic latitude, -90 to 90 (required)
9. stepLat - Latitude step (0.1 to 90) (required)
10. startLong - Starting geodetic longitude, -180 to 180 (required)
11. endLong - Ending geodetic longitude, -180 to 180 (required)
12. stepLong - Longitude step (0.1 to 180) (required)
13. startAlt - Starting geodetic altitude, >= 0 (required)
14. endAlt - Ending geodetic altitude, > 0 (required)
15. stepAlt - Altitude step (>= 0.1) (required)
16. parms - comma delimited string of Madrigal parameters desired (required)
17. oneD - string in form <parm>,<value> This argument allows the user to
set any number of one-D parameters to be used in the calculation.
Value must be parameter name, comma, value as double.
Example: &oneD=kinst,31.0&oneD=elm,45.0
(optional - 0 or more allowed)
Returns comma-delimited data, one line for each combination of lat, long, and alt,
with the following fields:
1. latitude
2. longitude
3. altitude
4. Values for each Madrigal parameter listed in argument parms, separated by whitespace
Calls script "madCalculator.py":../scripts/madCalculator.py.html.
If error, returns error description
Change history:
Written by "Bill Rideout":mailto:wrideout@haystack.mit.edu Feb. 6, 2004
$Id: madCalculatorService.py,v 1.11 2008/10/03 19:34:55 brideout Exp $
"""
# constants
__scriptName = 'madCalculatorService'
def __init__(self):
"""__init__ run the entire madCalculatorService script. All other functions are private and called by __init__.
Inputs: None
Returns: void
Affects: Ouputs getInstrument data as a service.
Exceptions: None.
"""
# catch any exception, and write an appropriate message to user and to admin
try:
# check if pythonlibpath env variable exists
# written 'PYTHON' + 'LIBPATH' to stop automatic replacement during setup
temp = os.environ.get('PYTHON' + 'LIBPATH')
if temp != None:
sys.path.append(temp)
# append path madroot/lib (needed only if python not installed by setup)
sys.path.append('/Users/mnicolls/Documents/Work/Madrigal/lib/python')
# prepare to handle MadrigalError
import madrigal.admin
except ImportError:
# Fatal error - madpy library not found
print "Content-Type: text/html"
print
print "Unable to import the madrigal python library - please alert the sys admin!"
sys.exit(0)
try:
# set flag as to whether script headers have been written
self.scriptHeaders = 0
self.setScriptState()
# create needed Madrigal objects
self.createObjects()
# output madCalculator
self.madCalculator()
except madrigal.admin.MadrigalError, e:
# handle a MadrigalError
errStr = 'Error occurred in script ' + self.__scriptName + '.'
err = traceback.format_exception(sys.exc_info()[0],
sys.exc_info()[1],
sys.exc_info()[2])
for errItem in err:
errStr = errStr + '\n' + str(errItem)
# add info about called form:
if self.madForm != None:
errStr = errStr + 'Form elements\n'
for key in self.madForm.keys():
errStr = errStr + '\n' + str(key)
errStr = errStr + ' = ' + str(self.madForm.getvalue(key))
if self.scriptHeaders == 0: # not yet printed
print "Content-Type: text/plain"
print
print errStr
self.admin = madrigal.admin.MadrigalNotify()
self.admin.sendAlert('\n' + errStr,
'Error running ' + self.__scriptName)
print 'Your system administrator has been notified.'
except SystemExit:
sys.exit(0)
except:
# handle a normal error
errStr = 'Error occurred in script ' + self.__scriptName + '.'
err = traceback.format_exception(sys.exc_info()[0],
sys.exc_info()[1],
sys.exc_info()[2])
for errItem in err:
errStr = errStr + '\n' + str(errItem)
# add info about called form:
if self.madForm != None:
errStr = errStr + 'Form elements\n'
for key in self.madForm.keys():
errStr = errStr + '\n' + str(key)
errStr = errStr + ' = ' + str(self.madForm.getvalue(key))
if self.scriptHeaders == 0: # not yet printed
print "Content-Type: text/plain"
print
print errStr
self.admin = madrigal.admin.MadrigalNotify()
self.admin.sendAlert('\n' + errStr,
'Error running ' + self.__scriptName)
print 'Your system administrator has been notified.'
# end __init__
def setScriptState(self):
#create a form object
self.madForm = cgi.FieldStorage()
print "Content-Type: text/plain\n"
sys.stdout.flush()
if not self.madForm.has_key('year'):
print 'This cgi script was called without the proper arguments.\n'
sys.exit(0)
else:
# get start time
self.year = self.madForm.getvalue('year')
self.month = self.madForm.getvalue('month')
self.day = self.madForm.getvalue('day')
self.hour = self.madForm.getvalue('hour')
self.min = self.madForm.getvalue('min')
self.sec = self.madForm.getvalue('sec')
# get geometric limits
self.startLat = self.madForm.getvalue('startLat')
self.endLat = self.madForm.getvalue('endLat')
self.stepLat = self.madForm.getvalue('stepLat')
self.startLong = self.madForm.getvalue('startLong')
self.endLong = self.madForm.getvalue('endLong')
self.stepLong = self.madForm.getvalue('stepLong')
self.startAlt = self.madForm.getvalue('startAlt')
self.endAlt = self.madForm.getvalue('endAlt')
self.stepAlt = self.madForm.getvalue('stepAlt')
# get parms
self.parms = self.madForm.getvalue('parms')
# get oneD list
self.oneDList = self.madForm.getlist('oneD')
def createObjects(self):
# all states require a MadrigalDB object
import madrigal.metadata
self.madDBObj = madrigal.metadata.MadrigalDB()
def madCalculator(self):
# create cmd
cmd = self.madDBObj.getMadroot() + '/bin/madCalculator.py '
# append time
dateStr = '%i/%i/%i' % (int(self.month), int(self.day), int(self.year))
timeStr = '%i:%i:%i' % (int(self.hour), int(self.min), int(self.sec))
cmd += ' --date=%s ' % (dateStr)
cmd += ' --time=%s ' % (timeStr)
# append geometric data
cmd += ' --startLat=%f ' % (float(self.startLat))
cmd += ' --endLat=%f ' % (float(self.endLat))
cmd += ' --stepLat=%f ' % (float(self.stepLat))
cmd += ' --startLong=%f ' % (float(self.startLong))
cmd += ' --endLong=%f ' % (float(self.endLong))
cmd += ' --stepLong=%f ' % (float(self.stepLong))
cmd += ' --startAlt=%f ' % (float(self.startAlt))
cmd += ' --endAlt=%f ' % (float(self.endAlt))
cmd += ' --stepAlt=%f ' % (float(self.stepAlt))
# append parms
cmd += ' --parms=%s ' % (self.parms)
# append oneD
for item in self.oneDList:
cmd += ' --oneD=%s ' % (item)
os.system(cmd)
if __name__ == '__main__':
# Script madLogin
# This script only calls the init function of the class madCalculatorService
# All work is done by the init function
madCalculatorService()
|
986,499 | 0d021bc30afd46fba140ee16f964eeab73cd16ba | #!/usr/bin/python
# coding: utf-8
from __future__ import print_function
from __future__ import division
from math import sqrt
from itertools import count, islice
import random
import math
import sys
#nr_of_tasks = int(raw_input())
bins = []
bins2 = []
range_small = (16384,32767)
range_large = (1073741824,2147483647)
b = "10000000000000000000000000000000"
def check_if_prime(val):
if val == 2:
return True
if val % 2 == 0:
return False
i = 3
sqrt_val = math.sqrt(val)
while i <= sqrt_val:
if val % i == 0:
return False
i = i+2
return True
def p_test(val):
if (val > 1):
for time in range(3):
randomNumber = random.randint(2, val)-1
if (pow(randomNumber, val-1, val) != 1):
return False
return True
else:
return False
def check_value(val):
for i in range(2, 10 + 1):
if p_test(int(val, i)):
return False
return True
jam_coins = []
def get_divisior_list(val):
output = []
for base in range(2, 10 + 1):
value = int(val, base)
count = 2
for i in range(2, 500000):
if value % i == 0:
output.append(i)
break
if len(output) == 9:
return output
else:
return False
counter = 0
for i in xrange(range_large[0],range_large[1]):
bin_val = "".join([bin(i)[2:], "1"])
if check_value(bin_val):
counter = counter + 1
jam_coins.append(bin_val)
if len(jam_coins) == 5000:
break
print("Case #{task}:".format(task=1))
lc = 0
for cj in jam_coins:
list = get_divisior_list(cj)
if not list == False:
lc = lc + 1
print("{} {}".format(cj, " ".join([str(item) for item in list])))
if lc == 500:
break
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.