id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
5104142 | from django.urls import path
from . import views
app_name = "billing"
urlpatterns = [
path(
"create-checkout-session/<slug:slug>/<int:pk>/",
views.CreateCheckoutSessionView.as_view(),
name="create_checkout_session",
),
path(
"checkout-success/",
views.CheckoutSuccessView.as_view(),
name="checkout_success",
),
path(
"create-portal-session/",
views.CreatePortalView.as_view(),
name="create_portal_session",
),
path("stripe/webhook/", views.stripe_webhook_view, name="stripe_webhook"),
]
| StarcoderdataPython |
1618735 | import numpy as np
data = np.loadtxt('GSRM_plate_outlines.gmt',dtype=str)
data = np.flip(data,1)
# Locate the starting position of each plate
bnds_index, = np.where(data[:,1] == '>')
n = len(bnds_index)
# Separate the boundaries of each plate and write it in a file
for i in range(n):
vi = bnds_index[i]
j1 = vi+1
if i == n-1:
np.savetxt(data[vi][0], data[j1:-1], fmt='%10s %9s',header=data[vi][0],comments='')
else:
j2 = bnds_index[i+1]
np.savetxt(data[vi][0], data[j1:j2], fmt='%10s %9s',header=data[vi][0],comments='') | StarcoderdataPython |
1921502 | # SPDX-License-Identifier: Apache-2.0
# Copyright 2016 Eotvos Lorand University, Budapest, Hungary
from compiler_log_warnings_errors import addError, addWarning
from utils.codegen import format_expr, format_type, format_statement, format_declaration, to_c_bool
from compiler_common import statement_buffer_value, generate_var_name, get_hdr_name, unique_everseen
import functools
#[ #include "dpdk_lib.h"
#[ #include "util_packet.h"
#[ #include "gen_include.h"
#[ #include "hdr_fld.h"
#[ #include "hdr_fld_sprintf.h"
#{ const char* header_instance_names[HEADER_COUNT] = {
for hdr in hlir.header_instances:
#[ "${hdr.name}",
#} };
#[
#{ const char* field_names[FIELD_COUNT] = {
for hdr in hlir.header_instances:
#[ // ${hdr.name}
for fld in hdr.urtype.fields:
#[ "${fld.short_name}", // ${hdr.name}.${fld.name}
#} };
#[
#{ const hdr_info_t hdr_infos[HEADER_COUNT] = {
byte_offsets = ["0"]
fldidx = 0
for idx, hdr in enumerate(hlir.header_instances):
hdrt = hdr.urtype
flds = hdrt.fields
size = hdrt.size if 'size' in hdrt else 0
byte_width = hdrt('byte_width', 0)
vw = list(hdr.urtype.fields.filter(lambda fld: fld('is_vw', False)))
vwfld_name = f'FLD({hdr.name},{vw[0].name})' if vw else 'NO_VW_FIELD_PRESENT'
#[ // header ${hdr.name}
#{ {
#[ .idx = ${idx},
#[ .name = "${hdr.name}",
#[ .byte_width = ${byte_width}, // ${size} bits, ${size/8.0} bytes
#[ .byte_offset = ${"+".join(byte_offsets)},
#[ .is_metadata = ${to_c_bool('is_metadata' in hdrt and hdrt.is_metadata)},
#[ .var_width_field = ${vwfld_name},
if len(flds) == 0:
#[ // TODO set .first_fld so that it cannot cause any problems
#[ // TODO set .last_fld so that it cannot cause any problems
else:
#[ .first_fld = FLD(${hdr.name},${flds[0].name}),
#[ .last_fld = FLD(${hdr.name},${flds[-1].name}),
#} },
#[
byte_offsets += [f'{byte_width}']
fldidx += len(flds)
if len(hlir.header_instances) == 0:
#[ {}, // dummy
#} };
#{ const fld_info_t fld_infos[FIELD_COUNT] = {
hdr_startidxs = {}
fldidx = 0
for hdr in hlir.header_instances:
for fld in hdr.urtype.fields:
fldidx += 1
if hdr.name not in hdr_startidxs:
hdr_startidxs[hdr.name] = fldidx
is_meta = hdr.urtype.is_metadata
#[ // field ${hdr.name}.${fld.name}
#{ {
#[ .header_instance = HDR(${'all_metadatas' if is_meta else hdr.name}),
#[ .size = ${fld.urtype.size},
#[ .byte_width = to_bytes(${fld.urtype.size}),
#[ .bit_offset = ${fld.offset} % 8,
#[ .byte_offset = ${fld.offset} / 8,
#[ .is_vw = ${to_c_bool(fld.is_vw)},
if fld.urtype.size <= 32:
not0 = 0xffffffff
if is_meta:
mask = not0 >> (32 - fld.urtype.size)
mask_txt = f'0x{mask:08x}'
else:
shift_up = (32 - fld.urtype.size) % 32
top_bits = (not0 << shift_up) & not0
mask = top_bits >> (fld.offset % 8)
mask_txt = f'0x{mask:08x}'
binary_txt = '_'.join(f'{mask:032b}'[i:i+8] for i in range(0, 32, 8))
#[ .mask = ${mask_txt}, // ${fld.urtype.size}b at offset ${fld.offset//8}B+${fld.offset%8}b: 0b${binary_txt}
else:
#[ // .mask ignored: ${fld.urtype.size}b field is restricted to be byte aligned (over 32b)
#} },
#[
#} };
#{ const stk_info_t stk_infos[STACK_COUNT] = {
for stk in hlir.header_stacks:
stk0 = f'{stk.name}_0'
#[ // stack ${stk.name}
#{ {
#[ .size = ${stk.urtype.stk_size.value},
#[ .fld_count = ${len(stk.type.elementType.urtype.fields)},
#[ .start_hdr = HDR(${stk0}),
#[ .start_fld = ${hdr_startidxs[stk0]},
#} },
#[
#} };
#[
#{ const char* sprintf_hdr(char* out, packet_descriptor_t* pd, header_descriptor_t* hdr) {
#{ #ifdef T4P4S_DEBUG
#[ const char* name = hdr_infos[hdr->type].name;
for hdr in unique_everseen(hlir.header_instances):
#[ if (!strcmp("${hdr.name}", name)) return sprintf_hdr_${hdr.name}(out, pd, hdr);
#} #endif
#[ return NULL; // should never happen
#} }
#[
for hdr in unique_everseen(hlir.header_instances):
field_size_print_limit = 12
#{ const char* sprintf_hdr_${hdr.name}(char* out, packet_descriptor_t* pd, header_descriptor_t* hdr) {
#{ #ifdef T4P4S_DEBUG
#[ char* ptr = out;
#[ int idx = 0;
#[ uint8_t* hdrptr = pd->headers[HDR(${hdr.name})].pointer;
for fld in hdr.urtype.fields:
name = fld.name
size_fmt = f'T4LIT({fld.size//8}) "B"' if fld.size%8 == 0 else f'T4LIT({fld.size}) "b"'
if fld.size <= 32 and not fld.is_vw:
is_aligned = fld.size%8 == 0 and fld.offset%8 == 0
#[ uint32_t val_$name = GET32(src_pkt(pd), FLD(${hdr.name},$name));
#[ bool ${name}_is_too_large = ${fld.size} < 32 && val_$name > 1 << ${fld.size};
#[ const char* ${name}_is_too_large_txt = ${name}_is_too_large ? T4LIT(!too large!,error) : "";
#{ if (val_$name > 9) {
#[ idx += sprintf(ptr + idx, "." T4LIT(${fld.short_name},field) "/" T4LIT(%d) "%s=%s" T4LIT(%d) "=0x" T4LIT(%0${fld.size//4}x,bytes) " ",
#[ ${fld.size // (8 if is_aligned else 1)}, ${to_c_bool(is_aligned)} ? "B" : "b",
#[ ${name}_is_too_large_txt, val_$name, val_$name);
#[ } else {
#[ idx += sprintf(ptr + idx, "." T4LIT(${fld.short_name},field) "/" T4LIT(%d) "%s=%s" T4LIT(%d) " ",
#[ ${fld.size // (8 if is_aligned else 1)}, ${to_c_bool(is_aligned)} ? "B" : "b",
#[ ${name}_is_too_large_txt, val_$name);
#} }
else:
#[ field_instance_e fld_$name = FLD(${hdr.name},${fld.name});
#[ int size_$name = fld_infos[fld_$name].is_vw ? pd->headers[hdr->type].vw_size : ${fld.size};
#[ idx += sprintf(ptr + idx, "." T4LIT(${fld.short_name},field) "/%s" T4LIT(%d) "%s=" T4COLOR(T4LIGHT_bytes),
#[ ${to_c_bool(fld.is_vw)} ? "vw" : "", size_$name / (size_$name % 8 == 0 ? 8 : 1), size_$name % 8 == 0 ? "B" : "b");
#[ idx += dbg_sprint_bytes_limit(ptr + idx, hdrptr + fld_infos[FLD(${hdr.name},$name)].byte_offset, size_$name/8, ${field_size_print_limit}, "_");
#[ idx += sprintf(ptr + idx, T4COLOR(T4LIGHT_off) " ");
#} #endif
#[ return out;
#} }
#[
#{ int get_fld_vw_size(field_instance_e fld, packet_descriptor_t* pd) {
#[ header_instance_e hdr = fld_infos[fld].header_instance;
#[ return pd->headers[hdr].vw_size;
#} }
#[
| StarcoderdataPython |
94527 | <filename>iron/pythonlib/SimulationBasics.py
import random, re, sys
import SequenceBasics
import TranscriptomeBasics
from SerializeBasics import encode_64, decode_64
class RandomBiallelicTranscriptomeEmitter:
def __init__(self,transcriptome1=None,transcriptome2=None):
self.transcriptome1 = transcriptome1
self.transcriptome2 = transcriptome2
self.gene_names = None #convert a transcript name to a gene name
self.name2locus = None #convert a transcript name to a locus number
self.emissions_report = {}
#self.em = RandomTranscriptomeEmitter(transcriptome1)
self.transcriptome1_rho = {}
#Use for paired end reads
self.gaussian_fragmentation = None
# initialize rho to 0.5
if transcriptome1:
for n in transcriptome1.transcripts:
self.transcriptome1_rho[n] = 0.5
def read_serialized(self,instring):
a = decode_64(instring)
if a['transcriptome1']:
self.transcriptome1 = TranscriptomeBasics.Transcriptome()
self.transcriptome1.read_serialized(a['transcriptome1'])
else:
self.transcriptome1 = a['transcriptome1']
if a['transcriptome2']:
self.transcriptome2 = TranscriptomeBasics.Transcriptome()
self.transcriptome2.read_serialized(a['transcriptome2'])
else:
self.transcriptome2 = a['transcriptome2']
self.emissions_report = a['emissions_report']
self.transcriptome1_rho = a['transcriptome1_rho']
self.gaussian_fragmentation = a['gaussian_fragmentation']
self.gene_names = a['gene_names']
self.name2locus = a['name2locus']
def get_serialized(self):
a = {}
if self.transcriptome1:
a['transcriptome1'] = self.transcriptome1.get_serialized()
else:
a['transcriptome1'] = self.transcriptome1
if self.transcriptome2:
a['transcriptome2'] = self.transcriptome2.get_serialized()
else:
a['transcriptome2'] = self.transcriptome2
a['emissions_report'] = self.emissions_report
a['transcriptome1_rho'] = self.transcriptome1_rho
a['gaussian_fragmentation'] = self.gaussian_fragmentation
a['gene_names'] = self.gene_names
a['name2locus'] = self.name2locus
return encode_64(a)
def set_no_fragmentation(self):
self.gaussian_fragmentation = None
def set_gaussian_fragmentation(self,mu,sigma,minimum):
self.gaussian_fragmentation = {}
self.gaussian_fragmentation['mu'] = mu
self.gaussian_fragmentation['sigma'] = sigma
self.gaussian_fragmentation['minimum'] = minimum
def set_gaussian_fragmentation_default_hiseq(self):
self.set_gaussian_fragmentation(290,290,150)
def set_gaussian_fragmentation_default_pacbio(self):
self.set_gaussian_fragmentation(4000,2000,500)
def set_transcriptome1_rho(self,rho_dict):
self.transcriptome1_rho = rho_dict
def emit(self):
# If expression has been set for the transcriptome random will default to based on that
name = self.transcriptome1.get_random()
if name not in self.emissions_report:
self.emissions_report[name] = [0,0]
rnum = random.random()
if rnum < self.transcriptome1_rho[name]:
self.emissions_report[name][0] += 1
return [name, self.transcriptome1.transcripts[name]]
self.emissions_report[name][1] += 1
return [name, self.transcriptome2.transcripts[name]]
def emit_paired_short_read(self,read_length):
[name,seq] = self.emit()
# Get the sequence name first
flipped_seq = random_flip(seq)
# Use fragmentation if its enabled
frag_seq = flipped_seq
if self.gaussian_fragmentation:
frag_len = max(self.gaussian_fragmentation['minimum'],int(random.gauss(self.gaussian_fragmentation['mu'],self.gaussian_fragmentation['sigma'])))
if frag_len == 0:
return [name, 'N'*read_length, 'N'*read_length]
frag_seq = random_fragment(flipped_seq,frag_len)
l1 = frag_seq[0:read_length]
if len(l1) < read_length:
l1 = l1 + 'N'*(read_length-len(l1))
rc_frag_seq = SequenceBasics.rc(frag_seq)
r1 = rc_frag_seq[0:read_length]
if len(r1) < read_length:
r1 = r1 + 'N'*(read_length-len(r1))
return [name,l1,r1]
def emit_long_read(self):
[name, seq] = self.emit()
flipped_seq = random_flip(seq)
if not self.gaussian_fragmentation:
return [name,flipped_seq]
frag_len = max(self.gaussian_fragmentation['minimum'],int(random.gauss(self.gaussian_fragmentation['mu'],self.gaussian_fragmentation['sigma'])))
frag_seq = random_fragment(flipped_seq,frag_len)
return [name, frag_seq]
def emit_short_read(self,read_length):
vals = self.em.emit_short_read(read_length)
if not vals: return None
[name, seq] = vals
rnum = random.random()
if rnum < self.transcriptome1_rho[name]:
return [name, seq]
seq = random_fragment(self.transcriptome2.transcripts[name],read_length)
return [name, random_flip(seq)]
def random_flip(seq):
if random.random() < 0.5:
return seq
return SequenceBasics.rc(seq)
class RandomBiallelicGenomeEmitter:
def __init__(self,genomefasta,vcffile):
self.var_by_chr = {}
with open(vcffile) as inf:
for line in inf:
line = line.rstrip()
if re.match('^#',line): continue
f = line.split("\t")
chrom = f[0]
pos = int(f[1])
reference = f[3]
alternate = f[4]
if not chrom in self.var_by_chr:
self.var_by_chr[chrom] = {}
self.var_by_chr[chrom][pos] = {}
self.var_by_chr[chrom][pos]['ref'] = reference
self.var_by_chr[chrom][pos]['alt'] = alternate
self.ref_genome = SequenceBasics.read_fasta_into_hash(genomefasta)
def emit_genomes(self):
phase = ''
genome1 = ''
genome2 = ''
phase += "#Chromosome Genome1Allele Genome2Allele\n"
for chrom in self.var_by_chr:
if chrom not in self.ref_genome: continue
seq_1 = list(self.ref_genome[chrom][:].upper()) #copy the chromosome
seq_2 = list(self.ref_genome[chrom][:].upper())
for pos in self.var_by_chr[chrom]:
entry = self.var_by_chr[chrom][pos]
rnum = random.random()
if rnum < 0.5:
seq_1[pos-1] = entry['ref']
seq_2[pos-1] = entry['alt']
phase += chrom + "\t" + str(pos) + "\t" + entry['ref'] + "\t" + entry['alt'] + "\n"
else:
seq_1[pos-1] = entry['alt']
seq_2[pos-1] = entry['ref']
phase += chrom + "\t" + str(pos) + "\t" + entry['alt'] + "\t" + entry['ref'] + "\n"
genome1 += ">"+chrom+"\n"+''.join(seq_1)+"\n"
genome2 += ">"+chrom+"\n"+''.join(seq_2)+"\n"
return [genome1, genome2, phase]
class RandomTranscriptomeEmitter:
def __init__(self,in_transcriptome):
self.transcriptome = in_transcriptome
self.gaussian_fragmentation = None
self.emissions_report = {}
def emit(self):
name = self.transcriptome.get_random()
if name not in self.emissions_report:
self.emissions_report[name] = 0
self.emissions_report[name] += 1
return [name, self.transcriptome.transcripts[name]]
def set_no_fragmentation(self):
self.gaussian_fragmentation = None
def set_gaussian_fragmentation(self,mu,sigma,minimum):
self.gaussian_fragmentation = {}
self.gaussian_fragmentation['mu'] = mu
self.gaussian_fragmentation['sigma'] = sigma
self.gaussian_fragmentation['minimum'] = minimum
def set_gaussian_fragmentation_default_hiseq(self):
self.set_gaussian_fragmentation(290,290,150)
def set_gaussian_fragmentation_default_pacbio(self):
self.set_gaussian_fragmentation(4000,2000,500)
def emit_long_read(self):
[name, seq] = self.emit()
flipped_seq = random_flip(seq)
if not self.gaussian_fragmentation:
return [name,flipped_seq]
frag_len = max(self.gaussian_fragmentation['minimum'],int(random.gauss(self.gaussian_fragmentation['mu'],self.gaussian_fragmentation['sigma'])))
frag_seq = random_fragment(flipped_seq,frag_len)
return [name, frag_seq]
def emit_short_read(self,read_length):
[lastname, seq] = self.emit()
seq = random_fragment(self.transcriptome.transcripts[lastname],read_length)
return [lastname,random_flip(seq)]
def emit_paired_short_read(self,read_length):
[name,seq] = self.emit()
# Get the sequence name first
flipped_seq = random_flip(seq)
# Use fragmentation if its enabled
frag_seq = flipped_seq
if self.gaussian_fragmentation:
frag_len = max(self.gaussian_fragmentation['minimum'],int(random.gauss(self.gaussian_fragmentation['mu'],self.gaussian_fragmentation['sigma'])))
if frag_len == 0:
return [name, 'N'*read_length, 'N'*read_length]
frag_seq = random_fragment(flipped_seq,frag_len)
l1 = frag_seq[0:read_length]
if len(l1) < read_length:
l1 = l1 + 'N'*(read_length-len(l1))
rc_frag_seq = SequenceBasics.rc(frag_seq)
r1 = rc_frag_seq[0:read_length]
if len(r1) < read_length:
r1 = r1 + 'N'*(read_length-len(r1))
return [name,l1,r1]
def random_fragment(seq,frag_length):
if frag_length > len(seq):
return seq
startpoint = random.randint(0,len(seq)-frag_length)
return seq[startpoint:startpoint+frag_length]
# emits a random nucleotide sequence
# user can set parameters one at a time as desired
#
# - set parameters
# set_gc_content(float default 0.5)
# set_sequence_length_range(min_len,max_len)
# set_sequence_length(length)
#
# emit() - outputs the sequence
#
class RandomSequenceEmitter:
def __init__(self,bothlens=200):
# some random settings
self.sequence_length_min = bothlens
self.sequence_length_max = bothlens
self.gc_content = 0.5
def emit(self):
thislen = random.randint(self.sequence_length_min,self.sequence_length_max)
o = ''
for i in range(0,thislen): o += random_nucleotide_gc(self.gc_content)
return o
def set_length_range(self,minl,maxl):
self.sequence_length_min = minl
self.sequence_length_max = maxl
def set_length(self,onelen):
self.sequence_length_min = onelen
self.sequence_length_max = onelen
def set_gc_content(self,gc_content):
self.gc_content = gc_content
def random_nucleotide_gc(gc_content):
if random.random() < float(gc_content):
if random.random() < 0.5: return 'G'
return 'C'
else:
if random.random() < 0.5: return 'A'
return 'T'
# Whatever the input nucleotide change it to something else
def different_nucleotide(nt):
cs = ['A','C','T','G']
if nt not in cs: return nt
sm = [x for x in cs if x != nt.upper()]
if len(sm) != 3:
sys.stderr.write("ERROR: strange length array with nt "+nt+"\n")
sys.exit()
random.shuffle(sm)
return sm[0]
| StarcoderdataPython |
6669157 | from collections import OrderedDict
# This is the structure database
MgO_structures = OrderedDict()
MgO_structures['structure_db_dir'] = 'test_LammpsStructuralMinimization'
MgO_structures['MgO_NaCl_unit'] = OrderedDict()
MgO_structures['MgO_NaCl_unit']['filename'] = 'MgO_NaCl_unit.gga.relax.vasp'
MgO_structures['MgO_NaCl_fr_a'] = OrderedDict()
MgO_structures['MgO_NaCl_fr_a']['filename'] = 'MgO_NaCl_fr_a.gga.relax.vasp'
MgO_structures['MgO_NaCl_fr_c'] = OrderedDict()
MgO_structures['MgO_NaCl_fr_c']['filename'] = 'MgO_NaCl_fr_c.gga.relax.vasp'
MgO_structures['MgO_NaCl_sh'] = OrderedDict()
MgO_structures['MgO_NaCl_sh']['filename'] = 'MgO_NaCl_sh.gga.relax.vasp'
MgO_structures['MgO_NaCl_001s'] = OrderedDict()
MgO_structures['MgO_NaCl_001s']['filename'] = 'MgO_NaCl_001s.gga.relax.vasp'
# This is the potential section which defines the formalism of a potential
MgO_LewisCatlow = OrderedDict()
MgO_LewisCatlow['potential'] = OrderedDict()
MgO_LewisCatlow['potential']['potential_type'] = 'buckingham'
MgO_LewisCatlow['potential']['symbols'] = ['Mg','O']
MgO_LewisCatlow['parameters'] = OrderedDict()
# Charge potentials are expected in the order in ['potential']['symbols']
MgO_LewisCatlow['parameters']['chrg_Mg'] = 2.0
MgO_LewisCatlow['parameters']['chrg_O'] = -2.0
# For pair potentials, the order of pairings are determined by the order
# in the ['potential']['symbols'] entry in the dictionary. In this case,
# MgMg, MgO, and OO
MgO_LewisCatlow['parameters']['MgMg_A'] = 0.0
MgO_LewisCatlow['parameters']['MgMg_rho'] = 0.5
MgO_LewisCatlow['parameters']['MgMg_C'] = 0.0
MgO_LewisCatlow['parameters']['MgO_A'] = 821.6
MgO_LewisCatlow['parameters']['MgO_rho'] = 0.3242
MgO_LewisCatlow['parameters']['MgO_C'] = 0.0
MgO_LewisCatlow['parameters']['OO_A'] = 2274.00
MgO_LewisCatlow['parameters']['OO_rho'] = 0.1490
MgO_LewisCatlow['parameters']['OO_C'] = 27.88
| StarcoderdataPython |
4922047 | <reponame>Juan7655/simulacion_teoria_de_colas<filename>main.py
import time
import matplotlib.pyplot as plt
from src import manager
def waiting_times(server, n, ciclos=1):
start_time = time.time()
man.run(server, n, ciclos)
end_time = time.time() - start_time
return end_time
def complexity(serv):
time_diff = []
max_input = 1000
for i in range(100, max_input, 50):
end_time = waiting_times(serv, i)
time_diff.append(end_time)
print(end_time)
plt.plot([i for i in range(100, max_input, 50)], time_diff)
plt.xlabel("Input size for server " + str(serv))
plt.ylabel("Time (s)")
plt.show()
def show_graphs(list_servers=None):
input_size = 1000
if list_servers is None:
list_servers = man.get_server_index_list()
if type(list_servers) == int:
man.run(list_servers, input_size, show_graph=True)
else:
for i in list_servers:
man.run(i, input_size, show_graph=True)
if __name__ == '__main__':
man = manager.Manager()
# man.run(13, 100, show_graph=True)
## para mostrar las graficas del comportamiento de la cola
# show_graphs([3, 6, 9]) # muestra las graficas de los servidores que se pongan en la lista
# show_graphs(9) # si no se le ponen argumentos, muestra las graficas de todos los servidores
show_graphs() # si se quiere visualizar la grafica de un solo servidor, no se tiene que poner en lista, el numero solo funciona
## para medir los tiempos de ejecucion (complejidad computacional)
# complexity(12)
## para medir el tiempo tardado en ejecutar una o varias corridas de un servidor
# time = waiting_times(12, 100, ciclos=10)
# print(time)
| StarcoderdataPython |
1953497 | import requests
import re
import logging
import datetime
import trafilatura
from bs4 import BeautifulSoup
class Request():
def __init__(self, lang = 'en', country = 'US', timeout_sec = 60):
self.lang = lang.lower()
self.country = country.upper()
self.http_header = self.set_http_header()
self.cookies = self.set_cookies()
self.timeout_sec = timeout_sec
def set_http_header(self):
""" Change header of a request so it looks like the request originates from an actual computer rather than a bot"""
return {f"User-Agent" : "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/11.1.2 Safari/605.1.15",
"Accept" : "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"Accept-Language" : f"{self.lang}-{self.country},{self.lang};q=0.5"}
def set_cookies(self):
""" Cookies to try to accept/ignore accept or other consent popups/pages"""
return {'BCPermissionLevel': 'PERSONAL',
'CONSENT': 'YES+42'
}
class GoogleNews(Request):
def __init__(self, keyword, use_rss = False, use_quotes = True):
super().__init__()
# replace space with space character in html
self.keyword = keyword.replace(' ', '%20')
# use rss feed or not
self.use_rss = use_rss
# basw url to scrape google news http website
self.base_url = 'https://news.google.com'
# use quotes from exact search match
self.use_quotes = use_quotes
# create url by adding the keyword to the News url, either base url or rss
self.url = self.create_url()
def create_url(self):
""" Create URL by adding the base url with the remaining url components including the keyword"""
if self.use_quotes:
return f'{self.base_url}/search/section/q/"{self.keyword}"'
else:
return f'{self.base_url}/search/section/q/{self.keyword}'
def get_raw_news(self):
""" Make a get request to retrieve the html content"""
try:
return requests.get(url = self.url, headers = self.http_header, cookies = self.cookies, timeout = self.timeout_sec)
except Exception as e:
print(e)
return None
def parse_news(self, html, invalid_href_terms = ["google", "youtube"]):
"""Parse out news items with meta data"""
# set html as attribute of Query
self.html = html
# to check if link is present
links_checker = []
# maintain a list of URL ID that Google used so we don't process the same twice
links_ID_checker = []
# create beautifulsoup object
bsObj = BeautifulSoup(self.html, "lxml")
# create subset of html text that contain link data from link ID
html_stripped = self.html.replace('\n', '')
html_link_text = ""
html_word_counter = 0
for script in re.findall(r'<script.*?</script>' , html_stripped):
count = script.count('null')
if count > html_word_counter:
html_word_counter = count
html_link_text = script
# find all anchor, i.e. links
valid_links = []
for anchor in bsObj.findAll("a"):
# check if the anchor is atttached to an image; we don't need them
if len(anchor.findAll("img")) > 0:
continue
# find anchors with valid href
try:
href = anchor.attrs["href"]
# check in url contains a link to google page such as google or youtube; we don't want them
valid_url = True
for invalid_term in invalid_href_terms:
if invalid_term in href:
logging.warning("Link contains invalid href term {}, skipping".format(invalid_term))
valid_url = False
# check if links contains invalid words
if valid_url == True:
# check if url starts with a dot or with a / => internal google links, we don't need them
# if href.startswith("./articles/") and ('?' not in href): <- old method to find news articles urls.
if href.startswith("./articles/"):
"""
For each news link Google provides an news links ID. The Url can then be traced back later in a script area with that link ID.
Unfortunately, Google does not provide the link close to the original news title content
"""
try:
# get the URL LINK ID + the unix time stemp
link_id = re.search(r'/articles/(.*)', href).group(1)
# delelete whatever comes after the link ID, for example ?hl=en-US&gl=US&ceid=US%3Aen
link_id = re.sub(r'\?.*', '', link_id)
if link_id not in links_ID_checker:
# add to tracker
links_ID_checker.append(link_id)
# get URL text
url_text_area = re.search(link_id + r'.*?"http.*?"', html_link_text).group(0)
# get URL text area large
url_text_area_large = re.search(link_id + r'.*' + link_id, html_link_text).group(0)
# get the URL
url = re.search(r'"http.*?"', url_text_area).group(0).strip('"')
logging.debug("Found URL: {}".format(url))
# check if url contains invalid tokens
valid_url = True
for invalid_term in invalid_href_terms:
if invalid_term in url:
valid_url = False
break
# if not valid url then we need to skip it, for instance, if forbes or smartereum
if not valid_url:
logging.warning("Link contains invalid href term {}, skipping".format(invalid_term))
continue
"""
Extract time stamp from script file that is located where we also extract the url from, that is in the scripts file below the page
News articles do not always have the '4 weeks ago' text with the news, then the old method would find an incorrect time
"""
# get the unix time stamp
time_stamp = re.search(r'\[[0-9]+\]',url_text_area).group(0)
# strip the brackets
time_stamp = time_stamp.lstrip('[').rstrip(']')
logging.debug('Found time stamp: {}'.format(time_stamp))
# convert unix time stamp
datetime_datetime = datetime.datetime.fromtimestamp(int(time_stamp)).strftime('%Y-%m-%d')
logging.debug('converted to datetime object : {}'.format(datetime_datetime))
# save to variable
url_date = str(datetime_datetime)
else:
logging.info("Link ID already processed: {}".format(link_id))
continue
except Exception as e:
logging.error("Error retrieving url or time from from html content: {}".format(e))
continue
"""
parse title
"""
try:
link_title = re.search(link_id + r'"],"(.*?)","' ,url_text_area).group(1)
logging.debug("Found link title: {}".format(link_title))
if '|' in link_title:
link_title = link_title.split('|')[0].strip()
logging.debug('Updated link title: {}'.format(link_title))
except Exception as e:
logging.error("Error retrieving url publisher name: {}".format(e))
continue
"""
parse publisher name
"""
try:
publisher_name = re.search(r'Go to (.*?)"', url_text_area_large).group(1).strip()
logging.debug("Found publisher's name: {}".format(publisher_name))
except Exception as e:
logging.error('Failed to extract publisher name from html content: {}'.format(e))
publisher_name = None
# save to list
valid_links.append([link_title, url, publisher_name, url_date])
except Exception as e:
# logging.warning('Failed to extract valid href attribute: ' + str(e))
continue
logging.debug("Number of urls found: " + str(len(valid_links)))
# return dictionary
news_items = {}
# add links to total list
for link in valid_links:
if link[1] not in links_checker:
links_checker.append(link[1])
# add to return dictionary
news_items[link[1]] = { 'title' : link[0],
'url' : link[1],
'publisher' : link[2],
'date' : link[3],
'datetime' : datetime.datetime.now()}
return news_items
class NewsArticle(Request):
def __init__(self, url):
super().__init__()
self.url = url
self.raw_html = None
self.content = None
self.status_code = None
self.timed_out = None
def parse_main_content(self, include_comments = False, include_tables = False, allow_status_codes = [200]):
"""
Parse main content from website.
"""
try:
# get html
request = requests.get(url = self.url, headers = self.http_header, cookies = self.cookies, timeout = self.timeout_sec)
# status code
self.status_code = request.status_code
if self.status_code in allow_status_codes:
# raw html
self.raw_html = request.text
# get content of the main text
self.content = trafilatura.extract(self.raw_html, include_comments = include_comments, include_tables = include_tables)
else:
logging.warning(f'Skipping status code {self.status_code}')
except Exception as e:
logging.error(e)
self.timed_out = True
return {'raw_html' : self.raw_html, 'content' : self.content, 'status_code' : self.status_code, 'timed_out' : self.timed_out, 'datetime' : datetime.datetime.now()}
| StarcoderdataPython |
6524270 | # Copyright (c) 2020 Oracle, Inc.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
#
import io
import os
import json
import logging
import subprocess
import oci
from fdk import response
def execute_shell_command(cmd):
try:
return (subprocess.run(cmd, stdout=subprocess.PIPE).stdout.decode('utf-8'))
except Exception as error:
logging.getLogger().info(f'Exception while executing shell commands - str({error})')
class oci_cli_actions():
def __init__(self,region,signer):
"""Init with a region and resource principal signer"""
self.region = region
self.signer = signer
def download_artifact(self,artifact_repo_id,artifact_path,artifact_version):
try:
logging.getLogger().info("Downloading the artifact")
oci_artifact_client = oci.generic_artifacts_content.GenericArtifactsContentClient(config={'region': self.region}, signer = self.signer)
get_generic_artifact_content_by_path_response = oci_artifact_client.get_generic_artifact_content_by_path(
repository_id=artifact_repo_id,
artifact_path=artifact_path,
version=artifact_version
)
logging.getLogger().info(f"attemtping to write to path /tmp/{artifact_path}")
with open(f'/tmp/{artifact_path}', 'wb') as target_file:
for chunk in get_generic_artifact_content_by_path_response.data.raw.stream(1024 * 1024, decode_content=False):
target_file.write(chunk)
outcome = execute_shell_command(['ls','-ltrh','/tmp/'])
logging.getLogger().info("Temp file information " + str(outcome))
except Exception as error:
logging.getLogger().info(f'Exception while downloading artifact - {error}')
def oke_deployment(self,oke_cluster_id,artifact_path,artifact_version):
try:
ce_client = oci.container_engine.ContainerEngineClient(config={'region': self.region}, signer=self.signer)
config_response = ce_client.create_kubeconfig(oke_cluster_id)
config_path="/tmp/kubeconfig"
with open(config_path, 'w') as file:
file.write(config_response.data.text)
os.environ['KUBECONFIG'] = config_path
outcome = execute_shell_command(['chmod','go-r',config_path])
chart_name = artifact_path.strip('.zip').replace("_","-")
logging.getLogger().info(f"Attempting Helm install with version {artifact_version}")
outcome = execute_shell_command(['helm','history',chart_name])
logging.getLogger().info("helm current history - " + str(outcome))
outcome = execute_shell_command(['helm','upgrade','--install',chart_name,f'/tmp/{artifact_path}'])
outcome = execute_shell_command(['helm','history',chart_name])
logging.getLogger().info("helm post deployment history - " + str(outcome))
except Exception as error:
logging.getLogger().info(f'Exception while deploying to OKE - {error}')
def handler(ctx, data: io.BytesIO=None):
try:
body = json.loads(data.getvalue())
logging.getLogger().info("Fetching the information")
artifact_repo_id = body[0]['data']['stateChange']['current']['repositoryId']
artifact_path = body[0]['data']['stateChange']['current']['artifactPath']
artifact_version = body[0]['data']['stateChange']['current']['version']
region = os.environ['oci_region']
oke_cluster_id = os.environ['oke_cluster_id']
signer = oci.auth.signers.get_resource_principals_signer()
os.environ['OCI_CLI_AUTH']="resource_principal" #set OCI CLI to use resource_principal authorization
logging.getLogger().info(f'Input Params Repo = {artifact_repo_id} Path = {artifact_path}, Version = {artifact_version}')
artifact_handler = oci_cli_actions(region,signer)
artifact_handler.download_artifact(artifact_repo_id,artifact_path,artifact_version)
artifact_handler.oke_deployment(oke_cluster_id,artifact_path,artifact_version)
logging.getLogger().info(artifact_handler)
return response.Response(
ctx,
response_data=json.dumps({"status": "Hello World! with customImage"}),
headers={"Content-Type": "application/json"})
except Exception as error:
logging.getLogger().info(f'Exception - {error}')
return response.Response(
ctx,
response_data=json.dumps({"status": f'Exception - str({error})'}),
headers={"Content-Type": "application/json"}) | StarcoderdataPython |
12857199 | import pyhf
from pyhf.parameters import ParamViewer
def test_paramviewer_simple_nonbatched(backend):
pars = pyhf.tensorlib.astensor([1, 2, 3, 4, 5, 6, 7])
parshape = pyhf.tensorlib.shape(pars)
view = ParamViewer(
parshape,
{'hello': {'slice': slice(0, 2)}, 'world': {'slice': slice(5, 7)}},
['hello', 'world'],
)
par_slice = view.get(pars)
assert pyhf.tensorlib.tolist(par_slice[view.slices[0]]) == [1, 2]
assert pyhf.tensorlib.tolist(par_slice[view.slices[1]]) == [6, 7]
assert pyhf.tensorlib.tolist(par_slice) == [1, 2, 6, 7]
def test_paramviewer_simple_batched(backend):
pars = pyhf.tensorlib.astensor([[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12]])
parshape = pyhf.tensorlib.shape(pars)
view = ParamViewer(
parshape,
{'hello': {'slice': slice(0, 2)}, 'world': {'slice': slice(3, 4)}},
['hello', 'world'],
)
par_slice = view.get(pars)
assert isinstance(view.index_selection, list)
assert all(
[len(x) == 3 for x in view.index_selection]
) # first dimension is batch dim
assert pyhf.tensorlib.shape(par_slice) == (3, 3)
assert pyhf.tensorlib.tolist(par_slice[view.slices[0]]) == [[1, 5, 9], [2, 6, 10]]
assert pyhf.tensorlib.tolist(par_slice[view.slices[1]]) == [[4, 8, 12]]
assert pyhf.tensorlib.tolist(par_slice) == [[1, 5, 9], [2, 6, 10], [4, 8, 12]]
| StarcoderdataPython |
6403341 | # coding: utf-8
"""
Demo for the live reloading of Flask server.
"""
from __future__ import absolute_import
# Standard imports
import os
import sys
# External imports
import flask
def main():
"""
Main function.
:return:
None.
"""
try:
# Get the `src` directory's absolute path
src_path = os.path.dirname(
# `aoiklivereload` directory's absolute path
os.path.dirname(
# `demo` directory's absolute path
os.path.dirname(
# This file's absolute path
os.path.abspath(__file__)
)
)
)
# If the `src` directory path is not in `sys.path`
if src_path not in sys.path:
# Add to `sys.path`.
#
# This aims to save user setting PYTHONPATH when running this demo.
#
sys.path.append(src_path)
# Import reloader class
from aoiklivereload import LiveReloader
# Create reloader
reloader = LiveReloader()
# Start watcher thread
reloader.start_watcher_thread()
# Server host
server_host = '0.0.0.0'
# Server port
server_port = 8000
# Get message
msg = '# ----- Run server -----\nHost: {}\nPort: {}'.format(
server_host, server_port
)
# Print message
print(msg)
# Create Flask app
flask_app = flask.Flask(__name__)
# Create request handler
@flask_app.route('/')
def hello_handler(): # pylint: disable=unused-variable
"""
Request handler.
:return:
Response body.
"""
# Return response body
return 'hello'
# Run server
flask_app.run(
host=server_host,
port=server_port,
# Disable Flask's reloader
debug=False,
)
# If have `KeyboardInterrupt`
except KeyboardInterrupt:
# Not treat as error
pass
# If is run as main module
if __name__ == '__main__':
# Call main function
exit(main())
| StarcoderdataPython |
306656 | from pandac.PandaModules import *
from direct.showbase import PythonUtil
TARGET_POS = {4: Vec3(0.85, 0, 0.0),3: Vec3(0.6, 0, 0.42),2: Vec3(0.27, 0, 0.6),1: Vec3(-0.08, 0, 0.63),0: Vec3(-0.59, 0, 0.29)}
FACES = PythonUtil.Enum('DEALER,ONE,TWO,THREE,FOUR,FIVE,SIX,SEVEN')
FACE_SPOT_POS = {FACES.DEALER: (-1.0, 0, 0.6),FACES.ONE: (-1.15, 0, -0.3),FACES.TWO: (-0.96, 0, -0.61),FACES.THREE: (-0.65, 0, -0.8),FACES.FOUR: (0.65, 0, -0.8),FACES.FIVE: (0.96, 0, -0.61),FACES.SIX: (1.15, 0, -0.3)}
FINGER_RANGES = [
[
-26, -16], [-3, 8], [23, 32], [52, 60]]
PLAYER_ACTIONS = PythonUtil.Enum('JoinGame,UnjoinGame,RejoinGame,Resign,Leave,Continue,Progress')
GAME_ACTIONS = PythonUtil.Enum('AskForContinue,NotifyOfWin,NotifyOfLoss')
CONTINUE_OPTIONS = PythonUtil.Enum('Resign,Continue,Rejoin,Leave')
GameTimeDelay = 5
RoundTimeDelay = 5
RoundTimeLimit = 90
RoundContinueWait = 10 | StarcoderdataPython |
9725091 | import django
from {{ cookiecutter.pkg_name }} import context_processors
def test_django_version():
"""Test the django_version context processor.
Must return a dictionary containing the current Django version.
"""
assert context_processors.django_version(None) == {'django_version': django.get_version()}
| StarcoderdataPython |
3472495 | <gh_stars>0
import setuptools
with open("README.md") as f:
long_description = f.read()
setuptools.setup(
name="twarc-csv",
version="0.2.0",
url="https://github.com/docnow/twarc-csv",
author="<NAME>",
author_email="<EMAIL>",
py_modules=["twarc_csv"],
description="A twarc plugin to output Twitter data as CSV",
long_description=long_description,
long_description_content_type="text/markdown",
python_requires=">=3.3",
install_requires=[
"twarc>=2.0.12",
"pandas>=1.2.3",
"more-itertools>=8.7.0",
"tqdm>=4.59.0",
],
setup_requires=["pytest-runner"],
tests_require=["pytest"],
entry_points="""
[twarc.plugins]
csv=twarc_csv:csv
""",
)
| StarcoderdataPython |
9644925 | import copy
import json
import pickle
import os
from typing import List
from pretty_midi import PrettyMIDI, Instrument, Note
from .Chord import Chord
from ..utils.string import STATIC_DIR
from ..utils.utils import compute_distance, compute_destination, Logging, read_lib
from ..utils.constants import *
from ..settings import static_storage
class ChordProgression:
def __init__(self, type=None, tonic=None, metre=None, mode=None, source=None, saved_in_source_base=False):
self.meta = {"source": source, "type": type, "tonic": tonic, "metre": metre, "mode": mode}
self._progression = []
self.progression_class = {
'type': 'unknown', # positions, e.g., 'verse', 'chorus', ...
'pattern': 'unknown', # e.g., 'I-vi-IV-V', ...
'cycle': 'unknown', # number
'progression-style': 'unknown', # 'pop', 'edm', 'dark', ...
'chord-style': 'unknown', # 'classy', 'emotional', 'standard'
'performing-style': 'unknown', # 'arpeggio'
'rhythm': 'unknown', # 'fast-back-and-force', 'fast-same-time', 'slow'
'epic-endings': 'unknown', # 'True', 'False'
'melodic': 'unknown', # 'True', 'False'
'folder-id': 'unknown',
'duplicate-id': 'unknown'
}
try:
self.progression_class['type'] = type_dict[type]
except:
pass
self.appeared_time = 1
self.appeared_in_other_songs = 0
self.reliability = -1
self.saved_in_source_base = saved_in_source_base
self.cache = {
'2d-root': None
}
# chords are stored as Chord Class
# switch to root note and output the progression in a easy-read way
@property
def progression(self):
if not self.cache['2d-root']:
prog = []
for bar_chords in self._progression:
bar_roots = []
for chord in bar_chords:
if chord.root == -1:
bar_roots.append(0)
else:
root = chord.root
bar_roots.append(compute_distance(tonic=self.meta['tonic'], this=root, mode=self.meta['mode']))
prog.append(bar_roots)
self.cache['2d-root'] = prog
return prog
else:
return self.cache['2d-root']
@progression.setter
def progression(self, new):
self.cache['2d-root'] = None
if type(new[0][0]) is not int:
self._progression = new
# not recommended to assign numbers to _progression
else:
prog = []
for bar_roots in new:
bar_chords = []
for order in bar_roots:
if not self.meta['tonic']:
raise Exception('cannot convert numbers to chords before tonic assigned')
root = compute_destination(tonic=self.meta['tonic'], order=order, mode=self.meta['mode'])
if self.meta['mode'] == 'M':
if order == 1 or order == 4 or order == 5:
attr = [MAJ_TRIAD, -1, -1, -1]
elif order == 2 or order == 3 or order == 6:
attr = [MIN_TRIAD, -1, -1, -1]
elif order == 7:
attr = [DIM_TRIAD, -1, -1, -1]
else:
attr = [-1, -1, -1, -1]
elif self.meta['mode'] == 'm':
if order == 1 or order == 4 or order == 5:
attr = [MIN_TRIAD, -1, -1, -1]
elif order == 3 or order == 6 or order == 7:
attr = [MAJ_TRIAD, -1, -1, -1]
elif order == 2:
attr = [DIM_TRIAD, -1, -1, -1]
else:
attr = [-1, -1, -1, -1]
else:
attr = [-1, -1, -1, -1]
chord = Chord(root=root, attr=attr)
bar_chords.append(chord)
prog.append(bar_chords)
self._progression = prog
@property
def type(self):
return self.progression_class['type']
@property
def id(self):
return self.progression_class['duplicate-id']
@type.setter
def type(self, new_type):
self.progression_class['type'] = new_type
# all progression getters
def get(self, only_degree=False, flattened=False, only_root=False):
if only_root: # element is a number
if flattened:
return self.get_chord_progression_only_root_flattened()
else:
return self.get_chord_progression_only_root()
elif only_degree: # element is a Chord class, but Chord.root is a number
if flattened:
return self.get_chord_progression_only_degree_flattened()
else:
return self.get_chord_progression_only_degree()
else:
if flattened:
return self.get_chord_progression_flattened()
else:
return self.get_chord_progression()
# differences between progression.getter: this method returns the exact chord, not number (order)
def get_chord_progression(self):
return self._progression
def get_chord_progression_only_degree(self):
prog = []
for bar_chords in self._progression:
bar_roots = []
for chord in bar_chords:
if chord.root != -1:
root = chord.root
number = compute_distance(tonic=self.meta['tonic'], this=root, mode=self.meta['mode'])
new_chord = Chord(root=number, attr=[chord.type, chord.inversion, chord.sus, chord.add])
else:
new_chord = Chord(root=-1, attr=[chord.type, chord.inversion, chord.sus, chord.add])
bar_roots.append(new_chord)
prog.append(bar_roots)
return prog
def get_chord_progression_only_root(self):
return self.progression
def get_chord_progression_flattened(self):
return self.__flat_progression(self.get_chord_progression())
def get_chord_progression_only_degree_flattened(self):
return self.__flat_progression(self.get_chord_progression_only_degree())
def get_chord_progression_only_root_flattened(self):
return self.__flat_progression(self.progression)
@staticmethod
def __flat_progression(before):
after = []
for bar_prog in before:
after += bar_prog
return after
def to_midi(self, tempo=120, instrument=PIANO, tonic=None, lib=None):
if not self.progression:
Logging.error("Progression not assigned!")
return None
if not tonic:
tonic = self.meta['tonic']
midi = PrettyMIDI()
unit_length = 30 / tempo
ins = Instrument(instrument)
if not self.saved_in_source_base:
current_pos = 0
for i in self.get_chord_progression():
memo = -1
length = 0
for j in i:
if j == memo:
length += unit_length
else:
if memo != -1:
for pitch in memo.to_midi_pitch(
tonic=self.__key_changer(self.meta['tonic'], memo.root, tonic)):
note = Note(pitch=pitch, velocity=80, start=current_pos, end=current_pos + length)
ins.notes.append(note)
current_pos += length
length = unit_length
memo = j
for pitch in memo.to_midi_pitch(tonic=self.__key_changer(self.meta['tonic'], memo.root, tonic)):
note = Note(pitch=pitch, velocity=80, start=current_pos, end=current_pos + length)
ins.notes.append(note)
current_pos += length
else:
if lib is None:
lib = read_lib()
try:
all_notes = lib[self.meta['source']]
except:
Logging.error('Progression with source name {n} '
'cannot be find in library! '
'Call set_in_lib(in_lib=False) to generate MIDI '
'by progression list itself'.format(n=self.meta['source']))
return False
for note in all_notes:
ins.notes.append(Note(start=note[0] * unit_length,
end=note[1] * unit_length,
pitch=note[2],
velocity=note[3]))
midi.instruments.append(ins)
return midi
def __key_changer(self, original_tonic: str, root: str, new_tonic: str):
if root == -1:
return None
order = compute_distance(original_tonic, new_tonic, mode=self.meta['mode'])
return compute_destination(tonic=root, order=order, mode=self.meta['mode'])
# setters
def set_mode(self, mode):
self.meta["mode"] = mode
def set_metre(self, metre):
self.meta["metre"] = metre
def set_tonic(self, tonic):
self.meta["tonic"] = tonic
def set_source(self, source):
self.meta["source"] = source
def set_type(self, type):
try:
self.type = type_dict[type]
self.meta["type"] = type
except:
self.type = None
self.meta['type'] = None
def set_appeared_time(self, time):
self.appeared_time = time
def set_appeared_in_other_songs(self, time):
self.appeared_in_other_songs = time
def set_reliability(self, reliability):
self.reliability = reliability
def set_progression_class(self, progression_class):
self.progression_class = dict(json.loads(progression_class.replace('\'', '"')))
def set_in_lib(self, in_lib):
self.saved_in_source_base = True if in_lib else False
def add_cache(self):
self.cache = {
'2d-root': None
}
def __iter__(self):
if self.progression is None:
Logging.error("Progression not assigned!")
return None
for i in self.get_chord_progression():
for j in i:
yield j
def __len__(self):
count = 0
for i in self:
count += 1
return count
def __contains__(self, item):
if type(item) is str:
if item.isdigit():
item = int(item)
else:
Logging.error("'Item in ChordProgression': item type cannot be recognized!")
return False
if type(item) is int or type(item) is float or type(item) is Chord:
item = [item]
if type(item) is list:
if len(item) > len(self.get(flattened=True)):
return False
if type(item[0]) is Chord and type(item[0].root) is str:
ori_prog = self.get(flattened=True)
elif type(item[0]) is Chord and (type(item[0].root) is int or type(item[0].root) is float):
ori_prog = self.get(flattened=True, only_degree=True)
elif type(item[0]) is int or type(item[0]) is float:
ori_prog = self.get(flattened=True, only_root=True)
else:
Logging.error("'item in ChordProgression': item type cannot be recognized!")
return False
all_slices = [ori_prog[i:i + len(item)] for i in range(len(ori_prog) - len(item) + 1)]
for slice in all_slices:
if slice == item:
return True
else:
return False
def set_cache(self):
self.cache = {
'2d-root': None
}
def __getitem__(self, item):
raise SyntaxError('Syntax "ChordProgression[key]" should not be used because the type of the return is '
'ambiguous.')
def __setitem__(self, key, value):
pass
def __add__(self, other):
pass
def __eq__(self, other):
if self.meta['type'] != other.meta['type']:
return False
if self.meta['mode'] != other.meta['mode']:
return False
if self.get(flattened=True, only_degree=True) != other.get(flattened=True, only_degree=True):
return False
return True
def __ne__(self, other):
pass
def __bool__(self):
pass
def __str__(self):
str_ = "Chord Progression\n"
str_ += "-Source: " + self.__print_accept_none(self.meta["source"]) + "\n"
str_ += "-Source Type: " + self.__print_accept_none(self.meta["type"]) + "\n"
str_ += "-Source Tonic: " + self.__print_accept_none(self.meta["tonic"]) + "\n"
str_ += "-Source Metre: " + self.__print_accept_none(self.meta["metre"]) + "\n"
str_ += "-Source Mode: " + self.__print_accept_none(self.meta["mode"]) + " (M for Major and m for minor)" + "\n"
str_ += "-Appeared Times: " + self.__print_accept_none(self.appeared_time) + "\n"
str_ += "-Appeared In Other Songs: " + self.__print_accept_none(self.appeared_in_other_songs) + "\n"
str_ += "-Reliability: " + self.__print_accept_none(self.reliability) + "\n"
str_ += "-Progression Class: " + self.__print_accept_none(self.progression_class) + "\n"
str_ += "Numbered: " + "\n"
str_ += "| "
count = 0
for i in self.progression:
if count % 8 == 0 and count != 0:
str_ += "\n| "
memo = -1
for j in i:
if j == memo:
str_ += "-"
else:
str_ += str(j)
memo = j
str_ += " | "
count += 1
str_ += "\nChord: \n| "
for i in self._progression:
if count % 8 == 0 and count != 0:
str_ += "\n| "
memo = -1
for j in i:
if str(j) == memo:
str_ += "-"
else:
str_ += str(j)
memo = str(j)
str_ += " | "
count += 1
return str_ + "\n"
@staticmethod
def __print_accept_none(value):
return str(value) if value is not None else 'None'
def read_progressions(progression_file='progressions.pcls', span=False):
class RenameUnpickler(pickle.Unpickler):
def find_class(self, module, name):
renamed_module = module
if module == "chords.Chord":
renamed_module = "chorderator.chords.Chord"
if module == "chords.ChordProgression":
renamed_module = "chorderator.chords.ChordProgression"
return super(RenameUnpickler, self).find_class(renamed_module, name)
def renamed_load(file_obj):
return RenameUnpickler(file_obj).load()
def span_progression(progression):
def flatten(before):
after = []
for bar_prog in before:
after += bar_prog
return after
def restore(before, bar_length=8):
after = []
if len(before) % bar_length != 0:
raise Exception("Can't span progression: length {l} cannot be restored with bar length {b}"
.format(l=len(before), b=bar_length))
for i in range(len(before) // bar_length):
after.append(before[i:i + bar_length])
return after
def mul(chord_list, scale=2):
flattened = flatten(chord_list)
new_chord_list = []
for chord in flattened:
for i in range(scale):
new_chord_list.append(copy.deepcopy(chord))
return restore(new_chord_list, bar_length=len(chord_list[0]))
def div(chord_list, scale=2):
flattened = flatten(chord_list)
if len(flattened) % scale != 0:
raise Exception("Can't span progression: length {l} cannot be divide by scale {s}"
.format(l=len(flattened), s=scale))
new_chord_list = []
for i in range(len(flattened) // scale):
new_chord_list.append(copy.deepcopy(flattened[i * scale]))
return restore(new_chord_list, bar_length=len(chord_list[0]))
if progression.reliability <= 0.5:
return [progression]
if len(progression) // 8 == 4 or len(progression) // 8 == 8:
p_mul2 = copy.deepcopy(progression)
p_mul2.progression = mul(p_mul2.get())
p_mul2.meta['source'] = p_mul2.meta['source'] + 'modx2'
p_mul2.progression_class['cycle'] *= 2 if type(p_mul2.progression_class['cycle']) is int else 1
p_div2 = copy.deepcopy(progression)
p_div2.progression = div(p_div2.get())
p_div2.meta['source'] = p_div2.meta['source'] + 'mod/2'
p_div2.progression_class['cycle'] *= 2 if type(p_div2.progression_class['cycle']) is int else 1
return [progression, p_mul2, p_div2]
else:
return [progression]
Logging.info('start read progressions from {f}'.format(f=progression_file))
try:
file = open(static_storage[progression_file], "rb")
progression_list = renamed_load(file)
file.close()
except:
all_file_names = []
for file in os.listdir(STATIC_DIR):
if progression_file in file:
all_file_names.append(file)
if len(all_file_names) == 0:
Logging.error('cannot recognize progression_file "{n}"'.format(n=progression_file))
return None
progression_list = {}
for name in all_file_names:
file = open(STATIC_DIR + name, "rb")
progression_list.update(renamed_load(file))
file.close()
if span:
if type(progression_list) is list:
new_progression_list = []
for prog in progression_list:
new_progression_list += span_progression(prog)
progression_list = new_progression_list
elif type(progression_list) is dict:
for item in progression_list.items():
new_progression_list = []
for prog in item[1]:
new_progression_list += span_progression(prog)
progression_list[item[0]] = new_progression_list
else:
pass
Logging.info('read progressions done')
return progression_list
# Abandoned!
def query_progression(progression_list, source=None, type=None, tonic=None, mode=None, metre=None, times=None,
other_times=None, reliability=None):
if source:
new_progression_list = []
for prgression in progression_list:
if prgression.meta["source"] == source:
new_progression_list.append(prgression)
progression_list = new_progression_list[:]
if type:
new_progression_list = []
for prgression in progression_list:
if prgression.meta["type"] == type or prgression.type == type:
new_progression_list.append(prgression)
progression_list = new_progression_list[:]
if tonic:
new_progression_list = []
for prgression in progression_list:
if prgression.meta["tonic"] == tonic:
new_progression_list.append(prgression)
progression_list = new_progression_list[:]
if mode:
new_progression_list = []
for prgression in progression_list:
if prgression.meta["mode"] == mode:
new_progression_list.append(prgression)
progression_list = new_progression_list[:]
if metre:
new_progression_list = []
for prgression in progression_list:
if prgression.meta["metre"] == metre:
new_progression_list.append(prgression)
progression_list = new_progression_list[:]
if times:
new_progression_list = []
for prgression in progression_list:
if prgression.appeared_time == times:
new_progression_list.append(prgression)
progression_list = new_progression_list[:]
if other_times:
new_progression_list = []
for prgression in progression_list:
if prgression.appeared_in_other_songs == other_times:
new_progression_list.append(prgression)
progression_list = new_progression_list[:]
if reliability:
new_progression_list = []
for prgression in progression_list:
if prgression.reliability == reliability:
new_progression_list.append(prgression)
progression_list = new_progression_list[:]
return progression_list
def print_progression_list(progression_list: List[ChordProgression], limit=None):
limit = len(progression_list) if limit is None else limit
count = 0
for progression_concate in progression_list:
for progression in progression_concate:
print(progression)
count += 1
if count == limit:
break
print("Total: " + str(len(progression_list)) + "\n")
type_dict = {
"fadein": FADEIN,
"intro": INTRO,
"intro-a": INTRO,
"intro-b": INTRO,
"pre-verse": PREVERSE,
"preverse": PREVERSE,
"verse": VERSE,
"pre-chorus": PRECHORUS,
"prechorus": PRECHORUS,
"chorus": CHORUS,
"refrain": CHORUS,
"bridge": BRIDGE,
"trans": TRANS,
"transition": TRANS,
"interlude": INTERLUDE,
"instrumental": INSTRUMENTAL,
"solo": SOLO,
"outro": OUTRO,
"coda": OUTRO,
"ending": OUTRO,
"fadeout": FADEOUT,
}
if __name__ == '__main__':
cp = ChordProgression(type="", metre="", mode="M", tonic="D", source="")
cp.progression = [[1, 1, 1, 1, 4, 4, 4, 4], [1, 1, 1, 1, 4, 4, 4, 4], [1, 1, 1, 1, 4, 4, 4, 4],
[1, 1, 1, 1, 4, 4, 4, 4], ]
print(cp)
# listen(cp.to_midi(tempo=70, tonic="A", mode="M", instrument=SHAKUHACHI))
| StarcoderdataPython |
3342210 | import tkinter as tk
from tkinter import ttk
import matplotlib.pyplot as plt
import os
from mdtools.plotting_class import FilePlotting
LARGE_FONT = ("Verdana", 10)
class MDAnalysis(tk.Tk):
def __init__(self, *args, **kwargs):
tk.Tk.__init__(self, *args, **kwargs)
tk.Tk.wm_title(self, "Analysis of MD simulations")
container = ttk.Frame(self, width=200, height=200)
self.frames = {}
for F in (MainPage, PageOne):
frame = F(container, self)
self.frames[F] = frame
frame.grid(row=0, column=0, sticky="nsew")
self.show_frame(MainPage)
def show_frame(self, cont):
frame = self.frames[cont]
frame.tkraise()
class MainPage(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
self.particles = tk.IntVar()
self.steps = tk.IntVar()
# Entry fields
# self. included in these 2 variables because otherwise it was not working
self.steps.set(10000)
__steps_label = ttk.Label(text="N(steps)")
__steps_label.grid(row=0, column=6)
__steps = ttk.Entry(textvariable=self.steps, width=6)
__steps.grid(row=0, column=7, pady=5)
self.particles.set(1000)
__particles_label = ttk.Label(text="Particles")
__particles_label.grid(row=0, column=8)
__particles = ttk.Entry(textvariable=self.particles, width=4)
__particles.grid(row=0, column=9, pady=5)
rho = tk.DoubleVar()
rho.set(0.5)
__rho_label = ttk.Label(text="\u03C1")
__rho_label.grid(row=0, column=10)
__rho = ttk.Entry(textvariable=rho, width=6)
__rho.grid(row=0, column=11, pady=5)
t = tk.DoubleVar()
t.set(0.5)
__t_label = ttk.Label(text="T")
__t_label.grid(row=0, column=12)
__t = ttk.Entry(textvariable=t, width=6)
__t.grid(row=0, column=13, pady=5)
a = tk.DoubleVar()
a.set(0.5)
__a_label = ttk.Label(text="a")
__a_label.grid(row=0, column=14)
__a = ttk.Entry(textvariable=a, width=6)
__a.grid(row=0, column=15, pady=5)
n = tk.IntVar()
n.set(12)
__n_label = ttk.Label(text="n")
__n_label.grid(row=0, column=16)
__n = ttk.Entry(textvariable=n, width=2)
__n.grid(row=0, column=17, pady=5)
obj = FilePlotting(self.steps.get(), self.particles.get())
os.chdir('../../Archives of Data/')
# Buttons
# plotting entries
energies_label = ttk.Label(text="Energies", font=LARGE_FONT)
energies_label.grid(row=0, column=0, columnspan=2, pady=5)
stat_label = ttk.Label(text="Statistical Analysis", font=LARGE_FONT)
stat_label.grid(row=0, column=3, columnspan=3, pady=5)
# todo: this is a hack. find better sol'n
empty_label = ttk.Label(text=" ")
empty_label.grid(row=0, column=2)
self.grid_columnconfigure(2, minsize=1000)
# Energies vs time for single runs
u_en = ttk.Button(text="U",
command=obj.potential_data(rho.get(), t.get(), n.get(), a.get()))
u_en.grid(row=1, column=0)
k_en = ttk.Button(text="K")
k_en.grid(row=1, column=1)
total_en = ttk.Button(text="U+K")
total_en.grid(row=2, column=0)
all_en = ttk.Button(text="All",
command=obj.energy_plots(rho.get(), t.get(), n.get(), a.get()))
all_en.grid(row=2, column=1)
# Statistical Quantities
# tkinter variables not converted to Python variables
rdf = ttk.Button(text="RDF",
command=lambda: obj.rdf(rho.get(), t.get(), n.get(), a.get()))
rdf.grid(row=1, column=3)
msd = ttk.Button(text="MSD",
command=lambda: obj.msd(rho.get(), t.get(), n.get(), a.get()))
msd.grid(row=1, column=4)
vaf = ttk.Button(text="VAF",
command=lambda: obj.vaf(rho.get(), t.get(), n.get(), a.get()))
vaf.grid(row=1, column=5)
dif_plot = ttk.Button(text="D vs a",
command=lambda: obj.diffusion_plot(rho.get(), t.get(), n.get(), a.get()))
dif_plot.grid(row=2, column=3)
pc = ttk.Button(text="Pc",
command=lambda: obj.pc(rho.get(), t.get(), n.get(), a.get()))
pc.grid(row=2, column=4)
# Allows multiple figures to be stacked and then plotted
# use tk.Button since ttk has no easy way for bg/fg manipulation
plot_button = tk.Button(text="PLOT", bg="blue",
command=lambda: plt.show())
plot_button.grid(row=1, column=7, padx=5)
clear_figure = tk.Button(text="Clear Fig", bg="red",
command=lambda: plt.clf())
clear_figure.grid(row=1, column=8, padx=5)
# No-Data plots
no_data_label = ttk.Label(text="Theoretical", font=LARGE_FONT)
no_data_label.grid(row=4, column=0, columnspan=2)
# todo: this is a hack. find better sol'n
empty_label = ttk.Label(text=" ")
empty_label.grid(row=3, column=0)
self.grid_rowconfigure(3, minsize=1000)
# Buttons
potential = ttk.Button(text="Potential",
command=lambda: obj.potential(n.get(), a.get()))
potential.grid(row=5, column=0)
force = ttk.Button(text="Force",
command=lambda: obj.force(n.get(), a.get()))
force.grid(row=5, column=1)
rdf2 = ttk.Button(text="RDF2",
command=lambda: obj.rdf_2(n.get(), a.get()))
rdf2.grid(row=6, column=0)
avg_q_label = ttk.Label(text="Average Quantities", font=LARGE_FONT)
avg_q_label.grid(row=5, column=3, columnspan=3)
three_d_label = ttk.Label(text="3D", font=LARGE_FONT)
three_d_label.grid(row=9, column=0, columnspan=2)
class PageOne(tk.Frame):
def __init__(self, parent, controller):
tk.Frame.__init__(self, parent)
label = ttk.Label(self, text="Page 1", font=LARGE_FONT)
label.grid(row=0, column=0, pady=10, padx=10)
button1 = ttk.Button(self, text="Back to Home",
command=lambda: controller.show_frame(MainPage))
button1.grid(row=1, column=0)
app = MDAnalysis()
app.mainloop()
| StarcoderdataPython |
290785 | import os
import sys
import numpy as np
from scipy.ndimage import measurements
path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.
abspath(__file__))))
if path not in sys.path:
sys.path.append(path)
from CM.CM_TUW0.rem_mk_dir import rm_file
from CM.CM_TUW1.read_raster import raster_array
from CM.CM_TUW4 import run_cm as CM4
from CM.CM_TUW19 import run_cm as CM19
def distribuition_costs(invest_Euro, maxDHdem, features_path, hdm_1st, hdm_last, MS_1st, pixT,
DHT, costT, coh_area_raster, hdm_dh_reg_last_year,
label_raster, struct=np.ones((3, 3))):
rm_file(coh_area_raster, hdm_dh_reg_last_year, label_raster)
invest_Euro_arr = raster_array(invest_Euro)
maxDHdem_arr = raster_array(maxDHdem)
hdm_arr, geo_transform = raster_array(hdm_last, return_gt=True)
rast_origin = geo_transform[0], geo_transform[3]
coh_areas = np.zeros_like(maxDHdem_arr, 'int8')
reg_filter = maxDHdem_arr.astype(bool).astype('int8')
for pix_threshold in pixT:
# calculate coherent regions with given thresholds and cut them to
# LAU2 levels
DH_Regions = CM4.main(hdm_arr, features_path, pix_threshold, DHT,
None, rast_origin, only_return_areas=True)
# multiplication with reg_filter required to follow maxDHdem
# pattern and separate connection of regions with pixels that have
# value of zero in maxDHdem
result = DH_Regions.astype(int) * reg_filter
labels, nr_coherent = measurements.label(result, structure=struct)
if nr_coherent == 0:
break
for i in range(1, nr_coherent+1):
temp = labels == i
q = np.sum(maxDHdem_arr[temp])
q_inv = np.sum(invest_Euro_arr[temp])
q_spec_cost = q_inv / q
if q_spec_cost <= costT and q >= DHT:
coh_areas[temp] = 1
hdm_arr[temp] = 0
labels = None
nr_coherent = None
hdm_last_arr = raster_array(hdm_last)
hdm_1st_arr = raster_array(hdm_1st)
labels, numLabels = measurements.label(coh_areas, structure=struct)
if numLabels == 0:
raise ValueError('For the provided grid cost ceiling, no district '
'heating potential area can be realized!')
if numLabels > 100:
raise ValueError('For the given scenario, we found more than 100 '
'coherent areas. Please reduce the size of your '
'selection and run the scenario again!')
hdm_in_dh_reg = hdm_last_arr * coh_areas
CM19.main(coh_area_raster, geo_transform, 'int8', coh_areas)
CM19.main(hdm_dh_reg_last_year, geo_transform, "float64", hdm_in_dh_reg)
CM19.main(label_raster, geo_transform, "int16", labels)
# average demand in dh regions: sum_demand/sum_area_of_dhReg
# MWh/ha
ave_dem_dh_reg = np.sum(hdm_in_dh_reg) / np.sum(coh_areas)
| StarcoderdataPython |
340130 | import argparse
import contextlib
import functools
import itertools
import os
import shutil
import time
from pathlib import Path
import numpy as np
import torch
import torch.multiprocessing as mp
import torch.nn.functional as F
from torch.utils.tensorboard import SummaryWriter
import cargan
###############################################################################
# Train
###############################################################################
def train(rank, name, directory, datasets, checkpoint, gpu):
###############
# Load models #
###############
netG = cargan.GENERATOR()
netD = cargan.DISCRIMINATOR()
fft = cargan.preprocess.mels.MelSpectrogram()
if cargan.LOSS_PITCH_DISCRIMINATOR:
pitchD = cargan.model.pitch.PitchDiscriminator()
#############
# Multi-GPU #
#############
device = torch.device(f'cuda:{gpu}')
fft = fft.to(device)
netG.to(device)
netD.to(device)
if cargan.LOSS_PITCH_DISCRIMINATOR:
pitchD.to(device)
if rank is not None:
netG = torch.nn.parallel.DistributedDataParallel(
netG, device_ids=[gpu])
netD = torch.nn.parallel.DistributedDataParallel(
netD, device_ids=[gpu])
netG_unwrapped = netG.module
netD_unwrapped = netD.module
if cargan.LOSS_PITCH_DISCRIMINATOR:
pitchD = torch.nn.parallel.DistributedDataParallel(
pitchD,
device_ids=[gpu])
pitchD_unwrapped = pitchD.module
else:
netG_unwrapped = netG
netD_unwrapped = netD
if cargan.LOSS_PITCH_DISCRIMINATOR:
pitchD_unwrapped = pitchD
######################
# Create tensorboard #
######################
if not rank:
writer = SummaryWriter(str(directory))
#####################
# Create optimizers #
#####################
optG = cargan.OPTIMIZER(netG.parameters())
optD = cargan.OPTIMIZER(netD.parameters())
if cargan.LOSS_PITCH_DISCRIMINATOR:
optP = cargan.OPTIMIZER(pitchD.parameters())
#############################################
# Maybe start from previous checkpoint #
#############################################
if checkpoint is not None:
print('Loading from checkpoint...')
epochs = [
int(f.stem.split('-')[1])
for f in checkpoint.glob('checkpoint-*.pt')]
epochs.sort()
latest = f'{epochs[-1]:08d}'
netG_unwrapped.load_state_dict(
torch.load(checkpoint / f'netG-{latest}.pt', map_location=device))
netD_unwrapped.load_state_dict(
torch.load(checkpoint / f'netD-{latest}.pt', map_location=device))
if cargan.LOSS_PITCH_DISCRIMINATOR:
pitchD_unwrapped.load_state_dict(
torch.load(checkpoint / f'pitchD-{latest}.pt', map_location=device))
ckpt = torch.load(
checkpoint / f'checkpoint-{latest}.pt',
map_location=device)
optG.load_state_dict(ckpt['optG'])
optD.load_state_dict(ckpt['optD'])
start_epoch, steps = ckpt['epoch'], ckpt['steps']
else:
start_epoch, steps = -1, 0
#####################
# Create schedulers #
#####################
scheduler_fn = functools.partial(
torch.optim.lr_scheduler.ExponentialLR,
gamma=.999,
last_epoch=start_epoch if checkpoint is not None else -1)
scheduler_g = scheduler_fn(optG)
scheduler_d = scheduler_fn(optD)
if cargan.LOSS_PITCH_DISCRIMINATOR:
scheduler_p = scheduler_fn(optP)
#######################
# Create data loaders #
#######################
np.random.seed(cargan.RANDOM_SEED)
torch.cuda.manual_seed(cargan.RANDOM_SEED)
torch.manual_seed(cargan.RANDOM_SEED)
train_loader, val_loader, test_loader = cargan.data.loaders(
datasets,
rank is not None)
#######################################
# Write original audio to tensorboard #
#######################################
test_data = []
if not rank:
for i, (features, audio, _, _) in enumerate(test_loader):
x_t = audio.to(device)
s_t = features.to(device)
test_data.append((s_t, x_t))
writer.add_audio(
f"original/sample_{i}.wav",
x_t.squeeze().cpu(),
0,
sample_rate=cargan.SAMPLE_RATE)
if i == cargan.NUM_TEST_SAMPLES - 1:
break
#########
# Train #
#########
log_start = time.time()
best_mel_error = np.inf
best_wave_error = np.inf
if cargan.LOSS_CREPE:
crepe_loss = cargan.loss.CREPEPerceptualLoss().to(device)
# enable cudnn autotuner to speed up training
torch.backends.cudnn.benchmark = True
for epoch in itertools.count(start_epoch):
for iterno, (features, audio, pitch, periodicity) in enumerate(train_loader):
x_t = audio.to(device)
s_t = features.to(device)
# Maybe split signal
if cargan.AUTOREGRESSIVE:
ar = x_t[:, :, :cargan.AR_INPUT_SIZE]
x_t = x_t[:, :, cargan.AR_INPUT_SIZE:]
else:
ar = None
s_t1 = s_t
# Move pitch to device if we will use it
if cargan.LOSS_PITCH_DISCRIMINATOR:
pitch = pitch[2::4].to(device)
netG.train()
x_pred_t = netG(s_t1, ar)
s_pred_t = fft(x_pred_t)
mel_error = F.l1_loss(s_pred_t, s_t[:, :cargan.NUM_MELS])
# Discriminator input
if ar is not None and cargan.AR_DISCRIM:
d_ar = ar[:, :, -cargan.AR_INPUT_SIZE_DISCRIM:]
d_t = torch.cat([d_ar, x_t], dim=2)
d_pred_t = torch.cat([d_ar, x_pred_t], dim=2)
else:
d_t, d_pred_t = x_t, x_pred_t
#######################
# Train Discriminator #
#######################
if cargan.PITCH_COND_DISCRIM:
pitch = pitch.to(device)
periodicity = periodicity.to(device)
interp_fn = functools.partial(
torch.nn.functional.interpolate,
size=cargan.TRAIN_AUDIO_LENGTH,
mode='linear',
align_corners=False)
pitch_interp = interp_fn(pitch)
period_interp = interp_fn(periodicity)
d_pred_t = torch.cat(
[d_pred_t, pitch_interp, period_interp],
dim=1)
d_t = torch.cat(
[d_t, pitch_interp, period_interp],
dim=1)
if cargan.LOSS_ADVERSARIAL:
D_fake_det = netD(d_pred_t.detach())
D_real = netD(d_t)
loss_D = 0
for scale in D_fake_det:
loss_D += F.mse_loss(scale[-1], torch.zeros_like(scale[-1]))
for scale in D_real:
loss_D += F.mse_loss(scale[-1], torch.ones_like(scale[-1]))
netD.zero_grad()
loss_D.backward()
optD.step()
if not rank:
writer.add_scalar("train_loss/discriminator", loss_D.item(), steps)
# Pitch discriminator
if cargan.LOSS_PITCH_DISCRIMINATOR:
P_fake = pitchD(d_pred_t.detach())
P_real = pitchD(d_t)
p_t = cargan.preprocess.pitch.log_hz_to_bins(pitch.flatten())
p_pred_t = torch.ones_like(p_t) * (cargan.PITCH_BINS - 1)
loss_DP = torch.nn.functional.cross_entropy(P_real, p_t)
loss_DP += torch.nn.functional.cross_entropy(P_fake, p_pred_t)
pitchD.zero_grad()
loss_DP.backward()
optP.step()
###################
# Train Generator #
###################
loss_G = 0
D_fake = netD(d_pred_t)
D_real = netD(d_t)
for scale in D_fake:
loss_G += F.mse_loss(scale[-1], torch.ones_like(scale[-1]))
# L1 error on mel spectrogram
if cargan.LOSS_MEL_ERROR:
loss_G += cargan.LOSS_MEL_ERROR_WEIGHT * mel_error
# L1 error on waveform
if cargan.LOSS_WAVEFORM_ERROR:
wave_loss = torch.nn.functional.l1_loss(x_t, x_pred_t)
loss_G += cargan.LOSS_WAVEFORM_ERROR_WEIGHT * wave_loss
# Feature matching loss
if cargan.LOSS_FEAT_MATCH:
loss_feat = 0
for i in range(len(D_fake)):
for j in range(len(D_fake[i]) - 1):
loss_feat += \
F.l1_loss(D_fake[i][j], D_real[i][j].detach())
loss_G += cargan.LOSS_FEAT_MATCH_WEIGHT * loss_feat
if not rank:
writer.add_scalar("train_loss/feature_matching", loss_feat.item(), steps)
# CREPE perceptual loss
if cargan.LOSS_CREPE:
pitch_loss = crepe_loss(x_pred_t.squeeze(1), x_t.squeeze(1))
loss_G += cargan.LOSS_CREPE_WEIGHT * pitch_loss
if not rank:
writer.add_scalar('train_loss/crepe', pitch_loss.item(), steps)
# Pitch classification discriminator
if cargan.LOSS_PITCH_DISCRIMINATOR:
P_fake = pitchD(x_pred_t.detach())
p_t = cargan.preprocess.pitch.log_hz_to_bins(pitch.flatten())
loss_GP = torch.nn.functional.cross_entropy(P_fake, p_t)
loss_G += cargan.LOSS_PITCH_DISCRIMINATOR_WEIGHT * loss_GP
netG.zero_grad()
loss_G.backward()
optG.step()
###########
# Logging #
###########
if not rank:
writer.add_scalar("train_loss/generator", loss_G.item(), steps)
writer.add_scalar("train_loss/mel_reconstruction", mel_error.item(), steps)
if cargan.LOSS_PITCH_DISCRIMINATOR:
writer.add_scalar('train_loss/discriminator-pitch', loss_DP.item(), steps)
writer.add_scalar('train_loss/generator-pitch', loss_GP.item(), steps)
if steps % cargan.INTERVAL_LOG == 0 and not rank:
log = (
f"Epoch {epoch} ({iterno}/{len(train_loader)}) | Steps {steps} | "
f"ms/batch {1e3 * (time.time() - log_start) / cargan.INTERVAL_LOG:5.2f} | "
)
print(log)
log_start = time.time()
##############
# Validation #
##############
if steps % cargan.INTERVAL_VALID == 0 and not rank:
val_start = time.time()
netG.eval()
mel_errors = []
wave_errors = []
for i, (features, audio, _, _) in enumerate(val_loader):
with torch.no_grad():
x_t = audio.to(device)
s_t = features.to(device)
# Maybe split signal
if cargan.AUTOREGRESSIVE:
ar = x_t[:, :, :cargan.AR_INPUT_SIZE]
x_t = x_t[:, :, cargan.AR_INPUT_SIZE:]
else:
ar = None
s_t1 = s_t
x_pred_t = netG(s_t1, ar)
s_pred_t = fft(x_pred_t)
mel_errors.append(F.l1_loss(s_pred_t, s_t[:, :cargan.NUM_MELS]).item())
mel_error = np.asarray(mel_errors).mean(0)
writer.add_scalar("val_loss/mel_reconstruction", mel_error, steps)
if mel_error < best_mel_error:
best_mel_error = mel_error
print(f"Saving best model @ {best_mel_error:5.4f}...")
torch.save(netG_unwrapped.state_dict(), directory / "best_netG.pt")
torch.save(netD_unwrapped.state_dict(), directory / "best_netD.pt")
if cargan.LOSS_PITCH_DISCRIMINATOR:
torch.save(pitchD_unwrapped.state_dict(), directory / "best_pitchD.pt")
print("-" * 100)
print("Took %5.4fs to run validation loop" % (time.time() - val_start))
print("-" * 100)
########################################
# Generate samples #
########################################
if (steps % cargan.INTERVAL_SAMPLE == 0 and
not rank
):
save_start = time.time()
netG_unwrapped.eval()
for i, (s_t, _) in enumerate(test_data):
with torch.no_grad():
if cargan.AUTOREGRESSIVE:
pred_audio = cargan.ar_loop(netG_unwrapped, s_t)
else:
pred_audio = netG_unwrapped(s_t)
writer.add_audio(
f"generated/sample_{i}.wav",
pred_audio.squeeze().cpu(),
steps,
sample_rate=cargan.SAMPLE_RATE)
print("-" * 100)
print("Took %5.4fs to generate samples" % (time.time() - save_start))
print("-" * 100)
########################################
# Save checkpoint #
########################################
if steps % cargan.INTERVAL_SAVE == 0 and not rank:
save_start = time.time()
# Save checkpoint
torch.save(
netG_unwrapped.state_dict(),
directory / f'netG-{steps:08d}.pt')
torch.save(
netD_unwrapped.state_dict(),
directory / f'netD-{steps:08d}.pt')
if cargan.LOSS_PITCH_DISCRIMINATOR:
torch.save(
pitchD_unwrapped.state_dict(),
directory / f'pitchD-{steps:08d}.pt')
torch.save({
'epoch': epoch,
'steps': steps,
'optG': optG.state_dict(),
'optD': optD.state_dict(),
}, directory / f'checkpoint-{steps:08d}.pt')
print('-' * 100)
print('Took %5.4fs to save checkpoint' % (time.time() - save_start))
print('-' * 100)
########################################
# Evaluate pitch #
########################################
if (steps % cargan.INTERVAL_PITCH == 0 and
not rank
):
pitch_start = time.time()
netG_unwrapped.eval()
# Setup metrics
metrics = cargan.evaluate.objective.metrics.Pitch()
# Pitch and periodicity extraction
pitch_fn = functools.partial(
cargan.preprocess.pitch.from_audio,
gpu=gpu)
# Setup data loader
for i, (features, _, pitch, periodicity) in enumerate(test_loader):
pitch = 2 ** pitch.to(device)
periodicity = periodicity.to(device)
# Evaluate only a few samples
if i >= cargan.NUM_PITCH_SAMPLES:
break
# Vocode
features = features.to(device)
if cargan.AUTOREGRESSIVE:
vocoded = cargan.ar_loop(netG_unwrapped, features)
else:
with torch.no_grad():
vocoded = netG_unwrapped(features)
# Estimate pitch
pred_pitch, pred_periodicity = pitch_fn(vocoded.squeeze(0))
# Update metrics
metrics.update(
pitch.squeeze(0),
periodicity.squeeze(0),
pred_pitch,
pred_periodicity)
results = metrics()
if not rank:
writer.add_scalar('train_loss/pitch-rmse', results['pitch'], steps)
writer.add_scalar('train_loss/periodicity-rmse', results['periodicity'], steps)
writer.add_scalar('train_loss/f1', results['f1'], steps)
writer.add_scalar('train_loss/precision', results['precision'], steps)
writer.add_scalar('train_loss/recall', results['recall'], steps)
print("-" * 100)
print("Took %5.4fs to evaluate pitch" % (time.time() - pitch_start))
print("-" * 100)
########################################
# Waveform MSE #
########################################
if steps % cargan.INTERVAL_WAVEFORM == 0 and not rank:
with torch.no_grad():
wave_loss = torch.nn.functional.mse_loss(x_t, x_pred_t)
writer.add_scalar('train_loss/waveform_mse', wave_loss.item(), steps)
########################################
# Phase error #
########################################
if (steps % cargan.INTERVAL_PHASE == 0 and
not rank and
(not cargan.AUTOREGRESSIVE or cargan.CHUNK_SIZE >= cargan.NUM_FFT)
):
metrics = cargan.evaluate.objective.metrics.Phase()
with torch.no_grad():
metrics.update(x_t, x_pred_t)
writer.add_scalar('train_loss/phase_error', metrics(), steps)
if steps >= cargan.MAX_STEPS:
return
steps += 1
scheduler_d.step()
scheduler_g.step()
if cargan.LOSS_PITCH_DISCRIMINATOR:
scheduler_p.step()
# Evaluate final model
if not rank:
last_save_step = steps - steps % cargan.INTERVAL_SAVE
checkpoint = directory / f'netG-{last_save_step:08d}.pt'
cargan.evaluate.objective.from_datasets(
name,
datasets,
checkpoint,
gpu)
def train_ddp(rank, name, directory, datasets, checkpoint, gpus):
"""Train with distributed data parallelism"""
with ddp_context(rank, len(gpus)):
train(rank, name, directory, datasets, checkpoint, gpus[rank])
###############################################################################
# Utilities
###############################################################################
@contextlib.contextmanager
def ddp_context(rank, world_size):
"""Context manager for distributed data parallelism"""
# Setup ddp
os.environ['MASTER_ADDR'] = 'localhost'
os.environ['MASTER_PORT'] = '12355'
torch.distributed.init_process_group(
"nccl",
init_method="env://",
world_size=world_size,
rank=rank)
try:
# Execute user code
yield
finally:
# Close ddp
torch.distributed.destroy_process_group()
###############################################################################
# Entry point
###############################################################################
def main(name, checkpoint, datasets, overwrite, gpus):
# Optionally overwrite training with same name
directory = cargan.RUNS_DIR / name
if directory.exists() and overwrite:
shutil.rmtree(directory)
# Create output directory
directory.mkdir(parents=True, exist_ok=True)
# Save configuration
config_file = Path(__file__).parent / 'constants.py'
shutil.copyfile(config_file, directory / 'constants.py')
# Distributed data parallelism
if len(gpus) > 1:
mp.spawn(
train_ddp,
args=(name, directory, datasets, checkpoint, gpus),
nprocs=len(gpus),
join=True)
else:
train(None, name, directory, datasets, checkpoint, gpus[0])
def parse_args():
"""Parse command-line arguments"""
parser = argparse.ArgumentParser()
parser.add_argument(
'--name',
required=True,
help='A unique name to give to this training')
parser.add_argument(
'--datasets',
nargs='+',
required=True,
help='The datasets to use for training')
parser.add_argument(
'--checkpoint',
type=Path,
help='Optional checkpoint to start training from')
parser.add_argument(
'--overwrite',
action='store_true',
help='Whether to overwrite the previous training of the same name')
parser.add_argument(
'--gpus',
type=int,
nargs='+',
required=True,
help='The gpus to run training on')
return parser.parse_args()
if __name__ == '__main__':
main(**vars(parse_args()))
| StarcoderdataPython |
1679299 | <gh_stars>1-10
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class LevelNode:
def __init__(self, order, node):
self.order = order
self.node = node
class Solution:
def levelOrder(self, root):
"""
:type root: TreeNode
:rtype: List[List[int]]
"""
if not root:
return []
queue = []
res = []
queue.append(LevelNode(1, root))
while len(queue) != 0:
levelNode = queue.pop(0)
node = levelNode.node
i = levelNode.order
if len(res) < i:
res.append([])
res[i-1].append(node.val)
if node.left:
queue.append(LevelNode(i+1, node.left))
if node.right:
queue.append(LevelNode(i+1, node.right))
return res
| StarcoderdataPython |
110767 | <reponame>sgino209/cloudOCR<gh_stars>0
from sys import stdout
from os import path
from AbbyyOnlineSdk import *
# Recognize a file at filePath and save result to resultFilePath
def recognizeFile(processor, filePath, resultFilePath, language, outputFormat):
prediction = {}
print "Uploading.."
settings = ProcessingSettings()
settings.Language = language
settings.OutputFormat = outputFormat
task = processor.ProcessImage(filePath, settings)
if task is None:
print "Error"
return
print "Id = %s" % task.Id
print "Status = %s" % task.Status
# Wait for the task to be completed
stdout.write("Waiting..")
# Note: it's recommended that your application waits at least 2 seconds
# before making the first getTaskStatus request and also between such requests
# for the same task. Making requests more often will not improve your
# application performance.
# Note: if your application queues several files and waits for them
# it's recommended that you use listFinishedTasks instead (which is described
# at http://ocrsdk.com/documentation/apireference/listFinishedTasks/).
while task.IsActive():
time.sleep(5)
stdout.write(".")
task = processor.GetTaskStatus(task)
print "Status = %s" % task.Status
if task.Status == "Completed":
if task.DownloadUrl is not None:
prediction = processor.DownloadResult(task, resultFilePath)
print "Result was written to %s" % resultFilePath
else:
print "Error processing task"
return prediction
def main_abbyy(args):
prediction = {}
# Load ABBYY SDK and feed credentials:
processor = AbbyyOnlineSdk()
processor.ApplicationId = args.abbyy_appid
processor.Password = <PASSWORD>
if args.img_type == 'URL':
print('Sending URL paths to ABBYY is not supported, please upload a local image file')
else:
sourceFile = args.img_path
targetFile = path.join(args.res_path, path.basename(sourceFile) + '.abbyy.out')
language = 'English'
outputFormat = 'txt'
if path.isfile(sourceFile):
prediction = recognizeFile(processor, sourceFile, targetFile, language, outputFormat)
else:
print "No such file: %s" % sourceFile
return prediction
| StarcoderdataPython |
1912677 | <filename>packages/datetime/lektor_datetime.py
from lektor.pluginsystem import Plugin
from datetime import datetime
def parse_date(str_date, fmt):
return datetime.strptime(str_date, fmt)
class DatetimePlugin(Plugin):
def on_process_template_context(self, context, **extra):
context['parse_date'] = parse_date
| StarcoderdataPython |
1867175 | <gh_stars>1-10
'''
Read DCS json in an iterated way
@author: avinashvarna
'''
from __future__ import print_function
import os
#import ijson.backends.yajl2_cffi as ijson
import ujson
import codecs
import datetime
from indic_transliteration import sanscript
def iter_sentences():
start = datetime.datetime.now()
print("Loading file started at", start)
with codecs.open('dcs_sentences.json', "rb", "utf8") as f:
doc_list = ujson.load(f)["docs"]
end = datetime.datetime.now()
print("Loading file finished at", start, "took", end-start)
for doc in doc_list:
if doc["_id"].startswith("sentence_"):
yield doc
# objects = ijson.items(f, 'docs.item')
# sentences = (o for o in objects if o['_id'].startswith("sentence_"))
# for s in sentences:
# yield s
start = datetime.datetime.now()
print("Starting processign at", start)
with codecs.open('sent_roots.txt', "wb", "utf8") as out:
for i,doc in enumerate(iter_sentences()):
if "dcsAnalysisDecomposition" in doc:
out.write(str(doc["dcsId"]) + ", ")
out.write(doc["text"] + ", ")
s = ""
for decomp in doc["dcsAnalysisDecomposition"]:
for group in decomp:
if "root" in group:
s += " " + group["root"]
s_trans = sanscript.transliterate(s, sanscript.IAST, sanscript.SLP1)
out.write(s_trans + "\n")
# if i == 150: break
end = datetime.datetime.now()
print("Processing finished at", start, "took", end-start) | StarcoderdataPython |
3550844 | <reponame>ColtonBarr/aigt
import os
import time
import cv2
import sys
import numpy
import random
import argparse
import logging
import pyigtl
FLAGS = None
def main():
try:
networkModuleName = FLAGS.network_module_name
sys.path.append(os.path.join(FLAGS.model_directory,os.pardir))
importStatement = "from " + networkModuleName + " import " + networkModuleName + " as NeuralNetwork"
exec(importStatement,globals())
except ModuleNotFoundError:
logging.info("Could not find model folder " + str(FLAGS.model_name))
errorMessage = "Could not find model folder " + str(FLAGS.model_name)
print(errorMessage)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
model_name = FLAGS.model_name
modelFolder =FLAGS.model_directory
currentTime = time.time()
model = NeuralNetwork()
model.loadModel(modelFolder,model_name)
print("Server starting...")
if FLAGS.outgoing_host == "localhost":
server = pyigtl.OpenIGTLinkServer(port=FLAGS.outgoing_port,local_server=True)
else:
server = pyigtl.OpenIGTLinkServer(port=FLAGS.outgoing_port, local_server=False)
server.start()
print("Server running on " + str(server.host) + " : " + str(server.port) + "...")
print("Client starting...")
client = pyigtl.OpenIGTLinkClient(host=FLAGS.incoming_host, port=FLAGS.incoming_port)
client.start()
print(FLAGS.incoming_host)
print(FLAGS.incoming_port)
print("Client running...")
lastMessageTime = time.time()
ImageReceived = False
frameCount = 0
try:
while (not ImageReceived) or (ImageReceived and time.time() - lastMessageTime < FLAGS.timeout):
#if server.is_connected() and client.is_connected():
messages = client.get_latest_messages()
if len(messages) > 0:
for message in messages:
if message._message_type == "IMAGE":
frameCount +=1
ImageReceived = True
lastMessageTime = time.time()
image = message.image
image = image[0]
print(time.time())
(networkOutput) = model.predict(image)
print(time.time())
if FLAGS.output_type == 'STRING':
labelMessage = pyigtl.StringMessage(networkOutput, device_name=FLAGS.device_name)
server.send_message(labelMessage)
elif FLAGS.output_type == 'IMAGE':
labelMessage = pyigtl.ImageMessage(networkOutput, device_name=FLAGS.device_name)
server.send_message(labelMessage)
elif FLAGS.output_type == 'TRANSFORM':
pass
print(frameCount)
if message._message_type == "STRING":
print("Received stop message")
text = message.string
if text == "STOP":
client.stop()
server.stop()
else:
pass
time.sleep(0.25)
except KeyboardInterrupt:
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--network_module_name',
type=str,
default='',
help='Name of module that defines the model and the predict function.'
)
parser.add_argument(
'--model_name',
type=str,
default='',
help='Name of model.'
)
parser.add_argument(
'--model_directory',
type=str,
default='',
help='Location of model.'
)
parser.add_argument(
'--output_type',
type=str,
default='IMAGE',
help='type of output your model generates'
)
parser.add_argument(
'--timeout',
type=int,
default=15,
help='Number of seconds before network stops waiting for new image'
)
parser.add_argument(
'--incoming_host',
type=str,
default='localhost',
help='Name of model.'
)
parser.add_argument(
'--incoming_port',
type=int,
default=18946,
help='Location of model.'
)
parser.add_argument(
'--outgoing_host',
type=str,
default='localhost',
help='type of output your model generates'
)
parser.add_argument(
'--outgoing_port',
type=int,
default=18944,
help='Number of seconds before network stops waiting for new image'
)
parser.add_argument(
'--device_name',
type=str,
default='LabelNode',
help='The name of the node that the network output should be sent to'
)
FLAGS, unparsed = parser.parse_known_args()
main() | StarcoderdataPython |
5057349 | <filename>h2o-bindings/bin/custom/R/gen_isolationforest.py
def update_param(name, param):
if name == 'stopping_metric':
param['values'] = ['AUTO', 'anomaly_score']
return param
return None # param untouched
extensions = dict(
required_params=['training_frame', 'x'],
validate_required_params="",
set_required_params="""
parms$training_frame <- training_frame
if(!missing(x))
parms$ignored_columns <- .verify_datacols(training_frame, x)$cols_ignore
""",
)
doc = dict(
preamble="""
Trains an Isolation Forest model
""",
params=dict(
x="""A vector containing the \code{character} names of the predictors in the model."""
),
)
| StarcoderdataPython |
6608020 | # DO NOT EDIT -- GENERATED BY CMake -- Change the CMakeLists.txt file if needed
firestore_client_HDRS = [
"field_path.h",
]
firestore_client_SRCS = [
"field_path.cc",
]
| StarcoderdataPython |
6419323 | <reponame>audinowho/Maybe
import sqlite3
class PokeSQL:
def __init__(self, cursor):
self.c = cursor
def dexNum(self, name):
t = (name,)
q = self.c.execute('SELECT pokemon_species_id \
FROM pokemon_species_names \
WHERE LOWER(name)=LOWER(?) \
AND local_language_id=9', t)
r = q.fetchall()
if len(r) == 0:
return -1
return r[0][0]
def moveNum(self, name):
t = (name,)
q = self.c.execute('SELECT move_id \
FROM move_names \
WHERE LOWER(name)=LOWER(?) \
AND local_language_id=9', t)
r = q.fetchall()
if len(r) == 0:
return -1
return r[0][0]
def typeNum(self, name):
t = (name,)
q = self.c.execute('SELECT type_id \
FROM type_names \
WHERE LOWER(name)=LOWER(?) \
AND local_language_id=9', t)
r = q.fetchall()
if len(r) == 0:
return -1
return r[0][0]
def abilityNum(self, name):
t = (name,)
q = self.c.execute('SELECT ability_id \
FROM ability_names \
WHERE LOWER(name)=LOWER(?) \
AND local_language_id=9', t)
r = q.fetchall()
if len(r) == 0:
return -1
return r[0][0]
def movePool(self, p_id, lv=100):
t = (p_id,lv)
res = []
q = self.c.execute('SELECT M.move_id, N.name, M.level \
FROM pokemon_moves M \
JOIN move_names N ON M.move_id = N.move_id \
WHERE M.pokemon_id=? \
AND M.version_group_id=16\
AND M.level<=?\
AND M.pokemon_move_method_id=1\
AND N.local_language_id=9\
ORDER BY M.level', t)
for row in q:
res.append(row)
return res
def learnSet(self, p_id, lv=100):
t = (p_id,lv)
res = []
q = self.c.execute('SELECT M.pokemon_id, N.name, M.level \
FROM pokemon_moves M \
JOIN pokemon_species_names N ON M.pokemon_id = N.pokemon_species_id \
WHERE M.move_id=? \
AND M.version_group_id=16\
AND M.level<=?\
AND M.pokemon_id<=493\
AND M.pokemon_move_method_id=1\
AND N.local_language_id=9\
ORDER BY M.pokemon_id', t)
for row in q:
if len(res) > 0 and row[0] == res[-1][0]:
res[-1][2].append(row[2])
else:
res.append((row[0], row[1], [row[2]]))
return res
def abilityPool(self, p_id):
t = (p_id,)
res = []
q = self.c.execute('SELECT M.ability_id, N.name, M.is_hidden \
FROM pokemon_abilities M \
JOIN ability_names N ON M.ability_id = N.ability_id \
WHERE M.pokemon_id=? \
AND N.local_language_id=9\
ORDER BY M.is_hidden', t)
for row in q:
res.append(row)
return res
def abilitySet(self, p_id):
t = (p_id,)
res = []
q = self.c.execute('SELECT M.pokemon_id, N.name, M.is_hidden \
FROM pokemon_abilities M \
JOIN pokemon_species_names N ON M.pokemon_id = N.pokemon_species_id \
WHERE M.ability_id=? \
AND M.pokemon_id<=493\
AND N.local_language_id=9\
ORDER BY M.pokemon_id', t)
for row in q:
res.append(row)
return res
def typeSet(self, p_id):
t = (p_id,)
res = []
q = self.c.execute('SELECT M.pokemon_id, N.name \
FROM pokemon_types M \
JOIN pokemon_species_names N ON M.pokemon_id = N.pokemon_species_id \
WHERE M.type_id=? \
AND M.pokemon_id<=493\
AND N.local_language_id=9\
ORDER BY M.pokemon_id', t)
for row in q:
res.append(row)
return res
# conn = sqlite3.connect('pokedex.sqlite')
# c = conn.cursor()
# db = PokeSQL(c)
# ret = db.typeSet(db.typeNum("dragon"))
# print(ret)
| StarcoderdataPython |
11246233 | # Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from vitrage.common.constants import EntityCategory
from vitrage.common.constants import VertexProperties as VProps
from vitrage.datasources import CINDER_VOLUME_DATASOURCE
from vitrage.datasources.heat.stack import HEAT_STACK_DATASOURCE
from vitrage.datasources import NOVA_HOST_DATASOURCE
from vitrage.datasources import NOVA_INSTANCE_DATASOURCE
from vitrage.datasources import NOVA_ZONE_DATASOURCE
from vitrage.tests.functional.datasources.base import TestDataSourcesBase
from vitrage.tests.mocks import mock_driver
class TestHeatStack(TestDataSourcesBase):
DATASOURCES_OPTS = [
cfg.ListOpt('types',
default=[HEAT_STACK_DATASOURCE,
NOVA_HOST_DATASOURCE,
NOVA_INSTANCE_DATASOURCE,
NOVA_ZONE_DATASOURCE,
CINDER_VOLUME_DATASOURCE],
help='Names of supported driver data sources'),
cfg.ListOpt('path',
default=['vitrage.datasources'],
help='base path for data sources')
]
# noinspection PyPep8Naming
@classmethod
def setUpClass(cls):
super(TestHeatStack, cls).setUpClass()
cls.conf = cfg.ConfigOpts()
cls.conf.register_opts(cls.PROCESSOR_OPTS, group='entity_graph')
cls.conf.register_opts(cls.DATASOURCES_OPTS, group='datasources')
cls.load_datasources(cls.conf)
def test_heat_stack_validity(self):
# Setup
processor = self._create_processor_with_graph(self.conf)
self.assertEqual(self._num_total_expected_vertices(),
len(processor.entity_graph))
spec_list = mock_driver.simple_stack_generators(
stack_num=1,
instance_and_volume_num=1,
snapshot_events=1)
static_events = mock_driver.generate_random_events_list(spec_list)
heat_stack_event = static_events[0]
# Action
processor.process_event(heat_stack_event)
# Test assertions
self.assertEqual(self._num_total_expected_vertices() + 3,
len(processor.entity_graph))
stack_vertices = processor.entity_graph.get_vertices(
vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: HEAT_STACK_DATASOURCE
})
self.assertEqual(1, len(stack_vertices))
instance_vertices = processor.entity_graph.get_vertices(
vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: NOVA_INSTANCE_DATASOURCE
})
self.assertEqual(self.NUM_INSTANCES + 1, len(instance_vertices))
cinder_vertices = processor.entity_graph.get_vertices(
vertex_attr_filter={
VProps.CATEGORY: EntityCategory.RESOURCE,
VProps.TYPE: CINDER_VOLUME_DATASOURCE
})
self.assertEqual(1, len(cinder_vertices))
stack_neighbors = processor.entity_graph.neighbors(
stack_vertices[0].vertex_id)
self.assertEqual(2, len(stack_neighbors))
| StarcoderdataPython |
5010584 | <reponame>SwarmRoboticsSUSTechOPAL/RosEv3devDemo
# python3
import rospy
from std_msgs.msg import String
import getch
def talker():
pub = rospy.Publisher('chatter', String, queue_size=1)
rospy.init_node('talker', anonymous=True)
while not rospy.is_shutdown():
key = getch.getch()
hello_str = key + str(rospy.get_time())
rospy.loginfo(hello_str)
pub.publish(key)
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass | StarcoderdataPython |
11232647 | #!-*-coding:utf-8-*-
import sys
# import PyQt4 QtCore and QtGui modules
from PyQt5.QtWidgets import *
from PyQt5.QtGui import *
from PyQt5 import uic
from pylinac import PicketFence
from pylinac import picketfence
from PicketFens import Ui_MainWindow
import matplotlib.pyplot as plt
from pylinac import geometry
class MainWindow(QMainWindow, Ui_MainWindow):
"""MainWindow inherits QMainWindow"""
def __init__(self, parent=None):
super(MainWindow, self).__init__(parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
def OpenFile(self):
options = QFileDialog.Options()
options |= QFileDialog.DontUseNativeDialog
fileName, _ = QFileDialog.getOpenFileName(self, "QFileDialog.getOpenFileName()", "",
"All Files (*);;DICOM Files (*.dcm)", options=options)
if fileName:
leeds = PicketFence(fileName)
# leeds = PicketFence(fileName, filter=1)
# leeds.analyze(tolerance=0.2, action_tolerance=0.03, hdmlc=True, invert=False)
leeds.analyze(orientation='Up-Down', tolerance=0.2, action_tolerance=0.1, hdmlc=True)
d = picketfence.Settings(orientation='Up-Down', tolerance=0.2, action_tolerance=0.1, hdmlc=True,
image=leeds.image, log_fits=None)
d1 = picketfence.Settings(orientation='Left-Right', tolerance=0.2, action_tolerance=0.1, hdmlc=True,
image=leeds.image, log_fits=None)
m = picketfence.PicketManager(image=leeds.image, settings=d1, num_pickets=leeds.num_pickets)
plt.hist(m.error_hist()[2])
#oo = picketfence.Picket(image=leeds.image, settings=d1, approximate_idx=m.num_pickets,
# spacing=m.mean_spacing)
oo = picketfence.Picket(image=leeds.image, settings=d1, approximate_idx=m.num_pickets,
spacing=m.mean_spacing)
print(len(m.error_hist()[2]))
plt.plot(m.passed)
print(len(oo.mlc_meas))
plt.show() # y = picketfence.MLCMeas(point1=geometry.Point(), point2=geometry.Point(), settings=d)
rr = picketfence.MLCMeas(point1=oo.mlc_meas[1], point2=oo.mlc_meas[9], settings=d1)
print(oo.mlc_meas[1])
print(rr.passed)
print(oo.abs_median_error)
# r = picketfence.Picket(image=leeds.image)
# print(r.max_error) # print(leeds.results()) # print(d.action_tolerance) # print(d.number_large_leaves) # print(d.tolerance) # leeds.publish_pdf(fileName + '.pdf') # a = picketfence.PicketManager(image=leeds.image, settings=d, num_pickets=9)
# plt.hist(a.error_hist()[1]) # plt.show()
# b = geometry.Point(x=2, y=1, z=4) # c = geometry.Point(x=3, y=1, z=4) # g = picketfence.MLCMeas(point1=b, point2=c, settings=d) # g.plot2axes(axes=plt.axes, # width=1) # plt.plot(a.image_mlc_inplane_mean_profile) # plt.show() # print(len(a.error_hist()[1])) # plt.hist(a.error_hist()[2]) # plt.show() # leeds.plot_analyzed_image() # leeds.save_analyzed_image("image.jpg") # leeds.publish_pdf(fileName + '.pdf')
def __del__(self):
self.ui = None
# -----------------------------------------------------#
if __name__ == '__main__':
# create application
app = QApplication(sys.argv)
app.setApplicationName('PicketFens')
# create widget
w = MainWindow()
w.setWindowTitle('PicketFens')
w.show()
# connection
# QObject.connect( app, SIGNAL( 'lastWindowClosed()' ), app, SLOT( 'quit()' ) )
# execute application
sys.exit(app.exec_())
| StarcoderdataPython |
244856 | #!/usr/bin/env python3
import scapy.all as scapy
import optparse
def get_arguments():
parser = optparse.OptionParser()
parser.add_option("-t","--target", dest="ip", help="Target IP or or range of IPs you want to scan\n e.q: '**.**.**.1/24'")
(options, arguments) = parser.parse_args()
if not options.ip:
parser.error("[-] Please specify an IP or range of IPs, use --help for more info.")
return options
def scan(ip):
arp_request = scapy.ARP(pdst=ip)
broadcast = scapy.Ether(dst="ff:ff:ff:ff:ff:ff")
arp_request_broadcast = broadcast / arp_request
answered_list = scapy.srp(arp_request_broadcast, timeout=1, verbose=False)[0]
target_client_list = list()
for answer in answered_list:
target_client_list.append({"ip":answer[1].psrc, "mac":answer[1].hwsrc})
return target_client_list
def ls(my_list_of_dicts):
print("IP\t\t\tMAC")
for i_dict in my_list_of_dicts:
print(i_dict["ip"] + "\t\t" + i_dict["mac"])
options = get_arguments()
target_client_list = scan(options.ip)
ls(target_client_list) | StarcoderdataPython |
6645795 | <reponame>okadalabipr/biomass
from dataclasses import make_dataclass
from typing import Dict, List
NAMES: List[str] = [
"TGFb",
"Rec",
"TGFb_pRec",
"S2",
"S3",
"S4",
"ppS2_ppS2_ppS2",
"ppS3_ppS3_ppS3",
"S4_S4_S4",
"pS2",
"pS3",
"ppS2",
"ppS3",
"ppS2_ppS2_S4",
"ppS2_ppS2_ppS3",
"ppS2_ppS3_ppS3",
"ppS3_ppS3_S4",
"ppS2_ppS3_S4",
"ppS3_S4_S4",
"ppS2_S4_S4",
"gene",
]
NUM: int = len(NAMES)
Species = make_dataclass(
cls_name="Species",
fields=[(name, int) for name in NAMES],
namespace={"NAMES": NAMES, "NUM": NUM},
frozen=True,
)
name2idx: Dict[str, int] = {k: v for v, k in enumerate(NAMES)}
V = Species(**name2idx)
del name2idx
| StarcoderdataPython |
3466043 | <gh_stars>10-100
#!/usr/bin/python3
## Tommy
from botbase import *
_saarbruecken_cc1 = re.compile(r"Das Gesundheitsamt des Regionalverbandes meldet heute ([0-9.]+|\w+)")
_saarbruecken_cc2 = re.compile(r"Das Gesundheitsamt des Regionalverbandes meldet am Samstag (?:[0-9.]+|\w+) und am (?:heutigen )?Sonntag weitere ([0-9.]+|\w+) neue Coronafรคlle")
_saarbruecken_c = re.compile(r"Insgesamt liegen im Regionalverband ([0-9.]+)")
_saarbruecken_d = re.compile(r"Die Anzahl der Todesfรคlle, die im Zusammenhang mit dem Coronavirus stehen, (?:liegt bei |steigt (?:damit )?auf )(?:insgesamt )?([0-9.]+)")
_saarbruecken_dd = re.compile(r"([0-9.]+|\w+) weiterer?n? Todesf(?:a|รค)lle? gemeldet")
def saarbruecken(sheets):
import locale
locale.setlocale(locale.LC_TIME, "de_DE.UTF-8")
domain = "https://www.regionalverband-saarbruecken.de"
soup = get_soup(domain)
for item in soup.find_all("div", {"class":"col-sm-4"}):
if "Fallzahl-Statistik aus dem Regionalverband" in item.text:
link_url = item.find("a")["href"]
break
link_soup = get_soup(link_url)
date = link_soup.find("time").get("datetime")
check_date(date, "Saarbruecken")
weekday = datetime.datetime.today().weekday()
_saarbruecken_cc = _saarbruecken_cc2 if weekday == 6 else _saarbruecken_cc1
content = link_soup.text
#print(content)
cc = force_int(_saarbruecken_cc.search(content).group(1))
c = force_int(_saarbruecken_c.search(content).group(1))
d = force_int(_saarbruecken_d.search(content).group(1)) if _saarbruecken_d.search(content) else None
dd = force_int(_saarbruecken_dd.search(content).group(1)) if _saarbruecken_dd.search(content) and d is not None else None
comment = "Bot <NAME>" if d is None else "Bot"
update(sheets, 10041, c=c, cc=cc, d=d, dd=dd, comment=comment)
return True
schedule.append(Task(15, 41, 18, 11, 360, saarbruecken, 10041))
if __name__ == '__main__': saarbruecken(googlesheets())
| StarcoderdataPython |
12844283 | <filename>python3/11.py
#! /usr/bin/env python3
part = 1
def read_input():
with open('../inputs/input11.txt') as fp:
lines = fp.readlines()
return [line.strip() for line in lines]
class Seat:
def __init__(self, x, y, state):
self.x = x
self.y = y
self.state = state
def __str__(self):
return self.state
def isEdge(self):
return self.state in '_|'
def isFloor(self):
return self.state == '.'
def isEmptySeat(self):
return self.state == 'L'
def isFilledSeat(self):
return self.state == '#'
# @returns Seat[]
def neighbours(self):
global seating
neighbs = {
'W': seating[self.y][self.x - 1],
'E': seating[self.y][self.x + 1],
'S': seating[self.y + 1][self.x],
'N': seating[self.y - 1][self.x],
'NW': seating[self.y - 1][self.x - 1],
'SW': seating[self.y + 1][self.x - 1],
'NE': seating[self.y - 1][self.x + 1],
'SE': seating[self.y + 1][self.x + 1]
}
return list(neighbs.values())
# @returns Seat[]
def line_of_sight_seats(self):
dirs = {
'N': (-1,0),
'NE': (-1,1),
'E': (0,1),
'SE': (1,1),
'S': (1,0),
'SW': (1,-1),
'W': (0,-1),
'NW': (-1,-1)
}
# look for first filled, empty or edge seat in a direction
def look_at_seat(direction):
pos = (self.y, self.x)
# do not take more than d steps from original pos
while 1:
pos = (pos[0] + direction[0], pos[1] + direction[1])
seat = seating[pos[0]][pos[1]]
if not seat.isFloor():
return seat
return [look_at_seat(direction) for direction in list(dirs.values())]
def get_new_state(self):
# skip floors and edges
if self.isEdge() or self.isFloor():
return self.state
if part == 1:
tolerance = 4
filled_neighbours = [nb for nb in self.neighbours() if nb.isFilledSeat()]
else:
tolerance = 5
filled_neighbours = [nb for nb in self.line_of_sight_seats() if nb.isFilledSeat()]
# node empty and no filled neighbs -> filled
if self.isEmptySeat() and len(filled_neighbours) == 0:
return '#'
# node filled and 4+ filled neighbs -> empty
elif self.isFilledSeat() and len(filled_neighbours) >= tolerance:
return 'L'
return self.state
# generate string snapshot of current seating area, for state comparison
# @returns {String}
def hash_seating(seating):
return "".join(["".join([str(seat) for seat in row]) for row in seating])
# pad grid with | and _ to avoid out-of-bounds errors:
# @param {string[]} grid
def pad_grid(grid):
pgrid = []
# sides
for y in range(len(grid)):
pgrid += ["|" + grid[y] + "|"]
# top, bottom
horiz = "_" * len(pgrid[0])
return [horiz] + pgrid + [horiz]
diagram = pad_grid(read_input())
# set up two 2D arrays, for current and next state
seating = []
next_seating = []
# fill initial seating
for y, line in enumerate(diagram):
seating += [[]]
for x, char in enumerate(line):
seating[y] += [Seat(x, y, char)]
# one iteration of time
def run_step(i):
global seating, next_seating
# new empty seating before filling from current
next_seating = []
# fill next_seating
for y, row in enumerate(seating):
next_seating += [[]]
for x, seat in enumerate(row):
next_seating[y] += [Seat(seat.x, seat.y, seat.get_new_state())]
# run time and keep comparing hashes to detect stable state
i = 0
while 1:
i += 1
run_step(i)
# progress...
if i % 20 == 0:
print(i, hash_seating(next_seating))
if hash_seating(seating) == hash_seating(next_seating):
# part 1 - number of full seats, once stable - 2183
# part 2 - same - 1990
print(hash_seating(seating).count("#"), "full seats")
break
else:
# shift seating states before next loop
seating, next_seating = next_seating, []
| StarcoderdataPython |
382527 | <reponame>Vman45/LHA
from pyext import RuntimeModule
from actions.action_callback import ActionCallback
class Action:
name: str
tags: [str]
callback: ActionCallback
callback_arguments_parser: RuntimeModule
def __init__(
self,
name: str,
tags: [str],
callback: ActionCallback,
callback_arguments_parser: RuntimeModule
):
self.name = name
self.tags = tags
self.callback = callback
self.callback_arguments_parser = callback_arguments_parser
| StarcoderdataPython |
11341736 | import pandas as pd
import numpy as np
import math
from typing import Tuple
from scipy.integrate import solve_ivp
import matplotlib.pyplot as plt
from function_approximation import rbf_approx, approx_nonlin_func
def read_vectorfield_data(dir_path="../data/", base_filename="linear_vectorfield_data") -> Tuple[np.ndarray, np.ndarray]:
"""
Reads the 2 files containing the vector field data
:param dir_path: path of the directory containing the 2 files
:param base_filename: common part of the name in the 2 files, then the suffix "_x0.txt" or "_x1.txt" is added
:returns: the data contained in the 2 files in the form of 2 numpy ndarrays
"""
x0 = pd.read_csv(dir_path + base_filename + "_x0.txt", sep=' ', header=None).to_numpy()
x1 = pd.read_csv(dir_path + base_filename + "_x1.txt", sep=' ', header=None).to_numpy()
return x0, x1
def estimate_vectors(delta_t: float, x0=None, x1=None) -> np.ndarray:
"""
Estimates the vector field using the finite-difference formula
:param delta_t: the time difference used as denominator of the time-difference formula
:param x0: the data at beginning of time delta
:param x1: the data at end of time delta
:returns: an approximation of the vectors s.t. v(x0_k) = x1_k
"""
# read the 2 files containing the vector field data (if data is not given)
if x0 is None or x1 is None:
x0, x1 = read_vectorfield_data()
# estimate the vector field through the finite-difference formula
vectors = (x1 - x0) / delta_t
return vectors
def create_phase_portrait_matrix(A: np.ndarray, title_suffix: str, save_plots=False,
save_path: str = None, display=True):
"""
Plots the phase portrait of the linear system Ax
:param A: system's (2x2 matrix in our case)
:param title_suffix: suffix to add to the title (after the value of alpha and A's eigenvalues
:param save_plots: if True, saves the plots instead of displaying them
:param save_path: path where to save the plots if save_plots is True
:param display: if True, display the plots
"""
w = 10 # width
Y, X = np.mgrid[-w:w:100j, -w:w:100j]
eigenvalues = np.linalg.eigvals(A)
print("Eigenvalues of A: ", eigenvalues)
# linear vector field A*x
UV = A @ np.row_stack([X.ravel(), Y.ravel()])
U = UV[0, :].reshape(X.shape)
V = UV[1, :].reshape(X.shape)
fig = plt.figure(figsize=(10, 10))
plt.streamplot(X, Y, U, V, density=1.0)
if display:
plt.show()
if save_plots:
plt.savefig(save_path)
def solve_trajectory(x0, x1, funct, args, find_best_dt=False, end_time=0.1, plot=False):
"""
Solves initial value point problem for a whole dataset of points, up to a certain moment in time
:param x0: the data at time 0
:param x1: the data at unknown time step after 0
:param funct: to get derivative for next steps generation
:param find_best_dt: if True also the dt where we have lowest MSE is searched
:param end_time: end time for the simulation
:param plot: boolean to produce a scatter plot of the trajectory (orange) with the final x1 points in blue
:returns: points at time end_time, best point in time (getting lowest MSE), lowest MSE
"""
# initialize variables for find_best_dt procedure
best_dt = -1
best_mse = math.inf
x1_pred = []
# fixate some times where system must be evaluated
t_eval = np.linspace(0, end_time, 100)
sols = []
for i in range(len(x0)):
sol = solve_ivp(funct, [0, end_time], x0[i], args=args, t_eval=t_eval) # solve initial value problem for a given point
x1_pred.append([sol.y[0, -1], sol.y[1, -1]]) # save the final solution
if find_best_dt:
# to find best dt then all the different snapshots in time have to be saved
sols.append(sol.y)
# plot the trajectory (orange) and ground truth end point (blue)
if plot:
plt.scatter(x1[i, 0], x1[i, 1], c='blue', s=10)
plt.scatter(sol.y[0, :], sol.y[1, :], c='orange', s=4)
if find_best_dt:
# try all the different moments in time, check if it is the best time
for i in range(len(t_eval)):
pred = [[sols[el][0][i], sols[el][1][i]] for el in range(len(sols))]
mse = np.mean(np.linalg.norm(pred - x1, axis=1)**2)
# if mse found is best yet, update the variables
if mse < best_mse:
best_mse = mse
best_dt = t_eval[i]
if plot:
plt.rcParams["figure.figsize"] = (14,14)
plt.show()
return x1_pred, best_dt, best_mse
def find_best_rbf_configuration(x0, x1, dt=0.1, end_time=0.5):
"""
grid search over various different eps and n_bases values, returning the whole configuration with lowest MSE
:param x0: data at time 0
:param x1: data after a certain unknown dt
:param dt: dt to approximate the vector field between x0 and x1
:param end_time: total time of solve_ivp system solving trajectory
:return: best mse found with the configuration, including eps, n_bases, dt at which the mse was found, centers
"""
final_best_mse, final_best_eps, final_best_n_bases, final_best_dt = math.inf, -1, -1, -1 # initialize variables
n_bases_trials = [int(i) for i in np.linspace(100, 1001, 20)] # define search space for n_bases
for n_bases in n_bases_trials:
centers = x0[np.random.choice(range(x0.shape[0]), replace=False, size=n_bases)] # define centers
for eps in (0.3, 0.5, 0.7, 1.0, 5.0, 10.0, 20.0):
v = estimate_vectors(dt, x0, x1) # estimate vector field
C, res, _, _, _, eps, phi = approx_nonlin_func(data=(x0,v), n_bases=n_bases, eps=eps, centers=centers)
x1_pred, best_dt, best_mse = solve_trajectory(x0, x1, rbf_approx, find_best_dt=True, args=[centers, eps, C], end_time=end_time, plot=False)
if final_best_mse > best_mse: # if new mse is better then update all return variables
final_best_mse, final_best_eps, final_best_n_bases, final_best_dt, final_centers = best_mse, eps, n_bases, best_dt, centers
print(f"Printing best configuration: eps = {final_best_eps} - n_bases = {final_best_n_bases} - dt = {final_best_dt} giving MSE = {final_best_mse}")
return final_best_mse, final_best_eps, final_best_n_bases, final_best_dt, final_centers
def create_phase_portrait_derivative(funct, args, title_suffix: str, save_plots=False,
save_path: str = None, display=True, fig_size=10, w=4.5):
"""
Plots the phase portrait given a 'funct' that gives the derivatives for a certain point
:param funct: given a 2d point gives back the 2 derivatives
:param title_suffix: suffix to add to the title (after the value of alpha and A's eigenvalues
:param save_plots: if True, saves the plots instead of displaying them
:param save_path: path where to save the plots if save_plots is True
:param display: if True, display the plots
:param fig_size: gives width and height of plotted figure
:param w: useful for defining range for setting Y and X
"""
# setting up grid width/height
Y, X = np.mgrid[-w:w:100j, -w:w:100j]
# dynamic system parameter, responsible for the change in behaviour
U, V = [], []
for x2 in X[0]:
for x1 in Y[:, 0]:
res = funct(0, np.array([x1, x2]), *args)
U.append(res[0][0])
V.append(res[0][1])
U = np.reshape(U, X.shape)
V = np.reshape(V, X.shape)
plt.figure(figsize=(fig_size, fig_size))
plt.streamplot(X, Y, U, V, density=2)
plt.title(f"{title_suffix}")
if display:
plt.show()
if save_plots:
plt.savefig(save_path)
| StarcoderdataPython |
1714069 | <filename>Level_1/01_Prison_Labor_Dodgers/solution.py
def solution(x, y):
"""Returns ID that is only present in one of the two lists passed as args
Args:
x: list of prisoner IDs
y: list of prisoner IDs
Returns:
int value of the additional prisoner ID
"""
try:
a = set(x)
b = set(y)
except TypeError:
raise TypeError("Args must be lists of IDs")
c = a.symmetric_difference(b)
# c is a set containing the ID that is only present in one of the lists
if len(c) == 0:
raise ValueError("Args have same set of IDs. " +
"One additional ID expected.")
if len(c) > 1:
raise ValueError("More than one additional ID " +
"found: %s One expected." % list(c))
return c.pop()
| StarcoderdataPython |
11305194 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import annotations
import bs4
class DocumentLine:
def __init__(self, doc: Document, line: str, previous: DocumentLine = None):
self.doc = doc
self.line = line
self.idx = previous.idx + 1 if previous else 0
self._xml_repr = self._build_xml_repr(line)
self.length = len(self._xml_repr.text)
self.start = previous.end + 1 if previous else 0
self.end = self.start + self.length - 1
@property
def text(self):
return self._xml_repr.text
@property
def is_gold(self):
return self.doc.is_gold
def _bounds_check_text_idx(self, idx: int) -> int:
if idx < 0:
idx = 0
if idx > len(self.text):
idx = len(self.text)
return idx
def text_at(self, start: int, end: int):
start = self._bounds_check_text_idx(start)
end = self._bounds_check_text_idx(end)
return self.text[start:end]
def get_tags_with_name(self, tag_name: str):
tags = self._xml_repr.find_all(tag_name)
return list(map(lambda t: TagInContext(tag=t, doc_line=self), tags))
@staticmethod
def _build_xml_repr(line: str) -> bs4.BeautifulSoup:
return bs4.BeautifulSoup(line, "lxml")
class Document:
def __init__(self, path: str, basename: str = "", is_gold: bool = False):
self.path = path
self.basename = basename
self.is_gold = is_gold
self.lines = self._read_lines()
def _read_lines(self) -> [DocumentLine]:
with open(self.path) as f:
lines = f.read().splitlines(keepends=True)
return self._lines_to_doc_lines(lines)
def _lines_to_doc_lines(self, lines: [str]) -> [DocumentLine]:
result = []
previous = None
for line in lines:
doc_line = DocumentLine(doc=self, line=line, previous=previous)
result.append(doc_line)
previous = doc_line
return result
def line_at(self, idx: int) -> DocumentLine:
return self.lines[idx]
class TagInContext:
def __init__(self, tag: bs4.Tag, doc_line: DocumentLine):
self.tag = tag
self.doc_line = doc_line
self._span = self._determine_line_pos()
def _determine_line_pos(self) -> range:
start = self._count_chars_preceding(self.tag)
return range(start, start + len(self.text))
def _count_chars_preceding(self, tag: bs4.Tag, count: int = 0) -> int:
previous = tag.previous
if type(previous) == bs4.NavigableString:
return self._count_chars_preceding(previous, count + len(previous))
elif previous is None:
return count
else:
return self._count_chars_preceding(previous, count)
@property
def doc(self) -> Document:
return self.doc_line.doc
@property
def idx_of_line(self):
return self.doc_line.idx
@property
def text(self) -> str:
return self.tag.text
@property
def line(self) -> str:
return self.doc_line.line
@property
def is_gold(self) -> bool:
return self.doc_line.is_gold
@property
def start_in_line(self):
return self._span[0]
@property
def end_in_line(self):
return self._span[-1]
@property
def start_in_doc_text(self) -> int:
return self.doc_line.start + self.start_in_line
@property
def end_in_doc_text(self) -> int:
return self.doc_line.end + self.end_in_line
def text_before(self, length: int) -> str:
return self.doc_line.text_at(self.start_in_line - length, self.start_in_line)
def text_after(self, length: int) -> str:
return self.doc_line.text_at(self.end_in_line + 1, self.end_in_line + length)
def attr(self, k: str) -> str:
return self.tag.attrs.get(k, "")
def overlaps(self, other: TagInContext) -> bool:
return bool(set(self._span).intersection(set(other._span)))
| StarcoderdataPython |
3537778 | <gh_stars>0
#!/usr/bin/env python3
###############################################################################
# #
# RMG - Reaction Mechanism Generator #
# #
# Copyright (c) 2002-2019 Prof. <NAME> (<EMAIL>), #
# Prof. <NAME> (<EMAIL>) and the RMG Team (<EMAIL>) #
# #
# Permission is hereby granted, free of charge, to any person obtaining a #
# copy of this software and associated documentation files (the 'Software'), #
# to deal in the Software without restriction, including without limitation #
# the rights to use, copy, modify, merge, publish, distribute, sublicense, #
# and/or sell copies of the Software, and to permit persons to whom the #
# Software is furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in #
# all copies or substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING #
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER #
# DEALINGS IN THE SOFTWARE. #
# #
###############################################################################
"""
Arkane TeraChem module
Used to parse TeraChem output files
"""
import logging
import math
import os.path
import numpy as np
import rmgpy.constants as constants
from rmgpy.statmech import HarmonicOscillator, Conformer
from arkane.common import check_conformer_energy, get_element_mass, symbol_by_number
from arkane.exceptions import LogError
from arkane.ess.log import Log
################################################################################
class TeraChemLog(Log):
"""
Represent a log file from TeraChem. The attribute `path` refers to the
location on disk of the TeraChem log file of interest. Methods are provided
to extract a variety of information into Arkane classes and/or NumPy arrays.
"""
def __init__(self, path):
super(TeraChemLog, self).__init__(path)
def get_number_of_atoms(self):
"""
Return the number of atoms in the molecular configuration used in the TeraChem output file.
Accepted output files: TeraChem's log file, xyz format file, TeraChem's output.geometry file.
"""
n_atoms = 0
with open(self.path, 'r') as f:
file_extension = os.path.splitext(self.path)[1]
if file_extension == '.xyz':
n_atoms = int(f.readline())
else:
line = f.readline()
while line and n_atoms == 0:
if 'Total atoms:' in line:
n_atoms = int(line.split()[-1])
elif '****** QM coordinates ******' in line \
or 'Type X Y Z Mass' in line:
line = f.readline()
while line != '\n':
n_atoms += 1
line = f.readline()
line = f.readline()
return n_atoms
def load_force_constant_matrix(self):
"""
Return the force constant matrix (in Cartesian coordinates) from the
TeraChem log file. If multiple such matrices are identified,
only the last is returned. The units of the returned force constants
are J/m^2. If no force constant matrix can be found in the log file, ``None`` is returned.
"""
force = None
n_atoms = self.get_number_of_atoms()
n_rows = n_atoms * 3
with open(self.path, 'r') as f:
line = f.readline()
while line != '':
# Read force constant matrix
if '*** Hessian Matrix (Hartree/Bohr^2) ***' in line:
force = np.zeros((n_rows, n_rows), np.float64)
for i in range(int(math.ceil(n_rows / 6.0))):
# Matrix element rows
for j in range(n_rows):
line = f.readline()
while len(line.split()) not in [4, 7]:
# This is a header row
line = f.readline()
data = line.split()
for k in range(len(data) - 1):
force[j, i * 6 + k] = float(data[k + 1])
# Convert from atomic units (Hartree/Bohr^2) to SI (J/m^2)
force *= 4.35974417e-18 / 5.291772108e-11 ** 2
line = f.readline()
return force
def load_geometry(self):
"""
Return the optimum geometry of the molecular configuration from the
TeraChem log file. If multiple such geometries are identified, only the
last is returned.
"""
coords, numbers, masses = list(), list(), list()
with open(self.path) as f:
lines = f.readlines()
num_of_atoms = None # used to verify the result
if os.path.splitext(self.path)[1] == '.xyz':
skip_line = False
for line in lines:
if not skip_line and line.rstrip():
if len(line.split()) == 1 and line[0].isdigit():
num_of_atoms = int(line.rstrip())
skip_line = True # the next line is just a comment, skip it
continue
splits = line.split()
coords.append([float(c) for c in splits[1:]])
mass, num = get_element_mass(splits[0])
masses.append(mass)
numbers.append(num)
if skip_line:
skip_line = False
coords, numbers, masses = list(), list(), list()
else:
for i, line in enumerate(lines):
if 'Type X Y Z Mass' in line:
# this is an output.geometry file
j = i + 1
while lines[j].strip():
# example: ' C 0.6640965100 0.0039526500 0.0710079300 12.0000000000'
# or: ' C 0.512276 -0.516064 0.779232'
splits = lines[j].split()
coords.append([float(c) for c in splits[1:-1]])
masses.append(float(splits[-1]))
numbers.append(list(symbol_by_number.keys())[list(symbol_by_number.values()).index(splits[0])])
j += 1
break
if '*** Reference Geometry ***' in line:
# this is an output.out file, e.g., from a freq run
j = i + 2
while lines[j].strip():
# example: ' C 0.512276 -0.516064 0.779232'
splits = lines[j].split()
coords.append([float(c) for c in splits[1:]])
mass, num = get_element_mass(splits[0])
masses.append(mass)
numbers.append(num)
j += 1
break
coords = np.array(coords, np.float64)
numbers = np.array(numbers, np.int)
masses = np.array(masses, np.float64)
if len(coords) == 0 or len(numbers) == 0 or len(masses) == 0 \
or ((len(coords) != num_of_atoms or len(numbers) != num_of_atoms or len(masses) != num_of_atoms)
and num_of_atoms is not None):
raise LogError(f'Unable to read atoms from TeraChem geometry output file {self.path}. '
f'If this is a TeraChem optimization log file, try using either the '
f'frequencies calculation log file (important if torsion modes exist) or '
f'the "output.geometry" or a ".xyz" file instead.')
return coords, numbers, masses
def load_conformer(self, symmetry=None, spin_multiplicity=0, optical_isomers=None, label=''):
"""
Load the molecular degree of freedom data from an output file created as the result of a
TeraChem "Freq" calculation. As TeraChem's guess of the external symmetry number might not always correct,
you can use the `symmetry` parameter to substitute your own value;
if not provided, the value in the TeraChem output file will be adopted.
"""
modes, unscaled_freqs = list(), list()
converged = False
if optical_isomers is None:
_optical_isomers = self.get_symmetry_properties()[0]
if optical_isomers is None:
optical_isomers = _optical_isomers
with open(self.path, 'r') as f:
line = f.readline()
while line != '':
# Read spin multiplicity if not explicitly given
if 'Spin multiplicity' in line and spin_multiplicity == 0 and len(line.split()) == 3:
spin_multiplicity = int(float(line.split()[-1]))
logging.debug(f'Conformer {label} is assigned a spin multiplicity of {spin_multiplicity}')
# Read vibrational modes
elif 'Mode Eigenvalue(AU) Frequency(cm-1)' in line:
line = f.readline()
while line != '\n':
# example:
# 'Mode Eigenvalue(AU) Frequency(cm-1) Intensity(km/mol) Vib.Temp(K) ZPE(AU) ...'
# ' 1 0.0331810528 170.5666870932 52.2294230772 245.3982965841 0.0003885795 ...'
if 'i' not in line.split()[2]:
# only consider non-imaginary frequencies in this function
unscaled_freqs.append(float(line.split()[2]))
line = f.readline()
if 'Vibrational Frequencies/Thermochemical Analysis' in line:
converged = True
line = f.readline()
if not len(unscaled_freqs):
raise LogError(f'Could not read frequencies from TeraChem log file {self.path}')
if not converged:
raise LogError(f'TeraChem job {self.path} did not converge.')
modes.append(HarmonicOscillator(frequencies=(unscaled_freqs, "cm^-1")))
return Conformer(E0=(0.0, "kJ/mol"), modes=modes, spin_multiplicity=spin_multiplicity,
optical_isomers=optical_isomers), unscaled_freqs
def load_energy(self, zpe_scale_factor=1.):
"""
Load the energy in J/mol from a TeraChem log file. Only the last energy
in the file is returned, unless the log file represents a frequencies calculation,
in which case the first energy is returned. The zero-point energy is *not* included
in the returned value.
"""
e_elect, return_first = None, False
with open(self.path, 'r') as f:
lines = f.readlines()
for i, line in enumerate(lines):
if 'FREQUENCY ANALYSIS' in line:
return_first = True
if 'Ground state energy (a.u.):' in line:
e_elect = float(lines[i + 1].strip())
if return_first:
break
if 'FINAL ENERGY:' in line:
# example: 'FINAL ENERGY: -114.5008455547 a.u.'
e_elect = float(line.split()[2])
if return_first:
break
if e_elect is None:
raise LogError(f'Unable to find energy in TeraChem output file {self.path}.')
return e_elect * constants.E_h * constants.Na
def load_zero_point_energy(self):
"""
Load the unscaled zero-point energy in J/mol from a TeraChem log file.
"""
zpe = None
with open(self.path, 'r') as f:
for line in f:
if 'Vibrational zero-point energy (ZPE)' in line:
# example:
# 'Vibrational zero-point energy (ZPE) = 243113.467652369843563065 J/mol = 0.09259703 AU'
zpe = float(line.split('J/mol')[0].split()[-1])
logging.debug(f'ZPE is {zpe}')
if zpe is not None:
return zpe
else:
raise LogError(f'Unable to find zero-point energy in TeraChem output file {self.path}.')
def load_scan_energies(self):
"""
Extract the optimized energies in J/mol from a TeraChem torsional scan log file.
"""
v_list = list()
with open(self.path, 'r') as f:
lines = f.readlines()
v_index, expected_num_of_points = 0, 0
for line in lines:
if 'Scan Cycle' in line:
# example: '-=#=- Scan Cycle 5/37 -=#=-'
v_index += 1
if not expected_num_of_points:
expected_num_of_points = int(line.split()[3].split('/')[1])
if 'Optimized Energy:' in line:
# example: '-=#=- Optimized Energy: -155.0315243910 a.u.'
v = float(line.split()[3])
if len(v_list) == v_index - 1:
# append this point, it is in order
v_list.append(v)
elif len(v_list) < v_index - 1:
# seems like points in this scan are missing... add None's instead,
# later they'll be removed along with the corresponding angles
v_list.extend([None] * (v_index - 1 - len(v_list)))
else:
# we added more points that we should have, something is wrong with the log file or this method
raise LogError(f'Could not parse scan energies from {self.path}')
logging.info(' Assuming {0} is the output from a TeraChem PES scan...'.format(os.path.basename(self.path)))
v_list = np.array(v_list, np.float64)
# check to see if the scanlog indicates that one of the reacting species may not be the lowest energy conformer
check_conformer_energy(v_list, self.path)
# Adjust energies to be relative to minimum energy conformer
# Also convert units from Hartree/particle to J/mol
v_list -= np.min(v_list)
v_list *= constants.E_h * constants.Na
angles = np.arange(0.0, 2 * math.pi + 0.00001, 2 * math.pi / (len(v_list) - 1), np.float64)
# remove None's:
indices_to_pop = [v_list.index[entry] for entry in v_list if entry is None]
for i in reversed(indices_to_pop):
v_list.pop(i)
angles.pop(i)
if v_index != expected_num_of_points:
raise LogError(f'Expected to find {expected_num_of_points} scan points in TeraChem scan log file '
f'{self.path}, but found: {v_index}')
return v_list, angles
def load_negative_frequency(self):
"""
Return the imaginary frequency from a transition state frequency
calculation in cm^-1.
"""
frequency = None
with open(self.path, 'r') as f:
line = f.readline()
while line != '':
# Read vibrational modes
if 'Mode Eigenvalue(AU) Frequency(cm-1)' in line:
line = f.readline()
# example:
# 'Mode Eigenvalue(AU) Frequency(cm-1) Intensity(km/mol) Vib.Temp(K) ZPE(AU) ...'
# ' 1 0.0331810528 170.5666870932i 52.2294230772 245.3982965841 0.0003885795 ...'
frequency = -1 * float(line.split()[2][:-1]) # remove 'i'
break
f.readline()
if frequency is None:
raise LogError(f'Unable to find imaginary frequency in TeraChem output file {self.path}.')
return frequency
def load_scan_pivot_atoms(self):
"""Not implemented for TeraChem"""
raise NotImplementedError('The load_scan_pivot_atoms method is not implemented for TeraChem Logs')
def load_scan_frozen_atoms(self):
"""Not implemented for TeraChem"""
raise NotImplementedError('The load_scan_frozen_atoms method is not implemented for TeraChem Logs')
def get_D1_diagnostic(self):
"""Not implemented for TeraChem"""
raise NotImplementedError('The get_D1_diagnostic method is not implemented for TeraChem Logs')
def get_T1_diagnostic(self):
"""Not implemented for TeraChem"""
raise NotImplementedError('The get_T1_diagnostic method is not implemented for TeraChem Logs')
| StarcoderdataPython |
11225828 | # -*- coding: utf-8 -*-
"""
Created on Mon Aug 3 16:03:09 2020
@author: Jimit.Dholakia
"""
import requests
import os
import time
import urllib.parse
os.environ['TZ'] = 'Asia/Kolkata'
time.tzset()
url = os.getenv('GITHUB_API_URL', 'https://api.github.com') + '/emojis'
print('Emojis URL:', url)
r = requests.get(url)
x = r.json()
keys = list(x.keys())
#single_line = '|Icon|Code|' + '\n' + '|---|---|'
#complete_text = '|Icon|Code' * 3 + '|\n' + '|---|---'*3 + '|'
meta_info = '''<meta name="author" content="<NAME>">
<meta name="keywords" content="GitHub Markdown Emoji Cheatsheet">
<!-- HTML Meta Tags -->
<meta name="description" content="Complete list of GitHub Markdown Emoji Codes">
<!-- Google / Search Engine Tags -->
<meta itemprop="name" content="GitHub Markdown Cheatsheet">
<meta itemprop="description" content="Complete list of GitHub Markdown Emoji Codes">
<meta itemprop="image" content="meta_img.png">
<!-- Facebook Meta Tags -->
<meta property="og:url" content="https://jimit105.github.io/github-emoji-cheatsheet">
<meta property="og:type" content="website">
<meta property="og:title" content="GitHub Markdown Cheatsheet">
<meta property="og:description" content="Complete list of GitHub Markdown Emoji Codes">
<meta property="og:image" content="meta_img.png">
<!-- Twitter Meta Tags -->
<meta name="twitter:card" content="summary_large_image">
<meta property="twitter:url" content="https://jimit105.github.io/github-emoji-cheatsheet/">
<meta name="twitter:title" content="GitHub Markdown Cheatsheet">
<meta name="twitter:description" content="Complete list of GitHub Markdown Emoji Codes">
<meta name="twitter:image" content="meta_img.png">
'''
current_time = time.strftime('%b %d, %Y %X %Z', time.localtime())
action_badge = '[](https://github.com/jimit105/github-emoji-cheatsheet/actions)'
header = '## GitHub Emoji Cheatsheet \n\n' + action_badge + '\n + '-brightgreen)' + '\n\n'
complete_text = meta_info + header + '|Icon|Emoji Code|' + '\n' + '|---|---|' + '\n'
for key in keys:
text = '|:' + key + ':|`:' + key + ':`|' + '\n'
complete_text += text
with open('README.md', 'w') as f:
f.write(complete_text)
print('Emoji Update Complete')
| StarcoderdataPython |
372449 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2021 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .op import Operator, operator_registry
from .tensor import Tensor
# tf 1.0 changes tf.pack -> tf.stack
# tf.stack(values, axis=0, name='stack')
# Packs the list of tensors in values into a tensor with
# rank one higher than each tensor in values,
# by packing them along the axis dimension.
# An int. The axis to stack along. Defaults to the first dimension. Negative values wrap around,
# so the valid range is [-(D+1), D]
# See also tf.concat, tf.tile, tf.repeat.
@operator_registry(operator_type='Pack')
class Pack(Operator):
def __init__(self):
super().__init__()
def set_attr(self, framework, node):
if framework == 'tensorflow':
self._attr['axis'] = node.attr['axis'].i
self._attr['N'] = node.attr['N'].i
| StarcoderdataPython |
1899897 | <filename>python/helpers/window_func.py
from typing import Union
import numpy as np
import scipy
def window_func(name: str, m: int, **kwargs: Union[float, int]) -> np.ndarray:
"""Design a window for a given window function.
Parameters
----------
name: str
name of the window, can be any of the following:
"bartlett" : Bartlett window
"barthann" : Bartlett-Hann window
"blackman" : Blackman window
"blackmanharris" : Blackman-Harris window
"flattop" : Flat-top window
"gauss" : Gaussian window with parameter alpha (default: 2.5)
"hamming" : Hamming window
"hann" : Hann window
"kaiser" : Kaiser window with parameter beta (default: 0.5)
"lanczos" : Lanczos window
"nuttall" : Blackman-Nuttall window
"rect" : Rectangular window
"triang" : Triangular window
m: int
number of points in the window
kwargs: Union[float, int]
window parameter(s) (if any)
Returns
-------
np.ndarray
designed window (column vector)
"""
p = np.arange(m - 1) / (m - 1)
if name == "bartlett":
w = 1 - np.abs((np.arange(m - 1) - (m - 1) / 2) / ((m - 1) / 2))
elif name in ["barthann", "barthannwin"]:
w = 0.62 - 0.48 * np.abs(p - 0.5) - 0.38 * np.cos(2 * np.pi * p)
elif name == "blackman":
w = 0.42 - 0.5 * np.cos(2 * np.pi * p) + 0.08 * np.cos(4 * np.pi * p)
elif name == "blackmanharris":
w = 0.35875 - 0.48829 * np.cos(2 * np.pi * p) + 0.14128 * np.cos(4 * np.pi * p) \
- 0.01168 * np.cos(6 * np.pi * p)
elif name in ["bohman", "bohmanwin"]:
w = (1 - np.abs(p * 2 - 1)) * np.cos(np.pi * np.abs(p * 2 - 1)) + (1 / np.pi) \
* np.sin(np.pi * np.abs(p * 2 - 1))
elif name in ["flattop", "flattopwin"]:
w = 0.2157 - 0.4163 * np.cos(2 * np.pi * p) + 0.2783 * np.cos(4 * np.pi * p) \
- 0.0837 * np.cos(6 * np.pi * p) + 0.0060 * np.cos(8 * np.pi * p)
elif name in ["gauss", "gausswin"]:
if "param" not in kwargs.keys():
kwargs["param"] = 2.5
w = np.exp(-0.5 * (kwargs["param"] * 2 * (p - 0.5)) ** 2)
elif name == "hamming":
w = 0.54 - 0.46 * np.cos(2 * np.pi * p)
elif name == "hann":
w = 0.5 - 0.5 * np.cos(2 * np.pi * p)
elif name == "kaiser":
if "param" not in kwargs.keys():
kwargs["param"] = 0.5
w = scipy.special.jv(0, kwargs["param"] * np.sqrt(1 - (2 * p - 1) ** 2)) / scipy.special.jv(0, kwargs["param"])
elif name == "lanczos":
w = np.sin(np.pi * (2 * p -1)) / (np.pi * (2 * p - 1))
w[np.isnan(w)] = 1
elif name in ["nuttall","nuttallwin"]:
w = 0.3635819 - 0.4891775 * np.cos(2 * np.pi * p) + 0.1365995 * np.cos(4 * np.pi * p) \
- 0.0106411 * np.cos(6 * np.pi * p)
elif name in ["rect", "rectwin"]:
w = np.ones((1, m))
elif name == "triang":
w = 1 - np.abs((np.arange(m - 1) - (m - 1) / 2) / ((m + 1) / 2))
else:
# fall back to the Signal Processing toolbox for unknown windows () scipy
w = scipy.signal.windows.get_window(name, m, *kwargs)
return w.ravel() | StarcoderdataPython |
9639492 | <gh_stars>0
# -*- coding: utf-8 -*-
import numpy as np
import random
class EndingRule:
def __init__(self):
self.ending_message = False
self.base_ending_rule = [self.full_board, self.only_one_side]
self.ending_condition_list = {0: self.nothing, 1: self.one_line}
self.ending_option_list = [self.one_line_num, self.check_available_place]
self.placement_type = None
self.winner = None
self.playing = True
self.board = None
self.game_data = None
self.rule_list = []
self.rule = None
self.ending_option = None
self.flag = True
self.type = None
def nothing(self):
pass
def set(self, game_data, placement_data):
"""
Data setting
"""
self.game_data = game_data
self.board = np.array(placement_data.board)
self.rule = 1 # game_data.ending_rule
self.type = game_data.rule[int(placement_data.obj_number) - 1]["type"]
def check_ending(self, game_data, placement_data):
"""
Check game is ending
:param game_data: rule data
:param placement_data: user placement, current board
:return: is ending, winner
:rtype: bool, int
"""
self.set(game_data, placement_data)
self.check_available_place()
if not self.playing:
self.count_stone()
return self.playing, self.winner
for function in self.base_ending_rule:
function()
if not self.playing:
self.count_stone()
return self.playing, self.winner
return self.playing, 0
def full_board(self):
"""
Check board is full
"""
self.playing = np.any(self.board == 0)
def only_one_side(self):
"""
Check there is only one user stone on board
"""
self.playing = np.any(self.board < 0)
def check_range(self, x, y):
"""
Check x,y in board range
:param x: placement x
:type x: int
:param y: placement y
:type y: int
:rtype: bool
"""
return (0 <= x < len(self.board)) and (0 <= y < len(self.board))
def check_available_place(self):
"""
Check if there is a available placement position
:return: available placement position
:rtype: bool
"""
pos = []
pos2 = []
for x, line in enumerate(self.board):
for y, i in enumerate(line):
if i < 0:
pos.append((x, y))
pos2.append((x, y))
available = None
available2 = None
# available position for each rule type
if self.type == 'add':
_, available = self.get_stones(pos, 0, 0)
print('available', available)
else:
_, available = self.get_stones(pos, 0, 0)
_, available2 = self.get_stones(pos2, 0, 1)
print('available2', available2)
if not (available or available2):
self.playing = False
def get_stones(self, pos, whose, space):
"""
get available placement position
:param pos: user's stones in board
:type pos: list of tuple
:param whose: num of user to get position
:type whose: int
:param space: distance of move
:type space: int
:return: result, available position
:rtype: bool, list of tuple
"""
eight_dir_pos = []
result = None
x_list = []
y_list = []
if space == 0:
x_list = y_list = [-1, 0, 1]
elif space == 1:
x_list = y_list = [-2, -1, 0, 1, 2]
while not eight_dir_pos:
if not pos:
break
pos = random.choice(pos)
pos.remove(pos)
for x in x_list:
for y in y_list:
if space == 0:
if x == 0 and y == 0:
continue
elif space == 1:
if abs(x) <= 1 and abs(y) <= 1:
continue
next_x = pos[0] + x
next_y = pos[1] + y
if next_x > 7 or next_x < 0 or next_y > 7 or next_y < 0:
continue
if whose == 0:
if self.board[next_x][next_y] == 0:
# print(1,pos,next_x, next_y)
# print(self.board)
eight_dir_pos.append((next_x, next_y))
result = True
elif whose == 1:
if self.board[next_x][next_y] > 0:
# print(2,self.board[next_x][next_y])
eight_dir_pos.append((next_x, next_y))
result = True
elif whose == -1:
if self.board[next_x][next_y] < 0:
# print(3,self.board[next_x][next_y])
eight_dir_pos.append((next_x, next_y))
result = True
return result, eight_dir_pos
def count_stone(self):
"""
Count both user stones to check winner
"""
if (self.board > 0).sum() > (self.board < 0).sum():
self.winner = 1
elif (self.board > 0).sum() < (self.board < 0).sum():
self.winner = -1
else:
self.winner = 0
| StarcoderdataPython |
3538658 | <reponame>houfu/pdpc-decisions
from pdpc_decisions import classes
def test_pdpcdecision_item_str(decisions_test_items):
assert str(decisions_test_items[1][0]) == "PDPCDecisionItem: 2016-04-21 Institution of Engineers, Singapore"
def test_get_text_as_paragraphs(decisions_gold):
document = classes.CorpusDocument(decisions_gold[0])
document.paragraphs = [
classes.Paragraph('This is the first paragraph.', '1.'),
classes.Paragraph('This is the second paragraph.', '2.'),
classes.Paragraph('This is the third paragraph.', '3.')
]
assert document.get_text_as_paragraphs() == ['This is the first paragraph.',
'This is the second paragraph.',
'This is the third paragraph.']
assert document.get_text_as_paragraphs(True) == ['1. This is the first paragraph.',
'2. This is the second paragraph.',
'3. This is the third paragraph.']
assert document.get_text() == 'This is the first paragraph. ' \
'This is the second paragraph. ' \
'This is the third paragraph.'
assert document.get_text(True) == '1. This is the first paragraph. ' \
'2. This is the second paragraph. ' \
'3. This is the third paragraph.'
def test_paragraph():
test = classes.Paragraph('ABCDFEF G', '1.')
assert str(test) == 'Paragraph: 1. ABCDFEF G'
test = classes.Paragraph('ABCDFEG')
assert str(test) == 'Paragraph: NA, ABCDFEG'
def test_corpus_document(decisions_gold):
test = classes.CorpusDocument()
assert str(test) == "CorpusDocument:0, source: None"
document = classes.CorpusDocument(decisions_gold[0])
assert str(document) == "CorpusDocument:0, source: Avant Logistic Service, 2019-08-02"
assert iter(document)
def test_pdffile(options_test, decisions_gold):
with classes.PDFFile(decisions_gold[0], options=options_test) as file:
assert file
| StarcoderdataPython |
6543231 | <filename>client.py
# coding: utf-8
import socket
import json
import time
import threading
class Client(object):
def __init__(self, addr="127.0.0.1", port=12345, nickname=None, queue=None):
"""\
@param addr: ๆๅกๅจip
@param port: ๆๅกๅจ็ซฏๅฃ
@param nickname: ๆณจๅๆต็งฐ
@param queue: ๆถๆฏ้ๅ๏ผ่ฅๆญคๅๆฐไธบไธไธช้ๅ๏ผๅๅฐๆญค้ๅไฝไธบๆถๆฏ้ๅ๏ผๅฏ็จไบๅ
ถไปๅบไบๅฝไปค่กๆๅกๅจ็ๅฎขๆท็ซฏ็ๆฉๅฑ
"""
self.addr = addr
self.port = port
self.run = True
self.queue = queue
self.nickname = nickname
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect()
if not self.sock:
return
def connect(self, nickname=None):
"""\
่ฟๆฅๆๅกๅจ๏ผ่ฎพ็ฝฎๆต็งฐ๏ผๅนถๅ ๅ
ฅ่ๅคฉๅฎค
"""
try:
self.sock.connect((self.addr, self.port))
self.sock.settimeout(3)
except socket.error, e:
if e.errno == 10061:
print "Couldn't established connection with %s:%d"%(self.addr, self.port)
return
else:
raise
else:
if self.nickname:
pass
elif nickname:
self.nickname = nickname
else:
self.nickname = raw_input("Set your nickname: ").decode("GBK").encode("utf-8") # ่ฎพ็ฝฎๆต็งฐ
self.sock.send(json.dumps({"action": "join",
"time": time.time(),
"nickname": self.nickname}))
def receive_loop(self):
"""\
็ๅฌ็บฟ็จ๏ผๅฏไปฅๅฐๆถๅฐ็ๆถๆฏๅญๅ
ฅๆถๆฏ้ๅไธญ
่ฅไฝๆไพๆถๆฏ้ๅ๏ผๅ่ฏฅๅฎขๆท็ซฏไธบCLIๆจกๅผ๏ผๅฐๆถๅฐ็ๆถๆฏๅค็ๅๅจstdout่พๅบ
"""
try:
while self.run:
try:
data = json.loads(self.sock.recv(1024))
if self.queue:
self.queue.put(data) # ๅ้ๅไธญๆจ้ๆถๆฏ
else:
if data["type"] == "message":
x = time.localtime(data["time"])
tstr = time.strftime('%Y-%m-%d %H:%M:%S',x)
print u"{n}@{addr} {time}\n{msg}\n".format(n=data["nickname"],
addr=data["address"],
time=tstr,
msg=data["msg"])
elif data["type"] == "info":
print "INFO:", data["msg"]
except KeyboardInterrupt:
self.run = False
except socket.timeout:
pass
except socket.error, e:
if e.errno == 10053:
self.run = False
print "Server closed"
def send_loop(self):
"""\
CLIๅฎขๆท็ซฏๆถๆฏๅ้ๅฝๆฐ
"""
try:
while self.run:
s = raw_input().decode("gbk").encode("utf-8")
self.sock.send(json.dumps({"action": "send",
"time": time.time(),
"msg": s}))
except (EOFError, KeyboardInterrupt):
self.sock.send(json.dumps({"action": "quit", "time": time.time()}))
self.run = False
def send_message(self, msg):
"""\
ๆถๆฏๅ้API
"""
self.sock.send(json.dumps({"action": "send",
"time": time.time(),
"msg": msg}))
def get_local_addr(self):
addr = self.sock.getsockname()
if addr[0] == "localhost":
return "127.0.0.1", addr[1]
else:
return addr
def main(self):
"""\
CLIๅฎขๆท็ซฏ็ไธปๅฝๆฐ
"""
funclist = [self.receive_loop, self.send_loop]
thrlist = [threading.Thread(target=f)for f in funclist]
map(lambda x: x.start(), thrlist)
try:
map(lambda x: x.join(), thrlist)
except KeyboardInterrupt:
pass
def __del__(self):
self.sock.close()
if __name__ == '__main__':
client = Client(addr="localhost", port=12345)
client.main()
| StarcoderdataPython |
378265 | <filename>fem/utilities/dock_table/table_widget.py
"""
dock_table.table_widget
Table widget
author: <NAME>
"""
from __future__ import print_function, absolute_import
from qtpy import QtCore, QtWidgets, QtGui
try:
from .dock_table_ui import Ui_DockWidget
from .dock_data_table import DockDataTable
except SystemError:
from dock_table_ui import Ui_DockWidget
from dock_data_table import DockDataTable
from fem.utilities.command_dispatcher.action_signal import ActionSignal
class TableWidget(QtWidgets.QDockWidget):
def __init__(self, *args):
super(TableWidget, self).__init__(*args)
self.ui = Ui_DockWidget()
self.ui.setupUi(self)
self._table_view = self.ui.tableView
self._header = self._table_view.horizontalHeader()
""":type: QtGui.QHeaderView"""
self._header.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self._header.customContextMenuRequested.connect(self._header_context_menu)
self.ui.pushButton_add.clicked.connect(self._add)
self.ui.pushButton_delete.clicked.connect(self._remove)
self.ui.pushButton_insert.clicked.connect(self._insert)
self.ui.pushButton_up.clicked.connect(self._up)
self.ui.pushButton_down.clicked.connect(self._down)
self.add = ActionSignal()
self.remove = ActionSignal()
self.insert = ActionSignal()
self.set_data = ActionSignal()
self.set_rows = ActionSignal()
self.up = ActionSignal()
self.down = ActionSignal()
self.copy = self._table_view.copy
self.paste = self._table_view.paste
self.right_click = self._table_view.right_click
self._table_view.set_data.connect(self._set_data)
self._title_bar_widget = self.titleBarWidget()
self.row_changed = self._table_view.row_changed
def hide_title_bar(self):
self.setTitleBarWidget(QtWidgets.QWidget())
def show_title_bar(self):
self.setTitleBarWidget(self._title_bar_widget)
def set_editable_columns(self, int_set):
assert isinstance(int_set, set)
self._table_view.set_editable_columns(int_set)
def _set_data(self, index, value, role):
row = index.row()
column = index.column()
try:
value = value.toString()
except AttributeError:
pass
self.set_data.emit((row, column), str(value))
result = self.set_data.results[0]
self._table_view.update_all()
# if result is True:
# self._table_view.data_changed.emit(row, column)
return result
def setup_data(self, data, headers=None):
self._table_view.setup_data(data, headers)
# def set_headers(self, headers):
# self._table_view.set_headers(headers)
def _add(self):
self.add.emit()
self._table_view.update_all()
def _remove(self):
row = self._table_view.current_row()
if row < 0:
return
self.remove.emit(row)
self._table_view.update_all()
def _insert(self):
row = self._table_view.current_row()
if row < 0:
row = 0
self.insert.emit(row)
self._table_view.update_all()
def _up(self):
row = self._table_view.current_row()
if row < 0:
row = 0
self.up.emit(row)
self._table_view.update_all()
def _down(self):
row = self._table_view.current_row()
if row < 0:
row = 0
self.down.emit(row)
self._table_view.update_all()
def update_all(self):
self._table_view.update_all()
def hide_buttons(self):
self.ui.pushButton_add.hide()
self.ui.pushButton_insert.hide()
self.ui.pushButton_delete.hide()
self.ui.pushButton_up.hide()
self.ui.pushButton_down.hide()
def show_buttons(self):
self.ui.pushButton_add.show()
self.ui.pushButton_insert.show()
self.ui.pushButton_delete.show()
self.ui.pushButton_up.show()
self.ui.pushButton_down.show()
def set_selection(self, selections):
self._table_view.set_selection(selections)
def set_focus(self, focus):
self._table_view.setFocus(focus)
def select_last_row(self):
self._table_view.select_last_row()
def selection(self):
return self._table_view.selection()
def current_row(self):
return self._table_view.current_row()
def current_column(self):
return self._table_view.current_column()
def row_count(self):
return self._table_view.row_count()
def column_count(self):
return self._table_view.column_count()
def set_table_item_delegate(self, delegate):
self._table_view.setItemDelegate(delegate)
def _mouse_release_event(self, event):
return QtWidgets.QTableView.mouseReleaseEvent(self._table_view, event)
def _header_context_menu(self, pos):
# global_pos = self.mapToGlobal(pos)
rows, ok = QtWidgets.QInputDialog.getText(self, "Enter number of rows.", "Rows:", QtWidgets.QLineEdit.Normal)
try:
rows = int(rows)
except (ValueError, TypeError):
return
if ok is False:
return
self.set_rows.emit(rows)
def select_and_edit(self, index):
self._table_view.select_and_edit(index)
def select(self, index):
self._table_view.select(index)
| StarcoderdataPython |
3460219 | from enum import IntEnum
class Color:
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
RED = (255, 0, 0)
GREEN = (0, 255, 0)
BLUE = (0, 0, 255)
SKY_BLUE = (41, 173, 255)
GRASS_GREEN = (0, 168, 68)
OCEAN_BLUE = (60, 188, 252)
DARK_BLUE = (0, 64, 88)
WOOD_BROWN = (172, 124, 0)
GRAY = (120, 120, 120)
LIGHT_GRAY = (188, 188, 188)
LIGHT_BLUE = (104, 136, 252)
LIGHT_PINK = (248, 164, 192)
TEAL = (0, 136, 136)
class Window:
WIDTH = 640
HEIGHT = 480
FPS = 60
class Direction(IntEnum):
LEFT = 0
RIGHT = 1
UP = 2
DOWN = 3
NONE = 4 | StarcoderdataPython |
1781730 | <filename>bittensor/_config/config_impl.py<gh_stars>10-100
"""
Implementation of the config class, which manages the config of different bittensor modules.
"""
# The MIT License (MIT)
# Copyright ยฉ 2021 <NAME>
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the โSoftwareโ), to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of
# the Software.
# THE SOFTWARE IS PROVIDED โAS ISโ, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
import yaml
from munch import Munch
class Config ( Munch ):
"""
Implementation of the config class, which manages the config of different bittensor modules.
"""
def __init__(self, loaded_config = None ):
super().__init__()
if loaded_config:
raise NotImplementedError('Function load_from_relative_path is not fully implemented.')
def __repr__(self) -> str:
return self.__str__()
def __str__(self) -> str:
return "\n" + yaml.dump(self.toDict())
def to_string(self, items) -> str:
""" Get string from items
"""
return "\n" + yaml.dump(items.toDict())
def update_with_kwargs( self, kwargs ):
""" Add config to self
"""
for key,val in kwargs.items():
self[key] = val
| StarcoderdataPython |
1767890 | from os.path import isfile
from psi_apps.utils.json_helper import json_loads
from psi_apps.utils.basic_response import \
(ok_resp, err_resp)
def load_file_contents(fpath):
"""Given a file path, open the file and return the contents"""
if not isfile(fpath):
user_msg = 'File not found: %s' % fpath
return err_resp(user_msg)
fcontents = None
try:
with open(fpath, 'rb') as fh:
fcontents = fh.read()
except IOError as ex_obj:
user_msg = 'Could not read file: %s\nError: %s' % (fpath, ex_obj)
return err_resp(user_msg)
return ok_resp(fcontents)
def load_file_as_json(fpath):
"""Given a file path, open the file and convert it to an OrderedDict"""
if not isfile(fpath):
user_msg = 'File not found: %s' % fpath
return err_resp(user_msg)
fcontents = None
try:
with open(fpath, 'r') as fh:
fcontents = fh.read()
except IOError as ex_obj:
user_msg = 'Could not read file: %s\nError: %s' % (fpath, ex_obj)
return err_resp(user_msg)
json_info = json_loads(fcontents)
if not json_info.success:
return err_resp(json_info.err_msg)
return ok_resp(json_info.result_obj)
| StarcoderdataPython |
8063813 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from mavencoord import MavenCoord
class MavenVersionDb:
""" This class serves as a dependency database so we can lookup
versions of packages that have been registered already.
"""
def __init__ (self):
self._db = {}
self._warnings = set()
return
def parseFile (self, depsfile):
""" _depsfile_ is the path to a filename that will be read and processed
in order to inject all dependencies.
The format of the file is similar to the one produced by the following
maven command:
$ mvn dependency:tree -DoutputType=text -Doutput=deps.txt
The only difference is that the very first line, which references the
coord we are in, should be commented out with '#' character.
"""
with open (depsfile, 'rt') as f:
for line in f:
line = line.strip()
if line.startswith ('#') or (len(line) == 0):
continue
self.register (line.lstrip ('=|+- \\'))
return True
def register (self, coord):
""" Register given coord in the database
"""
if isinstance (coord, list):
for m in coord:
self.register (m)
return
m = MavenCoord (coord)
myId = m.group + ':' + m.artifact
if (myId in self._db) and (self._db[myId] != m.version):
self.dependencyWarningOnce (m.id, self._db[myId])
m.version = self._db[myId]
return m
self._db[myId] = m.version
return m
def findOrRegister (self, coord):
normCoord = self.find (coord)
if normCoord is None:
return self.register (coord)
return normCoord
def find (self, coord):
""" Finds a coordinate that matches given group and artifact
"""
coord = MavenCoord (coord)
coord.version = self.getVersionFor (coord.group, coord.artifact)
if not coord.version:
return None
return coord
def getVersionFor (self, group, artifact):
""" Get default version for given group and artifact
"""
return self._db.get (group + ':' + artifact, None)
def hasVersionFor (self, group, artifact):
return self._db.has (group + ':' + artifact)
def dependencyWarningOnce (self, coord, existingVersion):
""" Show a dependency warning once
"""
if coord in self._warnings:
return
self._warnings.add (coord)
print (
"WARNING: Unhandled dependency conflict for %s (expecting version '%s')" % (
coord,
existingVersion
)
) | StarcoderdataPython |
6485649 | <reponame>shauryachawla/misc
#!/usr/bin/env python2
from sys import stdin
n = raw_input().split(' ')
k = int(n[1])
n = int(n[0])
ans = 0
for i in range(0, n):
t = int ( stdin.readline() )
if (t%k) == 0: ans += 1
print (ans)
| StarcoderdataPython |
260086 | <filename>user/views.py
from django.shortcuts import render, redirect
from django.contrib import messages
from .forms import UserRegisterForm, UserUpdateForm, ProfileUpdateForm
from django.contrib.auth.decorators import login_required
from django.contrib.auth import login, authenticate
from django.contrib.sites.shortcuts import get_current_site
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.template.loader import render_to_string
from .tokens import account_activation_token
from django.core.mail import EmailMessage
from django.http import HttpResponse
from instagram.models import User
def register(request):
if request.method == 'POST':
form = UserRegisterForm(request.POST)
if form.is_valid():
user = form.save(commit=False)
user.is_active = False
user.save()
current_site = get_current_site(request)
username = form.cleaned_data.get('username')
messages.success(request, f'Account created for {username}!,Please confirm your email address to complete the registration')
mail_subject = 'Activate your INSTAGRAM account.'
message = render_to_string('acc_active_email.html', {
'user': user,
'domain': current_site.domain,
'uid':urlsafe_base64_encode(force_bytes(user.pk)),
'token':account_activation_token.make_token(user),
})
to_email = form.cleaned_data.get('email')
email = EmailMessage(
mail_subject, message, to=[to_email]
)
email.send()
return redirect('login')
else:
form = UserRegisterForm()
return render(request, 'users/register.html', {'form': form})
def activate(request, uidb64, token):
try:
uid = force_text(urlsafe_base64_decode(uidb64))
user = User.objects.get(pk=uid)
except(TypeError, ValueError, OverflowError, User.DoesNotExist):
user = None
if user is not None and account_activation_token.check_token(user, token):
user.is_active = True
user.save()
login(request, user)
messages.success(request, f'Thank you for your email confirmation. Now you can login your account.!')
return redirect('login')
else:
messages.success(request, f'Activation link is invalid!')
return redirect('login')
@login_required
def profile(request):
return render(request, 'users/profile.html')
@login_required
def profile(request):
if request.method == 'POST':
u_form = UserUpdateForm(request.POST, instance=request.user)
p_form = ProfileUpdateForm(request.POST,
request.FILES,
instance=request.user.profile)
if u_form.is_valid() and p_form.is_valid():
u_form.save()
p_form.save()
messages.success(request, f'Your account has been updated!')
return redirect('profile')
else:
u_form = UserUpdateForm(instance=request.user)
p_form = ProfileUpdateForm()
context = {
'u_form': u_form,
'p_form': p_form
}
return render(request, 'users/profile.html', context) | StarcoderdataPython |
3477346 | <gh_stars>10-100
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of the SPORCO package. Details of the copyright
# and user license can be found in the 'LICENSE.txt' file distributed
# with the package.
"""
Convolutional Dictionary Learning
=================================
This example demonstrates the use of :class:`.fista.cbpdndl.ConvBPDNDictLearn` for learning a convolutional dictionary from a set of colour training images :cite:`wohlberg-2016-convolutional`, using FISTA solvers for both sparse coding :cite:`chalasani-2013-fast` :cite:`wohlberg-2016-efficient` and dictionary update steps :cite:`garcia-2017-convolutional`.
"""
from __future__ import print_function
from builtins import input
from builtins import range
import pyfftw # See https://github.com/pyFFTW/pyFFTW/issues/40
import numpy as np
from sporco.fista import cbpdndl
from sporco import util
from sporco import plot
"""
Load training images.
"""
exim = util.ExampleImages(scaled=True, zoom=0.5)
img1 = exim.image('barbara.png', idxexp=np.s_[10:522, 100:612])
img2 = exim.image('kodim23.png', idxexp=np.s_[:, 60:572])
img3 = exim.image('monarch.png', idxexp=np.s_[:, 160:672])
S = np.stack((img1, img2, img3), axis=3)
"""
Highpass filter training images.
"""
npd = 16
fltlmbd = 5
sl, sh = util.tikhonov_filter(S, fltlmbd, npd)
"""
Construct initial dictionary.
"""
np.random.seed(12345)
D0 = np.random.randn(16, 16, 3, 96)
"""
Set regularization parameter and options for dictionary learning solver. Note the multi-scale dictionary filter sizes.
"""
lmbda = 0.2
L_sc = 360.0
L_du = 50.0
dsz = ((8, 8, 3, 32), (12, 12, 3, 32), (16, 16, 3, 32))
opt = cbpdndl.ConvBPDNDictLearn.Options({'Verbose': True,
'MaxMainIter': 200, 'DictSize': dsz,
'CBPDN': {'BackTrack': {'Enabled': True }, 'L': L_sc},
'CCMOD': {'BackTrack': {'Enabled': True }, 'L': L_du } })
"""
Create solver object and solve.
"""
d = cbpdndl.ConvBPDNDictLearn(D0, sh, lmbda, opt)
D1 = d.solve()
print("ConvBPDNDictLearn solve time: %.2fs" % d.timer.elapsed('solve'))
"""
Display initial and final dictionaries.
"""
D1 = D1.squeeze()
fig = plot.figure(figsize=(14, 7))
plot.subplot(1, 2, 1)
plot.imview(util.tiledict(D0), fig=fig, title='D0')
plot.subplot(1, 2, 2)
plot.imview(util.tiledict(D1, dsz), fig=fig, title='D1')
fig.show()
"""
Get iterations statistics from solver object and plot functional value, residuals, and automatically adjusted gradient step parameters against the iteration number.
"""
its = d.getitstat()
fig = plot.figure(figsize=(20, 5))
plot.subplot(1, 3, 1)
plot.plot(its.ObjFun, fig=fig, xlbl='Iterations', ylbl='Functional')
plot.subplot(1, 3, 2)
plot.plot(np.vstack((its.X_Rsdl, its.D_Rsdl)).T,
fig=fig, ptyp='semilogy', xlbl='Iterations', ylbl='Residual',
lgnd=['X', 'D'])
plot.subplot(1, 3, 3)
plot.plot(np.vstack((its.X_L, its.D_L)).T, fig=fig, xlbl='Iterations',
ylbl='Inverse of Gradient Step Parameter', ptyp='semilogy',
lgnd=['$\L_X$', '$\L_D$'])
fig.show()
# Wait for enter on keyboard
input()
| StarcoderdataPython |
270885 | """
*Comparison Operator*
"""
import jax.numpy as jnp
from ._operator import ArrayOperator
class ComparisonOperator(ArrayOperator):
"""
Element-wise comparison operators.
"""
Equal = jnp.equal
NotEqual = jnp.not_equal
IsClose = jnp.isclose
GreaterThan = jnp.greater
GreaterEqualThan = jnp.greater_equal
LesserThan = jnp.less
LesserEqualThan = jnp.less_equal
Where = jnp.where # predicate evaluation
Condition = jnp.extract # #[TODO] also predicate eval? compare
# [TODO] arity note?
allclose = jnp.allclose # element-wise equality within tolerance
| StarcoderdataPython |
6686267 |
def load_population (initPopFile = None, decoder = None):
"""
Args:
initPopFile (str): file name with the population to be loaded.
decoder (Decoder): decoder to decode the candidates.
Returns:
num_generations(str): number of the generation
population (list): list of candidates
fitness (list): fitness values for each candidate
"""
population = []
fitness = []
if initPopFile is not None:
with open(initPopFile, 'r') as file:
data = file.readlines()
for line in data:
fields = line.split(';')
num_generations = int(fields[0]) + 1
fitness.append(fields[1])
candidateIds = eval(fields[3])
candidate = set(decoder.decode_candidate_ids_to_index(candidateIds))
population.append(candidate)
file.close()
return num_generations, population, fitness
def save_all_results(population, num_generations, num_evaluations, args):
"""
Print the output of the evolutionary computation to a file with the follow fields:
- number of generation
- fitness of candidate
- the solution candidates
- the solution encoded candidates
Args:
population (list): the population of Individuals
num_generations (int): the number of elapsed generations
num_evaluations (int): the number of evaluations already performed
args (dict): a dictionary of keyword arguments
Notes:
Optional keyword arguments in args:
- *results_file* -- the file path of the result file
- *configuration* -- the configuration of the EA algorithm
"""
print ("save results of generation:" + str(num_generations))
resultFile = args["results_file"]
file = open(resultFile, 'a')
config = args["configuration"]
decoder = config.get_decoder()
# save the optimization configuration
if num_generations == 0:
file.write("population_size;candidate_max_size;crossover_rate; mutation_rate;new_candidates_rate; num_elites\n")
file.write(";".join(map(str,config.get_ea_configurations().get_default_config())))
file.write("Generation;Fitness;Candidate;Reactions\n")
# save all candidates of the population
for ind in population:
solution_decoded = decoder.decode_candidate(ind.candidate)
file.write(("{0};{1};{2};{3} \n").format(num_generations, ind.fitness, ind.candidate, solution_decoded))
file.close()
| StarcoderdataPython |
8049120 | <filename>gym_multi_car_racing/__init__.py
from .multi_car_racing import MultiCarRacing
from gym.envs.registration import register
register(
id='MultiCarRacing-v0',
entry_point='gym_multi_car_racing:MultiCarRacing',
max_episode_steps=1000,
reward_threshold=900
)
| StarcoderdataPython |
12803094 | from django.db import models
class Aluno(models.Model):
nome = models.CharField(max_length=30)
rg = models.CharField(max_length=9)
cpf = models.CharField(max_length=11)
data_nascimento = models.DateField()
def __str__(self):
return self.nome
class Curso(models.Model):
NIVEL = (
('B', 'Bรกsico'),
('I', 'Intermediรกrio'),
('A', 'Avanรงado')
)
cod = models.CharField(max_length=10)
descricao = models.CharField(max_length=100)
nivel = models.CharField(
max_length=1,
choices=NIVEL,
blank=False,
null=False,
default='B'
)
def __str__(self):
return self.descricao
class Matricula(models.Model):
PERIODO = (
('M', 'Matutino'),
('V', 'Vespertino'),
('N', 'Noturno')
)
aluno = models.ForeignKey(Aluno, on_delete=models.CASCADE)
curso = models.ForeignKey(Curso, on_delete=models.CASCADE)
periodo = models.CharField(
max_length=1,
choices=PERIODO,
blank=False,
null=False,
default='M'
)
| StarcoderdataPython |
4936331 | from flask import Flask, request, abort, make_response
from flask_httpauth import HTTPBasicAuth
import os, requests, json
from linebot import (
LineBotApi, WebhookHandler
)
from linebot.exceptions import (
InvalidSignatureError, LineBotApiError
)
from linebot.models import (
MessageEvent, TextMessage, TextSendMessage,
)
app = Flask(__name__)
CHANNEL_SECRET = os.environ.get('provider_channel_secret')
CHANNEL_ACCESS_TOKEN = os.environ.get('provider_top_channel_access_token')
USER_TOP_TOKEN = os.environ.get('user_top_id')
USER_RAM_TOKEN = os.environ.get('user_ram_id')
line_bot_api = LineBotApi(CHANNEL_ACCESS_TOKEN)
handler = WebhookHandler(CHANNEL_SECRET)
db = ''
@app.route('/line_callback', methods=['POST'])
def line_callback():
signature = request.headers['X-Line-Signature']
body = request.get_data(as_text=True)
app.logger.info("Request body: " + body)
try:
handler.handle(body, signature)
except InvalidSignatureError:
abort(400)
return request.data
@handler.add(MessageEvent, message=TextMessage)
def handle_message(event):
global db
text = event.message.text #message from user
db = text
@app.route('/')
def index():
return CHANNEL_SECRET + '\n' + USER_RAM_TOKEN
@app.route('/hello', methods=['GET'])
def hello():
username = request.args.get('name')
return 'hello ' + username
@app.route('/callback', methods=['POST'])
def callback():
payload = request.get_json()
json_string = json.dumps(payload)
data_dict = json.loads(json_string)
user_name = data_dict['name']
user_id = data_dict['id']
user_message = data_dict['message']
result = 'json string: ' + json_string + '\n'
result = result + 'user id: ' + user_id + '\n'
result = result + 'user name: ' + user_name + '\n'
result = result + 'user message: ' + user_message
return result
@app.route('/push_to_line', methods=['POST'])
def push_to_line():
payload = request.get_json()
json_string = json.dumps(payload)
data_dict = json.loads(json_string)
user_name = data_dict['name']
user_id = data_dict['id']
user_message = data_dict['message']
line_bot_api.push_message(USER_RAM_TOKEN, TextSendMessage(text=user_message))
result = {'status':'success'}
print(CHANNEL_ACCESS_TOKEN)
return json.dumps(result)
@app.route('/message_checker', methods=['POST'])
def message_checker():
global db
if db != '':
temp = db
db = ''
return temp
else:
return 'NONE'
if __name__ == "__main__":
app.run()
| StarcoderdataPython |
9660935 | <gh_stars>0
from django.shortcuts import render
from django.views.generic.base import RedirectView
# Create your views here.
def index(request):
return render(request, "index.html", context={})
from django.views.generic import TemplateView
class Home(TemplateView):
template_name = "home.html"
class LoginView(TemplateView):
template_name = "core/login.html"
class About(TemplateView):
template_name = "core/about.html"
class LoginRedirectView(RedirectView):
pattern_name = "redirect-to-login"
def get_redirect_url(self, *args, **kwargs):
return "/servers"
| StarcoderdataPython |
5074619 | __all__ = ["parser"]
from icevision.all import *
def parser(data_dir: Path):
parser = parsers.VocXmlParser(
annotations_dir=data_dir / "odFridgeObjects/annotations",
images_dir=data_dir / "odFridgeObjects/images",
class_map=ClassMap(["milk_bottle", "carton", "can", "water_bottle"]),
)
return parser
| StarcoderdataPython |
9664835 | # coding:utf-8
from __future__ import unicode_literals,division,print_function
__author__ = 'timmyliang'
__email__ = '<EMAIL>'
__date__ = '2020-04-29 17:07:57'
"""
"""
import os
import sys
DIR = os.path.dirname(__file__)
MODULE = os.path.join(DIR,"..","QBinding","_vendor")
if MODULE not in sys.path:
sys.path.append(MODULE)
# import QBinding
from Qt import QtWidgets
from Qt import QtCore
from Qt import QtGui
class WidgetTest(QtWidgets.QWidget):
# ComputedSignal = QtCore.Signal()
# StateSignal = QtCore.Signal()
_var_dict = {}
_var_dict["ComputedSignal"] = QtCore.Signal()
_var_dict["StateSignal"] = QtCore.Signal()
locals().update(_var_dict)
def __init__(self):
super(WidgetTest, self).__init__()
# ! RuntimeError: maximum recursion depth exceeded while calling a Python object
getattr(self,"StateSignal").connect(getattr(self,"ComputedSignal").emit)
self.ComputedSignal.connect(lambda:print("computed signal"))
getattr(self,"StateSignal").emit()
# ! Empty Property
self.setProperty("test","hello,world")
print(self.dynamicPropertyNames())
# ! remove unicode_literals will fix all the problems or Add b to the _var_dict key
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
widget = WidgetTest()
widget.show()
sys.exit(app.exec_()) | StarcoderdataPython |
3239274 | <reponame>isabella232/cauliflowervest
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Duplicity keypair client."""
# Because of OSS
# pylint: disable=g-line-too-long
from cauliflowervest import settings as base_settings
from cauliflowervest.client import base_client
class DuplicityClient(base_client.CauliflowerVestClient):
"""Client to perform Duplicity operations."""
ESCROW_PATH = '/duplicity/'
PASSPHRASE_KEY = 'key_pair'
REQUIRED_METADATA = base_settings.DUPLICITY_REQUIRED_PROPERTIES
# Alias the RetrieveSecret method for naming consistency.
# pylint: disable=g-bad-name
RetrieveKeyPair = base_client.CauliflowerVestClient.RetrieveSecret
def UploadKeyPair(self, volume_uuid, key_pair, metadata):
self._metadata = metadata
super(DuplicityClient, self).UploadPassphrase(volume_uuid, key_pair)
| StarcoderdataPython |
6526092 | def grades():
count = 0
total = 0
while count < 5:
grade = input(f'Enter marks for subject {count+1}: ')
if grade.isdigit():
count+=1
total = total + int(grade)
else:
print("Enter numbers only..")
if int(total)>=90:
print("A*")
elif int(total)>=80:
print("A")
elif int(total)>=70:
print("B")
elif int(total)>=60:
print("C")
elif int(total)>=50:
print("D")
else:
print("Fail")
def evenOdd():
valid = False
while valid==False:
number = input("Enter a number: ")
if number.isdigit():
valid = True
else:
print("Please enter a number..")
if int(number) % 2 ==0:
print("Even Number")
else:
print("Odd number")
def printLengthOfList():
myList = [1,2,3,"python","java"]
print("Printing the length of the list", myList)
length = 0
for i in myList:
length += 1
print(length)
def sumList():
myList = [1,2,3,"python","java","javascript","34","43"]
print("summing all the numbers from the list", myList)
total = 0
for i in myList:
try:
i = int(i)
total = total + i
except:
pass
print(total)
def largest():
myList = [1,23,123,4,5,34,234,33,4,44]
print("Printing largest number from the list", myList)
largest = 0
for i in myList:
if i > largest:
largest = i
print (largest)
def lessThenFive():
a = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
print("Printing number less than five from the list", a)
num = 5
for i in a:
if i < num:
print(i)
if __name__ == "__main__":
grades()
print("")
evenOdd()
print("")
printLengthOfList()
print("")
sumList()
print("")
largest()
print("")
lessThenFive()
| StarcoderdataPython |
8075808 | # -*- coding: utf-8 -*-
from builtins import object
from django import forms
from django.conf import settings
from django.contrib.admin import widgets as admin_widgets
from django.utils.http import quote
from django.utils.safestring import mark_safe
if "django_thumbor" in settings.INSTALLED_APPS:
from django_thumbor import generate_url
else:
from django.template import Context, Template
from ..templatetags.stachoutils_extras import truncate
STATICS = {
'js': [
'django_stachoutils/js/jquery.charcounter.js',
'django_stachoutils/js/jquery.fieldselection.js',
'django_stachoutils/js/editeur.js',
]
}
class TexteEditeur(forms.Textarea):
template_name = 'django_stachoutils/forms/widgets/texte_editeur.html'
def _media(self):
return forms.Media(**STATICS)
media = property(_media)
class AdminTexteEditeur(admin_widgets.AdminTextareaWidget, TexteEditeur):
pass
class TextareaCounter(AdminTexteEditeur):
def __init__(self, max_signs=None, direction='up', attrs=None):
self.max_signs = max_signs
self.direction = direction
super(TextareaCounter, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
textarea = super(TextareaCounter, self).render(name, value, attrs)
jscounter = ''
if self.max_signs:
label = "signe(s)"
if self.direction == 'down':
label += ' restant(s)'
jscounter = (
'<script type="text/javascript">'
'$(document).ready(function () { '
'$("#%(id)s").charCounter(%(max_signs)d, {'
'container: "<div></div>",'
'direction: "%(direction)s",'
'format: "%%1 %(label)s",'
'limit: false,'
'limit_class: "error"'
'});'
'});'
'</script>' % {
'id': attrs['id'],
'direction': self.direction,
'label': label,
'max_signs': self.max_signs})
return mark_safe(textarea + jscounter)
class TextInputCounter(forms.TextInput):
def __init__(self, max_signs=None, direction='up', attrs=None):
self.max_signs = max_signs
self.direction = direction
super(TextInputCounter, self).__init__(attrs)
def render(self, name, value, attrs=None, renderer=None):
textinput = super(TextInputCounter, self).render(name, value, attrs)
jscounter = ''
if self.max_signs:
label = "signe(s)"
if self.direction == 'down':
label += ' restant(s)'
jscounter = (
'<script type="text/javascript">'
'$(document).ready(function () { '
'$("#%(id)s").charCounter(%(max_signs)d, {'
'container: "<span></span>",'
'direction: "%(direction)s",'
'format: "%%1 %(label)s",'
'limit: false,'
'limit_class: "error"'
'});'
'});'
'</script>' % {
'id': attrs['id'],
'direction': self.direction,
'label': label,
'max_signs': self.max_signs})
return mark_safe(textinput + jscounter)
def _media(self):
return forms.Media(**STATICS)
media = property(_media)
class ImageDroppableHiddenInput(forms.HiddenInput):
def __init__(self, *args, **kwargs):
super(ImageDroppableHiddenInput, self).__init__(*args, **kwargs)
self.related_model, self.related_fieldname = None, None
self.image_container_html = ''
self.message = ''
class Media(object):
css = {
'all': ('django_stachoutils/css/forms.css',)
}
js = ('django_stachoutils/js/forms.js',)
def render(self, name, value, attrs=None, renderer=None):
hidden_input = super(ImageDroppableHiddenInput, self).render(name, value, attrs=None)
image_tag = '<img />'
display_name = '-'
if value:
rel_obj = self.related_model.objects.get(pk=value) # TODO: Faire un get_object_or_none
image_tag = self._get_thumbnail(rel_obj)
display_name = truncate(str(rel_obj), 23)
tag = (
'<div class="droppableHiddenInput">%s'
' <div>%s</div>'
' <div class="droppableContainer"><span class="delete" title="Vider l\'emplacement"></span>%s'
' <div class="droppable"><div class="draggable">%s</div></div>'
' </div>'
' <div class="message">%s</div>'
'</div>' % (hidden_input, display_name, self.image_container_html, image_tag, self.message)
)
return mark_safe(tag)
def _get_thumbnail(self, rel_obj):
image_field = getattr(rel_obj, self.related_fieldname)
if "django_thumbor" in settings.INSTALLED_APPS:
return get_thumbor_thumbnail_tag(image_field)
else:
t = Template('{% load thumbnail %}{% thumbnail img_field "120" as im %}<img src="{{ im.url }}"'
'width="{{ im.width }}" height="{{ im.height }}">{% endthumbnail %}')
d = {"img_field": image_field}
return mark_safe(t.render(Context(d)))
def get_thumbor_thumbnail_tag(image, width=120):
return mark_safe('<img src="%s" width="%d">' % (
get_thumbor_thumbnail_url(image, width=width),
width))
def get_thumbor_thumbnail_url(image, **kwargs):
storage = image.storage
thumbor_server = settings.THUMBOR_SERVER_EXTERNAL
url = quote(image.url)
if hasattr(storage, "key"):
try:
url = storage.key(image.name)
except NotImplementedError:
pass
else:
thumbor_server = settings.THUMBOR_SERVER
return generate_url(url, thumbor_server=thumbor_server, **kwargs)
| StarcoderdataPython |
6465682 | # -*- coding: utf-8 -*-
import os
import sys
import smbus
import time
import datetime
import RPi.GPIO as GPIO
import Yi2k_ctrl
import subprocess
import gpsd
import threading
import runpy
import argparse
import Adafruit_Nokia_LCD as LCD
import Adafruit_GPIO.SPI as SPI
import lcd_menu as menu
from queue import Queue
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
cam_range=0b00001111
global cams_up
# Set Rpi.GPIO to BCM mode
GPIO.setmode(GPIO.BCM)
GPIO.cleanup()
# Channel used to receive MCP interrupts
int_pin = 17
# Set this channel as input
GPIO.setup(int_pin, GPIO.IN)
# Channel used for the buzzer
buzzer_pin = 22
GPIO.setup(buzzer_pin, GPIO.OUT)
# Raspberry Pi hardware SPI config:
DC = 23
RST = 24
SPI_PORT = 0
SPI_DEVICE = 0
#bus = smbus.SMBus(0) # Rev 1 Pi uses 0
bus = smbus.SMBus(1) # Rev 2 Pi uses 1
Keypressed = False
DEVICE = 0x20 # Device address (A0-A2)
IODIRA = 0x00 # IO direction (0 = output, 1 = input (Default))
IODIRB = 0x01
IOPOLA = 0x02 # IO polarity (0 = normal, 1 = inverse)
IOPOLB = 0x03
GPINTENA =0x04 # Interrupt on change (0 = disable, 1 = enable)
GPINTENB =0x05
DEFVALA = 0x06 # Default comparison for interrupt on change (interrupts on opposite)
DEFVALB = 0x07
INTCONA = 0x08 # Interrupt control (0 = interrupt on change from previous, 1 = interrupt on change from DEFVAL)
INTCONB = 0x09
IOCON = 0x0A # IO Configuration: bank/mirror/seqop/disslw/haen/odr/intpol/notimp
#IOCON 0x0B // same as 0x0A
GPPUA = 0x0C # Pull-up resistor (0 = disabled, 1 = enabled)
GPPUB = 0x0D
INFTFA = 0x0E # Interrupt flag (read only) : (0 = no interrupt, 1 = pin caused interrupt)
INFTFB = 0x0F
INTCAPA = 0x10 # Interrupt capture (read only) : value of GPIO at time of last interrupt
INTCAPB = 0x11
GPIOA = 0x12 # Port value. Write to change, read to obtain value
GPIOB = 0x13
OLLATA = 0x14 # Output latch. Write to latch output.
OLLATB = 0x15
# Set all GPA pins as outputs by setting
# all bits of IODIRA register to 0
bus.write_byte_data(DEVICE,IODIRA,0x00)
# Set output all 7 output bits to 0
bus.write_byte_data(DEVICE,OLLATA,0)
# Set GPIOA polarity to normal
bus.write_byte_data(DEVICE, IOPOLA, 0)
# Set GPIOB pins as inputs
bus.write_byte_data(DEVICE,IODIRB, 0xFF)
# Enable pull up resistor on GPIOB
bus.write_byte_data(DEVICE, GPPUB, 0xFF)
#Set GPIOB polarity as inverted
bus.write_byte_data(DEVICE,IOPOLB, 0xFF)
# no mirror interrupts, disable sequential mode, active HIGH
bus.write_byte_data(DEVICE, IOCON, 0b01100010)
#Enable interrupt on port B
bus.write_byte_data(DEVICE, GPINTENB, 0xFF)
"""
for MyData in range(1,16):
# Count from 1 to 8 which in binary will count
# from 001 to 111
bus.write_byte_data(DEVICE,OLLATA,0x1)
time.sleep(1)
bus.write_byte_data(DEVICE,OLLATA,0x0)
time.sleep(1)
"""
def my_callback(channel):
#print('This is a edge event callback function!')
print('Edge detected on channel %s'%channel)
#print('This is run in a different thread to your main program')
global Keypressed
Keypressed = True
# add rising edge detection on a channel
GPIO.add_event_detect(int_pin, GPIO.RISING, callback=my_callback)
#reset interrupt on mcp, or an already active interrupt
#would disable a new one, rendering the mcp unusable.
bus.read_byte_data(DEVICE, INTCAPB)
bus.read_byte_data(DEVICE, INTCAPA)
#Hall sensor pin
hall_pin=25
# Set this channel as input
GPIO.setup(hall_pin, GPIO.IN)
GPIO.setup(hall_pin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
def hall_callback(hall_pin):
print('Edge detected on pin %s' %hall_pin)
cams_takePic(MyCams,logqueue, cam_range)
lcd_write_text("Picture", 1)
GPIO.add_event_detect(hall_pin, GPIO.FALLING, callback=hall_callback)
"""
void handleKeypress ()
{
unsigned int keyValue = 0;
delay (100); // de-bounce before we re-enable interrupts
keyPressed = false; // ready for next time through the interrupt service routine
digitalWrite (ISR_INDICATOR, LOW); // debugging
// Read port values, as required. Note that this re-arms the interrupts.
if (expanderRead (INFTFA))
keyValue |= expanderRead (INTCAPA) << 8; // read value at time of interrupt
if (expanderRead (INFTFB))
keyValue |= expanderRead (INTCAPB); // port B is in low-order byte
Serial.println ("Button states");
Serial.println ("0 1");
Serial.println ("0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5");
// display which buttons were down at the time of the interrupt
for (byte button = 0; button < 16; button++)
{
// this key down?
if (keyValue & (1 << button))
Serial.print ("1 ");
else
Serial.print ("0 ");
} // end of for each button
Serial.println ();
// if a switch is now pressed, turn LED on (key down event)
if (keyValue)
{
time = millis (); // remember when
digitalWrite (ONBOARD_LED, HIGH); // on-board LED
} // end if
} // end of handleKeypress
"""
def handleKeyPress():
print("In handleKeyPress function")
KeyValue = 0
time.sleep(0.1)
global Keypressed, keyDown, keyUp, keySelect, keyBack
Keypressed = False
if bus.read_byte_data(DEVICE, INFTFB):
KeyValue |= bus.read_byte_data(DEVICE, INTCAPB)
#clear interrupt
print(bin(KeyValue))
if KeyValue & 0b1:
print("Power up button")
cams_power_up(cam_range)
lcd_write_text("Powering up...", 10)
lcd_write_text("Cams ready", 5)
elif KeyValue & 0b10:
print("Power down button")
cams_power_down(cam_range)
lcd_write_text("Pwr cam down..", 5)
elif KeyValue & 0b100:
print("Shutter button")
cams_takePic(MyCams,logqueue, pic_count, cam_range)
#lcd_write_text("Picture", 1)
elif KeyValue & 0b1000:
print("Select button")
keySelect = True
elif KeyValue & 0b10000:
print("Down button")
keyDown = True
elif KeyValue & 0b100000:
print("Up button")
keyUp = True
elif KeyValue & 0b1000000:
print("Back button")
keyBack = True
#reset interrupt on mcp
bus.read_byte_data(DEVICE, INTCAPB)
bus.read_byte_data(DEVICE, INTCAPA)
# Hardware SPI usage:
disp = LCD.PCD8544(DC, RST, spi=SPI.SpiDev(SPI_PORT, SPI_DEVICE, max_speed_hz=4000000))
# Software SPI usage (defaults to bit-bang SPI interface):
#disp = LCD.PCD8544(DC, RST, SCLK, DIN, CS)
# Initialize library.
disp.begin(contrast=50)
# Clear display.
disp.clear()
disp.display()
def splash_boot(pause_time=5):
# Create blank image for drawing.
# Make sure to create image with mode '1' for 1-bit color.
boot = Image.new('1', (LCD.LCDWIDTH, LCD.LCDHEIGHT))
# Get drawing object to draw on image.
draw = ImageDraw.Draw(boot)
# Draw a white filled box to clear the image.
draw.rectangle((0,0,LCD.LCDWIDTH,LCD.LCDHEIGHT), outline=255, fill=255)
# Load default font.
font = ImageFont.load_default()
# Alternatively load a TTF font.
# Some nice fonts to try: http://www.dafont.com/bitmap.php
# font = ImageFont.truetype('Minecraftia.ttf', 8)
# Write some text.
draw.text((0,8), ' Welcome to ', font=font)
draw.text((0,16), 'V4MPOD v2.01', font=font)
# Display image.
disp.image(boot)
disp.display()
time.sleep(pause_time)
# Clear display.
disp.clear()
disp.display()
def lcd_write_text(lcdtext, timeout=None):
disp.clear()
disp.display()
image = Image.new('1', (LCD.LCDWIDTH, LCD.LCDHEIGHT))
draw = ImageDraw.Draw(image)
font = ImageFont.load_default()
draw.rectangle((0,0,LCD.LCDWIDTH,LCD.LCDHEIGHT), outline=255, fill=255)
# Write some text.
draw.text((0,8), lcdtext, font=font)
disp.image(image)
disp.display()
if timeout:
time.sleep(timeout)
disp.clear()
disp.display()
def beep(duration=0.2, pause=0.2, repeat=0):
for _rpt in range(repeat +1):
GPIO.output(buzzer_pin,1)
time.sleep(duration)
GPIO.output(buzzer_pin,0)
time.sleep(pause)
def cams_takePic(cameras_obj, log_queue, cams=None, pic_id=1):
pic_answer = cameras_obj.takePic(cams)
#pic_answer is a tuple: timestamp, pic_return, cam, status
log_queue.put(str(pic_answer[0]) + "," + str(pic_answer[1]) + "," + str(pic_answer[2]) + "," + pic_answer[3] + "\n")
if pic_answer[3] == "ok":
beep(0.1)
else:
beep(0.4, 0.1, 2)
return pic_answer[1]
def picLoop(cam, pic_nbr, pause, logqueue):
t = threading.currentThread()
for i in range(pic_nbr-1):
if getattr(t, "do_run", True):
cams_takePic(MyCams, logqueue, cam_range, i)
time.sleep(pause)
else:
break
def start_Timelapse():
global timelapsethread
timelapsethread=threading.Thread(target=picLoop, args=(cam_range, 100000, 1.3, logqueue,), name="Picloop")
timelapsethread.start()
def stop_Timelapse():
global timelapsethread
timelapsethread.do_run = False
def cams_arduino_connect(camera_obj):
timestamp, answer = camera_obj.connect()
logfile.write(str(timestamp) + "," + "Arduino connection: " + "," + str(answer) + "\n")
return answer
def cams_power_up(cameras_obj, cams=None):
timestamp, answer, cams = cameras_obj.power_up(cams)
logfile.write(str(timestamp) + "," + str(answer) + "," + str(cams) + "\n")
return answer
def cams_power_down(cameras_obj, cams=None):
timestamp, answer, cams = cameras_obj.power_down(cams)
logfile.write(str(timestamp) + "," + "Power down" + "," + str(cams) + "\n")
return answer
def start_gnss_log():
subprocess.call(["gpspipe -d -R -o ~/Documents/Sessions_V4MPOD/`date +%Y-%m-%d_%H.%M.%S`.nmea"], shell=True)
def stop_gnss_log():
subprocess.call(["killall", "gpspipe"])
def show_log():
lcd_write_text(logfile.readline()[-4:], 5)
menu.display_img(current_img, disp)
def gnss_fix():
gpsd.connect()
mode=0
while mode < 3:
try:
packet=gpsd.get_current()
mode = packet.mode
except:
pass
print("Gnss Fix")
time.sleep(10)
def gnss_localization():
gpsd.connect()
timer= 0
while timer < 5:
try:
packet=gpsd.get_current()
gnss_info = str(packet.position()[0]) + "\n" + str(packet.position()[1]) + "\n" + packet.time[-13:]
lcd_write_text(gnss_info, 1)
except:
gnss_info = "Error"
timer +=1
menu.display_img(current_img, disp)
print(packet.position())
print(packet.time)
def cams_set_clocks(cameras_obj):
timestamp, answer = cameras_obj.set_clocks()
if answer:
logfile.write(str(timestamp) + "," + "Yi set clock: OK" + "\n")
beep(0.1)
else:
logfile.write(str(timestamp) + "," + "Yi set clock: Can't set clock, communication error" + "\n")
beep(0.4, 0.1, 2)
def cams_send_settings(cameras_obj):
answer = cameras_obj.send_settings()
if answer[1]:
logfile.write(str(answer[0]) + "," + "Yi send settings: OK" + "\n")
beep(0.1)
else:
logfile.write(str(answer[0]) + "," + "Yi send settings: Can't send settings, communication error" + "\n")
beep(0.4, 0.1, 2)
def arg_parser():
"""parse the command line"""
parser = argparse.ArgumentParser(description="Main V4MPOD software")
parser.add_argument("-i", "--interactive", help="Interactive mode to be able to use the command line", action="store_true")
args = parser.parse_args()
if args.interactive:
print("entering interactive mode")
global keepRunning
keepRunning=False
def exit_loop():
global keepRunning
keepRunning=False
def power_down_pi():
exit_prog()
os.system("sudo shutdown -h now")
def exit_prog():
#TODO : stop the flush thread
global keepRunning
#try:
#reset interrupt on mcp
bus.read_byte_data(DEVICE, INTCAPB)
bus.read_byte_data(DEVICE, INTCAPA)
bus.close()
stop_gnss_log()
logfile.write("Exiting" + "\n")
logfile.close()
flushthread.do_run = False
GPIO.cleanup()
print("Exiting program A")
keepRunning=False
#except:
# print("Erreur en quittant")
print("Exiting program B")
print("Exiting V4MPOD")
sys.exit()
def open_file():
global flushthread
now=datetime.datetime.now()
filename = os.path.expanduser("~") + "/Documents/Sessions_V4MPOD/cam_log_" + now.strftime("%Y-%m-%d_%H.%M.%S") + ".log"
logfile=open(filename, "w")
flushthread=threading.Thread(target=flush_log, args=(logqueue,), name="flushlog")
flushthread.start()
return logfile
def new_session():
#TODO vรฉrifier l'รฉtat du compteur de photo aprรจs crรฉation d'une nouvelle session
global logfile
logfile.write("Close logfile" + "\n")
logfile.close()
flushthread.do_run = False
#stop gnss log
stop_gnss_log()
#start new logfile
logfile = open_file()
#start new gnss log
start_gnss_log()
def flush_log(logqueue):
#since Python 3.4 file are inheritable. I think it's why
#flush() alone doesn't work in this thread.
t = threading.currentThread()
while getattr(t, "do_run", True):
time.sleep(10)
try:
while not logqueue.empty():
#logfile.write(logqueue.get())
logline = logqueue.get()
print (logline)
logfile.write(logline)
logqueue.task_done()
logfile.flush()
os.fsync(logfile.fileno())
except:
None
def menu_previous_line():
global menuA
pos = menuA[-2]
level = menuA[-1]
try:
if pos[level] > 0:
# monter d'une ligne et rafraichir l'รฉcran
menuA[-2][menuA[-1]] -= 1 # actualise la position
if level == 0:
print(menuA[0][menuA[-2][0]])
if level == 1:
print(menuA[menuA[-2][level - 1] + 1][menuA[-2][level]])
except:
None
def menu_next_line():
global menuA
pos = menuA[-2]
level = menuA[-1]
try:
if level == 0 and (pos[0]+1 < len(menuA[0])):
menuA[-2][level] += 1 # actualise la position
print(menuA[0][menuA[-2][0]])
if level==1 and (pos[level]+1 < len(menuA[pos[level-1]+1])):
# descendre d'une ligne et rafraichir l'รฉcran
menuA[-2][level] += 1 # actualise la position
print(menuA[menuA[-2][level-1]+1][menuA[-2][level]])
except:
None
menuA = [[{"Name":"Take Pic", "Func":"cams_takePic", "Param":"MyCams, logqueue, cam_range"},
{"Name":"Power up Cams", "Func":"cams_power_up", "Param":"MyCams, cam_range"},
{"Name":"Power down Cams", "Func":"cams_power_down", "Param":"MyCams, cam_range"},
{"Name":"Start TimeLapse", "Func":"start_Timelapse", "Param":""},
{"Name":"Stop TimeLapse", "Func":"stop_Timelapse", "Param":""},
{"Name":"Start cam log", "Func":"logfile=open_file", "Param":""},
{"Name":"Stop Gnss log", "Func":"stop_gnss_log", "Param":""},
{"Name":"GNSS Info", "Func":"gnss_localization", "Param":""},
{"Name":"Set Yi settings", "Func":"cams_send_settings", "Param":"MyCams"},
{"Name":"Set Yi clock", "Func":"cams_set_clocks", "Param":"MyCams"},
{"Name":"Start new session", "Func":"new_session", "Param":""},
{"Name":"Exit", "Func":"exit_prog", "Param":""},
{"Name":"Power off PI", "Func":"power_down_pi", "Param":""},
],
[0, 0], 0]
# Main
splash_boot()
gnss_fix()
keyDown = False
keyUp = False
keySelect = False
keyBack = False
keepRunning=True
flushthread = None
timelapsethread = None
Timelapse = False
cams_up = False
pic_count = 0
logqueue=Queue(maxsize=0)
back=menu.create_blanck_img()
img_menu_top = menu.create_full_img(menuA[0])
current_img=menu.select_line(img_menu_top, back, 1, disp)
start_gnss_log()
logfile=open_file()
MyCams = Yi2k_ctrl.Yi2K_cam_ctrl('/dev/ttyACM0', '115200', cam_range)
cams_arduino_connect(MyCams)
#check if interactive mode is enabled
arg_parser()
# Loop until user presses CTRL-C
while keepRunning:
if Keypressed:
handleKeyPress()
if keyDown:
keyDown=False
menu_next_line()
current_img=menu.select_line(img_menu_top, back, (menuA[-2][0])+1, disp)
if keyUp:
keyUp=False
menu_previous_line()
current_img=menu.select_line(img_menu_top, back, (menuA[-2][0])+1, disp)
if keySelect:
keySelect=False
exec(menuA[0][menuA[-2][0]]["Func"] + "(" + menuA[0][menuA[-2][0]]["Param"] +")")
print("exec done")
| StarcoderdataPython |
3277250 | from utils.db.mongo_orm import *
# ็ฑปๅๅฎไน collection
class TestReportDetail(Model):
class Meta:
database = db
collection = 'testReportDetail'
# ๅญๆฎต
_id = ObjectIdField() # reportDetailId
reportId = ObjectIdField()
projectId = ObjectIdField()
testSuiteId = ObjectIdField()
testCaseId = ObjectIdField()
resultDetail = DictField()
createAt = DateField()
def __str__(self):
return "reportId:{} - testSuiteId:{} - testCaseId:{}".format(self.reportId, self.testSuiteId, self.testCaseId)
if __name__ == '__main__':
pass
| StarcoderdataPython |
6550376 | #<NAME>
#Codewars : @Kunalpod
#Problem name: Simple Pig Latin
#Problem level: 5 kyu
def pig_it(text):
text1 = ' '.join([x[1:]+x[0]+'ay' for x in text.split()])
return text1 if text1[-3].isalpha() else text1[:-2]
| StarcoderdataPython |
11372841 | <filename>bowtie/tests/test_layout.py
"""Test layout functionality."""
# pylint: disable=redefined-outer-name,protected-access
import pytest
from bowtie import App
from bowtie.control import Button
from bowtie.exceptions import GridIndexError, NoUnusedCellsError, SpanOverlapError
def check_all_cells_used(view):
"""Check if all cells are used."""
with pytest.raises(NoUnusedCellsError):
view._available_cell()
def count_used_cells(view):
"""Count number of used cells."""
return sum(len(list(x.cells)) for x in view._spans.keys())
@pytest.fixture(scope='module')
def buttons():
"""Four buttons."""
return [Button() for _ in range(4)]
def test_add_list(buttons):
"""Append button to existing cell."""
app = App()
app[0, 0] = buttons[0]
app[0, 0] += buttons[1]
def test_set_tuple(buttons):
"""Set tuple of components to cell."""
app = App()
app[0, 0] = buttons[0], buttons[1]
def test_set_list(buttons):
"""Set list of components to cell."""
app = App()
app[0, 0] = [buttons[0], buttons[1]]
def test_append_no_init(buttons):
"""Append button to cell without component."""
app = App()
with pytest.raises(KeyError):
app[0, 0] += buttons[0]
def test_append_to_partial(buttons):
"""Append button to partial cell."""
app = App(columns=2)
app[0] = buttons[0]
with pytest.raises(KeyError):
app[0, 0] += buttons[1]
with pytest.raises(SpanOverlapError):
app[0, 0] = buttons[1]
def test_append_to_partial_superset(buttons):
"""Append button to partial cell."""
app = App(columns=2)
app[0, 0] = buttons[0]
with pytest.raises(Exception):
app[0] += buttons[1]
def test_all_used(buttons):
"""Test all cells are used."""
app = App(rows=2, columns=2)
for i in range(4):
app.add(buttons[i])
check_all_cells_used(app._root)
app = App(rows=2, columns=2)
app[0, 0] = buttons[0]
app[0, 1] = buttons[1]
app[1, 0] = buttons[2]
app[1, 1] = buttons[3]
check_all_cells_used(app._root)
app.add(buttons[2])
assert len(app[1, 1]) == 2
app = App(rows=2, columns=2)
app[0] = buttons[0]
app[1, 0] = buttons[2]
app[1, 1] = buttons[3]
check_all_cells_used(app._root)
app.add(buttons[2])
assert len(app[1, 1]) == 2
def test_used(buttons):
"""Test cell usage checks."""
app = App(rows=2, columns=2)
for i in range(3):
app.add(buttons[i])
app[0, 0] = buttons[3]
app[0:1, 1] = buttons[3]
app[1, 0:1] = buttons[3]
app[1, 1] = buttons[3]
def test_grid_index(buttons):
"""Test grid indexing checks."""
app = App(rows=2, columns=2)
with pytest.raises(GridIndexError):
app[-5] = buttons[0]
app[-1] = buttons[0]
with pytest.raises(GridIndexError):
app[2] = buttons[0]
app[1] = buttons[0]
def test_getitem(buttons):
"""Test grid indexing checks."""
but = buttons[0]
app = App(rows=2, columns=2)
with pytest.raises(GridIndexError):
app[3] = but
with pytest.raises(GridIndexError):
app[1, 2, 3] = but
with pytest.raises(GridIndexError):
# pylint: disable=invalid-slice-index
app['a':3] = but
with pytest.raises(GridIndexError):
app['a'] = but
with pytest.raises(GridIndexError):
app[3, 'a'] = but
with pytest.raises(GridIndexError):
app['a', 3] = but
with pytest.raises(GridIndexError):
app[0, 0::2] = but
with pytest.raises(GridIndexError):
app[0, 1:-1:-1] = but
app[1,] = but
assert count_used_cells(app._root) == 2
app[0, :] = but
assert count_used_cells(app._root) == 4
app = App(rows=2, columns=2)
app[0:1, 1:2] = but
assert count_used_cells(app._root) == 1
app[1:, 0:] = but
assert count_used_cells(app._root) == 3
app = App(rows=2, columns=2)
app[-1, :2] = but
assert count_used_cells(app._root) == 2
app = App(rows=1, columns=2)
app[0, :2] = but
assert count_used_cells(app._root) == 2
app = App(rows=1, columns=2)
app[0] = but
assert count_used_cells(app._root) == 2
app = App(rows=2, columns=2)
app[:2] = but
assert count_used_cells(app._root) == 4
| StarcoderdataPython |
8086111 | import geohash
class HashConstants(object):
BIT_RADIUS_MAP = {
52: 0.5971,
50: 1.1943,
48: 2.3889,
46: 4.7774,
44: 9.5547,
42: 19.1095,
40: 38.2189,
38: 76.4378,
36: 152.8757,
34: 305.751,
32: 611.5028,
30: 1223.0056,
28: 2446.0112,
26: 4892.0224,
24: 9784.0449,
22: 19568.0898,
20: 39136.1797,
}
class LocationManager(object):
def __init__(self, db):
self.redis = db
self.RADIUS_BIT_MAP = {v: k for k, v in HashConstants.BIT_RADIUS_MAP.iteritems()}
def _encode(self, lat, lon):
return geohash.encode_uint64(lat, lon)
def _expand(self, ui64, depth):
return geohash.expand_uint64(ui64, depth)
def _decode(self, key):
return geohash.decode_uint64(key)
def add(self, lat, lon, msg):
self.redis.zadd('msg', self._encode(lat, lon), msg)
def rem(self, lat, lon):
self.redis.zrem('msg', self._encode(lat, lon), msg)
def _search(self, lat, lon, start=0, num=0, radius=9.5547):
rbm = self.RADIUS_BIT_MAP
depth = rbm[radius] if radius in rbm else rbm[min(rbm.keys(), key=lambda k: abs(k-radius))]
gh_int64 = self._encode(lat, lon)
gh_int64 >>= (64 - depth)
G = self._get_neighbors(gh_int64, depth)
r = []
for lb in G:
ub = lb + 1
if ub and lb:
m = lb << (64 - depth)
n = ub << (64 - depth)
r.extend(self.redis.zrangebyscore('msg', m, n, withscores=True))
return r
def _get_neighbors(self, _hash, depth):
n = self._move(_hash, 0, 1, depth)
e = self._move(_hash, 1, 0, depth)
s = self._move(_hash, 0, -1, depth)
w = self._move(_hash, -1, 0, depth)
nw = self._move(_hash, -1, 1, depth)
ne = self._move(_hash, 1, 1, depth)
se = self._move(_hash, 1, -1, depth)
sw = self._move(_hash, -1, -1, depth)
return [n, e, s, w, ne, nw, se, sw, _hash]
def _move(self, _hash, x, y, depth):
if x and y:
t = self._movex(_hash, x, depth)
return self._movey(t, y, depth)
elif x:
return self._movex(_hash, x, depth)
elif y:
return self._movey(_hash, y, depth)
def _movex(self, _hash, d, depth):
if not d:
return 0
x = _hash & 0xaaaaaaaaaaaaaaaa
y = _hash & 0x5555555555555555
zz = 0x5555555555555555 >> (64 - depth)
if d > 0:
x += zz + 1
else:
x = x | zz
x -= zz + 1
x &= 0xaaaaaaaaaaaaaaaa >> (64 - depth)
return x | y
def _movey(self, _hash, d, depth):
if not d:
return 0
x = _hash & 0xaaaaaaaaaaaaaaaa
y = _hash & 0x5555555555555555
zz = 0xaaaaaaaaaaaaaaaa >> (64 - depth)
if d > 0:
y += zz + 1
else:
y = y | zz
y -= zz + 1
y &= 0x5555555555555555 >> (64 - depth)
return x | y
def test():
from app.utilities.redis import TestRedis
redis = TestRedis()
m = LocationManager(redis)
m.add(41.8781, -87.6298, 'Chicago')
m.add(41.9436, -87.6584, 'Lakeview')
m.add(41.7959, -87.9756, 'Westmont')
print m._search(41.95, -87.65, 0, 3, radius=20000)
print m._search(41.95, -87.65, 0, 3, radius=1000)
print m._search(41.95, -87.65, 0, 3, radius=100)
print m._search(41.886, -87.628, 0, 3, radius=10000)
print m._search(41.794796, -87.974327, 0, 3, radius=1000)
print m._search(41.9434, -87.657849, 0, 3, radius=300)
print m._search(41.9413, -87.654270, 0, 3, radius=20)
| StarcoderdataPython |
1887209 | ##############################################################################
#
# Copyright (c) 2004 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
r"""Regular expression pattern normalizing output checker
The pattern-normalizing output checker extends the default output checker with
an option to normalize expected an actual output.
You specify a sequence of patterns and replacements. The replacements are
applied to the expected and actual outputs before calling the default outputs
checker. Let's look at an example. In this example, we have some times and
addresses:
>>> want = '''\
... <object object at 0xb7f14438>
... completed in 1.234 seconds.
... <BLANKLINE>
... <object object at 0xb7f14440>
... completed in 123.234 seconds.
... <BLANKLINE>
... <object object at 0xb7f14448>
... completed in .234 seconds.
... <BLANKLINE>
... <object object at 0xb7f14450>
... completed in 1.234 seconds.
... <BLANKLINE>
... '''
>>> got = '''\
... <object object at 0xb7f14458>
... completed in 1.235 seconds.
...
... <object object at 0xb7f14460>
... completed in 123.233 seconds.
...
... <object object at 0xb7f14468>
... completed in .231 seconds.
...
... <object object at 0xb7f14470>
... completed in 1.23 seconds.
...
... '''
We may wish to consider these two strings to match, even though they differ in
actual addresses and times. The default output checker will consider them
different:
>>> doctest.OutputChecker().check_output(want, got, 0)
False
We'll use the RENormalizing to normalize both the wanted and gotten strings to
ignore differences in times and addresses:
>>> import re
>>> checker = RENormalizing([
... (re.compile('[0-9]*[.][0-9]* seconds'), '<SOME NUMBER OF> seconds'),
... (re.compile('at 0x[0-9a-f]+'), 'at <SOME ADDRESS>'),
... ])
>>> checker.check_output(want, got, 0)
True
Usual OutputChecker options work as expected:
>>> want_ellided = '''\
... <object object at 0xb7f14438>
... completed in 1.234 seconds.
... ...
... <object object at 0xb7f14450>
... completed in 1.234 seconds.
... <BLANKLINE>
... '''
>>> checker.check_output(want_ellided, got, 0)
False
>>> checker.check_output(want_ellided, got, doctest.ELLIPSIS)
True
When we get differencs, we output them with normalized text:
>>> source = '''\
... >>> do_something()
... <object object at 0xb7f14438>
... completed in 1.234 seconds.
... ...
... <object object at 0xb7f14450>
... completed in 1.234 seconds.
... <BLANKLINE>
... '''
>>> example = doctest.Example(source, want_ellided)
>>> print checker.output_difference(example, got, 0)
Expected:
<object object at <SOME ADDRESS>>
completed in <SOME NUMBER OF> seconds.
...
<object object at <SOME ADDRESS>>
completed in <SOME NUMBER OF> seconds.
<BLANKLINE>
Got:
<object object at <SOME ADDRESS>>
completed in <SOME NUMBER OF> seconds.
<BLANKLINE>
<object object at <SOME ADDRESS>>
completed in <SOME NUMBER OF> seconds.
<BLANKLINE>
<object object at <SOME ADDRESS>>
completed in <SOME NUMBER OF> seconds.
<BLANKLINE>
<object object at <SOME ADDRESS>>
completed in <SOME NUMBER OF> seconds.
<BLANKLINE>
<BLANKLINE>
>>> print checker.output_difference(example, got,
... doctest.REPORT_NDIFF)
Differences (ndiff with -expected +actual):
- <object object at <SOME ADDRESS>>
- completed in <SOME NUMBER OF> seconds.
- ...
<object object at <SOME ADDRESS>>
completed in <SOME NUMBER OF> seconds.
<BLANKLINE>
+ <object object at <SOME ADDRESS>>
+ completed in <SOME NUMBER OF> seconds.
+ <BLANKLINE>
+ <object object at <SOME ADDRESS>>
+ completed in <SOME NUMBER OF> seconds.
+ <BLANKLINE>
+ <object object at <SOME ADDRESS>>
+ completed in <SOME NUMBER OF> seconds.
+ <BLANKLINE>
<BLANKLINE>
If the wanted text is empty, however, we don't transform the actual output.
This is usful when writing tests. We leave the expected output empty, run
the test, and use the actual output as expected, after reviewing it.
>>> source = '''\
... >>> do_something()
... '''
>>> example = doctest.Example(source, '\n')
>>> print checker.output_difference(example, got, 0)
Expected:
<BLANKLINE>
Got:
<object object at 0xb7f14458>
completed in 1.235 seconds.
<BLANKLINE>
<object object at 0xb7f14460>
completed in 123.233 seconds.
<BLANKLINE>
<object object at 0xb7f14468>
completed in .231 seconds.
<BLANKLINE>
<object object at 0xb7f14470>
completed in 1.23 seconds.
<BLANKLINE>
<BLANKLINE>
$Id: renormalizing.py 66267 2006-03-31 09:40:54Z BjornT $
"""
import doctest
class RENormalizing(doctest.OutputChecker):
"""Pattern-normalizing outout checker
"""
def __init__(self, patterns):
self.patterns = patterns
def check_output(self, want, got, optionflags):
if got == want:
return True
for pattern, repl in self.patterns:
want = pattern.sub(repl, want)
got = pattern.sub(repl, got)
return doctest.OutputChecker.check_output(self, want, got, optionflags)
def output_difference(self, example, got, optionflags):
want = example.want
# If want is empty, use original outputter. This is useful
# when setting up tests for the first time. In that case, we
# generally use the differencer to display output, which we evaluate
# by hand.
if not want.strip():
return doctest.OutputChecker.output_difference(
self, example, got, optionflags)
# Dang, this isn't as easy to override as we might wish
original = want
for pattern, repl in self.patterns:
want = pattern.sub(repl, want)
got = pattern.sub(repl, got)
# temporarily hack example with normalized want:
example.want = want
result = doctest.OutputChecker.output_difference(
self, example, got, optionflags)
example.want = original
return result
| StarcoderdataPython |
9732433 | <gh_stars>0
import os
def modis_operation(list_folder, source, path):
print(path)
if path.endswith("Layer1/"):
clipName = "clip1_"
mosaicName = "Mosaic1_"
if path.endswith("Layer5/"):
clipName = "clip5_"
mosaicName = "Mosaic5_"
for files in source:
file_name = (files[8:16])
if files.endswith(".tif") and file_name not in list_folder:
list_folder.append(file_name)
for name in list_folder:
os.system("gdalwarp -co COMPRESS=DEFLATE -dstalpha -cutline alla_lan.shp -crop_to_cutline " + path + mosaicName + name + "_.tif "+ path + clipName + name+".tif")
#os.system("rm " + path + "/Mosaic*")
modis_operation([], os.listdir("./Layer1"), os.path.dirname(os.path.realpath(__file__))+"/Layer1/")
modis_operation([], os.listdir("./Layer5"), os.path.dirname(os.path.realpath(__file__))+"/Layer5/")
| StarcoderdataPython |
5165019 | from time import sleep
velocidade = float(input("Qual a velocidade vocรช estรก percorrendo? "))
print("Calculando....")
sleep(3)
if velocidade > 80:
multa = (velocidade-80) * 7
print("\033[:31m MULTADO!!\033[m Vocรช ultrapassou o limite de 80km/h deverรก pagar a multa de \033[:31mR${:.2f}\033[m".format(multa))
else:
print("\033[:33m PARABENS!!! \033[m vocรช andou dentro do limite estabelecido")
print("="*20, "FIM DO CONTATADOR", "="*20) | StarcoderdataPython |
8093621 | import os
from base64 import b32encode
from flask_login import LoginManager, UserMixin
import pyotp
loginmanager = LoginManager()
secret = os.environ["TOTP_SECRET"]
totp = pyotp.TOTP( b32encode( bytes(secret, "utf8") ) )
del(secret)
class OnlyUser(UserMixin):
def __init__(self):
self.id = "0"
@loginmanager.user_loader
def load_user(id):
if id == "0":
return OnlyUser()
return None
| StarcoderdataPython |
339075 | <gh_stars>1-10
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from cinder import context as cinder_context
import cinder.exception
from cinder import objects
objects.register_all()
from cinder.db.sqlalchemy import api as cinder_db
from cinder import rpc
from cinder.transfer import api as transfer_api
from oslo_config import cfg
from oschown import base
from oschown import exception
# Not sure why but this has to happen at import time where our config
# is set properly
cfg.CONF([], project='cinder')
CONF = cfg.CONF
rpc.init(CONF)
TRANSFER_API = transfer_api.API()
LOG = logging.getLogger(__name__)
class CinderResource(base.ChownableResource):
def __init__(self, volume):
self._volume = volume
self._admin_ctx = cinder_context.get_admin_context()
self._deps = []
self._collect_instances()
def _collect_instances(self):
if self._volume.volume_attachment:
for attachment in self._volume.volume_attachment:
LOG.info('Cinder volume %s requires attached instance %s' % (
self._volume.id, attachment.instance_uuid))
self._deps.append('nova:%s' % attachment.instance_uuid)
@property
def dependencies(self):
return self._deps
@property
def identifier(self):
return 'cinder:%s' % self._volume['id']
def _set_vol_state(self, state):
cinder_db.volume_update(self._admin_ctx, self._volume['id'],
{'status': 'available'})
def chown(self, context):
orig_state = self._volume['status']
# NOTE(danms): The transfer API trivially blocks the operation
# on in-use volumes. So, we cheat here and hack the status so
# that we can push it through and then reset it when we are done.
# It would be nice if we could get an ignore_state=True flag to the
# transfer api create method for this usage, but not sure if the
# cinder people would be up for that.
# FIXME(danms): Log/print a warning here if we are operating
# on an in-use volume.
self._set_vol_state('available')
try:
transfer_spec = TRANSFER_API.create(self._admin_ctx,
self._volume['id'],
'oschown')
user_ctx = cinder_context.RequestContext(
context.target_user_id,
context.target_project_id)
TRANSFER_API.accept(user_ctx,
transfer_spec['id'],
transfer_spec['auth_key'])
finally:
self._set_vol_state(orig_state)
class CinderProject(base.ChownableProject):
def __init__(self):
super(CinderProject, self).__init__()
@property
def name(self):
return 'cinder'
def collect_resource_by_id(self, context, resource_id):
ctx = cinder_context.get_admin_context()
try:
vol = objects.Volume.get_by_id(ctx, resource_id)
except cinder.exception.VolumeNotFound:
raise exception.UnableToResolveResources(
'Cinder volume %s not found' % resource_id)
resource = CinderResource(vol)
self._resources.append(resource)
return resource
| StarcoderdataPython |
1774214 | # Advent Of Code 2016, day 1, part 2
# http://adventofcode.com/2016/day/1
# solution by ByteCommander, 2016-12-01
data = open("inputs/aoc2016_1.txt").read()
dirs = [(0, 1), (1, 0), (0, -1),
(-1, 0)] # dirs[0]: North, [1]: East, ... - (x,y) steps
instr = [[-1 if s[0] == "L" else 1, int(s[1:])] for s in data.split(", ")]
cd, x, y = 0, 0, 0
track = [(x, y)]
for d, l in instr:
cd = (cd + d) % 4
for i in range(l):
x += dirs[cd][0]
y += dirs[cd][1]
if (x, y) in track:
print(
"Answer: first coordinates visited twice are {} steps away ({}|{})"
.format(abs(x) + abs(y), y, x))
exit()
track.append((x, y))
| StarcoderdataPython |
4871560 | <filename>utils/converter.py
# -*- coding: utf-8 -*-
# Created on Sun Mar 15 00:51:53 2020
# @author: arthurd
import os
def open_files(dirpath, ext="conllu"):
"""Get the path of all files in a directory.
Parameters
----------
dirpath : string
Name or path to the directory where the files you want to
open are saved..
ext : string, optional
The files extension you want to open. The default is "conllu".
Returns
-------
list
A list of the path of all files saved in the directory.
Example
-------
>>> dir_name = "path/to/your/directory"
>>> files = open_files(dir_name, ext="geojson")
>>> files
['fr_gsd-ud-dev.conllu',
'fr_gsd-ud-train.conllu',
'fr_gsd-ud-test.conllu']
"""
try :
ls = os.listdir(dirpath)
except FileNotFoundError as e:
raise e('the path to the directory was not found')
files_list = []
for f in ls :
if f.endswith(ext):
filename = os.path.join(dirpath, f)
files_list.append(filename)
return files_list
if __name__ == "__main__":
from conllu import conllu2txt
# Load all conllu files in directory
print("Getting the path to conllu files...")
dirpath = 'conversion'
files = open_files(dirpath)
print("Done.")
# Convert the conllu files to txt
print("\nConverting...")
for filename in files:
print("\tConverting {}".format(filename.split(os.sep)[-1]))
conllu2txt(filename)
print("Done.")
| StarcoderdataPython |
3423740 | <gh_stars>1-10
A, B, C = map(int, input().split())
if sorted([A, B, C]) == [5, 5, 7]:
print('YES')
else:
print('NO')
| StarcoderdataPython |
3390165 | # remember to call with --py-files person.py to include this one
from person import Person
from pyspark import SparkContext, SparkConf
sc = SparkContext()
# textfile gives us lines, now we call Person's parse method
people=sc.textFile("../../data/people.txt").map(Person().parse)
# find number of males and number of females
# first get tuples like: ('M',1),('F',1) ... then reduce by key
people.map(lambda t: (t.gender,1)).reduceByKey(lambda x,y:x+y).collect()
# now you do number of people per programming language
# let's do youngest person per gender
people.map(lambda t: (t.gender,t.age )).reduceByKey(lambda x,y:min(x,y)).collect()
| StarcoderdataPython |
399686 | <filename>tests/test_inflection_es.py<gh_stars>100-1000
# -*- fundamental -*-
#
# Tests for parsing inflection tables
#
# Copyright (c) 2021 <NAME>. See file LICENSE and https://ylonen.org
import unittest
import json
from wikitextprocessor import Wtp
from wiktextract import WiktionaryConfig
from wiktextract.inflection import parse_inflection_section
class InflTests(unittest.TestCase):
def setUp(self):
self.maxDiff = 100000
self.ctx = Wtp()
self.config = WiktionaryConfig()
self.ctx.start_page("testpage")
self.ctx.start_section("English")
def xinfl(self, word, lang, pos, section, text):
"""Runs a single inflection table parsing test, and returns ``data``."""
self.ctx.start_page(word)
self.ctx.start_section(lang)
self.ctx.start_subsection(pos)
tree = self.ctx.parse(text)
data = {}
parse_inflection_section(self.config, self.ctx, data, word, lang, pos,
section, tree)
return data
def test_Spanish_verb1(self):
ret = self.xinfl("interdecir", "Spanish", "verb", "Conjugation", """
<div class="NavFrame">
<div class="NavHead" align="center"> Conjugation of <i class="Latn+mention" lang="es">[[interdecir#Spanish|interdecir]]</i> (irregular; e-i alternation) (See [[Appendix:Spanish verbs]])</div>
<div class="NavContent">
{| style="background%3A%23F9F9F9%3Btext-align%3Acenter%3Bwidth%3A100%25"
|-
! colspan="3" style="background%3A%23e2e4c0" | <span title="infinitivo">infinitive</span>
| colspan="5" | <span class="Latn+form-of+lang-es+inf-form-of++++origin-interdecir+++" lang="es">[[interdecir#Spanish|interdecir]]</span>
|-
! colspan="3" style="background%3A%23e2e4c0" | <span title="gerundio">gerund</span>
| colspan="5" | <span class="Latn+form-of+lang-es+ger-form-of++++origin-interdecir+++" lang="es">[[interdiciendo#Spanish|interdiciendo]]</span>
|-
! rowspan="3" colspan="2" style="background%3A%23e2e4c0" | <span title="participio+%28pasado%29">past participle</span>
| colspan="2" style="background%3A%23e2e4c0" |
! colspan="2" style="background%3A%23e2e4c0" | <span title="masculino">masculine</span>
! colspan="2" style="background%3A%23e2e4c0" | <span title="femenino">feminine</span>
|-
! colspan="2" style="background%3A%23e2e4c0" | singular
| colspan="2" | <span class="Latn+form-of+lang-es+m%7Cs%7Cpast%7Cpart-form-of++++origin-interdecir+++" lang="es">[[interdicho#Spanish|interdicho]]</span>
| colspan="2" | <span class="Latn+form-of+lang-es+f%7Cs%7Cpast%7Cpart-form-of++++origin-interdecir+++" lang="es">[[interdicha#Spanish|interdicha]]</span>
|-
! colspan="2" style="background%3A%23e2e4c0" | plural
| colspan="2" | <span class="Latn+form-of+lang-es+m%7Cp%7Cpast%7Cpart-form-of++++origin-interdecir+++" lang="es">[[interdichos#Spanish|interdichos]]</span>
| colspan="2" | <span class="Latn+form-of+lang-es+f%7Cp%7Cpast%7Cpart-form-of++++origin-interdecir+++" lang="es">[[interdichas#Spanish|interdichas]]</span>
|-
! colspan="2" rowspan="2" style="background%3A%23DEDEDE" |
! colspan="3" style="background%3A%23DEDEDE" | singular
! colspan="3" style="background%3A%23DEDEDE" | plural
|-
! style="background%3A%23DEDEDE" | 1st person
! style="background%3A%23DEDEDE" | 2nd person
! style="background%3A%23DEDEDE" | 3rd person
! style="background%3A%23DEDEDE" | 1st person
! style="background%3A%23DEDEDE" | 2nd person
! style="background%3A%23DEDEDE" | 3rd person
|-
! rowspan="6" style="background%3A%23c0cfe4" | <span title="indicativo">indicative</span>
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" |
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | yo
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | tรบ<br>vos
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | รฉl/ella/ello<br>usted
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | nosotros<br>nosotras
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | vosotros<br>vosotras
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | ellos/ellas<br>ustedes
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="presente+de+indicativo">present</span>
| <span class="Latn+form-of+lang-es+1%7Cs%7Cpres%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdigo#Spanish|interdigo]]</span>
| <span class="Latn+form-of+lang-es+2%7Cs%7Cpres%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdices#Spanish|interdices]]</span><sup><sup>tรบ</sup></sup><br><span class="Latn+form-of+lang-es+2%7Cs%7Cvoseo%7Cpres%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdecรญs#Spanish|interdecรญs]]</span><sup><sup>vos</sup></sup>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cpres%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdice#Spanish|interdice]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cpres%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdecimos#Spanish|interdecimos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cpres%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdecรญs#Spanish|interdecรญs]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cpres%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdicen#Spanish|interdicen]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="pret%C3%A9rito+imperfecto+%28copr%C3%A9terito%29">imperfect</span>
| <span class="Latn+form-of+lang-es+1%7Cs%7Cimpf%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdecรญa#Spanish|interdecรญa]]</span>
| <span class="Latn+form-of+lang-es+2%7Cs%7Cimpf%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdecรญas#Spanish|interdecรญas]]</span>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cimpf%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdecรญa#Spanish|interdecรญa]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cimpf%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdecรญamos#Spanish|interdecรญamos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cimpf%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdecรญais#Spanish|interdecรญais]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cimpf%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdecรญan#Spanish|interdecรญan]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="pret%C3%A9rito+perfecto+simple+%28pret%C3%A9rito+indefinido%29">preterite</span>
| <span class="Latn+form-of+lang-es+1%7Cs%7Cpret%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdije#Spanish|interdije]]</span>
| <span class="Latn+form-of+lang-es+2%7Cs%7Cpret%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdijiste#Spanish|interdijiste]]</span>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cpret%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdijo#Spanish|interdijo]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cpret%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdijimos#Spanish|interdijimos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cpret%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdijisteis#Spanish|interdijisteis]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cpret%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdijeron#Spanish|interdijeron]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="futuro+simple+%28futuro+imperfecto%29">future</span>
| <span class="Latn+form-of+lang-es+1%7Cs%7Cfut%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdirรฉ#Spanish|interdirรฉ]]</span>
| <span class="Latn+form-of+lang-es+2%7Cs%7Cfut%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdirรกs#Spanish|interdirรกs]]</span>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cfut%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdirรก#Spanish|interdirรก]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cfut%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdiremos#Spanish|interdiremos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cfut%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdirรฉis#Spanish|interdirรฉis]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cfut%7Cind-form-of++++origin-interdecir+++" lang="es">[[interdirรกn#Spanish|interdirรกn]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="condicional+simple+%28pospret%C3%A9rito+de+modo+indicativo%29">conditional</span>
| <span class="Latn+form-of+lang-es+1%7Cs%7Ccond-form-of++++origin-interdecir+++" lang="es">[[interdirรญa#Spanish|interdirรญa]]</span>
| <span class="Latn+form-of+lang-es+2%7Cs%7Ccond-form-of++++origin-interdecir+++" lang="es">[[interdirรญas#Spanish|interdirรญas]]</span>
| <span class="Latn+form-of+lang-es+3%7Cs%7Ccond-form-of++++origin-interdecir+++" lang="es">[[interdirรญa#Spanish|interdirรญa]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Ccond-form-of++++origin-interdecir+++" lang="es">[[interdirรญamos#Spanish|interdirรญamos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Ccond-form-of++++origin-interdecir+++" lang="es">[[interdirรญais#Spanish|interdirรญais]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Ccond-form-of++++origin-interdecir+++" lang="es">[[interdirรญan#Spanish|interdirรญan]]</span>
|-
! style="background%3A%23DEDEDE%3Bheight%3A.75em" colspan="8" |
|-
! rowspan="5" style="background%3A%23c0e4c0" | <span title="subjuntivo">subjunctive</span>
! style="background%3A%23ECECEC" |
! style="background%3A%23ECECEC" | yo
! style="background%3A%23ECECEC" | tรบ<br>vos
! style="background%3A%23ECECEC" | รฉl/ella/ello<br>usted
! style="background%3A%23ECECEC" | nosotros<br>nosotras
! style="background%3A%23ECECEC" | vosotros<br>vosotras
! style="background%3A%23ECECEC" | ellos/ellas<br>ustedes
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="presente+de+subjuntivo">present</span>
| <span class="Latn+form-of+lang-es+1%7Cs%7Cpres%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdiga#Spanish|interdiga]]</span>
| <span class="Latn+form-of+lang-es+2%7Cs%7Cpres%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdigas#Spanish|interdigas]]</span><sup><sup>tรบ</sup></sup><br><span class="Latn+form-of+lang-es+2%7Cs%7Cvoseo%7Cpres%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdigรกs#Spanish|interdigรกs]]</span><sup><sup>vos<sup style="color%3Ared">2</sup></sup></sup>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cpres%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdiga#Spanish|interdiga]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cpres%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdigamos#Spanish|interdigamos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cpres%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdigรกis#Spanish|interdigรกis]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cpres%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdigan#Spanish|interdigan]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="pret%C3%A9rito+imperfecto+de+subjuntivo">imperfect</span><br>(ra)
| <span class="Latn+form-of+lang-es+1%7Cs%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijera#Spanish|interdijera]]</span>
| <span class="Latn+form-of+lang-es+2%7Cs%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijeras#Spanish|interdijeras]]</span>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijera#Spanish|interdijera]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijรฉramos#Spanish|interdijรฉramos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijerais#Spanish|interdijerais]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijeran#Spanish|interdijeran]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="pret%C3%A9rito+imperfecto+de+subjuntivo">imperfect</span><br>(se)
| <span class="Latn+form-of+lang-es+1%7Cs%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijese#Spanish|interdijese]]</span>
| <span class="Latn+form-of+lang-es+2%7Cs%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijeses#Spanish|interdijeses]]</span>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijese#Spanish|interdijese]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijรฉsemos#Spanish|interdijรฉsemos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijeseis#Spanish|interdijeseis]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cimpf%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijesen#Spanish|interdijesen]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="futuro+simple+de+subjuntivo+%28futuro+de+subjuntivo%29">future</span><sup style="color%3Ared">1</sup>
| <span class="Latn+form-of+lang-es+1%7Cs%7Cfut%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijere#Spanish|interdijere]]</span>
| <span class="Latn+form-of+lang-es+2%7Cs%7Cfut%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijeres#Spanish|interdijeres]]</span>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cfut%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijere#Spanish|interdijere]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cfut%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijรฉremos#Spanish|interdijรฉremos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cfut%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijereis#Spanish|interdijereis]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cfut%7Csub-form-of++++origin-interdecir+++" lang="es">[[interdijeren#Spanish|interdijeren]]</span>
|-
! style="background%3A%23DEDEDE%3Bheight%3A.75em" colspan="8" |
|-
! rowspan="6" style="background%3A%23e4d4c0" | <span title="imperativo">imperative</span>
! style="background%3A%23ECECEC" |
! style="background%3A%23ECECEC" | โ
! style="background%3A%23ECECEC" | tรบ<br>vos
! style="background%3A%23ECECEC" | usted
! style="background%3A%23ECECEC" | nosotros<br>nosotras
! style="background%3A%23ECECEC" | vosotros<br>vosotras
! style="background%3A%23ECECEC" | ustedes
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="imperativo+afirmativo">affirmative</span>
|
| <span class="Latn+form-of+lang-es+2%7Cs%7Cimp-form-of++++origin-interdecir+++" lang="es">[[interdice#Spanish|interdice]]</span><sup><sup>tรบ</sup></sup><br><span class="Latn+form-of+lang-es+2%7Cs%7Cvoseo%7Cimp-form-of++++origin-interdecir+++" lang="es">[[interdecรญ#Spanish|interdecรญ]]</span><sup><sup>vos</sup></sup>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cimp-form-of++++origin-interdecir+++" lang="es">[[interdiga#Spanish|interdiga]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cimp-form-of++++origin-interdecir+++" lang="es">[[interdigamos#Spanish|interdigamos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cimp-form-of++++origin-interdecir+++" lang="es">[[interdecid#Spanish|interdecid]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cimp-form-of++++origin-interdecir+++" lang="es">[[interdigan#Spanish|interdigan]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="imperativo+negativo">negative</span>
|
| <span class="Latn" lang="es">[[no#Spanish|no]] [[interdigas#Spanish|interdigas]]</span>
| <span class="Latn" lang="es">[[no#Spanish|no]] [[interdiga#Spanish|interdiga]]</span>
| <span class="Latn" lang="es">[[no#Spanish|no]] [[interdigamos#Spanish|interdigamos]]</span>
| <span class="Latn" lang="es">[[no#Spanish|no]] [[interdigรกis#Spanish|interdigรกis]]</span>
| <span class="Latn" lang="es">[[no#Spanish|no]] [[interdigan#Spanish|interdigan]]</span>
|}
<div style="width%3A100%25%3Btext-align%3Aleft%3Bbackground%3A%23d9ebff">
<div style="display%3Ainline-block%3Btext-align%3Aleft%3Bpadding-left%3A1em%3Bpadding-right%3A1em">
<sup style="color%3A+red">1</sup>Mostly obsolete form, now mainly used in legal jargon.<br><sup style="color%3A+red">2</sup>Argentine and Uruguayan <i class="Latn+mention" lang="es">[[voseo#Spanish|voseo]]</i> prefers the <i class="Latn+mention" lang="es">[[tรบ#Spanish|tรบ]]</i> form for the present subjunctive.
</div></div>
</div></div>
[[Category:Spanish verbs ending in -ir|INTERDECIR]][[Category:Spanish irregular verbs|INTERDECIR]][[Category:Spanish verbs with e-i alternation|INTERDECIR]]
""")
expected = {
"forms": [
{
"form": "e-i alternation",
"source": "Conjugation title",
"tags": ["class"],
},
{
"form": "interdecir",
"source": "Conjugation",
"tags": [
"infinitive"
]
},
{
"form": "interdiciendo",
"source": "Conjugation",
"tags": [
"gerund"
]
},
{
"form": "interdicho",
"source": "Conjugation",
"tags": [
"masculine",
"participle",
"past",
"singular"
]
},
{
"form": "interdicha",
"source": "Conjugation",
"tags": [
"feminine",
"participle",
"past",
"singular"
]
},
{
"form": "interdichos",
"source": "Conjugation",
"tags": [
"masculine",
"participle",
"past",
"plural"
]
},
{
"form": "interdichas",
"source": "Conjugation",
"tags": [
"feminine",
"participle",
"past",
"plural"
]
},
{
"form": "interdigo",
"source": "Conjugation",
"tags": [
"first-person",
"indicative",
"present",
"singular"
]
},
{
"form": "interdices",
"source": "Conjugation",
"tags": [
"indicative",
"informal",
"present",
"second-person",
"singular"
]
},
{
"form": "interdecรญs",
"source": "Conjugation",
"tags": [
"indicative",
"informal",
"present",
"second-person",
"singular",
"vos-form"
]
},
{
"form": "interdice",
"source": "Conjugation",
"tags": [
"indicative",
"present",
"singular",
"third-person"
]
},
{
"form": "interdecimos",
"source": "Conjugation",
"tags": [
"first-person",
"indicative",
"plural",
"present"
]
},
{
"form": "interdecรญs",
"source": "Conjugation",
"tags": [
"indicative",
"plural",
"present",
"second-person"
]
},
{
"form": "interdicen",
"source": "Conjugation",
"tags": [
"indicative",
"plural",
"present",
"third-person"
]
},
{
"form": "interdecรญa",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"indicative",
"singular"
]
},
{
"form": "interdecรญas",
"source": "Conjugation",
"tags": [
"imperfect",
"indicative",
"second-person",
"singular"
]
},
{
"form": "interdecรญa",
"source": "Conjugation",
"tags": [
"imperfect",
"indicative",
"singular",
"third-person"
]
},
{
"form": "interdecรญamos",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"indicative",
"plural"
]
},
{
"form": "interdecรญais",
"source": "Conjugation",
"tags": [
"imperfect",
"indicative",
"plural",
"second-person"
]
},
{
"form": "interdecรญan",
"source": "Conjugation",
"tags": [
"imperfect",
"indicative",
"plural",
"third-person"
]
},
{
"form": "interdije",
"source": "Conjugation",
"tags": [
"first-person",
"indicative",
"preterite",
"singular"
]
},
{
"form": "interdijiste",
"source": "Conjugation",
"tags": [
"indicative",
"preterite",
"second-person",
"singular"
]
},
{
"form": "interdijo",
"source": "Conjugation",
"tags": [
"indicative",
"preterite",
"singular",
"third-person"
]
},
{
"form": "interdijimos",
"source": "Conjugation",
"tags": [
"first-person",
"indicative",
"plural",
"preterite"
]
},
{
"form": "interdijisteis",
"source": "Conjugation",
"tags": [
"indicative",
"plural",
"preterite",
"second-person"
]
},
{
"form": "interdijeron",
"source": "Conjugation",
"tags": [
"indicative",
"plural",
"preterite",
"third-person"
]
},
{
"form": "interdirรฉ",
"source": "Conjugation",
"tags": [
"first-person",
"future",
"indicative",
"singular"
]
},
{
"form": "interdirรกs",
"source": "Conjugation",
"tags": [
"future",
"indicative",
"second-person",
"singular"
]
},
{
"form": "interdirรก",
"source": "Conjugation",
"tags": [
"future",
"indicative",
"singular",
"third-person"
]
},
{
"form": "interdiremos",
"source": "Conjugation",
"tags": [
"first-person",
"future",
"indicative",
"plural"
]
},
{
"form": "interdirรฉis",
"source": "Conjugation",
"tags": [
"future",
"indicative",
"plural",
"second-person"
]
},
{
"form": "interdirรกn",
"source": "Conjugation",
"tags": [
"future",
"indicative",
"plural",
"third-person"
]
},
{
"form": "interdirรญa",
"source": "Conjugation",
"tags": [
"conditional",
"first-person",
"indicative",
"singular"
]
},
{
"form": "interdirรญas",
"source": "Conjugation",
"tags": [
"conditional",
"indicative",
"second-person",
"singular"
]
},
{
"form": "interdirรญa",
"source": "Conjugation",
"tags": [
"conditional",
"indicative",
"singular",
"third-person"
]
},
{
"form": "interdirรญamos",
"source": "Conjugation",
"tags": [
"conditional",
"first-person",
"indicative",
"plural"
]
},
{
"form": "interdirรญais",
"source": "Conjugation",
"tags": [
"conditional",
"indicative",
"plural",
"second-person"
]
},
{
"form": "interdirรญan",
"source": "Conjugation",
"tags": [
"conditional",
"indicative",
"plural",
"third-person"
]
},
{
"form": "interdiga",
"source": "Conjugation",
"tags": [
"first-person",
"present",
"singular",
"subjunctive"
]
},
{
"form": "interdigas",
"source": "Conjugation",
"tags": [
"informal",
"present",
"second-person",
"singular",
"subjunctive"
]
},
{
"form": "interdigรกs",
"source": "Conjugation",
"tags": [
"informal",
"present",
"second-person",
"singular",
"subjunctive",
"vos-form"
]
},
{
"form": "interdiga",
"source": "Conjugation",
"tags": [
"present",
"singular",
"subjunctive",
"third-person"
]
},
{
"form": "interdigamos",
"source": "Conjugation",
"tags": [
"first-person",
"plural",
"present",
"subjunctive"
]
},
{
"form": "interdigรกis",
"source": "Conjugation",
"tags": [
"plural",
"present",
"second-person",
"subjunctive"
]
},
{
"form": "interdigan",
"source": "Conjugation",
"tags": [
"plural",
"present",
"subjunctive",
"third-person"
]
},
{
"form": "interdijera",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"singular",
"subjunctive"
]
},
{
"form": "interdijeras",
"source": "Conjugation",
"tags": [
"imperfect",
"second-person",
"singular",
"subjunctive"
]
},
{
"form": "interdijera",
"source": "Conjugation",
"tags": [
"imperfect",
"singular",
"subjunctive",
"third-person"
]
},
{
"form": "interdijรฉramos",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"plural",
"subjunctive"
]
},
{
"form": "interdijerais",
"source": "Conjugation",
"tags": [
"imperfect",
"plural",
"second-person",
"subjunctive"
]
},
{
"form": "interdijeran",
"source": "Conjugation",
"tags": [
"imperfect",
"plural",
"subjunctive",
"third-person"
]
},
{
"form": "interdijese",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"imperfect-se",
"singular",
"subjunctive"
]
},
{
"form": "interdijeses",
"source": "Conjugation",
"tags": [
"imperfect",
"imperfect-se",
"second-person",
"singular",
"subjunctive"
]
},
{
"form": "interdijese",
"source": "Conjugation",
"tags": [
"imperfect",
"imperfect-se",
"singular",
"subjunctive",
"third-person"
]
},
{
"form": "interdijรฉsemos",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"imperfect-se",
"plural",
"subjunctive"
]
},
{
"form": "interdijeseis",
"source": "Conjugation",
"tags": [
"imperfect",
"imperfect-se",
"plural",
"second-person",
"subjunctive"
]
},
{
"form": "interdijesen",
"source": "Conjugation",
"tags": [
"imperfect",
"imperfect-se",
"plural",
"subjunctive",
"third-person"
]
},
{
"form": "interdijere",
"source": "Conjugation",
"tags": [
"first-person",
"future",
"singular",
"subjunctive"
]
},
{
"form": "interdijeres",
"source": "Conjugation",
"tags": [
"future",
"second-person",
"singular",
"subjunctive"
]
},
{
"form": "interdijere",
"source": "Conjugation",
"tags": [
"future",
"singular",
"subjunctive",
"third-person"
]
},
{
"form": "interdijรฉremos",
"source": "Conjugation",
"tags": [
"first-person",
"future",
"plural",
"subjunctive"
]
},
{
"form": "interdijereis",
"source": "Conjugation",
"tags": [
"future",
"plural",
"second-person",
"subjunctive"
]
},
{
"form": "interdijeren",
"source": "Conjugation",
"tags": [
"future",
"plural",
"subjunctive",
"third-person"
]
},
{
"form": "interdice",
"source": "Conjugation",
"tags": [
"imperative",
"informal",
"second-person",
"singular"
]
},
{
"form": "interdecรญ",
"source": "Conjugation",
"tags": [
"imperative",
"informal",
"second-person",
"singular",
"vos-form"
]
},
{
"form": "interdiga",
"source": "Conjugation",
"tags": [
"imperative",
"singular",
"third-person"
]
},
{
"form": "interdigamos",
"source": "Conjugation",
"tags": [
"first-person",
"imperative",
"plural"
]
},
{
"form": "interdecid",
"source": "Conjugation",
"tags": [
"imperative",
"plural",
"second-person"
]
},
{
"form": "interdigan",
"source": "Conjugation",
"tags": [
"imperative",
"plural",
"third-person"
]
},
{
"form": "no interdigas",
"source": "Conjugation",
"tags": [
"imperative",
"negative",
"second-person",
"singular"
]
},
{
"form": "no interdiga",
"source": "Conjugation",
"tags": [
"imperative",
"negative",
"singular",
"third-person"
]
},
{
"form": "no interdigamos",
"source": "Conjugation",
"tags": [
"first-person",
"imperative",
"negative",
"plural"
]
},
{
"form": "no interdigรกis",
"source": "Conjugation",
"tags": [
"imperative",
"negative",
"plural",
"second-person"
]
},
{
"form": "no interdigan",
"source": "Conjugation",
"tags": [
"imperative",
"negative",
"plural",
"third-person"
]
},
{
"form": "irregular",
"source": "Conjugation title",
"tags": ["word-tags"],
},
],
}
self.assertEqual(expected, ret)
def test_Spanish_verb2(self):
# This has ^vos^2 in second-person singular subjunctive
ret = self.xinfl("apoltronarse", "Spanish", "verb", "Conjugation", """
<div class="NavFrame">
<div class="NavHead" align="center"> Conjugation of <i class="Latn+mention" lang="es">[[apoltronarse#Spanish|apoltronarse]]</i> (See [[Appendix:Spanish verbs]])</div>
<div class="NavContent">
{| style="background%3A%23F9F9F9%3Btext-align%3Acenter%3Bwidth%3A100%25"
|-
! colspan="3" style="background%3A%23e2e4c0" | <span title="infinitivo">infinitive</span>
| colspan="5" | <span class="Latn+form-of+lang-es+inf-form-of++++origin-apoltronarse+++" lang="es">[[apoltronarse#Spanish|apoltronarse]]</span>
|-
! colspan="3" style="background%3A%23e2e4c0" | <span title="gerundio">gerund</span>
| colspan="5" | <span class="Latn+form-of+lang-es+ger-form-of++++origin-apoltronarse+++" lang="es">[[apoltronรกndose#Spanish|apoltronรกndose]]</span>
|-
! rowspan="3" colspan="2" style="background%3A%23e2e4c0" | <span title="participio+%28pasado%29">past participle</span>
| colspan="2" style="background%3A%23e2e4c0" |
! colspan="2" style="background%3A%23e2e4c0" | <span title="masculino">masculine</span>
! colspan="2" style="background%3A%23e2e4c0" | <span title="femenino">feminine</span>
|-
! colspan="2" style="background%3A%23e2e4c0" | singular
| colspan="2" | <span class="Latn+form-of+lang-es+m%7Cs%7Cpast%7Cpart-form-of++++origin-apoltronarse+++" lang="es">[[apoltronado#Spanish|apoltronado]]</span>
| colspan="2" | <span class="Latn+form-of+lang-es+f%7Cs%7Cpast%7Cpart-form-of++++origin-apoltronarse+++" lang="es">[[apoltronada#Spanish|apoltronada]]</span>
|-
! colspan="2" style="background%3A%23e2e4c0" | plural
| colspan="2" | <span class="Latn+form-of+lang-es+m%7Cp%7Cpast%7Cpart-form-of++++origin-apoltronarse+++" lang="es">[[apoltronados#Spanish|apoltronados]]</span>
| colspan="2" | <span class="Latn+form-of+lang-es+f%7Cp%7Cpast%7Cpart-form-of++++origin-apoltronarse+++" lang="es">[[apoltronadas#Spanish|apoltronadas]]</span>
|-
! colspan="2" rowspan="2" style="background%3A%23DEDEDE" |
! colspan="3" style="background%3A%23DEDEDE" | singular
! colspan="3" style="background%3A%23DEDEDE" | plural
|-
! style="background%3A%23DEDEDE" | 1st person
! style="background%3A%23DEDEDE" | 2nd person
! style="background%3A%23DEDEDE" | 3rd person
! style="background%3A%23DEDEDE" | 1st person
! style="background%3A%23DEDEDE" | 2nd person
! style="background%3A%23DEDEDE" | 3rd person
|-
! rowspan="6" style="background%3A%23c0cfe4" | <span title="indicativo">indicative</span>
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" |
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | yo
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | tรบ<br>vos
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | รฉl/ella/ello<br>usted
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | nosotros<br>nosotras
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | vosotros<br>vosotras
! style="background%3A%23ECECEC%3Bwidth%3A12.5%25" | ellos/ellas<br>ustedes
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="presente+de+indicativo">present</span>
| <span class="Latn" lang="es">[[me#Spanish|me]] [[apoltrono#Spanish|apoltrono]]</span>
| <span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronas#Spanish|apoltronas]]</span><sup><sup>tรบ</sup></sup><br><span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronรกs#Spanish|apoltronรกs]]</span><sup><sup>vos</sup></sup>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltrona#Spanish|apoltrona]]</span>
| <span class="Latn" lang="es">[[nos#Spanish|nos]] [[apoltronamos#Spanish|apoltronamos]]</span>
| <span class="Latn" lang="es">[[os#Spanish|os]] [[apoltronรกis#Spanish|apoltronรกis]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronan#Spanish|apoltronan]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="pret%C3%A9rito+imperfecto+%28copr%C3%A9terito%29">imperfect</span>
| <span class="Latn" lang="es">[[me#Spanish|me]] [[apoltronaba#Spanish|apoltronaba]]</span>
| <span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronabas#Spanish|apoltronabas]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronaba#Spanish|apoltronaba]]</span>
| <span class="Latn" lang="es">[[nos#Spanish|nos]] [[apoltronรกbamos#Spanish|apoltronรกbamos]]</span>
| <span class="Latn" lang="es">[[os#Spanish|os]] [[apoltronabais#Spanish|apoltronabais]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronaban#Spanish|apoltronaban]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="pret%C3%A9rito+perfecto+simple+%28pret%C3%A9rito+indefinido%29">preterite</span>
| <span class="Latn" lang="es">[[me#Spanish|me]] [[apoltronรฉ#Spanish|apoltronรฉ]]</span>
| <span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronaste#Spanish|apoltronaste]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronรณ#Spanish|apoltronรณ]]</span>
| <span class="Latn" lang="es">[[nos#Spanish|nos]] [[apoltronamos#Spanish|apoltronamos]]</span>
| <span class="Latn" lang="es">[[os#Spanish|os]] [[apoltronasteis#Spanish|apoltronasteis]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronaron#Spanish|apoltronaron]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="futuro+simple+%28futuro+imperfecto%29">future</span>
| <span class="Latn" lang="es">[[me#Spanish|me]] [[apoltronarรฉ#Spanish|apoltronarรฉ]]</span>
| <span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronarรกs#Spanish|apoltronarรกs]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronarรก#Spanish|apoltronarรก]]</span>
| <span class="Latn" lang="es">[[nos#Spanish|nos]] [[apoltronaremos#Spanish|apoltronaremos]]</span>
| <span class="Latn" lang="es">[[os#Spanish|os]] [[apoltronarรฉis#Spanish|apoltronarรฉis]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronarรกn#Spanish|apoltronarรกn]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="condicional+simple+%28pospret%C3%A9rito+de+modo+indicativo%29">conditional</span>
| <span class="Latn" lang="es">[[me#Spanish|me]] [[apoltronarรญa#Spanish|apoltronarรญa]]</span>
| <span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronarรญas#Spanish|apoltronarรญas]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronarรญa#Spanish|apoltronarรญa]]</span>
| <span class="Latn" lang="es">[[nos#Spanish|nos]] [[apoltronarรญamos#Spanish|apoltronarรญamos]]</span>
| <span class="Latn" lang="es">[[os#Spanish|os]] [[apoltronarรญais#Spanish|apoltronarรญais]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronarรญan#Spanish|apoltronarรญan]]</span>
|-
! style="background%3A%23DEDEDE%3Bheight%3A.75em" colspan="8" |
|-
! rowspan="5" style="background%3A%23c0e4c0" | <span title="subjuntivo">subjunctive</span>
! style="background%3A%23ECECEC" |
! style="background%3A%23ECECEC" | yo
! style="background%3A%23ECECEC" | tรบ<br>vos
! style="background%3A%23ECECEC" | รฉl/ella/ello<br>usted
! style="background%3A%23ECECEC" | nosotros<br>nosotras
! style="background%3A%23ECECEC" | vosotros<br>vosotras
! style="background%3A%23ECECEC" | ellos/ellas<br>ustedes
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="presente+de+subjuntivo">present</span>
| <span class="Latn" lang="es">[[me#Spanish|me]] [[apoltrone#Spanish|apoltrone]]</span>
| <span class="Latn" lang="es">[[te#Spanish|te]] [[apoltrones#Spanish|apoltrones]]</span><sup><sup>tรบ</sup></sup><br><span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronรฉs#Spanish|apoltronรฉs]]</span><sup><sup>vos<sup style="color%3Ared">2</sup></sup></sup>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltrone#Spanish|apoltrone]]</span>
| <span class="Latn" lang="es">[[nos#Spanish|nos]] [[apoltronemos#Spanish|apoltronemos]]</span>
| <span class="Latn" lang="es">[[os#Spanish|os]] [[apoltronรฉis#Spanish|apoltronรฉis]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronen#Spanish|apoltronen]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="pret%C3%A9rito+imperfecto+de+subjuntivo">imperfect</span><br>(ra)
| <span class="Latn" lang="es">[[me#Spanish|me]] [[apoltronara#Spanish|apoltronara]]</span>
| <span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronaras#Spanish|apoltronaras]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronara#Spanish|apoltronara]]</span>
| <span class="Latn" lang="es">[[nos#Spanish|nos]] [[apoltronรกramos#Spanish|apoltronรกramos]]</span>
| <span class="Latn" lang="es">[[os#Spanish|os]] [[apoltronarais#Spanish|apoltronarais]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronaran#Spanish|apoltronaran]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="pret%C3%A9rito+imperfecto+de+subjuntivo">imperfect</span><br>(se)
| <span class="Latn" lang="es">[[me#Spanish|me]] [[apoltronase#Spanish|apoltronase]]</span>
| <span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronases#Spanish|apoltronases]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronase#Spanish|apoltronase]]</span>
| <span class="Latn" lang="es">[[nos#Spanish|nos]] [[apoltronรกsemos#Spanish|apoltronรกsemos]]</span>
| <span class="Latn" lang="es">[[os#Spanish|os]] [[apoltronaseis#Spanish|apoltronaseis]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronasen#Spanish|apoltronasen]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="futuro+simple+de+subjuntivo+%28futuro+de+subjuntivo%29">future</span><sup style="color%3Ared">1</sup>
| <span class="Latn" lang="es">[[me#Spanish|me]] [[apoltronare#Spanish|apoltronare]]</span>
| <span class="Latn" lang="es">[[te#Spanish|te]] [[apoltronares#Spanish|apoltronares]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronare#Spanish|apoltronare]]</span>
| <span class="Latn" lang="es">[[nos#Spanish|nos]] [[apoltronรกremos#Spanish|apoltronรกremos]]</span>
| <span class="Latn" lang="es">[[os#Spanish|os]] [[apoltronareis#Spanish|apoltronareis]]</span>
| <span class="Latn" lang="es">[[se#Spanish|se]] [[apoltronaren#Spanish|apoltronaren]]</span>
|-
! style="background%3A%23DEDEDE%3Bheight%3A.75em" colspan="8" |
|-
! rowspan="6" style="background%3A%23e4d4c0" | <span title="imperativo">imperative</span>
! style="background%3A%23ECECEC" |
! style="background%3A%23ECECEC" | โ
! style="background%3A%23ECECEC" | tรบ<br>vos
! style="background%3A%23ECECEC" | usted
! style="background%3A%23ECECEC" | nosotros<br>nosotras
! style="background%3A%23ECECEC" | vosotros<br>vosotras
! style="background%3A%23ECECEC" | ustedes
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="imperativo+afirmativo">affirmative</span>
|
| <span class="Latn+form-of+lang-es+2%7Cs%7Cimp-form-of++++origin-apoltronarse+++" lang="es">[[apoltrรณnate#Spanish|apoltrรณnate]]</span><sup><sup>tรบ</sup></sup><br><span class="Latn+form-of+lang-es+2%7Cs%7Cvoseo%7Cimp-form-of++++origin-apoltronarse+++" lang="es">[[apoltronate#Spanish|apoltronate]]</span><sup><sup>vos</sup></sup>
| <span class="Latn+form-of+lang-es+3%7Cs%7Cimp-form-of++++origin-apoltronarse+++" lang="es">[[apoltrรณnese#Spanish|apoltrรณnese]]</span>
| <span class="Latn+form-of+lang-es+1%7Cp%7Cimp-form-of++++origin-apoltronarse+++" lang="es">[[apoltronรฉmonos#Spanish|apoltronรฉmonos]]</span>
| <span class="Latn+form-of+lang-es+2%7Cp%7Cimp-form-of++++origin-apoltronarse+++" lang="es">[[apoltronaos#Spanish|apoltronaos]]</span>
| <span class="Latn+form-of+lang-es+3%7Cp%7Cimp-form-of++++origin-apoltronarse+++" lang="es">[[apoltrรณnense#Spanish|apoltrรณnense]]</span>
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | <span title="imperativo+negativo">negative</span>
|
| <span class="Latn" lang="es">[[no#Spanish|no]] [[te#Spanish|te]] [[apoltrones#Spanish|apoltrones]]</span>
| <span class="Latn" lang="es">[[no#Spanish|no]] [[se#Spanish|se]] [[apoltrone#Spanish|apoltrone]]</span>
| <span class="Latn" lang="es">[[no#Spanish|no]] [[nos#Spanish|nos]] [[apoltronemos#Spanish|apoltronemos]]</span>
| <span class="Latn" lang="es">[[no#Spanish|no]] [[os#Spanish|os]] [[apoltronรฉis#Spanish|apoltronรฉis]]</span>
| <span class="Latn" lang="es">[[no#Spanish|no]] [[se#Spanish|se]] [[apoltronen#Spanish|apoltronen]]</span>
|}
<div style="width%3A100%25%3Btext-align%3Aleft%3Bbackground%3A%23d9ebff">
<div style="display%3Ainline-block%3Btext-align%3Aleft%3Bpadding-left%3A1em%3Bpadding-right%3A1em">
<sup style="color%3A+red">1</sup>Mostly obsolete form, now mainly used in legal jargon.<br><sup style="color%3A+red">2</sup>Argentine and Uruguayan <i class="Latn+mention" lang="es">[[voseo#Spanish|voseo]]</i> prefers the <i class="Latn+mention" lang="es">[[tรบ#Spanish|tรบ]]</i> form for the present subjunctive.
</div></div>
</div></div>
<div class="NavFrame">
<div class="NavHead" align="center"> Selected combined forms of <i class="Latn+mention" lang="es">[[apoltronarse#Spanish|apoltronarse]]</i></div>
<div class="NavContent">
{| class="inflection-table" style="background%3A%23F9F9F9%3Btext-align%3Acenter%3Bwidth%3A100%25"
|-
! colspan="2" rowspan="2" style="background%3A%23DEDEDE" |
! colspan="3" style="background%3A%23DEDEDE" | singular
! colspan="3" style="background%3A%23DEDEDE" | plural
|-
! style="background%3A%23DEDEDE" | 1st person
! style="background%3A%23DEDEDE" | 2nd person
! style="background%3A%23DEDEDE" | 3rd person
! style="background%3A%23DEDEDE" | 1st person
! style="background%3A%23DEDEDE" | 2nd person
! style="background%3A%23DEDEDE" | 3rd person
|-
! rowspan="2" style="background%3A%23c0cfe4" | Infinitives
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | accusative
| <span class="Latn+form-of+lang-es+me%7Cinf%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronarme#Spanish|apoltronarme]]</span>
| <span class="Latn+form-of+lang-es+te%7Cinf%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronarte#Spanish|apoltronarte]]</span>
| <span class="Latn+form-of+lang-es+se%7Cinf%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronarse#Spanish|apoltronarse]]</span>
| <span class="Latn+form-of+lang-es+nos%7Cinf%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronarnos#Spanish|apoltronarnos]]</span>
| <span class="Latn+form-of+lang-es+os%7Cinf%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronaros#Spanish|apoltronaros]]</span>
| <span class="Latn+form-of+lang-es+se%7Cinf%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronarse#Spanish|apoltronarse]]</span>
|-
! style="background%3A%23DEDEDE%3Bheight%3A.35em" colspan="8" |
|-
! rowspan="2" style="background%3A%23d0cfa4" | gerunds
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | accusative
| <span class="Latn+form-of+lang-es+me%7Cgerund%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronรกndome#Spanish|apoltronรกndome]]</span>
| <span class="Latn+form-of+lang-es+te%7Cgerund%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronรกndote#Spanish|apoltronรกndote]]</span>
| <span class="Latn+form-of+lang-es+se%7Cgerund%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronรกndose#Spanish|apoltronรกndose]]</span>
| <span class="Latn+form-of+lang-es+nos%7Cgerund%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronรกndonos#Spanish|apoltronรกndonos]]</span>
| <span class="Latn+form-of+lang-es+os%7Cgerund%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronรกndoos#Spanish|apoltronรกndoos]]</span>
| <span class="Latn+form-of+lang-es+se%7Cgerund%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronรกndose#Spanish|apoltronรกndose]]</span>
|-
! style="background%3A%23DEDEDE%3Bheight%3A.35em" colspan="8" |
|-
! rowspan="2" style="background%3A%23f2caa4" | with positive imperatives
|-
! style="height%3A3em%3Bbackground%3A%23ECECEC" | accusative
| ''not used''
| <span class="Latn+form-of+lang-es+te%7Cimp%7C2s%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltrรณnate#Spanish|apoltrรณnate]]</span>
| <span class="Latn+form-of+lang-es+se%7Cimp%7C3s%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltrรณnese#Spanish|apoltrรณnese]]</span>
| <span class="Latn+form-of+lang-es+nos%7Cimp%7C1p%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronรฉmonos#Spanish|apoltronรฉmonos]]</span>
| <span class="Latn+form-of+lang-es+os%7Cimp%7C2p%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltronaos#Spanish|apoltronaos]]</span>
| <span class="Latn+form-of+lang-es+se%7Cimp%7C3p%7Ccombined-form-of++++origin-apoltronarse+++" lang="es">[[apoltrรณnense#Spanish|apoltrรณnense]]</span>
|}
</div></div>
[[Category:Spanish verbs ending in -ar|APOLTRONARSE]][[Category:Spanish reflexive verbs|APOLTRONARSE]]
""")
expected = {
"forms": [
{
"form": "apoltronarse",
"source": "Conjugation",
"tags": [
"infinitive"
]
},
{
"form": "apoltronรกndose",
"source": "Conjugation",
"tags": [
"gerund"
]
},
{
"form": "apoltronado",
"source": "Conjugation",
"tags": [
"masculine",
"participle",
"past",
"singular"
]
},
{
"form": "apoltronada",
"source": "Conjugation",
"tags": [
"feminine",
"participle",
"past",
"singular"
]
},
{
"form": "apoltronados",
"source": "Conjugation",
"tags": [
"masculine",
"participle",
"past",
"plural"
]
},
{
"form": "apoltronadas",
"source": "Conjugation",
"tags": [
"feminine",
"participle",
"past",
"plural"
]
},
{
"form": "me apoltrono",
"source": "Conjugation",
"tags": [
"first-person",
"indicative",
"present",
"singular"
]
},
{
"form": "te apoltronas",
"source": "Conjugation",
"tags": [
"indicative",
"informal",
"present",
"second-person",
"singular"
]
},
{
"form": "te apoltronรกs",
"source": "Conjugation",
"tags": [
"indicative",
"informal",
"present",
"second-person",
"singular",
"vos-form"
]
},
{
"form": "se apoltrona",
"source": "Conjugation",
"tags": [
"indicative",
"present",
"singular",
"third-person"
]
},
{
"form": "nos apoltronamos",
"source": "Conjugation",
"tags": [
"first-person",
"indicative",
"plural",
"present"
]
},
{
"form": "os apoltronรกis",
"source": "Conjugation",
"tags": [
"indicative",
"plural",
"present",
"second-person"
]
},
{
"form": "se apoltronan",
"source": "Conjugation",
"tags": [
"indicative",
"plural",
"present",
"third-person"
]
},
{
"form": "me apoltronaba",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"indicative",
"singular"
]
},
{
"form": "te apoltronabas",
"source": "Conjugation",
"tags": [
"imperfect",
"indicative",
"second-person",
"singular"
]
},
{
"form": "se apoltronaba",
"source": "Conjugation",
"tags": [
"imperfect",
"indicative",
"singular",
"third-person"
]
},
{
"form": "nos apoltronรกbamos",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"indicative",
"plural"
]
},
{
"form": "os apoltronabais",
"source": "Conjugation",
"tags": [
"imperfect",
"indicative",
"plural",
"second-person"
]
},
{
"form": "se apoltronaban",
"source": "Conjugation",
"tags": [
"imperfect",
"indicative",
"plural",
"third-person"
]
},
{
"form": "me apoltronรฉ",
"source": "Conjugation",
"tags": [
"first-person",
"indicative",
"preterite",
"singular"
]
},
{
"form": "te apoltronaste",
"source": "Conjugation",
"tags": [
"indicative",
"preterite",
"second-person",
"singular"
]
},
{
"form": "se apoltronรณ",
"source": "Conjugation",
"tags": [
"indicative",
"preterite",
"singular",
"third-person"
]
},
{
"form": "nos apoltronamos",
"source": "Conjugation",
"tags": [
"first-person",
"indicative",
"plural",
"preterite"
]
},
{
"form": "os apoltronasteis",
"source": "Conjugation",
"tags": [
"indicative",
"plural",
"preterite",
"second-person"
]
},
{
"form": "se apoltronaron",
"source": "Conjugation",
"tags": [
"indicative",
"plural",
"preterite",
"third-person"
]
},
{
"form": "me apoltronarรฉ",
"source": "Conjugation",
"tags": [
"first-person",
"future",
"indicative",
"singular"
]
},
{
"form": "te apoltronarรกs",
"source": "Conjugation",
"tags": [
"future",
"indicative",
"second-person",
"singular"
]
},
{
"form": "se apoltronarรก",
"source": "Conjugation",
"tags": [
"future",
"indicative",
"singular",
"third-person"
]
},
{
"form": "nos apoltronaremos",
"source": "Conjugation",
"tags": [
"first-person",
"future",
"indicative",
"plural"
]
},
{
"form": "os apoltronarรฉis",
"source": "Conjugation",
"tags": [
"future",
"indicative",
"plural",
"second-person"
]
},
{
"form": "se apoltronarรกn",
"source": "Conjugation",
"tags": [
"future",
"indicative",
"plural",
"third-person"
]
},
{
"form": "me apoltronarรญa",
"source": "Conjugation",
"tags": [
"conditional",
"first-person",
"indicative",
"singular"
]
},
{
"form": "te apoltronarรญas",
"source": "Conjugation",
"tags": [
"conditional",
"indicative",
"second-person",
"singular"
]
},
{
"form": "se apoltronarรญa",
"source": "Conjugation",
"tags": [
"conditional",
"indicative",
"singular",
"third-person"
]
},
{
"form": "nos apoltronarรญamos",
"source": "Conjugation",
"tags": [
"conditional",
"first-person",
"indicative",
"plural"
]
},
{
"form": "os apoltronarรญais",
"source": "Conjugation",
"tags": [
"conditional",
"indicative",
"plural",
"second-person"
]
},
{
"form": "se apoltronarรญan",
"source": "Conjugation",
"tags": [
"conditional",
"indicative",
"plural",
"third-person"
]
},
{
"form": "me apoltrone",
"source": "Conjugation",
"tags": [
"first-person",
"present",
"singular",
"subjunctive"
]
},
{
"form": "te apoltrones",
"source": "Conjugation",
"tags": [
"informal",
"present",
"second-person",
"singular",
"subjunctive"
]
},
{
"form": "te apoltronรฉs",
"source": "Conjugation",
"tags": [
"informal",
"present",
"second-person",
"singular",
"subjunctive",
"vos-form"
]
},
{
"form": "se apoltrone",
"source": "Conjugation",
"tags": [
"present",
"singular",
"subjunctive",
"third-person"
]
},
{
"form": "nos apoltronemos",
"source": "Conjugation",
"tags": [
"first-person",
"plural",
"present",
"subjunctive"
]
},
{
"form": "os apoltronรฉis",
"source": "Conjugation",
"tags": [
"plural",
"present",
"second-person",
"subjunctive"
]
},
{
"form": "se apoltronen",
"source": "Conjugation",
"tags": [
"plural",
"present",
"subjunctive",
"third-person"
]
},
{
"form": "me apoltronara",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"singular",
"subjunctive"
]
},
{
"form": "te apoltronaras",
"source": "Conjugation",
"tags": [
"imperfect",
"second-person",
"singular",
"subjunctive"
]
},
{
"form": "se apoltronara",
"source": "Conjugation",
"tags": [
"imperfect",
"singular",
"subjunctive",
"third-person"
]
},
{
"form": "nos apoltronรกramos",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"plural",
"subjunctive"
]
},
{
"form": "os apoltronarais",
"source": "Conjugation",
"tags": [
"imperfect",
"plural",
"second-person",
"subjunctive"
]
},
{
"form": "se apoltronaran",
"source": "Conjugation",
"tags": [
"imperfect",
"plural",
"subjunctive",
"third-person"
]
},
{
"form": "me apoltronase",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"imperfect-se",
"singular",
"subjunctive"
]
},
{
"form": "te apoltronases",
"source": "Conjugation",
"tags": [
"imperfect",
"imperfect-se",
"second-person",
"singular",
"subjunctive"
]
},
{
"form": "se apoltronase",
"source": "Conjugation",
"tags": [
"imperfect",
"imperfect-se",
"singular",
"subjunctive",
"third-person"
]
},
{
"form": "nos apoltronรกsemos",
"source": "Conjugation",
"tags": [
"first-person",
"imperfect",
"imperfect-se",
"plural",
"subjunctive"
]
},
{
"form": "os apoltronaseis",
"source": "Conjugation",
"tags": [
"imperfect",
"imperfect-se",
"plural",
"second-person",
"subjunctive"
]
},
{
"form": "se apoltronasen",
"source": "Conjugation",
"tags": [
"imperfect",
"imperfect-se",
"plural",
"subjunctive",
"third-person"
]
},
{
"form": "me apoltronare",
"source": "Conjugation",
"tags": [
"first-person",
"future",
"singular",
"subjunctive"
]
},
{
"form": "te apoltronares",
"source": "Conjugation",
"tags": [
"future",
"second-person",
"singular",
"subjunctive"
]
},
{
"form": "se apoltronare",
"source": "Conjugation",
"tags": [
"future",
"singular",
"subjunctive",
"third-person"
]
},
{
"form": "nos apoltronรกremos",
"source": "Conjugation",
"tags": [
"first-person",
"future",
"plural",
"subjunctive"
]
},
{
"form": "os apoltronareis",
"source": "Conjugation",
"tags": [
"future",
"plural",
"second-person",
"subjunctive"
]
},
{
"form": "se apoltronaren",
"source": "Conjugation",
"tags": [
"future",
"plural",
"subjunctive",
"third-person"
]
},
{
"form": "apoltrรณnate",
"source": "Conjugation",
"tags": [
"imperative",
"informal",
"second-person",
"singular"
]
},
{
"form": "apoltronate",
"source": "Conjugation",
"tags": [
"imperative",
"informal",
"second-person",
"singular",
"vos-form"
]
},
{
"form": "apoltrรณnese",
"source": "Conjugation",
"tags": [
"imperative",
"singular",
"third-person"
]
},
{
"form": "apoltronรฉmonos",
"source": "Conjugation",
"tags": [
"first-person",
"imperative",
"plural"
]
},
{
"form": "apoltronaos",
"source": "Conjugation",
"tags": [
"imperative",
"plural",
"second-person"
]
},
{
"form": "apoltrรณnense",
"source": "Conjugation",
"tags": [
"imperative",
"plural",
"third-person"
]
},
{
"form": "no te apoltrones",
"source": "Conjugation",
"tags": [
"imperative",
"negative",
"second-person",
"singular"
]
},
{
"form": "no se apoltrone",
"source": "Conjugation",
"tags": [
"imperative",
"negative",
"singular",
"third-person"
]
},
{
"form": "no nos apoltronemos",
"source": "Conjugation",
"tags": [
"first-person",
"imperative",
"negative",
"plural"
]
},
{
"form": "no os apoltronรฉis",
"source": "Conjugation",
"tags": [
"imperative",
"negative",
"plural",
"second-person"
]
},
{
"form": "no se apoltronen",
"source": "Conjugation",
"tags": [
"imperative",
"negative",
"plural",
"third-person"
]
},
{
"form": "apoltronarme",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"infinitive",
"object-first-person",
"object-singular"
]
},
{
"form": "apoltronarte",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"infinitive",
"object-second-person",
"object-singular"
]
},
{
"form": "apoltronarse",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"infinitive",
"object-singular",
"object-third-person"
]
},
{
"form": "apoltronarnos",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"infinitive",
"object-first-person",
"object-plural"
]
},
{
"form": "apoltronaros",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"infinitive",
"object-plural",
"object-second-person"
]
},
{
"form": "apoltronarse",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"infinitive",
"object-plural",
"object-third-person"
]
},
{
"form": "apoltronรกndome",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"gerund",
"object-first-person",
"object-singular"
]
},
{
"form": "apoltronรกndote",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"gerund",
"object-second-person",
"object-singular"
]
},
{
"form": "apoltronรกndose",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"gerund",
"object-singular",
"object-third-person"
]
},
{
"form": "apoltronรกndonos",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"gerund",
"object-first-person",
"object-plural"
]
},
{
"form": "apoltronรกndoos",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"gerund",
"object-plural",
"object-second-person"
]
},
{
"form": "apoltronรกndose",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"gerund",
"object-plural",
"object-third-person"
]
},
{'form': '-',
'source': 'Conjugation',
'tags': ['accusative',
'combined-form',
'object-first-person',
'object-singular',
'with-positive-imperative']},
{
"form": "apoltrรณnate",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"object-second-person",
"object-singular",
"with-positive-imperative"
]
},
{
"form": "apoltrรณnese",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"object-singular",
"object-third-person",
"with-positive-imperative"
]
},
{
"form": "apoltronรฉmonos",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"object-first-person",
"object-plural",
"with-positive-imperative"
]
},
{
"form": "apoltronaos",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"object-plural",
"object-second-person",
"with-positive-imperative"
]
},
{
"form": "apoltrรณnense",
"source": "Conjugation",
"tags": [
"accusative",
"combined-form",
"object-plural",
"object-third-person",
"with-positive-imperative"
]
}
],
}
self.assertEqual(expected, ret)
| StarcoderdataPython |
8062003 | <filename>aoc/year2019/day1/day1.py
from aocd import data
from aoc.utils import ints
def fuel(mass: int) -> int:
return mass // 3 - 2
def total_fuel(mass: int) -> int:
fuel_req = mass // 3 - 2
if fuel_req < 0:
return 0
return fuel_req + total_fuel(fuel_req)
# part 1
print(sum(map(fuel, ints(data))))
# part 2
print(sum(map(total_fuel, ints(data))))
| StarcoderdataPython |
11214822 | import subprocess
from glob import glob
from pathlib import Path
cwd = Path(__file__).parent
name = 'atlas'
output = cwd / '../../dist/data'
file = output / 'atlas.mbtiles'
tiles = glob(str((cwd / '../../data/**/*.mbtiles').resolve()))
if __name__ == '__main__':
file.unlink(missing_ok=True)
output.mkdir(parents=True, exist_ok=True)
subprocess.run([
'tile-join',
f'--name={name}',
'--no-tile-size-limit',
'--force',
f'--output={file}',
*tiles,
])
| StarcoderdataPython |
3216755 | <reponame>bozonhiggsa/BotTelegram_ImageStyleTransfer
import os
import urllib.request
from PIL import Image
import numpy as np
import tensorflow as tf
import tensorflow_hub as hub
import config
def save_image_from_message(message, telbot):
cid = message.chat.id
image_id = get_image_id_from_message(message)
# prepare image for downloading
file_path = telbot.get_file(image_id).file_path
# generate image download url
image_url = "https://api.telegram.org/file/bot{0}/{1}".format(config.TOKEN, file_path)
# create folder to store image temporary, if it doesnt exist
if not os.path.exists(config.result_storage_path):
os.makedirs(config.result_storage_path)
# retrieve and save image
image_name = "{0}.jpg".format(image_id)
urllib.request.urlretrieve(image_url, "{0}/{1}".format(config.result_storage_path, image_name))
return image_name
def get_image_id_from_message(message):
# there are multiple array of images, check the biggest
return message.photo[len(message.photo) - 1].file_id
def handle_image(image_name, cid):
if config.dict_styles_established[cid] == 'standard style established':
style_number = config.dict_styles[cid]
del config.dict_styles[cid]
style_image_name = "handled_style{0}.jpg".format(style_number)
style_image = Image.open("{0}/{1}".format(config.result_storage_path, style_image_name))
else:
style_image_name = config.dict_styles_established[cid]
style_image = Image.open("{0}/{1}".format(config.result_storage_path, style_image_name))
style_image = image_to_square(style_image, 256)
style_img = np.array(style_image)
style_img = style_img.astype(np.float32)[np.newaxis, ...] / 255.
content_image = Image.open("{0}/{1}".format(config.result_storage_path, image_name))
image_resized = image_reduce(content_image, 384)
content_img = np.array(image_resized)
content_img = content_img.astype(np.float32)[np.newaxis, ...] / 255.
hub_module = hub.load('https://tfhub.dev/google/magenta/arbitrary-image-stylization-v1-256/2')
outputs = hub_module(tf.constant(content_img), tf.constant(style_img))
stylized_image = outputs[0]
result_img = tf.squeeze(stylized_image)
result_im = tf.cast(result_img * 255, tf.uint8)
result_img_pillow = Image.fromarray(result_im.numpy())
image_name_new = "handled_image_" + image_name
result_img_pillow.save("{0}/{1}".format(config.result_storage_path, image_name_new))
return image_name_new
def cleanup_remove_images(image_name, image_name_new, style_image_name):
os.remove('{0}/{1}'.format(config.result_storage_path, image_name))
os.remove('{0}/{1}'.format(config.result_storage_path, image_name_new))
if style_image_name != 'standard style established':
os.remove('{0}/{1}'.format(config.result_storage_path, style_image_name))
def get_save_style_image(number):
# create folder to store image temporary, if it doesnt exist
if not os.path.exists(config.result_storage_path):
os.makedirs(config.result_storage_path)
if not os.path.exists("{0}/handled_style{1}.jpg".format(config.result_storage_path, number)):
image_mame = "style{0}.jpg".format(number)
image_path = "static/" + image_mame
style_image = Image.open(image_path)
image_square_resized = image_to_square(style_image, 256)
style_image_name = "handled_" + image_mame
image_square_resized.save("{0}/{1}".format(config.result_storage_path, style_image_name))
else:
style_image_name = "handled_style{0}.jpg".format(number)
return style_image_name
def image_to_square(image_name, image_size):
width = image_name.width
height = image_name.height
if width == height:
image_square_resized = image_name.resize((image_size, image_size))
elif width > height:
image_crop = image_name.crop(((width // 2 - height // 2), 0, (width // 2 - height // 2) + height, height))
image_square_resized = image_crop.resize((image_size, image_size))
else:
image_crop = image_name.crop((0, (height // 2 - width // 2), width, (height // 2 - width // 2) + width))
image_square_resized = image_crop.resize((image_size, image_size))
return image_square_resized
def image_reduce(image_name, width_size):
width = image_name.width
height = image_name.height
if width == height & width > width_size:
image_resized = image_name.resize((width_size, width_size))
elif width > width_size:
factor = width / width_size
image_resized = image_name.resize((width_size, round(height / factor)))
else:
image_resized = image_name
return image_resized
| StarcoderdataPython |
39423 | <filename>alembic/versions/0b7ccbfa8f7c_add_order_and_hide_from_menu_to_page_.py
"""Add order and hide_from_menu to Page model
Revision ID: 0b7ccbfa8f7c
Revises: <KEY>
Create Date: 2016-03-23 16:33:44.047433
"""
# revision identifiers, used by Alembic.
revision = '0b7ccbfa8f7c'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('pages', schema=None) as batch_op:
batch_op.add_column(sa.Column('hide_from_menu', sa.Boolean(), nullable=True))
batch_op.add_column(sa.Column('order', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('pages', schema=None) as batch_op:
batch_op.drop_column('order')
batch_op.drop_column('hide_from_menu')
### end Alembic commands ###
| StarcoderdataPython |
11239481 | <gh_stars>1-10
from kg.checkers import * ### @import
@chk.get_one_input
def get_one_input(file, **kwargs):
n = int(next(file))
a = list(map(int, next(file).strip().split()))
ensure(len(a) == n, "Invalid length in input", exc=Fail)
return a
@chk.get_output_for_input
def get_output_for_input(file, a, **kwargs):
try:
m = int(next(file).rstrip())
b = list(map(int, next(file).rstrip().split(' ')))
except Exception as e:
raise ParseError("Failed to get a sequence") from e
ensure(m >= 0, "Invalid length", exc=WA)
ensure(len(b) == m, lambda: WA(f"Expected {m} numbers but got {len(b)}"))
return b
@chk.get_judge_data_for_input
def get_judge_data_for_input(file, a, **kwargs):
return int(next(file))
@set_multi_checker(no_extra_chars=True)
@default_return(1.0)
def check_solution(a, b, ans, **kwargs):
# check subsequence
j = 0
for i in range(len(a)):
if j < len(b) and a[i] == b[j]: j += 1
ensure(j == len(b), "Not a subsequence!", exc=WA)
# check distinct
ensure(len(b) == len(set(b)), "Values not unique!", exc=WA)
if len(b) < ans: raise WA("Suboptimal solution")
if len(b) > ans: raise Fail("Judge data incorrect!")
if __name__ == '__main__': chk(title="Split")
| StarcoderdataPython |
1672352 | <filename>examples/rmg/commented/input.py
# Data sources
database(
# overrides RMG thermo calculation of RMG with these values.
# libraries found at http://rmg.mit.edu/database/thermo/libraries/
# if species exist in multiple libraries, the earlier libraries overwrite the
# previous values
thermoLibraries=['BurkeH2O2', 'primaryThermoLibrary', 'DFT_QCI_thermo', 'CBS_QB3_1dHR'],
# overrides RMG kinetics estimation if needed in the core of RMG.
# list of libraries found at http://rmg.mit.edu/database/kinetics/libraries/
# libraries can be input as either a string or tuple of form ('library_name',True/False)
# where a `True` indicates that all unused reactions will be automatically added
# to the chemkin file at the end of the simulation. Placing just string values
# defaults the tuple to `False`. The string input is sufficient in almost
# all situations
reactionLibraries=[('C3', False)],
# seed mechanisms are reactionLibraries that are forced into the initial mechanism
# in addition to species listed in this input file.
# This is helpful for reducing run time for species you know will appear in
# the mechanism.
seedMechanisms=['BurkeH2O2inN2', 'ERC-FoundationFuelv0.9'],
# this is normally not changed in general RMG runs. Usually used for testing with
# outside kinetics databases
kineticsDepositories='default',
# lists specific families used to generate the model. 'default' uses a list of
# families from RMG-Database/input/kinetics/families/recommended.py
# a visual list of families is available in PDF form at RMG-database/families
kineticsFamilies='default',
# specifies how RMG calculates rates. currently, the only option is 'rate rules'
kineticsEstimator='rate rules',
)
# List of species
# list initial and expected species below to automatically put them into the core mechanism.
# 'structure' can utilize method of SMILES("put_SMILES_here"),
# adjacencyList("""put_adj_list_here"""), or InChI("put_InChI_here")
# for molecular oxygen, use the smiles string [O][O] so the triplet form is used
species(
label='butane',
reactive=True, # this parameter is optional if true
structure=SMILES("CCCC"),
)
species(
label='O2',
structure=SMILES("[O][O]"),
)
species(
label='N2',
reactive=False,
structure=adjacencyList("""
1 N u0 p1 c0 {2,T}
2 N u0 p1 c0 {1,T}
"""),
)
# You can list species not initially in reactor to make sure RMG includes them in the mechanism
species(
label='QOOH',
reactive=True,
structure=SMILES("OOCC[CH]C")
)
species(
label='CO2',
reactive=True,
structure=SMILES("O=C=O")
)
# Reaction systems
# currently RMG models only constant temperature and pressure as homogeneous batch reactors.
# two options are: simpleReactor for gas phase or liquidReactor for liquid phase
# use can use multiple reactors in an input file for each condition you want to test.
simpleReactor(
# specifies reaction temperature with units
temperature=(700, 'K'),
# specifies reaction pressure with units
pressure=(10.0, 'bar'),
# list initial mole fractions of compounds using the label from the 'species' label.
# RMG will normalize if sum/=1
initialMoleFractions={
"N2": 4,
"O2": 1,
"butane": 1. / 6.5,
},
# the following two values specify when to determine the final output model
# only one must be specified
# the first condition to be satisfied will terminate the process
terminationConversion={
'butane': .99,
},
terminationTime=(40, 's'),
# the next two optional values specify how RMG computes sensitivities of
# rate coefficients with respect to species concentrations.
# sensitivity contains a list of species' labels to conduct sensitivity analysis on.
# sensitvityThreshold is the required sensitiviy to be recorded in the csv output file
# sensitivity=['CH4'],
# sensitivityThreshold=0.0001,
)
# liquidReactor(
# temperature=(500, 'K'),
# initialConcentrations={
# "N2": 4,
# "O2": 1,
# "CO": 1,
# },
# terminationConversion=None,
# terminationTime=(3600, 's'),
# sensitivity=None,
# sensitivityThreshold=1e-3
# )
# liquid reactors also have solvents, you can specify one solvent
# list of solvents available at : http://rmg.mit.edu/database/solvation/libraries/solvent/
# solvation('water')
# determines absolute and relative tolerances for ODE solver and sensitivities.
# normally this doesn't cause many issues and is modified after other issues are
# ruled out
simulator(
atol=1e-16,
rtol=1e-8,
# sens_atol=1e-6,
# sens_rtol=1e-4,
)
# used to add species to the model and to reduce memory usage by removing unimportant additional species.
# all relative values are normalized by a characteristic flux at that time point
model(
# determines the relative flux to put a species into the core.
# A smaller value will result in a larger, more complex model
# when running a new model, it is recommended to start with higher values and then decrease to converge on the model
toleranceMoveToCore=0.1,
# comment out the next three terms to disable pruning
# determines the relative flux needed to not remove species from the model.
# Lower values will keep more species and utilize more memory
toleranceKeepInEdge=0.01,
# determines when to stop a ODE run to add a species.
# Lower values will improve speed.
# if it is too low, may never get to the end simulation to prune species.
toleranceInterruptSimulation=1,
# number of edge species needed to accumulate before pruning occurs
# larger values require more memory and will prune less often
maximumEdgeSpecies=100000,
# minimum number of core species needed before pruning occurs.
# this prevents pruning when kinetic model is far away from completeness
minCoreSizeForPrune=50,
# make sure that the pruned edge species have existed for a set number of RMG iterations.
# the user can specify to increase it from the default value of 2
minSpeciesExistIterationsForPrune=2,
# filter the reactions during the enlarge step to omit species from reacting if their
# concentration are deemed to be too low
filterReactions=False,
# for bimolecular reactions, will only allow them to react if
# filterThreshold*C_A*C_B > toleranceMoveToCore*characteristic_rate
# and if filterReactions=True
filterThreshold=1e8,
)
options(
# provides a name for the seed mechanism produced at the end of an rmg run default is 'Seed'
name='SeedName',
# if True every iteration it saves the current model as libraries/seeds
# (and deletes the old one)
# Unlike HTML this is inexpensive time-wise
# note a seed mechanism will be generated at the end of a completed run and some incomplete
# runs even if this is set as False
generateSeedEachIteration=True,
# If True the mechanism will also be saved directly as kinetics and thermo libraries in the database
saveSeedToDatabase=False,
# only option is 'si'
units='si',
# how often you want to save restart files.
# takes significant amount of time. comment out if you don't want to save
saveRestartPeriod=None,
# Draws images of species and reactions and saves the model output to HTML.
# May consume extra memory when running large models.
generateOutputHTML=True,
# generates plots of the RMG's performance statistics. Not helpful if you just want a model.
generatePlots=False,
# saves mole fraction of species in 'solver/' to help you create plots
saveSimulationProfiles=False,
# gets RMG to output comments on where kinetics were obtained in the chemkin file.
# useful for debugging kinetics but increases memory usage of the chemkin output file
verboseComments=False,
# gets RMG to generate edge species chemkin files. Uses lots of memory in output.
# Helpful for seeing why some reaction are not appearing in core model.
saveEdgeSpecies=False,
# Sets a time limit in the form DD:HH:MM:SS after which the RMG job will stop. Useful for profiling on jobs that
# do not converge.
# wallTime = '00:00:00',
# Forces RMG to import library reactions as reversible (default). Otherwise, if set to True, RMG will import library
# reactions while keeping the reversibility as as.
keepIrreversible=False,
# Allows families with three products to react in the diverse direction (default).
trimolecularProductReversible=True,
)
# optional module allows for correction to unimolecular reaction rates at low pressures and/or temperatures.
pressureDependence(
# two methods available: 'modified strong collision' is faster and less accurate than 'reservoir state'
method='modified strong collision',
# these two categories determine how fine energy is descretized.
# more grains increases accuracy but takes longer
maximumGrainSize=(0.5, 'kcal/mol'),
minimumNumberOfGrains=250,
# the conditions for the rate to be output over
# parameter order is: low_value, high_value, units, internal points
temperatures=(300, 2200, 'K', 2),
pressures=(0.01, 100, 'bar', 3),
# The two options for interpolation are 'PDepArrhenius' (no extra arguments) and
# 'Chebyshev' which is followed by the number of basis sets in
# Temperature and Pressure. These values must be less than the number of
# internal points specified above
interpolation=('Chebyshev', 6, 4),
# turns off pressure dependence for molecules with number of atoms greater than the number specified below
# this is due to faster internal rate of energy transfer for larger molecules
maximumAtoms=15,
)
# optional block adds constraints on what RMG can output.
# This is helpful for improving the efficiency of RMG, but wrong inputs can lead to many errors.
generatedSpeciesConstraints(
# allows exceptions to the following restrictions
allowed=['input species', 'seed mechanisms', 'reaction libraries'],
# maximum number of each atom in a molecule
maximumCarbonAtoms=4,
maximumOxygenAtoms=7,
maximumNitrogenAtoms=0,
maximumSiliconAtoms=0,
maximumSulfurAtoms=0,
# max number of non-hydrogen atoms
# maximumHeavyAtoms=20,
# maximum radicals on a molecule
maximumRadicalElectrons=1,
# maximum number of singlet carbenes (lone pair on a carbon atom) in a molecule
maximumSingletCarbenes=1,
# maximum number of radicals on a molecule with a singlet carbene
# should be lower than maximumRadicalElectrons in order to have an effect
maximumCarbeneRadicals=0,
# If this is false or missing, RMG will throw an error if the more less-stable form of O2 is entered
# which doesn't react in the RMG system. normally input O2 as triplet with SMILES [O][O]
# allowSingletO2=False,
# maximum allowed number of non-normal isotope atoms:
# maximumIsotopicAtoms=2,
)
# optional block allows thermo to be estimated through quantum calculations
# quantumMechanics(
# # the software package for calculations...can use 'mopac' or 'gaussian' if installed
# software='mopac',
# # methods available for calculations. 'pm2' 'pm3' or 'pm7' (last for mopac only)
# method='pm3',
# # where to store calculations
# fileStore='QMfiles',
# # where to store temporary run files
# scratchDirectory=None,
# # onlyCyclics allows linear molecules to be calculated using bensen group addivity....need to verify
# onlyCyclics=True,
# # how many radicals should be utilized in the calculation.
# # If the amount of radicals is more than this, RMG will use hydrogen bond incrementation method
# maxRadicalNumber=0,
# )
# optional block allows thermo to be estimated through ML estimator
# mlEstimator(
# thermo=True,
# # Name of folder containing ML architecture and parameters in database
# name='main',
# # Limits on atom numbers
# minHeavyAtoms=1,
# maxHeavyAtoms=None,
# minCarbonAtoms=0,
# maxCarbonAtoms=None,
# minOxygenAtoms=0,
# maxOxygenAtoms=None,
# minNitrogenAtoms=0,
# maxNitrogenAtoms=None,
# # Limits on cycles
# onlyCyclics=False,
# minCycleOverlap=0, # specifies the minimum number of atoms that must be shared between any two cycles
# # If the estimated uncertainty of the thermo prediction is greater than
# # any of these values, then don't use the ML estimate
# H298UncertaintyCutoff=(3.0, 'kcal/mol'),
# S298UncertaintyCutoff=(2.0, 'cal/(mol*K)'),
# CpUncertaintyCutoff=(2.0, 'cal/(mol*K)')
# )
| StarcoderdataPython |
3247315 | <reponame>ukraine-war-info/ukraine-news-bot<gh_stars>1-10
from datetime import datetime
from typing import Any
import rich
from rich.table import Table
class timer:
def __init__(self) -> None:
self.time = datetime.now()
def getTime(self) -> str:
now = datetime.now()
delta = now - self.time
return delta.total_seconds()
class Console:
def __init__(self, saveToFile: bool=False) -> None:
self.timers: dict = {}
self.save = saveToFile
def _init_grid(self) -> Table.grid:
grid = Table.grid()
grid.add_column()
grid.add_column()
return grid
def _get_time(self) -> str:
return datetime.now().strftime("[grey23][bold][[/bold]%H:%M:%S[bold]][/bold][/grey23] ")
def toFile(self, toLog: str, type: str) -> None:
if self.save:
now = datetime.now()
filename = now.strftime("./logs/%Y-%m-%d")+"-log.txt"
logTime = now.strftime('%Y-%m-%d %H:%M:%S')
toLog = " ".join(map(str, toLog))
with open(filename, "a") as f:
f.write(f"{logTime} [{type}] {toLog}\n")
def log(self, *toLog: Any) -> None:
grid = self._init_grid()
grid.add_row(self._get_time(), " ".join(map(str, toLog)))
rich.print(grid)
self.toFile(toLog, "LOG")
def info(self, *toLog: Any) -> None:
grid = self._init_grid()
toLogMessage = " ".join(map(str, toLog))
grid.add_row(self._get_time(), f"[white on cyan] INFO [/white on cyan] [cyan bold]{toLogMessage}[/cyan bold]")
rich.print(grid)
self.toFile(toLog, "INFO")
def warn(self, *toWarn: Any) -> None:
grid = self._init_grid()
toWarnMessage = " ".join(map(str, toWarn))
grid.add_row(self._get_time(), f"[white on orange_red1] WARN [/white on orange_red1] [yellow]{toWarnMessage}[/yellow]")
rich.print(grid)
self.toFile(toWarn, "WARN")
def error(self, *toError: Any) -> None:
grid = self._init_grid()
toErrorMessage = " ".join(map(str, toError))
grid.add_row(self._get_time(), f"[white on red] ERROR [/white on red] [red bold]{toErrorMessage}[/red bold]")
rich.print(grid)
self.toFile(toError, "ERROR")
def clear(self) -> None:
print("\033[H\033[J")
def time(self, timerName: str="timer"):
self.timers[timerName] = timer()
self.toFile(f"Timer {timerName} started.", "TIME")
def timeEnd(self, timerName: str="timer") -> float:
time = self.timers.get(timerName)
if time:
seconds = time.getTime()
self.timers.pop(timerName)
self.toFile(f"Timer {timerName} ended. Time: {seconds} seconds.", "TIME")
return seconds
def timeLog(self, timerName: str="timer") -> float:
time = self.timers.get(timerName)
if time:
seconds = time.getTime()
self.toFile(f"Timer {timerName} logged at {seconds}.", "TIME")
return seconds | StarcoderdataPython |
98191 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import hashlib
import json
import re
import unittest
from collections import OrderedDict
from enum import Enum
from pants.base.hash_utils import CoercingEncoder, hash_all, json_hash
from pants.util.ordered_set import OrderedSet
class TestHashUtils(unittest.TestCase):
def test_hash_all(self):
expected_hash = hashlib.sha1()
expected_hash.update(b"jakejones")
self.assertEqual(expected_hash.hexdigest(), hash_all(["jake", "jones"]))
class CoercingJsonEncodingTest(unittest.TestCase):
@staticmethod
def _coercing_json_encode(o):
return json.dumps(o, cls=CoercingEncoder)
def test_normal_object_encoding(self):
self.assertEqual(self._coercing_json_encode({}), "{}")
self.assertEqual(self._coercing_json_encode(()), "[]")
self.assertEqual(self._coercing_json_encode([]), "[]")
self.assertEqual(self._coercing_json_encode(set()), "[]")
self.assertEqual(self._coercing_json_encode([{}]), "[{}]")
self.assertEqual(self._coercing_json_encode([("a", 3)]), '[["a", 3]]')
self.assertEqual(self._coercing_json_encode({"a": 3}), '{"a": 3}')
self.assertEqual(self._coercing_json_encode([{"a": 3}]), '[{"a": 3}]')
self.assertEqual(self._coercing_json_encode({1}), "[1]")
def test_rejects_ordered_dict(self):
with self.assertRaisesRegex(
TypeError, r"CoercingEncoder does not support OrderedDict inputs"
):
self._coercing_json_encode(OrderedDict([("a", 3)]))
def test_non_string_dict_key_coercion(self):
self.assertEqual(
self._coercing_json_encode({("a", "b"): "asdf"}), r'{"[\"a\", \"b\"]": "asdf"}'
)
def test_string_like_dict_key_coercion(self):
self.assertEqual(self._coercing_json_encode({"a": 3}), '{"a": 3}')
self.assertEqual(self._coercing_json_encode({b"a": 3}), '{"a": 3}')
def test_nested_dict_key_coercion(self):
self.assertEqual(self._coercing_json_encode({(1,): {(2,): 3}}), '{"[1]": {"[2]": 3}}')
def test_collection_ordering(self):
self.assertEqual(self._coercing_json_encode({2, 1, 3}), "[1, 2, 3]")
self.assertEqual(self._coercing_json_encode({"b": 4, "a": 3}), '{"a": 3, "b": 4}')
self.assertEqual(self._coercing_json_encode([("b", 4), ("a", 3)]), '[["b", 4], ["a", 3]]')
self.assertEqual(self._coercing_json_encode([{"b": 4, "a": 3}]), '[{"b": 4, "a": 3}]')
def test_enum(self) -> None:
class Test(Enum):
dog = 0
cat = 1
pig = 2
self.assertEqual(self._coercing_json_encode([Test.dog, Test.cat, Test.pig]), "[0, 1, 2]")
class JsonHashingTest(unittest.TestCase):
def test_known_checksums(self):
"""Check a laundry list of supported inputs to stable_json_sha1().
This checks both that the method can successfully handle the type of input object, but also
that the hash of specific objects remains stable.
"""
self.assertEqual(json_hash({}), "bf21a9e8fbc5a3846fb05b4fa0859e0917b2202f")
self.assertEqual(json_hash(()), "97d170e1550eee4afc0af065b78cda302a97674c")
self.assertEqual(json_hash([]), "97d170e1550eee4afc0af065b78cda302a97674c")
self.assertEqual(json_hash(set()), "97d170e1550eee4afc0af065b78cda302a97674c")
self.assertEqual(json_hash([{}]), "4e9950a1f2305f56d358cad23f28203fb3aacbef")
self.assertEqual(json_hash([("a", 3)]), "d6abed2e53c1595fb3075ecbe020365a47af1f6f")
self.assertEqual(json_hash({"a": 3}), "9e0e6d8a99c72daf40337183358cbef91bba7311")
self.assertEqual(json_hash([{"a": 3}]), "8f4e36849a0b8fbe9c4a822c80fbee047c65458a")
self.assertEqual(json_hash({1}), "f629ae44b7b3dcfed444d363e626edf411ec69a8")
def test_rejects_ordered_collections(self):
with self.assertRaisesRegex(
TypeError, re.escape("CoercingEncoder does not support OrderedDict inputs")
):
json_hash(OrderedDict([("a", 3)]))
with self.assertRaisesRegex(
TypeError, re.escape("CoercingEncoder does not support OrderedSet inputs")
):
json_hash(OrderedSet([3]))
def test_non_string_dict_key_checksum(self):
self.assertEqual(
json_hash({("a", "b"): "asdf"}), "45deafcfa78a92522166c77b24f5faaf9f3f5c5a"
)
def test_string_like_dict_key_checksum(self):
self.assertEqual(json_hash({"a": 3}), "9e0e6d8a99c72daf40337183358cbef91bba7311")
self.assertEqual(json_hash({b"a": 3}), "9e0e6d8a99c72daf40337183358cbef91bba7311")
def test_nested_dict_checksum(self):
self.assertEqual(json_hash({(1,): {(2,): 3}}), "63124afed13c4a92eb908fe95c1792528abe3621")
def test_checksum_ordering(self):
self.assertEqual(json_hash({2, 1, 3}), "a01eda32e4e0b1393274e91d1b3e9ecfc5eaba85")
self.assertEqual(json_hash({"b": 4, "a": 3}), "6348df9579e7a72f6ec3fb37751db73b2c97a135")
self.assertEqual(
json_hash([("b", 4), ("a", 3)]), "8e72bb976e71ea81887eb94730655fe49c454d0c"
)
self.assertEqual(json_hash([{"b": 4, "a": 3}]), "4735d702f51fb8a98edb9f6f3eb3df1d6d38a77f")
| StarcoderdataPython |
6443417 | import logging
import os
from cryptography.fernet import Fernet
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from shhh.api import api
from shhh.extensions import db, scheduler
def register_blueprints(app):
"""Register application blueprints."""
app.register_blueprint(api)
def create_app(env=os.environ.get("FLASK_ENV")):
"""Application factory."""
logging.basicConfig(
level=logging.INFO,
format=("[%(asctime)s] [sev %(levelno)s] [%(levelname)s] "
"[%(name)s]> %(message)s"),
datefmt="%a, %d %b %Y %H:%M:%S")
#ย Disable werkzeug logging under WARNING.
logging.getLogger("werkzeug").setLevel(logging.WARNING)
app = Flask(__name__)
app.logger.info(f"Loading env {env}")
configurations = {
"dev-local": "shhh.config.DefaultConfig",
"testing": "shhh.config.TestConfig",
"dev-docker": "shhh.config.DockerConfig",
"heroku": "shhh.config.HerokuConfig",
"production": "shhh.config.ProductionConfig",
}
app.config.from_object(
configurations.get(env, "shhh.config.ProductionConfig"))
db.init_app(app)
scheduler.init_app(app)
with app.app_context():
register_blueprints(app)
db.create_all()
scheduler.start()
from shhh import views
return app
| StarcoderdataPython |
12848129 | ####################################
# File name: models.py #
# Author: <NAME> #
####################################
from rss_skill import db
class Feed(db.Model):
__tablename__ = 'feed'
rss_i = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.Text, nullable=False)
link = db.Column(db.Text, nullable=False)
article_1 = db.Column(db.Text, nullable=False)
article_2 = db.Column(db.Text, nullable=False)
post = db.Column(db.String(32), nullable=False)
def __init__(self, name, link, article_1, article_2):
self.name = name
self.link = link
self.article_1 = article_1
self.article_2 = article_2
self.post = ""
def __repr__(self):
return f'Feed {self.rss_i}: {self.name}'
| StarcoderdataPython |
6524801 | <reponame>euribates/advent_of_code_2018
#!/usr/bin/env python3
import tools
if __name__ == "__main__":
with open('input.txt', 'r') as fh:
data = fh.read().strip()
dl = tools.DL(data)
dl.reduce()
solution = len(dl)
print(f'Solution of part 1: {solution}')
| StarcoderdataPython |
11279361 | <reponame>UserBlackBox/ctf-functions<filename>setup.py<gh_stars>0
from setuptools import setup, find_packages
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setup(
name="ctf_functions",
packages=find_packages(),
version="0.1.0",
entry_points={
},
author="<NAME>",
author_email="<EMAIL>",
description="Python library with functions useful for CTF data analysis/decoding",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/UserBlackBox/ctf_functions",
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
"License :: OSI Approved :: BSD License"
"Natural Language :: English",
],
python_requires='>=3.8',
install_requires=[]
)
| StarcoderdataPython |
4918587 | <filename>2017/03.py
#! /usr/bin/env python3
import itertools as it
import sys
import time
from typing import Dict, Generator, List, Tuple
test = False
debug = False
stdin = False
INFILENAME = "inputs/03.txt"
for arg in sys.argv:
if arg == "--test":
test = True
INFILENAME = "inputs/03.test.txt"
if arg == "--debug":
debug = True
if arg == "--stdin":
stdin = True
# Utilities
def grid_adjs(
coord: Tuple[int, ...],
bounds: Tuple[Tuple[int, int], ...] = None,
inclusive: bool = True,
) -> Generator[Tuple[int, ...], None, None]:
# Iterate through all of the deltas for the N dimensions of the coord. A delta is
# -1, 0, or 1 indicating that the adjacent cell is one lower, same level, or higher
# than the given coordinate.
for delta in it.product((-1, 0, 1), repeat=len(coord)):
if all(d == 0 for d in delta):
# This is the coord itself, skip.
continue
# Check the bounds
if bounds is not None:
inbounds = True
for i, (d, (low, high)) in enumerate(zip(delta, bounds)):
if inclusive and not (low <= coord[i] + d <= high):
inbounds = False
break
elif not inclusive and not (low < coord[i] + d < high):
inbounds = False
break
if not inbounds:
continue
yield tuple(c + d for c, d in zip(coord, delta))
def manhattan(x1: int, y1: int, x2: int = 0, y2: int = 0) -> int:
return abs(x2 - x1) + abs(y2 - y1)
print(f"\n{'=' * 30}\n")
# Input parsing
input_start = time.time()
if stdin:
lines: List[str] = [l.strip() for l in sys.stdin.readlines()]
else:
with open(INFILENAME) as f:
lines: List[str] = [l.strip() for l in f.readlines()]
X = int(lines[0])
input_end = time.time()
# Shared
########################################################################################
shared_start = time.time()
shared_end = time.time()
# Part 1
########################################################################################
print("Part 1:")
def part1() -> int:
grid = {}
x = 0
y = 0
d = (1, 0)
for i in range(1, X + 1):
grid[(x, y)] = i
x += d[0]
y += d[1]
if -(x - 1) == y and x > 0:
d = (0, 1)
elif x == y and x > 0:
d = (-1, 0)
elif x < 0 and -x == y:
d = (0, -1)
elif x == y and x < 0:
d = (1, 0)
for (a, b), v in grid.items():
if v == X:
return manhattan(0, 0, a, b)
assert False
part1_start = time.time()
ans_part1 = part1()
part1_end = time.time()
print(ans_part1)
# Store the attempts that failed here.
tries = [537, 538, 589]
print("Tries Part 1:", tries)
assert ans_part1 not in tries, "Same as an incorrect answer!"
# Regression Test
assert test or ans_part1 == 419
# Part 2
########################################################################################
print("\nPart 2:")
def part2() -> int:
ans = 0
grid: Dict[Tuple[int, int], int] = {(0, 0): 1}
def sum_adjs(coord):
s = 0
for a in grid_adjs(coord):
if grid.get(a):
s += grid[a]
return s
x = 1
y = 0
d = (0, 1)
for _ in range(1, X + 1):
grid[(x, y)] = sum_adjs((x, y))
if grid[(x, y)] > X:
return grid[(x, y)]
x += d[0]
y += d[1]
if -(x - 1) == y and x > 0:
d = (0, 1)
elif x == y and x > 0:
d = (-1, 0)
elif x < 0 and -x == y:
d = (0, -1)
elif x == y and x < 0:
d = (1, 0)
return ans
part2_start = time.time()
ans_part2 = part2()
part2_end = time.time()
print(ans_part2)
# Store the attempts that failed here.
tries2 = []
print("Tries Part 2:", tries2)
assert ans_part2 not in tries2, "Same as an incorrect answer!"
# Regression Test
assert test or ans_part2 == 295229
if debug:
input_parsing = input_end - input_start
shared = shared_end - shared_start
part1_time = part1_end - part1_start
part2_time = part2_end - part2_start
print()
print("DEBUG:")
print(f"Input parsing: {input_parsing * 1000}ms")
print(f"Shared: {shared * 1000}ms")
print(f"Part 1: {part1_time * 1000}ms")
print(f"Part 2: {part2_time * 1000}ms")
print(f"TOTAL: {(input_parsing + shared + part1_time + part2_time) * 1000}ms")
| StarcoderdataPython |
11316775 | """Package with shell specific actions, each shell class should
implement `from_shell`, `to_shell`, `app_alias`, `put_to_history` and
`get_aliases` methods.
"""
import os
#from psutil import Process
from .bash import Bash
from .fish import Fish
from .generic import Generic
from .powershell import Powershell
from .tcsh import Tcsh
from .zsh import Zsh
shells = {
"bash": Bash,
"fish": Fish,
"zsh": Zsh,
"csh": Tcsh,
"tcsh": Tcsh,
"powershell": Powershell,
"pwsh": Powershell,
}
def _get_shell_from_env():
name = os.environ.get("TF_SHELL")
if name in shells:
return shells[name]()
def _get_shell_from_proc():
proc = os.readlink('/proc/%d/exe' % os.getppid())
name = os.path.basename(proc)
if name in shells:
return shells[name]()
return Generic()
shell = _get_shell_from_env() or _get_shell_from_proc()
| StarcoderdataPython |
227646 | # coding:utf-8
from crossknight.sqink import createLogger
from crossknight.sqink.domain import isUuid
from crossknight.sqink.domain import NoteStatus
from crossknight.sqink.markdown import renderHtml
from crossknight.sqink.plist import marshalNote
from crossknight.sqink.plist import unmarshalNote
from crossknight.sqink.provider import AConnectionError
from crossknight.sqink.provider import InvalidProxyError
from crossknight.sqink.provider import RemoteNoteProvider
from crossknight.sqink.provider import TokenExpiredError
from dropbox import create_session
from dropbox import Dropbox
from dropbox.exceptions import ApiError
from dropbox.exceptions import InternalServerError
from dropbox.files import FileMetadata
from dropbox.files import WriteMode
from dropbox.oauth import DropboxOAuth2FlowNoRedirect
from requests.exceptions import ConnectionError
from requests.exceptions import HTTPError
from requests.exceptions import ProxyError
_LOG = createLogger("dbox")
def _proxies(proxyHost, proxyPort, proxyUser, proxyPassword):
proxies = {}
if proxyHost:
proxy = proxyHost + ":" + str(proxyPort or 80)
if proxyUser:
proxy = proxyUser + ":" + (proxyPassword or "") + "@" + proxy
proxies["http"] = proxy
proxies["https"] = proxy
return proxies
def online(f):
"""Raises InvalidProxyError when a HTTP proxy is required, or AConnectionError when connection fails."""
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except ProxyError:
raise InvalidProxyError
except ConnectionError:
raise AConnectionError
return wrapper
def expires(f):
"""Raises TokenExpiredError when authorization token is expired or revoked."""
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except InternalServerError:
raise TokenExpiredError
return wrapper
class SyncFolder:
DayOne = "/apps/Day One/journal.dayone"
Narrate = "/apps/Narrate"
class DropboxAuthorizator:
__APP_KEY = "<KEY>"
__APP_SECRET = "<KEY>"
def __init__(self, proxyHost=None, proxyPort=None, proxyUser=None, proxyPassword=None):
self.__proxies = _proxies(proxyHost, proxyPort, proxyUser, proxyPassword)
self.__oauth = DropboxOAuth2FlowNoRedirect(self.__APP_KEY, self.__APP_SECRET)
self.__oauth.requests_session.proxies = self.__proxies
def authorizationUrl(self):
return self.__oauth.start()
@online
def authorize(self, code):
try:
return self.__oauth.finish(code.strip()).access_token
except HTTPError:
raise TokenExpiredError
def checkFolder(self, accessToken, folder):
try:
client = Dropbox(accessToken, session=create_session(proxies=self.__proxies))
self.__createFolder(client, folder + "/entries/deleted")
self.__createFolder(client, folder + "/photos/deleted")
except ApiError:
return False
return True
@staticmethod
def __createFolder(client, path):
_LOG.info("Creating folder: %s", path)
try:
client.files_create_folder(path)
except ApiError as e:
if e.error.is_path() and e.error.get_path().is_conflict() and e.error.get_path().get_conflict().is_folder():
_LOG.info("Folder %s already exists", path)
else:
raise e
class FileEntry:
def __init__(self, folder, name, lastModified):
self.folder = folder
self.name = name
self.lastModified = lastModified
@staticmethod
def fromMetadata(metadata):
if isinstance(metadata, FileMetadata):
tokens = metadata.path_display.rsplit("/", 1)
folder, name = (tokens[0] or "/", tokens[1]) if len(tokens) == 2 else ("/", tokens[0])
return FileEntry(folder, name, metadata.client_modified)
return None
class DropboxNoteProvider(RemoteNoteProvider):
__DAY_ONE_EXTENSION = ".doentry"
def __init__(self, accessToken, folder, proxyHost=None, proxyPort=None, proxyUser=None, proxyPassword=None):
proxies = _proxies(proxyHost, proxyPort, proxyUser, proxyPassword)
self.__token = accessToken
self.__basePath = folder
self.__notesPath = folder + "/entries"
self.__removedNotesPath = self.__notesPath + "/deleted"
self.__photosPath = folder + "/photos"
self.__client = Dropbox(self.__token, session=create_session(proxies=proxies))
self.__notesCache = {}
self.__dayOneFlavor = folder == SyncFolder.DayOne
@online
@expires
def sync(self):
_LOG.info("Listing all notes and photos")
folder = self.__client.files_list_folder(self.__basePath, recursive=True)
files = list(filter(lambda e: e is not None, map(FileEntry.fromMetadata, folder.entries)))
while folder.has_more:
folder = self.__client.files_list_folder_continue(folder.cursor)
files.extend(filter(lambda e: e is not None, map(FileEntry.fromMetadata, folder.entries)))
notes = {}
for file in filter(lambda f: f.folder == self.__notesPath and isUuid(self.__normalizeNoteName(f.name)), files):
uuid = self.__normalizeNoteName(file.name)
notes[uuid] = NoteStatus(uuid, file.lastModified)
for file in filter(lambda f: f.folder == self.__photosPath and f.name.endswith(".jpg"), files):
uuid = file.name[:-4]
if uuid in notes:
notes[uuid].hasPhoto = True
for file in filter(lambda f: f.folder == self.__removedNotesPath and isUuid(self.__normalizeNoteName(f.name)),
files):
uuid = self.__normalizeNoteName(file.name)
if uuid in notes:
if file.lastModified >= notes[uuid].lastModified:
_LOG.warning("Sync integrity check deleting note: %s", uuid)
try:
self.__client.files_delete(self.__notePath(uuid))
except ApiError:
_LOG.warning("Note %s not found", uuid)
if notes[uuid].hasPhoto:
_LOG.warning("Sync integrity check deleting photo: %s", uuid)
try:
self.__client.files_delete(self.__photoPath(uuid))
except ApiError:
_LOG.warning("Photo %s not found", uuid)
del notes[uuid]
else:
_LOG.warning("Sync integrity check deleting REMOVED note: %s", uuid)
try:
self.__client.files_delete(self.__removedNotePath(uuid))
except ApiError:
_LOG.warning("REMOVED note %s not found", uuid)
continue
notes[uuid] = NoteStatus(uuid, file.lastModified, True)
self.__notesCache = notes
return notes
@online
@expires
def get(self, uuid):
_LOG.info("Getting note: %s", uuid)
metadata, response = self.__client.files_download(self.__notePath(uuid))
with response:
note = unmarshalNote(response.content, metadata.client_modified)
if uuid not in self.__notesCache or self.__notesCache[uuid].hasPhoto:
_LOG.info("Getting photo: %s", uuid)
try:
with self.__client.files_download(self.__photoPath(uuid))[1] as response:
note.photo = response.content
except ApiError as e:
if e.error.is_path() and e.error.get_path().is_not_found():
_LOG.warning("Photo %s does not exist", uuid)
else:
raise e
return renderHtml(note)
@online
@expires
def add(self, note):
uuid = note.uuid
_LOG.info("Adding note: %s", uuid)
self.__uploadFile(self.__notePath(uuid), note.lastModified, marshalNote(note), overwrite=False)
if note.photo:
_LOG.info("Adding photo: %s", uuid)
self.__uploadFile(self.__photoPath(uuid), note.lastModified, note.photo)
elif uuid in self.__notesCache and self.__notesCache[uuid].hasPhoto:
_LOG.info("Deleting photo: %s", uuid)
try:
self.__client.files_delete(self.__photoPath(uuid))
except ApiError:
_LOG.warning("Photo %s not found", uuid)
# Clean removed note if exists
if uuid in self.__notesCache and self.__notesCache[uuid].removed:
_LOG.info("Deleting REMOVED note: %s", uuid)
try:
self.__client.files_delete(self.__removedNotePath(uuid))
except ApiError:
_LOG.warning("REMOVED note %s not found", uuid)
@online
@expires
def update(self, note):
uuid = note.uuid
# Check if note exists
if self.__notesCache and (uuid not in self.__notesCache or self.__notesCache[uuid].removed):
raise RuntimeError("Note[uuid=%s] does not exist" % uuid)
# Dropbox does not update the client_modifed date when the content hasn't changed. So the note is removed first.
_LOG.info("Trying to delete old note before updating: %s", uuid)
try:
self.__client.files_delete(self.__notePath(uuid))
except ApiError:
pass
_LOG.info("Updating note: %s", uuid)
self.__uploadFile(self.__notePath(uuid), note.lastModified, marshalNote(note))
if note.photo:
_LOG.info("Updating photo: %s", uuid)
self.__uploadFile(self.__photoPath(uuid), note.lastModified, note.photo)
elif uuid not in self.__notesCache or self.__notesCache[uuid].hasPhoto:
_LOG.info("Deleting photo: %s", uuid)
try:
self.__client.files_delete(self.__photoPath(uuid))
except ApiError:
_LOG.warning("Photo %s not found", uuid)
@online
@expires
def remove(self, note):
uuid = note.uuid
# Remove note if exists
if uuid in self.__notesCache and not self.__notesCache[uuid].removed:
_LOG.info("Deleting note: %s", uuid)
try:
self.__client.files_delete(self.__notePath(uuid))
except ApiError:
_LOG.warning("Note %s not found", uuid)
# Remove photo if exists
if uuid in self.__notesCache and self.__notesCache[uuid].hasPhoto:
_LOG.info("Deleting photo: %s", uuid)
try:
self.__client.files_delete(self.__photoPath(uuid))
except ApiError:
_LOG.warning("Photo %s not found", uuid)
_LOG.info("Adding REMOVED note: %s", uuid)
self.__uploadFile(self.__removedNotePath(uuid), note.lastModified, b"")
def __uploadFile(self, path, lastModified, content, overwrite=True):
mode = WriteMode.overwrite if overwrite else WriteMode.add
self.__client.files_upload(content, path, mode=mode, client_modified=lastModified)
def __normalizeNoteName(self, name):
if self.__dayOneFlavor and name.endswith(self.__DAY_ONE_EXTENSION):
name = name[:-(len(self.__DAY_ONE_EXTENSION))]
return name
def __buildNotePath(self, parentPath, uuid):
path = parentPath + "/" + uuid
if self.__dayOneFlavor:
path += self.__DAY_ONE_EXTENSION
return path
def __notePath(self, uuid):
return self.__buildNotePath(self.__notesPath, uuid)
def __removedNotePath(self, uuid):
return self.__buildNotePath(self.__removedNotesPath, uuid)
def __photoPath(self, uuid):
return self.__photosPath + "/" + uuid + ".jpg"
| StarcoderdataPython |
5138445 | <reponame>leelige/mindspore
# Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""ckpt_util"""
import os
from mindspore.train.serialization import save_checkpoint, load_checkpoint, load_param_into_net
def save_ckpt(args, G, D, epoch):
# should remove old ckpt
save_checkpoint(G, os.path.join(args.ckpt_dir, f"G_{epoch}.ckpt"))
def load_ckpt(args, G, D, epoch):
if args.ckpt_dir is not None:
param_G = load_checkpoint(os.path.join(
args.ckpt_dir, f"G_{epoch}.ckpt"))
load_param_into_net(G, param_G)
if args.ckpt_dir is not None and D is not None:
param_D = load_checkpoint(os.path.join(
args.ckpt_dir, f"G_{epoch}.ckpt"))
load_param_into_net(D, param_D)
| StarcoderdataPython |
41519 | <filename>aiomatrix/dispatcher/storage/room_events/engines/__init__.py<gh_stars>1-10
from .sqlite import SqliteEventStorageEngine
| StarcoderdataPython |
60913 | import unittest
import hail as hl
from lib.model.seqr_mt_schema import SeqrVariantSchema
from tests.data.sample_vep import VEP_DATA, DERIVED_DATA
class TestSeqrModel(unittest.TestCase):
def _get_filtered_mt(self, rsid='rs35471880'):
mt = hl.import_vcf('tests/data/1kg_30variants.vcf.bgz')
mt = hl.split_multi(mt.filter_rows(mt.rsid == rsid))
return mt
def test_variant_derived_fields(self):
rsid = 'rs35471880'
mt = self._get_filtered_mt(rsid).annotate_rows(**VEP_DATA[rsid])
seqr_schema = SeqrVariantSchema(mt)
seqr_schema.sorted_transcript_consequences().doc_id(length=512).variant_id().contig().pos().start().end().ref().alt() \
.pos().xstart().xstop().xpos().transcript_consequence_terms().transcript_ids().main_transcript().gene_ids() \
.coding_gene_ids().domains().ac().af().an().annotate_all()
mt = seqr_schema.select_annotated_mt()
obj = mt.rows().collect()[0]
# Cannot do a nested compare because of nested hail objects, so do one by one.
fields = ['AC', 'AF', 'AN', 'codingGeneIds', 'docId', 'domains', 'end', 'geneIds', 'ref', 'alt', 'start',
'variantId', 'transcriptIds', 'xpos', 'xstart', 'xstop', 'contig']
for field in fields:
self.assertEqual(obj[field], DERIVED_DATA[rsid][field])
self.assertEqual(obj['mainTranscript']['transcript_id'], DERIVED_DATA[rsid]['mainTranscript']['transcript_id'])
def test_variant_genotypes(self):
mt = self._get_filtered_mt()
seqr_schema = SeqrVariantSchema(mt)
mt = seqr_schema.genotypes().select_annotated_mt()
genotypes = mt.rows().collect()[0].genotypes
actual = {gen['sample_id']: dict(gen) for gen in genotypes}
expected = {'HG00731': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 73.0, 'sample_id': 'HG00731'},
'HG00732': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 70.0, 'sample_id': 'HG00732'},
'HG00733': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 66.0, 'sample_id': 'HG00733'},
'NA19675': {'num_alt': 1, 'gq': 99, 'ab': 0.6000000238418579, 'dp': 29.0,
'sample_id': 'NA19675'},
'NA19678': {'num_alt': 0, 'gq': 78, 'ab': 0.0, 'dp': 28.0, 'sample_id': 'NA19678'},
'NA19679': {'num_alt': 1, 'gq': 99, 'ab': 0.3571428656578064, 'dp': 27.0,
'sample_id': 'NA19679'},
'NA20870': {'num_alt': 1, 'gq': 99, 'ab': 0.5142857432365417, 'dp': 67.0,
'sample_id': 'NA20870'},
'NA20872': {'num_alt': 1, 'gq': 99, 'ab': 0.5066666603088379, 'dp': 74.0,
'sample_id': 'NA20872'},
'NA20874': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 69.0, 'sample_id': 'NA20874'},
'NA20875': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 93.0, 'sample_id': 'NA20875'},
'NA20876': {'num_alt': 1, 'gq': 99, 'ab': 0.4383561611175537, 'dp': 70.0,
'sample_id': 'NA20876'},
'NA20877': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 76.0, 'sample_id': 'NA20877'},
'NA20878': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 73.0, 'sample_id': 'NA20878'},
'NA20881': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 69.0, 'sample_id': 'NA20881'},
'NA20885': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 82.0, 'sample_id': 'NA20885'},
'NA20888': {'num_alt': 0, 'gq': 99, 'ab': 0.0, 'dp': 74.0, 'sample_id': 'NA20888'}}
self.assertEqual(actual, expected)
def test_samples_num_alt(self):
mt = self._get_filtered_mt()
seqr_schema = SeqrVariantSchema(mt)
mt = seqr_schema.samples_no_call().samples_num_alt().select_annotated_mt()
row = mt.rows().flatten().collect()[0]
self.assertEqual(row.samples_no_call, set())
self.assertEqual(row['samples_num_alt.1'], {'NA19679', 'NA19675', 'NA20870', 'NA20876', 'NA20872'})
self.assertEqual(row['samples_num_alt.2'], set())
def test_samples_gq(self):
non_empty = {
'samples_gq.75_to_80': {'NA19678'}
}
start = 0
end = 95
step = 5
mt = self._get_filtered_mt()
seqr_schema = SeqrVariantSchema(mt)
mt = seqr_schema.samples_gq(start, end, step).select_annotated_mt()
row = mt.rows().flatten().collect()[0]
for name, samples in non_empty.items():
self.assertEqual(row[name], samples)
for i in range(start, end, step):
name = 'samples_gq.%i_to_%i' % (i, i+step)
if name not in non_empty:
self.assertEqual(row[name], set())
def test_samples_ab(self):
non_empty = {
'samples_ab.35_to_40': {'NA19679'},
'samples_ab.40_to_45': {'NA20876'},
}
start = 0
end = 45
step = 5
mt = self._get_filtered_mt()
seqr_schema = SeqrVariantSchema(mt)
mt = seqr_schema.samples_ab(start, end, step).select_annotated_mt()
row = mt.rows().flatten().collect()[0]
for name, samples in non_empty.items():
self.assertEqual(row[name], samples)
for i in range(start, end, step):
name = 'samples_ab.%i_to_%i' % (i, i+step)
if name not in non_empty:
self.assertEqual(row[name], set())
| StarcoderdataPython |
6428523 | <filename>tests/test_protocol.py
from olink.core.protocol import Protocol
from olink.core.types import MsgType
name = 'demo.Calc'
props = { 'count': 1}
value = 1
id = 1
args = [1, 2]
msgType = MsgType.INVOKE
error = "error"
def test_messages():
msg = Protocol.link_message(name)
assert msg == [MsgType.LINK, name]
msg = Protocol.unlink_message(name)
assert msg == [MsgType.UNLINK, name]
msg = Protocol.init_message(name, props)
assert msg == [MsgType.INIT, name, props]
msg = Protocol.set_property_message(name, value)
assert msg == [MsgType.SET_PROPERTY, name, value]
msg = Protocol.property_change_message(name, value)
assert msg == [MsgType.PROPERTY_CHANGE, name, value]
msg = Protocol.invoke_message(id, name, args)
assert msg == [MsgType.INVOKE, id, name, args]
msg = Protocol.invoke_reply_message(id, name, value)
assert msg == [MsgType.INVOKE_REPLY, id, name, value]
msg = Protocol.signal_message(name, args)
assert msg == [MsgType.SIGNAL, name, args]
msg = Protocol.error_message(msgType, id, error)
assert msg == [MsgType.ERROR, msgType, id, error]
| StarcoderdataPython |
8181248 | from strings.unique_emails import unique_email_addresses
def test_unique_email_addresses():
emails = [
"<EMAIL>+<EMAIL>",
"<EMAIL>+<EMAIL>",
"<EMAIL>+<EMAIL>",
"<EMAIL>"
]
assert unique_email_addresses(emails) == 2
| StarcoderdataPython |
3501563 | <reponame>YOON-CC/Baekjoon
#๋ฌธ์์ด์ ๋ค์ง์ ์ ์๋ ๊ฒ์ด ์๋ค.
#์ฌ๋ผ์ด์ค๋ผ๋ ๊ฒ์ผ๋ก ๋ฌธ์์ด[::-1] ์ด๋ ๊ฒ ์ฌ์ฉํ๋ฉด ๋ฌธ์์ด์ด ๋ค์งํ๋ค.
# ex) hello => olleh
a = int(input())
for _ in range(a):
n = list(input().split())
for i in range(int(len(n))):
if int(len(n[i]))>=2:
print(n[i][::-1], end=" ")
else:
print(n[i],end=" ")
print()
| StarcoderdataPython |
6627619 | <reponame>kids-first/kf-lib-data-ingest
import os
import pytest
from click.testing import CliRunner
from pandas import DataFrame
from conftest import KIDS_FIRST_CONFIG, TEST_INGEST_CONFIG
from kf_lib_data_ingest.app import cli
from kf_lib_data_ingest.common.errors import InvalidIngestStageParameters
from kf_lib_data_ingest.etl.configuration.base_config import (
ConfigValidationError,
)
from kf_lib_data_ingest.etl.load.load_shim import LoadStage
@pytest.fixture(scope="function")
def load_stage(tmpdir):
return LoadStage(
KIDS_FIRST_CONFIG,
"http://URL_A",
[],
"FAKE_STUDY_A",
cache_dir=tmpdir,
dry_run=True,
)
@pytest.mark.parametrize(
"run_input",
[
("foo"),
({"foo": "bar"}),
({"participant": "foo"}),
({"participant": ["foo"]}),
],
)
def test_invalid_run_parameters(load_stage, caplog, run_input):
"""
Test running transform with invalid run params
"""
with pytest.raises(InvalidIngestStageParameters):
load_stage.run(run_input)
def test_uid_cache(tmpdir):
a1 = LoadStage(
KIDS_FIRST_CONFIG,
"http://URL_A",
[],
"FAKE_STUDY_A",
cache_dir=tmpdir,
dry_run=True,
)
a2 = LoadStage(
KIDS_FIRST_CONFIG,
"http://URL_A",
[],
"FAKE_STUDY_A",
cache_dir=tmpdir,
dry_run=True,
)
assert os.path.exists(a1.uid_cache_filepath)
a1._store_target_id_for_key(
"entity_type", "entity_unique_key", "target_id", True
)
assert (
a1._get_target_id_from_key("entity_type", "entity_unique_key")
== "target_id"
)
assert os.path.exists(a2.uid_cache_filepath)
a2._store_target_id_for_key(
"entity_type", "entity_unique_key", "target_id", True
)
assert (
a2._get_target_id_from_key("entity_type", "entity_unique_key")
== "target_id"
)
assert a1.uid_cache_filepath == a2.uid_cache_filepath
b1 = LoadStage(
KIDS_FIRST_CONFIG,
"http://URL_B1",
[],
"FAKE_STUDY_B",
cache_dir=tmpdir,
dry_run=True,
)
b2 = LoadStage(
KIDS_FIRST_CONFIG,
"URL_B2",
[],
"FAKE_STUDY_B",
cache_dir=tmpdir,
dry_run=True,
)
assert "URL_B2" in b2.uid_cache_filepath
assert "URL_B1" in b1.uid_cache_filepath
assert os.path.exists(b1.uid_cache_filepath)
assert os.path.exists(b2.uid_cache_filepath)
b1._store_target_id_for_key(
"entity type", "entity unique key", "target_id", True
)
assert (
b1._get_target_id_from_key("entity type", "entity unique key")
== "target_id"
)
b2._store_target_id_for_key(
"entity type", "entity_unique_key", "target id", True
)
assert (
b2._get_target_id_from_key("entity type", "entity_unique_key")
== "target id"
)
assert b1.uid_cache_filepath != a1.uid_cache_filepath
assert b1.uid_cache_filepath != b2.uid_cache_filepath
def test_ingest_load_async_error():
"""
Test that async loading exits when threads raise exceptions
"""
prev_environ = os.environ.get("MAX_RETRIES_ON_CONN_ERROR")
os.environ["MAX_RETRIES_ON_CONN_ERROR"] = "0"
runner = CliRunner()
result = runner.invoke(
cli.ingest,
[TEST_INGEST_CONFIG, "--use_async", "--target_url", "http://potato"],
)
assert result.exit_code == 1
if prev_environ:
os.environ["MAX_RETRIES_ON_CONN_ERROR"] = prev_environ
else:
del os.environ["MAX_RETRIES_ON_CONN_ERROR"]
@pytest.mark.parametrize(
"ret_val, error",
[
(None, InvalidIngestStageParameters),
("foo", InvalidIngestStageParameters),
({"foo": DataFrame()}, ConfigValidationError),
(
{
"foo": DataFrame(),
"participant": DataFrame(),
"default": DataFrame(),
},
ConfigValidationError,
),
({"default": DataFrame()}, None),
({"participant": DataFrame()}, None),
],
)
def test_bad_ret_vals_transform_funct(ret_val, error, load_stage):
"""
Test input validation
"""
if error:
with pytest.raises(error):
load_stage._validate_run_parameters(ret_val)
else:
load_stage._validate_run_parameters(ret_val)
| StarcoderdataPython |
67373 | import pygame
import math
from roengine.util import Dummy
from roengine.config import PLAYER_KEYBINDS, USE_ROTOZOOM
__all__ = ["PlatformerPlayer"]
class PlatformerPlayer(pygame.sprite.Sprite):
keybinds = PLAYER_KEYBINDS
speed = 5
jump_power = 10
gravity = 0.5
climb_skill = 1
climb_velocity = 5
term_y = 10
bounds_checks = ('+y', '-x', '+x')
collidables = pygame.sprite.Group()
def __init__(self, image, pos=(0, 0)):
pygame.sprite.Sprite.__init__(self)
self.position = pygame.math.Vector2(0, 0)
self.velocity = pygame.math.Vector2(0, 0)
self.rotation = 0
self.is_climbing = False
self.grounded = False
self.firing = False
self.bounds = None
self.input_state = {"forward": False, "backward": False, "jump": False}
self.image = image
self.master_image = image
self.rect = self.image.get_rect()
self.rect.center = pos
def update(self):
self.grounded = False
self.is_climbing = False
self.check_y_collisions()
if self.bounds is not None:
self.check_bounds(self.bounds, ('+y', '-y') if "+y" in self.bounds_checks else ('-y', ))
self.update_input_state()
self.apply_gravity()
self.clamp_velocity()
if self.bounds is not None:
self.check_bounds(self.bounds, ('-y', ))
self.position.y += self.velocity.y
self.update_rect()
def update_rot(self, target_pos, scale=1.0, update_rect=False):
delta_pos = [target_pos[0] - self.position.x, target_pos[1] - self.position.y]
self.rotation = math.degrees(math.atan2(-delta_pos[1], delta_pos[0])) - 90
if USE_ROTOZOOM:
self.image = pygame.transform.rotozoom(self.master_image, self.rotation, scale)
else:
self.image = pygame.transform.rotate(self.master_image, self.rotation)
if update_rect:
self.rect = self.image.get_rect()
self.update_rect()
def update_event(self, event):
if event.type == pygame.MOUSEBUTTONDOWN:
self.firing = True
if event.type == pygame.MOUSEBUTTONUP:
self.firing = False
if event.type == pygame.KEYDOWN:
if event.key in self.keybinds['forward']:
self.input_state["forward"] = True
if event.key in self.keybinds['backward']:
self.input_state["backward"] = True
if event.key in self.keybinds["jump"]:
self.input_state["jump"] = True
if event.type == pygame.KEYUP:
if event.key in self.keybinds['forward']:
self.input_state["forward"] = False
if event.key in self.keybinds['backward']:
self.input_state["backward"] = False
if event.key in self.keybinds["jump"]:
self.input_state["jump"] = False
def clamp_velocity(self):
self.velocity.y = max(-self.term_y, min(self.velocity.y, self.term_y))
def apply_gravity(self):
if not (self.grounded or self.is_climbing):
self.velocity.y += self.gravity
def update_input_state(self):
if self.input_state["forward"]:
self.position.x += self.speed
self.update_rect()
self.check_px_cols()
if self.bounds is not None and "+x" in self.bounds_checks:
self.check_bounds(self.bounds, ('+x',))
if self.input_state["backward"]:
self.position.x -= self.speed
self.update_rect()
self.check_nx_cols()
if self.bounds is not None and "-x" in self.bounds_checks:
self.check_bounds(self.bounds, ('-x',))
if self.input_state["jump"] and self.grounded:
self.velocity.y -= self.jump_power
if self.is_climbing:
self.velocity.y = -self.climb_velocity
def update_rect(self):
self.rect.center = [int(self.position.x), int(self.position.y)]
def update_pos(self):
self.position = pygame.math.Vector2(self.rect.center)
def check_y_collisions(self):
self.update_rect()
if self.velocity.y > 0:
hit = pygame.sprite.spritecollide(self, self.collidables, False)
if hit:
getattr(hit[0], 'on_collide', Dummy)('+y', self)
self.rect.bottom = hit[0].rect.top
self.update_pos()
self.grounded = True
self.velocity.y = 0
if self.velocity.y < 0:
hit = pygame.sprite.spritecollide(self, self.collidables, False)
if hit:
getattr(hit[0], 'on_collide', Dummy)('-y', self)
self.rect.top = hit[0].rect.bottom
self.update_pos()
self.velocity.y = 0
def check_px_cols(self):
self.update_rect()
hit = pygame.sprite.spritecollide(self, self.collidables, False)
if hit:
getattr(hit[0], 'on_collide', Dummy)('+x', self)
self.is_climbing = getattr(hit[0], 'climb_difficulty', float('inf')) <= self.climb_skill
self.rect.right = hit[0].rect.left
self.update_pos()
def check_nx_cols(self):
self.update_rect()
hit = pygame.sprite.spritecollide(self, self.collidables, False)
if hit:
getattr(hit[0], 'on_collide', Dummy)('-x', self)
self.is_climbing = getattr(hit[0], 'climb_difficulty', float('inf')) <= self.climb_skill
self.rect.left = hit[0].rect.right
self.update_pos()
def check_bounds(self, surface, checks=("+y", "-y", "+x", "-x")):
self.update_rect()
if self.rect.left < 0 and "-x" in checks:
self.is_climbing = True
self.rect.left = 0
if self.rect.right > surface.get_width() and "+x" in checks:
self.is_climbing = True
self.rect.right = surface.get_width()
if self.rect.top < 0 and "-y" in checks:
self.rect.top = 0
self.velocity.y = 0
if self.rect.bottom > surface.get_height() and "+y" in checks:
self.rect.bottom = surface.get_height()
self.velocity.y = 0
self.grounded = True
self.position = pygame.math.Vector2(self.rect.center)
#self.update_rect()
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.