text
stringlengths 12
1.05M
| repo_name
stringlengths 5
86
| path
stringlengths 4
191
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 12
1.05M
| keyword
listlengths 1
23
| text_hash
stringlengths 64
64
|
|---|---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""Tests for the PyBEL processor."""
import os
from urllib import request
from pybel import BELGraph
from pybel.dsl import *
from pybel.language import Entity
from pybel.io import from_nodelink_file
from pybel.examples import egf_graph
from indra.statements import *
from indra.sources import bel
from indra.sources.bel import processor as pb
from indra.sources.bel.api import process_cbn_jgif_file, process_pybel_graph, \
small_corpus_url
from indra.databases import hgnc_client
from indra.statements.validate import assert_valid_statement
mek_hgnc_id = hgnc_client.get_hgnc_id('MAP2K1')
mek_up_id = hgnc_client.get_uniprot_id(mek_hgnc_id)
def test_pybel_neighborhood_query():
bp = bel.process_pybel_neighborhood(['TP63'],
network_type='graph_jsongz_url',
network_file=small_corpus_url)
assert bp.statements
for stmt in bp.statements:
assert_valid_statement(stmt)
assert all([s.evidence[0].context is not None
for s in bp.statements])
assert all([s.evidence[0].context.cell_line.name == 'MCF 10A'
for s in bp.statements])
# Locate statement about epidermis development
stmt = [st for st in bp.statements if st.agent_list()[1].name ==
'epidermis development'][0]
assert repr(stmt.evidence[0].context) == str(stmt.evidence[0].context)
assert stmt.evidence[0].context == BioContext(
location=RefContext(name="Cytoplasm",
db_refs={'MESH': 'D003593'}),
cell_line=RefContext(name="MCF 10A",
db_refs={'EFO': '0001200'}),
cell_type=RefContext(name="keratinocyte",
db_refs={'CL': 'CL:0000312'}),
organ=RefContext(name="colon",
db_refs={'UBERON': 'UBERON:0001155'}),
disease=RefContext(name="cancer",
db_refs={'DOID': 'DOID:162'}),
species=RefContext(name="Rattus norvegicus",
db_refs={'TAXONOMY': '10116'})), \
stmt.evidence[0].context
# Test annotation manager
assert bp.annot_manager.get_mapping('Species', '9606') == \
'Homo sapiens'
def test_pybel_readme_example():
bel_processor = bel.process_pybel_neighborhood(['KRAS', 'BRAF'])
assert bel_processor.statements
def test_process_pybel():
pbp = bel.process_pybel_graph(egf_graph)
assert pbp.statements
def test_process_jgif():
test_file_url = 'https://s3.amazonaws.com/bigmech/travis/Hox-2.0-Hs.jgf'
test_file = 'Hox-2.0-Hs.jgf'
if not os.path.exists(test_file):
request.urlretrieve(url=test_file_url, filename=test_file)
pbp = process_cbn_jgif_file(test_file)
# Clean up
os.remove(test_file)
assert len(pbp.statements) == 26, len(pbp.statements)
assert isinstance(pbp.statements[0], Statement)
assert all(s.evidence[0].source_api == 'bel' for s in pbp.statements)
def test_nodelink_json():
test_file_url = \
'https://s3.amazonaws.com/bigmech/travis/Hox-2.0-Hs_nljson.json'
test_file = 'Hox-2.0-Hs_nljson.json'
if not os.path.exists(test_file):
request.urlretrieve(url=test_file_url, filename=test_file)
pbp = process_pybel_graph(from_nodelink_file(test_file))
# Clean up
os.remove(test_file)
# Changed to 24, not really sure how to debug this one
assert len(pbp.statements) == 24, (len(pbp.statements), pbp.statements)
assert isinstance(pbp.statements[0], Statement)
assert all(s.evidence[0].source_api == 'bel' for s in pbp.statements)
def test_get_agent_hgnc():
mek = Protein(name='MAP2K1', namespace='HGNC')
agent = pb.get_agent(mek, {})
assert isinstance(agent, Agent)
assert agent.name == 'MAP2K1', agent
assert agent.db_refs.get('HGNC') == mek_hgnc_id
assert agent.db_refs.get('UP') == mek_up_id
# Now create an agent with an identifier
mek = Protein(name='Foo', namespace='HGNC', identifier='6840')
agent = pb.get_agent(mek, {})
assert isinstance(agent, Agent)
assert agent.name == 'MAP2K1', agent
assert agent.db_refs.get('HGNC') == mek_hgnc_id
assert agent.db_refs.get('UP') == mek_up_id
def test_get_agent_up():
mek = Protein(namespace='UP', identifier='Q02750')
agent = pb.get_agent(mek, {})
assert isinstance(agent, Agent)
assert agent.name == 'MAP2K1'
assert agent.db_refs.get('HGNC') == mek_hgnc_id
assert agent.db_refs.get('UP') == mek_up_id
def test_get_agent_egid():
node_data = Protein(name='5008', namespace='EGID')
agent = pb.get_agent(node_data)
assert isinstance(agent, Agent)
assert agent.name == 'OSM'
assert len(agent.db_refs) == 3
assert agent.db_refs['EGID'] == '5008'
assert agent.db_refs['HGNC'] == '8506'
assert agent.db_refs['UP'] == 'P13725'
def test_get_agent_mgi():
node = Protein(namespace='MGI', name='Nr1h3')
agent = pb.get_agent(node, {})
assert isinstance(agent, Agent)
assert agent.name == 'Nr1h3'
assert len(agent.db_refs) == 1
assert agent.db_refs.get('UP') == 'Q9Z0Y9', agent.db_refs
def test_get_agent_rgd():
node = Protein(namespace='RGD', name='Tp53')
agent = pb.get_agent(node, {})
assert isinstance(agent, Agent)
assert agent.name == 'Tp53'
assert len(agent.db_refs) == 1
assert agent.db_refs.get('UP') == 'P10361', agent.db_refs
def test_get_agent_sfam():
node_data = Protein(
namespace='SFAM',
name='PRKC Family',
)
agent = pb.get_agent(node_data)
assert isinstance(agent, Agent)
assert len(agent.db_refs) == 2
assert agent.db_refs['SFAM'] == 'PRKC Family'
assert agent.db_refs['FPLX'] == 'PKC'
assert agent.name == 'PKC'
def test_get_agent_sdis():
node_data = Pathology(namespace='SDIS', name='metastasis')
agent = pb.get_agent(node_data)
assert isinstance(agent, Agent)
assert agent.name == 'metastasis'
assert len(agent.db_refs) == 1
assert agent.db_refs['SDIS'] == 'metastasis'
def test_get_agent_chebi():
node_data = Abundance(namespace='CHEBI', name='nitric oxide')
agent = pb.get_agent(node_data)
assert isinstance(agent, Agent)
assert agent.name == 'nitric oxide'
assert len(agent.db_refs) == 1
assert agent.db_refs['CHEBI'] == 'CHEBI:16480'
def test_get_agent_schem():
node_data = Abundance(namespace='SCHEM', name='Promegestone')
agent = pb.get_agent(node_data)
assert isinstance(agent, Agent)
assert agent.name == 'Promegestone'
assert len(agent.db_refs) == 1
assert agent.db_refs['SCHEM'] == 'Promegestone'
def test_get_agent_mirna():
m = MicroRna(namespace='HGNC', name='MIRLET7A1')
agent = pb.get_agent(m, {})
assert isinstance(agent, Agent)
assert agent.name == 'MIRLET7A1'
assert agent.db_refs.get('MIRBASE') == 'MI0000060'
assert agent.db_refs.get('HGNC') == '31476'
m = MicroRna(namespace='HGNC', name='MIRLET7A1', identifier='31476')
agent = pb.get_agent(m, {})
assert isinstance(agent, Agent)
assert agent.name == 'MIRLET7A1'
assert agent.db_refs.get('MIRBASE') == 'MI0000060'
assert agent.db_refs.get('HGNC') == '31476'
m = MicroRna(namespace='MIRBASE', name='hsa-let-7a-1')
agent = pb.get_agent(m, {})
assert isinstance(agent, Agent)
assert agent.name == 'MIRLET7A1'
assert agent.db_refs.get('MIRBASE') == 'MI0000060'
assert agent.db_refs.get('HGNC') == '31476'
def test_get_agent_fusion():
node_data = ProteinFusion(
partner_5p=Protein(namespace='HGNC', name='BCR'),
partner_3p=Protein(namespace='HGNC', name='ABL1'),
)
agent = pb.get_agent(node_data)
assert agent is None
def test_get_agent_up_no_id():
mek = Protein(name='MAP2K1', namespace='UP')
agent = pb.get_agent(mek, {})
assert agent is None
def test_get_agent_meshpp():
apoptosis = bioprocess(name='Apoptosis', namespace='MESHPP')
agent = pb.get_agent(apoptosis)
assert isinstance(agent, Agent)
assert agent.name == 'Apoptosis'
assert 'MESH' in agent.db_refs
def test_get_agent_meshd():
hyperoxia = bioprocess(name='Hyperoxia', namespace='MESHD')
agent = pb.get_agent(hyperoxia)
assert isinstance(agent, Agent)
assert agent.name == 'Hyperoxia'
assert 'MESH' in agent.db_refs
def test_get_agent_with_mods():
mek = Protein(name='MAP2K1', namespace='HGNC',
variants=[pmod('Ph')])
agent = pb.get_agent(mek, {})
assert isinstance(agent, Agent)
assert len(agent.mods) == 1
mod = agent.mods[0]
assert mod.mod_type == 'phosphorylation'
assert not mod.residue
assert not mod.position
mek = Protein(name='MAP2K1', namespace='HGNC',
variants=[pmod('Ph', code='Ser')])
agent = pb.get_agent(mek, {})
assert isinstance(agent, Agent)
assert len(agent.mods) == 1
mod = agent.mods[0]
assert mod.mod_type == 'phosphorylation'
assert mod.residue == 'S'
assert not mod.position
mek = Protein(name='MAP2K1', namespace='HGNC',
variants=[pmod('Ph', position=218)])
agent = pb.get_agent(mek, {})
assert isinstance(agent, Agent)
assert len(agent.mods) == 1
mod = agent.mods[0]
assert mod.mod_type == 'phosphorylation'
assert not mod.residue
assert mod.position == '218'
mek = Protein(name='MAP2K1', namespace='HGNC',
variants=[pmod('Ph', position=218, code='Ser')])
agent = pb.get_agent(mek, {})
assert isinstance(agent, Agent)
assert len(agent.mods) == 1
mod = agent.mods[0]
assert mod.mod_type == 'phosphorylation'
assert mod.residue == 'S'
assert mod.position == '218'
def test_get_agent_with_muts():
mek = Protein(name='MAP2K1', namespace='HGNC',
variants=[hgvs('p.Val600Glu')])
agent = pb.get_agent(mek, {})
assert isinstance(agent, Agent)
assert len(agent.mutations) == 1
mut = agent.mutations[0]
assert mut.position == '600'
assert mut.residue_from == 'V'
assert mut.residue_to == 'E'
def test_get_agent_with_activity():
mek = Protein(name='MAP2K1', namespace='HGNC')
agent = pb.get_agent(mek, activity('act'))
assert isinstance(agent, Agent)
assert isinstance(agent.activity, ActivityCondition)
assert agent.activity.activity_type == 'activity'
assert agent.activity.is_active
def test_get_agent_complex():
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='HGNC',
variants=[pmod('Ph', position=185, code='Thr')])
cplx = complex_abundance([mek, erk])
agent = pb.get_agent(cplx)
assert isinstance(agent, Agent)
assert agent.name == 'MAP2K1'
assert len(agent.bound_conditions) == 1
bc = agent.bound_conditions[0]
assert isinstance(bc, BoundCondition)
assert bc.is_bound is True
bc_agent = bc.agent
assert bc_agent.name == 'MAPK1'
assert len(bc_agent.mods) == 1
assert bc_agent.mods[0].mod_type == 'phosphorylation'
assert bc_agent.mods[0].residue == 'T'
assert bc_agent.mods[0].position == '185'
def test_get_agent_complex_none_agent():
"""If one of the agents in the complex can't be obtained (e.g., an
unhandled namespace), then the complex itself should be None."""
# Prime agent is None
mek = Protein(name='MAP2K1', namespace='FOO')
erk = Protein(name='MAPK1', namespace='HGNC',
variants=[pmod('Ph', position=185, code='Thr')])
cplx = complex_abundance([mek, erk])
agent = pb.get_agent(cplx)
assert agent is None
# Bound agent is None
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='FOO',
variants=[pmod('Ph', position=185, code='Thr')])
cplx = complex_abundance([mek, erk])
agent = pb.get_agent(cplx)
assert agent is None
def test_get_agent_named_complex_go():
# TODO: Handle named complexes and map to FamPlex where possible
node_data = NamedComplexAbundance(namespace='GOCCID', name='0043509')
agent = pb.get_agent(node_data)
assert agent is None
def test_get_agent_with_translocation():
node_data = Protein(name='MAPK1', namespace='HGNC')
# Some example edge data
edge_data = translocation(
from_loc=Entity(namespace='GOCC', name='intracellular'),
to_loc=Entity(namespace='GOCC', name='extracellular space'),
)
agent = pb.get_agent(node_data, edge_data)
assert isinstance(agent, Agent)
assert agent.name == 'MAPK1'
assert agent.location == 'extracellular space'
def test_phosphorylation_one_site_with_evidence():
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='HGNC',
variants=[pmod('Ph', position=185, code='Thr')])
g = BELGraph()
g.annotation_list['TextLocation'] = {'Abstract'}
ev_text = 'Some evidence.'
ev_pmid = '123456'
edge_hash = g.add_directly_increases(
mek, erk, evidence=ev_text,
citation=ev_pmid,
annotations={"TextLocation": 'Abstract'},
)
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
assert isinstance(pbp.statements[0], Phosphorylation)
assert pbp.statements[0].residue == 'T'
assert pbp.statements[0].position == '185'
enz = pbp.statements[0].enz
sub = pbp.statements[0].sub
assert enz.name == 'MAP2K1'
assert enz.mods == []
assert sub.name == 'MAPK1'
assert sub.mods == []
# Check evidence
assert len(pbp.statements[0].evidence) == 1
ev = pbp.statements[0].evidence[0]
assert ev.source_api == 'bel'
assert ev.source_id == edge_hash
assert ev.pmid == ev_pmid, (ev.pmid, ev_pmid)
assert ev.text == ev_text
assert ev.annotations == {
'bel': 'p(HGNC:MAP2K1) directlyIncreases '
'p(HGNC:MAPK1, pmod(go:0006468 ! "protein phosphorylation", Thr, 185))'
}
assert ev.epistemics == {'direct': True, 'section_type': 'abstract'}
def test_doi_evidence():
"""Test processing edges with DOI citations."""
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='HGNC')
g = BELGraph()
g.annotation_list['TextLocation'] = {'Abstract'}
ev_doi = '123456'
g.add_directly_increases(
mek, erk, evidence='Some evidence.',
citation=('doi', ev_doi),
annotations={"TextLocation": 'Abstract'},
)
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
assert len(pbp.statements[0].evidence) == 1
ev = pbp.statements[0].evidence[0]
assert ev.pmid is None
assert 'DOI' in ev.text_refs
assert ev.text_refs['DOI'] == ev_doi
def test_phosphorylation_two_sites():
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='HGNC',
variants=[pmod('Ph', position=185, code='Thr'),
pmod('Ph', position=187, code='Tyr')])
g = BELGraph()
g.add_directly_increases(mek, erk, evidence="Some evidence.",
citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 2
stmt1 = pbp.statements[0]
stmt2 = pbp.statements[1]
assert stmt1.residue == 'T'
assert stmt1.position == '185'
assert stmt2.residue == 'Y'
assert stmt2.position == '187'
assert stmt1.sub.mods == []
assert stmt2.sub.mods == []
assert len(pbp.statements[0].evidence) == 1
def test_regulate_amount1_prot_obj():
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='HGNC')
g = BELGraph()
g.add_increases(mek, erk, evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
assert isinstance(pbp.statements[0], IncreaseAmount)
assert len(pbp.statements[0].evidence) == 1
def test_regulate_amount2_rna_obj():
# FIXME: Create a transcription-specific statement for p->rna
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = rna(name='MAPK1', namespace='HGNC')
g = BELGraph()
g.add_increases(mek, erk, evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
assert isinstance(pbp.statements[0], IncreaseAmount)
assert len(pbp.statements[0].evidence) == 1
def test_regulate_amount3_deg():
# FIXME: Create a stability-specific statement for p->deg(p(Foo))
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='HGNC')
g = BELGraph()
g.add_increases(mek, erk, object_modifier=degradation(),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
assert isinstance(pbp.statements[0], DecreaseAmount), pbp.statements[0]
assert len(pbp.statements[0].evidence) == 1
def test_regulate_amount4_subj_act():
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='HGNC')
g = BELGraph()
g.add_increases(mek, erk, source_modifier=activity(name='tscript'),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
assert isinstance(pbp.statements[0], IncreaseAmount)
subj = pbp.statements[0].subj
assert subj.name == 'MAP2K1'
assert subj.activity is not None
assert isinstance(subj.activity, ActivityCondition), subj.activity.__class__
assert subj.activity.activity_type == 'transcription'
assert subj.activity.is_active
assert len(pbp.statements[0].evidence) == 1
g = BELGraph()
g.add_increases(mek, erk, source_modifier=activity(),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
assert isinstance(pbp.statements[0], IncreaseAmount)
subj = pbp.statements[0].subj
assert subj.name == 'MAP2K1'
assert isinstance(subj.activity, ActivityCondition)
assert subj.activity.activity_type == 'activity'
assert subj.activity.is_active
assert len(pbp.statements[0].evidence) == 1
def test_regulate_activity():
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='HGNC')
g = BELGraph()
g.add_increases(mek, erk, source_modifier=activity(name='kin'),
target_modifier=activity(name='kin'),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
assert isinstance(pbp.statements[0], Activation), pbp.statements[0].__class__
subj = pbp.statements[0].subj
assert subj.name == 'MAP2K1'
assert isinstance(subj.activity, ActivityCondition)
assert subj.activity.activity_type == 'kinase'
assert subj.activity.is_active
obj = pbp.statements[0].obj
assert obj.name == 'MAPK1'
assert obj.activity is None
assert pbp.statements[0].obj_activity == 'kinase'
assert len(pbp.statements[0].evidence) == 1
def test_active_form():
p53_pmod = Protein(name='TP53', namespace='HGNC',
variants=[pmod('Ph', position=33, code='Ser')])
p53_obj = Protein(name='TP53', namespace='HGNC')
g = BELGraph()
g.add_increases(p53_pmod, p53_obj,
target_modifier=activity(name='tscript'),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
stmt = pbp.statements[0]
assert isinstance(stmt, ActiveForm)
assert stmt.activity == 'transcription'
assert stmt.is_active is True
ag = stmt.agent
assert ag.name == 'TP53'
assert len(ag.mods) == 1
mc = ag.mods[0]
assert mc.mod_type == 'phosphorylation'
assert mc.residue == 'S'
assert mc.position == '33'
assert len(pbp.statements[0].evidence) == 1
def test_gef():
sos = Protein(name='SOS1', namespace='HGNC')
kras = Protein(name='KRAS', namespace='HGNC')
g = BELGraph()
g.add_directly_increases(sos, kras,
source_modifier=activity(),
target_modifier=activity(name='gtp'),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
stmt = pbp.statements[0]
assert isinstance(stmt, Gef)
assert stmt.gef.name == 'SOS1'
assert stmt.ras.name == 'KRAS'
assert stmt.gef.activity.activity_type == 'activity'
assert stmt.gef.activity.is_active is True
assert stmt.ras.activity is None
assert len(pbp.statements[0].evidence) == 1
def test_indirect_gef_is_activation():
sos = Protein(name='SOS1', namespace='HGNC')
kras = Protein(name='KRAS', namespace='HGNC')
g = BELGraph()
g.add_increases(sos, kras, source_modifier=activity(),
target_modifier=activity(name='gtp'),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
stmt = pbp.statements[0]
assert isinstance(stmt, Activation)
assert stmt.subj.name == 'SOS1'
assert stmt.obj.name == 'KRAS'
assert stmt.subj.activity.activity_type == 'activity'
assert stmt.subj.activity.is_active is True
assert stmt.obj.activity is None
assert stmt.obj_activity == 'gtpbound'
assert len(pbp.statements[0].evidence) == 1
def test_gap():
sos = Protein(name='RASA1', namespace='HGNC')
kras = Protein(name='KRAS', namespace='HGNC')
g = BELGraph()
g.add_directly_decreases(sos, kras,
source_modifier=activity(),
target_modifier=activity(name='gtp'),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
stmt = pbp.statements[0]
assert isinstance(stmt, Gap)
assert stmt.gap.name == 'RASA1'
assert stmt.ras.name == 'KRAS'
assert stmt.gap.activity.activity_type == 'activity'
assert stmt.gap.activity.is_active is True
assert stmt.ras.activity is None
assert len(pbp.statements[0].evidence) == 1
def test_activation_bioprocess():
bax = Protein(name='BAX', namespace='HGNC')
apoptosis = bioprocess(name='apoptotic process', namespace='GOBP')
g = BELGraph()
g.add_increases(bax, apoptosis, evidence="Some evidence.",
citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
stmt = pbp.statements[0]
assert isinstance(stmt, Activation)
assert stmt.subj.name == 'BAX'
assert stmt.obj.name == 'apoptotic process'
assert 'GO' in stmt.obj.db_refs
assert len(pbp.statements[0].evidence) == 1
def test_gtpactivation():
kras = Protein(name='KRAS', namespace='HGNC')
braf = Protein(name='BRAF', namespace='HGNC')
g = BELGraph()
g.add_directly_increases(kras, braf,
source_modifier=activity(name='gtp'),
target_modifier=activity(name='kin'),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
stmt = pbp.statements[0]
assert isinstance(stmt, GtpActivation), stmt
assert stmt.subj.name == 'KRAS'
assert stmt.subj.activity.activity_type == 'gtpbound'
assert stmt.subj.activity.is_active is True
assert stmt.obj.name == 'BRAF'
assert stmt.obj.activity is None
assert stmt.obj_activity == 'kinase'
assert len(stmt.evidence) == 1
def test_conversion():
enz = Protein(name='PLCG1', namespace='HGNC')
react_1 = abundance('SCHEM',
'1-Phosphatidyl-D-myo-inositol 4,5-bisphosphate')
p1 = abundance('SCHEM', 'Diacylglycerol')
p2 = abundance('SCHEM', 'Inositol 1,4,5-trisphosphate')
rxn = reaction(
reactants=react_1,
products=[p1, p2],
)
g = BELGraph()
g.add_directly_increases(enz, rxn,
source_modifier=activity(),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
stmt = pbp.statements[0]
assert isinstance(stmt, Conversion)
assert stmt.subj.name == 'PLCG1'
assert stmt.subj.activity is not None
assert stmt.subj.activity.activity_type is not None
assert stmt.subj.activity.activity_type == 'activity', f'Got: {stmt.subj.activity.activity_type}'
assert stmt.subj.activity.is_active is True
assert len(stmt.obj_from) == 1
assert isinstance(stmt.obj_from[0], Agent)
assert stmt.obj_from[0].name == '1-Phosphatidyl-D-myo-inositol ' \
'4,5-bisphosphate'
assert len(stmt.obj_to) == 2
# why do these not appear in alphabetical order?
# PyBEL sorts the nodes based on their BEL, and
# Inositol 1,4,5-trisphosphate gets quoted.
assert stmt.obj_to[0].name == 'Inositol 1,4,5-trisphosphate'
assert stmt.obj_to[1].name == 'Diacylglycerol'
assert len(stmt.evidence) == 1
def test_controlled_transloc_loc_cond():
"""Controlled translocations are currently not handled."""
subj = Protein(name='MAP2K1', namespace='HGNC')
obj = Protein(name='MAPK1', namespace='HGNC')
g = BELGraph()
transloc = translocation(
from_loc=Entity(namespace='GOCC', name='intracellular'),
to_loc=Entity(namespace='GOCC', name='extracellular space'),
)
g.add_increases(subj, obj, target_modifier=transloc,
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert not pbp.statements, pbp.statements
def test_subject_transloc_loc_cond():
"""Translocations of the subject are treated as location conditions on the
subject (using the to_loc location as the condition)"""
subj = Protein(name='MAP2K1', namespace='HGNC')
obj = Protein(name='MAPK1', namespace='HGNC')
transloc = translocation(
from_loc=Entity(namespace='GOCC', name='intracellular'),
to_loc=Entity(namespace='GOCC', name='extracellular space'),
)
g = BELGraph()
g.add_increases(subj, obj, source_modifier=transloc,
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
stmt = pbp.statements[0]
assert isinstance(stmt, IncreaseAmount)
assert stmt.subj.name == 'MAP2K1'
assert stmt.subj.location is not None
assert stmt.subj.location == 'extracellular space'
assert stmt.obj.name == 'MAPK1'
def test_subject_transloc_active_form():
"""ActiveForms where the subject is a translocation--should draw on the
to-location of the subject."""
subj = Protein(name='MAP2K1', namespace='HGNC')
obj = Protein(name='MAP2K1', namespace='HGNC')
transloc = translocation(
from_loc=Entity(namespace='GOCC', name='intracellular'),
to_loc=Entity(namespace='GOCC', name='extracellular space'),
)
g = BELGraph()
g.add_increases(subj, obj, source_modifier=transloc,
target_modifier=activity(name='kin'),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 1
stmt = pbp.statements[0]
assert isinstance(stmt, ActiveForm)
assert stmt.agent.name == 'MAP2K1'
assert stmt.agent.location == 'extracellular space'
assert stmt.agent.activity is None
assert stmt.activity == 'kinase'
assert stmt.is_active is True
def test_complex_stmt_with_activation():
raf = Protein(name='BRAF', namespace='HGNC')
mek = Protein(name='MAP2K1', namespace='HGNC')
erk = Protein(name='MAPK1', namespace='HGNC')
cplx = complex_abundance([raf, mek])
g = BELGraph()
g.add_directly_increases(cplx, erk,
target_modifier=activity(name='kin'),
evidence="Some evidence.", citation='123456')
pbp = bel.process_pybel_graph(g)
assert pbp.statements
assert len(pbp.statements) == 2
stmt1 = pbp.statements[0]
assert isinstance(stmt1, Complex)
assert len(stmt1.agent_list()) == 2
assert sorted([ag.name for ag in stmt1.agent_list()]) == ['BRAF', 'MAP2K1']
assert stmt1.evidence
stmt2 = pbp.statements[1]
assert isinstance(stmt2, Activation)
assert stmt2.subj.name == 'BRAF'
assert stmt2.subj.bound_conditions[0].agent.name == 'MAP2K1'
assert stmt2.obj.name == 'MAPK1'
assert stmt2.obj.activity is None
assert stmt2.obj_activity == 'kinase'
def test_process_bel_stmts():
bp = bel.process_bel_stmt('p(HGNC:MDM2) directlyDecreases '
'tscript(p(HGNC:TP53))')
assert len(bp.statements) == 1
assert isinstance(bp.statements[0], Inhibition), bp.statements
assert bp.statements[0].subj.name == 'MDM2', bp.statements
assert bp.statements[0].obj.name == 'TP53', bp.statements
bp = bel.process_bel_stmt('a(CHEBI:lipoprotein) increases '
'bp(GOBP:"inflammatory response")')
assert len(bp.statements) == 1
assert isinstance(bp.statements[0], Activation), bp.statements
assert bp.statements[0].subj.name == 'lipoprotein', bp.statements
assert bp.statements[0].obj.name == 'inflammatory response', bp.statements
|
sorgerlab/belpy
|
indra/tests/test_pybel_api.py
|
Python
|
mit
| 30,089
|
[
"Pybel"
] |
9adebb0d4ac3378e682f81cccb76478a28a5b4341846c0e1e8229ad2ed6a65ad
|
#!/usr/bin/env python
'''
COPYRIGHT 2013 RPS ASA
This file is part of Wicken.
Wicken is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Wicken is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Wicken. If not, see <http://www.gnu.org/licenses/>.
@author David Stuebe <dstuebe@asasscience.com>
@file dogma.py
@date 06/03/13
@description The dogma module provides a metaclass based approach to mapping a flat name
space for class properties to any storage format and metadata schema. Example classes are
implemented for a dictionary storage. A particular mapping from the flat namespace used for
the properties to the metadata schema must be provided at runtime.
'''
from __future__ import absolute_import, print_function, division
from six import iteritems, with_metaclass
import re
from .exceptions import DogmaGetterSetterException
from .exceptions import DogmaMetaClassException
from .exceptions import DogmaDeleteException
class Tenets(object):
def __init__(self, belief, teaching, doc, options=None):
'''
belief is a string which will become a property of a particular dogma object
teaching is the string, collection or object that is used by the _set and
_get method of this Dogma to map a belieft about what a metadata element should be
called by IOOS to a particular schema, say ISO XML or NetCDF CF.
'''
self.belief = belief
self.teaching = teaching
self.__doc__ = doc
self.options = options or {}
def __get__(self, dogma, objtype=None):
try:
return dogma._get(self.teaching, self.options)
except Exception as ex:
exception_string = ''
exception_string += '''Error getting the '%s' property of the class '%s'\n''' % (self.belief, dogma.__class__.__name__)
#exception_string += '''Instance data object status: '%s'\n''' % dogma._dataObject
exception_string += '''Get operation raised exception: '%s' ''' % ex.__repr__()
raise DogmaGetterSetterException(exception_string)
def __set__(self, dogma, value):
try:
dogma._set(self.teaching, value, self.options)
except Exception as ex:
exception_string = ''
exception_string += '''Error setting the '%s' property of the class '%s'\n''' % (self.belief, dogma.__class__.__name__)
#exception_string += '''Instance data object status: '%s'\n''' % dogma._dataObject
exception_string += '''Set operation raised exception: '%s' ''' % ex.__repr__()
raise DogmaGetterSetterException(exception_string)
def __delete__(self, dogma):
try:
dogma._del(self.teaching, self.options)
except Exception as ex:
exception_string = ''
exception_string += '''Error deleting the '%s' property of the class '%s'\n''' % (self.belief, dogma.__class__.__name__)
#exception_string += '''Instance data object status: '%s'\n''' % dogma._dataObject
exception_string += '''Delete operation raised exception: '%s' ''' % ex.__repr__()
raise DogmaDeleteException(exception_string)
class MetaReligion(type):
"""
Designed for working with metadata and all of the strong personal convictions that go
with it,
"""
def __call__(cls, religion, beliefs, *args, **kwargs):
'''
cls is the base class which new properties will be added to
religion is the unique prefix for that class and its beliefs (properties)
beliefs is a dictionary that maps property names (IOOS metadata) to a particular schema (ISO, CF, etc)
@TODO - store the clsTypes so that they are only generated once - but how are they identified?
'''
clsName = religion + cls.__name__
clsDict={}
if re.match('^[\w-]+$', religion) is None:
raise DogmaMetaClassException('''Blasphemy! The name of your metadata religion (class name prefix: '%s') must be alpha numeric with no whitespace''' % religion)
clsDict['_religion'] = religion
clsDict['_beliefs'] = beliefs
clsDict['_fixup_belief'] = cls._fixup_belief
for origbelief, teaching in iteritems(beliefs):
belief, opts = cls._fixup_belief(origbelief)
if isinstance(teaching, dict):
# store old name
teaching['original_name'] = origbelief
doc = teaching.get('desc', '')
teaching = teaching['query']
else:
doc = cls._create_doc(belief, teaching)
# use a class method from the Dogma class to validate/transform the teaching
teaching = cls._validate_teaching(belief, teaching, *args, **kwargs)
clsDict[belief] = Tenets(belief, teaching, doc=doc, options=opts)
valid_propery_names = tuple(beliefs.keys())
def obj_setter(self, k, v):
if not k.startswith('_') and k not in valid_propery_names:
raise AttributeError('''Blasphemy! You can't create the new beliefs (property %s) on an instance of %s - only god can create properties when the class is defined''' % (k, clsName))
super(Dogma, self).__setattr__(k, v)
clsDict['__setattr__'] = obj_setter
clsType = MetaReligion.__new__(MetaReligion, clsName, (cls,), clsDict)
# Finally allow the instantiation to occur, but slip in our new class type
obj = super(MetaReligion, clsType).__call__(religion, beliefs, *args, **kwargs)
return obj
@classmethod
def _fixup_belief(cls, belief):
"""
Transforms beliefs into valid strings if possible and parses any options.
Beliefs will always be lowercased.
If it has spaces, they are converted to underscores ex: "Sensor Names" -> "sensor_names"
Options:
Multiple values: a belief with a suffix of "*" will allow multiple values returned from the get
when used with MultipleXmlDogma.
"""
extra = {}
belief = belief.lower()
if ' ' in belief:
belief = belief.replace(' ', '_')
if belief.endswith("*"):
belief = belief[:-1]
extra['multiple'] = True
# check for invalid characters in the belief which is used as a property name
if re.match('^[\w-]+$', belief) is None:
raise DogmaMetaClassException('''blasphemous belief! (property name: '%s') - even god can not make properties with non-alpha-numeric symbols or whitespace''' % belief)
if belief.startswith('_'):
raise DogmaMetaClassException('''Blasphemous belief! (property name: '%s') - even god can not make properties that start with an underscore''' % belief)
return belief, extra
class Dogma(with_metaclass(MetaReligion, object)):
def __init__(self, religion, beliefs, dataObject):
self._dataObject = dataObject
def _get(self, key):
raise NotImplementedError('_get Method is not implemented in the Dogma Base Class!')
def _set(self, key, value):
raise NotImplementedError('_set Method is not implemented in the Dogma Base Class!')
def _del(self, key):
raise NotImplementedError('_del Method is not implemented in the Dogma Base Class!')
@classmethod
def _validate_teaching(cls, belief, teaching, *args, **kwargs):
"""
Default implementation of the validation method for the teaching objects used as
keys in the _get and _set methods
"""
return teaching
@classmethod
def _create_doc(cls, belief, teaching):
"""
Default implementation to create a doc string for a tenet
"""
return '''This is the belief that '%s' is the true name for '%s' as taught by the class %s''' % (belief, teaching, cls.__name__)
|
ioos/wicken
|
wicken/dogma.py
|
Python
|
apache-2.0
| 8,388
|
[
"NetCDF"
] |
e064e8a86c39215543944a282c057771f6040ba306e2881f09b18eb227992745
|
"""Timeseries model using FSL's gaussian least squares."""
import re
import os.path as op
import numpy as np
from scipy import stats, signal
import pandas as pd
import nibabel as nib
import matplotlib.pyplot as plt
from moss import glm
from moss.mosaic import Mosaic
import seaborn as sns
from nipype import Node, MapNode, Workflow, IdentityInterface
from nipype.interfaces import fsl
from nipype.interfaces.base import (BaseInterface,
BaseInterfaceInputSpec,
InputMultiPath, OutputMultiPath,
TraitedSpec, File, traits,
isdefined)
import lyman
from lyman.tools import ManyOutFiles, SaveParameters, nii_to_png
def create_timeseries_model_workflow(name="model", exp_info=None):
# Default experiment parameters for generating graph image, testing, etc.
if exp_info is None:
exp_info = lyman.default_experiment_parameters()
# Define constant inputs
inputs = ["realign_file", "nuisance_file", "artifact_file", "timeseries"]
# Possibly add the design and regressor files to the inputs
if exp_info["design_name"] is not None:
inputs.append("design_file")
if exp_info["regressor_file"] is not None:
inputs.append("regressor_file")
# Define the workflow inputs
inputnode = Node(IdentityInterface(inputs), "inputs")
# Set up the experimental design
modelsetup = MapNode(ModelSetup(exp_info=exp_info),
["timeseries", "realign_file",
"nuisance_file", "artifact_file"],
"modelsetup")
# For some nodes, make it possible to request extra memory
mem_request = {"qsub_args": "-l h_vmem=%dG" % exp_info["memory_request"]}
# Use film_gls to estimate the timeseries model
modelestimate = MapNode(fsl.FILMGLS(smooth_autocorr=True,
mask_size=5,
threshold=100),
["design_file", "in_file", "tcon_file"],
"modelestimate")
modelestimate.plugin_args = mem_request
# Compute summary statistics about the model fit
modelsummary = MapNode(ModelSummary(),
["design_matrix_pkl",
"timeseries",
"pe_files"],
"modelsummary")
modelsummary.plugin_args = mem_request
# Save the experiment info for this run
# Save the experiment info for this run
saveparams = MapNode(SaveParameters(exp_info=exp_info),
"in_file", "saveparams")
# Report on the results of the model
# Note: see below for a conditional iterfield
modelreport = MapNode(ModelReport(),
["timeseries", "sigmasquareds_file",
"tsnr_file", "r2_files"],
"modelreport")
# Define the workflow outputs
outputnode = Node(IdentityInterface(["results",
"copes",
"varcopes",
"zstats",
"r2_files",
"ss_files",
"tsnr_file",
"report",
"design_mat",
"contrast_mat",
"design_pkl",
"design_report",
"json_file"]),
"outputs")
# Define the workflow and connect the nodes
model = Workflow(name=name)
model.connect([
(inputnode, modelsetup,
[("realign_file", "realign_file"),
("nuisance_file", "nuisance_file"),
("artifact_file", "artifact_file"),
("timeseries", "timeseries")]),
(inputnode, modelestimate,
[("timeseries", "in_file")]),
(inputnode, saveparams,
[("timeseries", "in_file")]),
(modelsetup, modelestimate,
[("design_matrix_file", "design_file"),
("contrast_file", "tcon_file")]),
(modelsetup, modelsummary,
[("design_matrix_pkl", "design_matrix_pkl")]),
(inputnode, modelsummary,
[("timeseries", "timeseries")]),
(modelestimate, modelsummary,
[("param_estimates", "pe_files")]),
(inputnode, modelreport,
[("timeseries", "timeseries")]),
(modelestimate, modelreport,
[("sigmasquareds", "sigmasquareds_file")]),
(modelsummary, modelreport,
[("r2_files", "r2_files"),
("tsnr_file", "tsnr_file")]),
(modelsetup, outputnode,
[("design_matrix_file", "design_mat"),
("contrast_file", "contrast_mat"),
("design_matrix_pkl", "design_pkl"),
("report", "design_report")]),
(saveparams, outputnode,
[("json_file", "json_file")]),
(modelestimate, outputnode,
[("results_dir", "results"),
("copes", "copes"),
("varcopes", "varcopes"),
("zstats", "zstats")]),
(modelsummary, outputnode,
[("r2_files", "r2_files"),
("ss_files", "ss_files"),
("tsnr_file", "tsnr_file")]),
(modelreport, outputnode,
[("out_files", "report")]),
])
if exp_info["design_name"] is not None:
model.connect(inputnode, "design_file",
modelsetup, "design_file")
if exp_info["regressor_file"] is not None:
model.connect(inputnode, "regressor_file",
modelsetup, "regressor_file")
if exp_info["contrasts"]:
model.connect(modelestimate, "zstats",
modelreport, "zstat_files")
modelreport.iterfield.append("zstat_files")
return model, inputnode, outputnode
# =========================================================================== #
class ModelSetupInput(BaseInterfaceInputSpec):
exp_info = traits.Dict()
timeseries = File(exists=True)
design_file = File(exists=True)
realign_file = File(exists=True)
nuisance_file = File(exists=True)
artifact_file = File(exists=True)
regressor_file = File(exists=True)
class ModelSetupOutput(TraitedSpec):
design_matrix_file = File(exists=True)
contrast_file = File(exists=True)
design_matrix_pkl = File(exists=True)
report = OutputMultiPath(File(exists=True))
class ModelSetup(BaseInterface):
input_spec = ModelSetupInput
output_spec = ModelSetupOutput
def _run_interface(self, runtime):
# Get all the information for the design
design_kwargs = self.build_design_information()
# Initialize the design matrix object
X = glm.DesignMatrix(**design_kwargs)
# Report on the design
self.design_report(self.inputs.exp_info, X, design_kwargs)
# Write out the design object as a pkl to pass to the report function
X.to_pickle("design.pkl")
# Finally, write out the design files in FSL format
X.to_fsl_files("design", self.inputs.exp_info["contrasts"])
return runtime
def build_design_information(self):
# Load in the design information
exp_info = self.inputs.exp_info
tr = self.inputs.exp_info["TR"]
# Derive the length of the scan and run number from the timeseries
ntp = nib.load(self.inputs.timeseries).shape[-1]
run = int(re.search("run_(\d+)", self.inputs.timeseries).group(1))
# Get the experimental design
if isdefined(self.inputs.design_file):
design = pd.read_csv(self.inputs.design_file)
design = design[design["run"] == run]
else:
design = None
# Get confound information to add to the model
confounds = []
sources = exp_info["confound_sources"]
bad_sources = set(sources) - set(["motion", "wm", "brain"])
if bad_sources:
msg = ("Invalid confound source specification: {}"
.format(list(bad_sources)))
raise ValueError(msg)
# Get the motion correction parameters
if "motion" in sources:
realign = pd.read_csv(self.inputs.realign_file)
realign = realign.filter(regex="rot|trans").apply(stats.zscore)
confounds.append(realign)
# Get the anatomical nuisance sources
nuisance = pd.read_csv(self.inputs.nuisance_file).apply(stats.zscore)
if "wm" in sources:
wm = nuisance.filter(regex="wm")
confounds.append(wm)
if "brain" in sources:
brain = nuisance["brain"]
confounds.append(brain)
# Combine the different confound sources
if confounds:
confounds = pd.concat(confounds, axis=1)
else:
confounds = None
# Get the image artifacts
if exp_info["remove_artifacts"]:
artifacts = pd.read_csv(self.inputs.artifact_file).max(axis=1)
else:
artifacts = None
# Get the additional model regressors
if isdefined(self.inputs.regressor_file):
regressors = pd.read_csv(self.inputs.regressor_file)
regressors = regressors[regressors["run"] == run]
regressors = regressors.drop("run", axis=1)
if exp_info["regressor_names"] is not None:
regressors = regressors[exp_info["regressor_names"]]
regressors.index = np.arange(ntp) * tr
else:
regressors = None
# Set up the HRF model
hrf = getattr(glm, exp_info["hrf_model"])
hrf = hrf(exp_info["temporal_deriv"], tr, **exp_info["hrf_params"])
# Build a dict of keyword arguments for the design matrix
design_kwargs = dict(design=design,
hrf_model=hrf,
ntp=ntp,
tr=tr,
confounds=confounds,
artifacts=artifacts,
regressors=regressors,
condition_names=exp_info["condition_names"],
confound_pca=exp_info["confound_pca"],
hpf_cutoff=exp_info["hpf_cutoff"])
return design_kwargs
def design_report(self, exp_info, X, design_kwargs):
"""Generate static images summarizing the design."""
# Plot the design itself
design_png = op.abspath("design.png")
X.plot(fname=design_png, close=True)
with sns.axes_style("whitegrid"):
# Plot the eigenvalue spectrum
svd_png = op.abspath("design_singular_values.png")
X.plot_singular_values(fname=svd_png, close=True)
# Plot the correlations between design elements and confounds
corr_png = op.abspath("design_correlation.png")
if design_kwargs["design"] is None:
with open(corr_png, "wb"):
pass
else:
X.plot_confound_correlation(fname=corr_png, close=True)
# Build a list of images sumarrizing the model
report = [design_png, corr_png, svd_png]
# Now plot the information loss from the high-pass filter
design_kwargs["hpf_cutoff"] = None
X_unfiltered = glm.DesignMatrix(**design_kwargs)
tr = design_kwargs["tr"]
ntp = design_kwargs["ntp"]
# Plot for each contrast
for i, (name, cols, weights) in enumerate(exp_info["contrasts"], 1):
# Compute the contrast predictors
C = X.contrast_vector(cols, weights)
y_filt = X.design_matrix.dot(C)
y_unfilt = X_unfiltered.design_matrix.dot(C)
# Compute the spectral density for filtered and unfiltered
fs, pxx_filt = signal.welch(y_filt, 1. / tr, nperseg=ntp)
fs, pxx_unfilt = signal.welch(y_unfilt, 1. / tr, nperseg=ntp)
# Draw the spectral density
with sns.axes_style("whitegrid"):
f, ax = plt.subplots(figsize=(9, 3))
ax.fill_between(fs, pxx_unfilt, color="#C41E3A")
ax.axvline(1.0 / exp_info["hpf_cutoff"], c=".3", ls=":", lw=1.5)
ax.fill_between(fs, pxx_filt, color=".5")
# Label the plot
ax.set(xlabel="Frequency",
ylabel="Spectral Density",
xlim=(0, .15))
plt.tight_layout()
# Save the plot
fname = op.abspath("cope%d_filter.png" % i)
f.savefig(fname, dpi=100)
plt.close(f)
report.append(fname)
# Store the report files for later
self.report_files = report
def _list_outputs(self):
outputs = self._outputs().get()
outputs["report"] = self.report_files
outputs["contrast_file"] = op.abspath("design.con")
outputs["design_matrix_pkl"] = op.abspath("design.pkl")
outputs["design_matrix_file"] = op.abspath("design.mat")
return outputs
class ModelSummaryInput(BaseInterfaceInputSpec):
design_matrix_pkl = File(exists=True)
timeseries = File(exists=True)
pe_files = InputMultiPath(File(exists=True))
class ModelSummaryOutput(TraitedSpec):
r2_files = OutputMultiPath(File(exists=True))
ss_files = OutputMultiPath(File(exists=True))
tsnr_file = File(exists=True)
class ModelSummary(BaseInterface):
input_spec = ModelSummaryInput
output_spec = ModelSummaryOutput
def _run_interface(self, runtime):
# Load the design matrix object
X = glm.DesignMatrix.from_pickle(self.inputs.design_matrix_pkl)
# Load and de-mean the timeseries
ts_img = nib.load(self.inputs.timeseries)
ts_aff, ts_header = ts_img.get_affine(), ts_img.get_header()
y = ts_img.get_data()
ybar = y.mean(axis=-1)[..., np.newaxis]
y -= ybar
self.y = y
# Store the image attributes
self.affine = ts_aff
self.header = ts_header
# Load the parameter estimates, make 4D, and concatenate
pes = [nib.load(f).get_data() for f in self.inputs.pe_files]
pes = [pe[..., np.newaxis] for pe in pes]
pes = np.concatenate(pes, axis=-1)
# Compute and save the total sum of squares
self.sstot = np.sum(np.square(y), axis=-1)
self.save_image(self.sstot, "sstot")
# Compute the full model r squared
yhat_full = self.dot_by_slice(X, pes)
ss_full, r2_full = self.compute_r2(yhat_full)
self.save_image(ss_full, "ssres_full")
self.save_image(r2_full, "r2_full")
del yhat_full, r2_full
# Compute the main model r squared
yhat_main = self.dot_by_slice(X, pes, "main")
ss_main, r2_main = self.compute_r2(yhat_main)
self.save_image(ss_main, "ssres_main")
self.save_image(r2_main, "r2_main")
del yhat_main, r2_main
# Compute the confound model r squared
yhat_confound = self.dot_by_slice(X, pes, "confound")
_, r2_confound = self.compute_r2(yhat_confound)
self.save_image(r2_confound, "r2_confound")
del yhat_confound, r2_confound
# Compute and save the residual tSNR
std = np.sqrt(ss_full / len(y))
tsnr = np.squeeze(ybar) / std
tsnr = np.nan_to_num(tsnr)
self.save_image(tsnr, "tsnr")
return runtime
def save_image(self, data, fname):
"""Save data to the output structure."""
img = nib.Nifti1Image(data, self.affine, self.header)
img.to_filename(fname + ".nii.gz")
def dot_by_slice(self, X, pes, component=None):
"""Broadcast a dot product by image slices to balance speed/memory."""
if component is not None:
pes = pes * getattr(X, component + "_vector").T[np.newaxis,
np.newaxis, :, :]
# Set up the output data structure
n_x, n_y, n_z, n_pe = pes.shape
n_t = X.design_matrix.shape[0]
out = np.empty((n_x, n_y, n_z, n_t))
# Do the dot product, broadcasted for each Z slice
for k in range(n_z):
slice_pe = pes[:, :, k, :].reshape(-1, n_pe).T
slice_dot = X.design_matrix.values.dot(slice_pe)
out[:, :, k, :] = slice_dot.T.reshape(n_x, n_y, n_t)
return out
def compute_r2(self, yhat):
"""Efficiently compute the coefficient of variation."""
ssres = np.zeros_like(self.sstot)
n_frames = yhat.shape[-1]
for tr in xrange(n_frames):
ssres += np.square(yhat[..., tr] - self.y[..., tr])
r2 = 1 - ssres / self.sstot
return ssres, r2
def _list_outputs(self):
outputs = self._outputs().get()
outputs["r2_files"] = [op.abspath("r2_full.nii.gz"),
op.abspath("r2_main.nii.gz"),
op.abspath("r2_confound.nii.gz")]
outputs["ss_files"] = [op.abspath("sstot.nii.gz"),
op.abspath("ssres_full.nii.gz"),
op.abspath("ssres_main.nii.gz")]
outputs["tsnr_file"] = op.abspath("tsnr.nii.gz")
return outputs
class ModelReportInput(BaseInterfaceInputSpec):
timeseries = File(exists=True)
sigmasquareds_file = File(exists=True)
tsnr_file = File(exists=True)
zstat_files = InputMultiPath(File(exists=True))
r2_files = InputMultiPath(File(exists=True))
class ModelReport(BaseInterface):
input_spec = ModelReportInput
output_spec = ManyOutFiles
def _run_interface(self, runtime):
# Load the sigmasquareds and use it to infer the model mask
var_img = nib.load(self.inputs.sigmasquareds_file).get_data()
self.mask = (var_img > 0).astype(np.int16)
# Load the timeseries and take the mean over time for a background
ts_img = nib.load(self.inputs.timeseries)
self.mean = nib.Nifti1Image(ts_img.get_data().mean(axis=-1),
ts_img.get_affine(),
ts_img.get_header())
# Set up the output list
self.out_files = []
# Plot the data
self.plot_residuals()
self.plot_rsquareds()
self.plot_tsnr()
if isdefined(self.inputs.zstat_files):
self.plot_zstats()
return runtime
def plot_residuals(self):
"""Plot the variance of the model residuals across time."""
ss = self.inputs.sigmasquareds_file
m = Mosaic(self.mean, ss, self.mask, step=1)
m.plot_overlay("cube:.8:.2", 0, alpha=.6, fmt="%d")
png_name = nii_to_png(ss)
m.savefig(png_name)
m.close()
self.out_files.append(png_name)
def plot_tsnr(self):
tsnr = self.inputs.tsnr_file
m = Mosaic(self.mean, tsnr, self.mask, step=1)
m.plot_overlay("cube:1.9:.5", 0, alpha=1, fmt="%d")
png_name = nii_to_png(tsnr)
m.savefig(png_name)
m.close()
self.out_files.append(png_name)
def plot_rsquareds(self):
"""Plot the full, main, and confound R squared maps."""
cmaps = ["cube:2:0", "cube:2.6:0", "cube:1.5:0"]
for r2_file, cmap in zip(self.inputs.r2_files, cmaps):
m = Mosaic(self.mean, r2_file, self.mask, step=1)
m.plot_overlay(cmap, 0, alpha=.6)
png_name = nii_to_png(r2_file)
m.savefig(png_name)
m.close()
self.out_files.append(png_name)
def plot_zstats(self):
"""Plot the positive and negative z stats with a low threshold."""
for z_file in self.inputs.zstat_files:
m = Mosaic(self.mean, z_file, self.mask, step=1)
m.plot_activation(pos_cmap="Reds_r", neg_cmap="Blues",
thresh=1.7, alpha=.85)
png_name = nii_to_png(z_file)
m.savefig(png_name)
m.close()
self.out_files.append(png_name)
def _list_outputs(self):
outputs = self._outputs().get()
outputs["out_files"] = self.out_files
return outputs
|
tuqc/lyman
|
lyman/workflows/model.py
|
Python
|
bsd-3-clause
| 20,567
|
[
"Gaussian"
] |
2cc2119b8fe9339dae2e56d391ba9016c50aed9600f52368d0ad65ddc2b12017
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Module to test fitting routines
"""
from __future__ import (absolute_import, unicode_literals, division,
print_function)
import os.path
import pytest
import numpy as np
from numpy import linalg
from numpy.testing.utils import assert_allclose, assert_almost_equal
from . import irafutil
from .. import models
from ..core import Fittable2DModel, Parameter
from ..fitting import *
from ...utils import NumpyRNGContext
from ...utils.data import get_pkg_data_filename
from .utils import ignore_non_integer_warning
from ...stats import sigma_clip
from ...utils.exceptions import AstropyUserWarning
from ..fitting import populate_entry_points
import warnings
try:
from scipy import optimize
HAS_SCIPY = True
except ImportError:
HAS_SCIPY = False
HAS_MOCK = True
try:
from unittest import mock
except ImportError:
try:
import mock
except ImportError:
HAS_MOCK = False
try:
from pkg_resources import EntryPoint
HAS_PKG = True
except ImportError:
HAS_PKG = False
fitters = [SimplexLSQFitter, SLSQPLSQFitter]
_RANDOM_SEED = 0x1337
class TestPolynomial2D(object):
"""Tests for 2D polynomail fitting."""
def setup_class(self):
self.model = models.Polynomial2D(2)
self.y, self.x = np.mgrid[:5, :5]
def poly2(x, y):
return 1 + 2 * x + 3 * x ** 2 + 4 * y + 5 * y ** 2 + 6 * x * y
self.z = poly2(self.x, self.y)
self.fitter = LinearLSQFitter()
def test_poly2D_fitting(self):
v = self.model.fit_deriv(x=self.x, y=self.y)
p = linalg.lstsq(v, self.z.flatten())[0]
new_model = self.fitter(self.model, self.x, self.y, self.z)
assert_allclose(new_model.parameters, p)
def test_eval(self):
new_model = self.fitter(self.model, self.x, self.y, self.z)
assert_allclose(new_model(self.x, self.y), self.z)
@pytest.mark.skipif('not HAS_SCIPY')
def test_polynomial2D_nonlinear_fitting(self):
self.model.parameters = [.6, 1.8, 2.9, 3.7, 4.9, 6.7]
nlfitter = LevMarLSQFitter()
new_model = nlfitter(self.model, self.x, self.y, self.z)
assert_allclose(new_model.parameters, [1, 2, 3, 4, 5, 6])
class TestICheb2D(object):
"""
Tests 2D Chebyshev polynomial fitting
Create a 2D polynomial (z) using Polynomial2DModel and default coefficients
Fit z using a ICheb2D model
Evaluate the ICheb2D polynomial and compare with the initial z
"""
def setup_class(self):
self.pmodel = models.Polynomial2D(2)
self.y, self.x = np.mgrid[:5, :5]
self.z = self.pmodel(self.x, self.y)
self.cheb2 = models.Chebyshev2D(2, 2)
self.fitter = LinearLSQFitter()
def test_default_params(self):
self.cheb2.parameters = np.arange(9)
p = np.array([1344., 1772., 400., 1860., 2448., 552., 432., 568.,
128.])
z = self.cheb2(self.x, self.y)
model = self.fitter(self.cheb2, self.x, self.y, z)
assert_almost_equal(model.parameters, p)
def test_poly2D_cheb2D(self):
model = self.fitter(self.cheb2, self.x, self.y, self.z)
z1 = model(self.x, self.y)
assert_almost_equal(self.z, z1)
@pytest.mark.skipif('not HAS_SCIPY')
def test_chebyshev2D_nonlinear_fitting(self):
cheb2d = models.Chebyshev2D(2, 2)
cheb2d.parameters = np.arange(9)
z = cheb2d(self.x, self.y)
cheb2d.parameters = [0.1, .6, 1.8, 2.9, 3.7, 4.9, 6.7, 7.5, 8.9]
nlfitter = LevMarLSQFitter()
model = nlfitter(cheb2d, self.x, self.y, z)
assert_allclose(model.parameters, [0, 1, 2, 3, 4, 5, 6, 7, 8],
atol=10**-9)
@pytest.mark.skipif('not HAS_SCIPY')
def test_chebyshev2D_nonlinear_fitting_with_weights(self):
cheb2d = models.Chebyshev2D(2, 2)
cheb2d.parameters = np.arange(9)
z = cheb2d(self.x, self.y)
cheb2d.parameters = [0.1, .6, 1.8, 2.9, 3.7, 4.9, 6.7, 7.5, 8.9]
nlfitter = LevMarLSQFitter()
weights = np.ones_like(self.y)
model = nlfitter(cheb2d, self.x, self.y, z, weights=weights)
assert_allclose(model.parameters, [0, 1, 2, 3, 4, 5, 6, 7, 8],
atol=10**-9)
@pytest.mark.skipif('not HAS_SCIPY')
class TestJointFitter(object):
"""
Tests the joint fitting routine using 2 gaussian models
"""
def setup_class(self):
"""
Create 2 gaussian models and some data with noise.
Create a fitter for the two models keeping the amplitude parameter
common for the two models.
"""
self.g1 = models.Gaussian1D(10, mean=14.9, stddev=.3)
self.g2 = models.Gaussian1D(10, mean=13, stddev=.4)
self.jf = JointFitter([self.g1, self.g2],
{self.g1: ['amplitude'],
self.g2: ['amplitude']}, [9.8])
self.x = np.arange(10, 20, .1)
y1 = self.g1(self.x)
y2 = self.g2(self.x)
with NumpyRNGContext(_RANDOM_SEED):
n = np.random.randn(100)
self.ny1 = y1 + 2 * n
self.ny2 = y2 + 2 * n
self.jf(self.x, self.ny1, self.x, self.ny2)
def test_joint_parameter(self):
"""
Tests that the amplitude of the two models is the same
"""
assert_allclose(self.jf.fitparams[0], self.g1.parameters[0])
assert_allclose(self.jf.fitparams[0], self.g2.parameters[0])
def test_joint_fitter(self):
"""
Tests the fitting routine with similar procedure.
Compares the fitted parameters.
"""
p1 = [14.9, .3]
p2 = [13, .4]
A = 9.8
p = np.r_[A, p1, p2]
def model(A, p, x):
return A * np.exp(-0.5 / p[1] ** 2 * (x - p[0]) ** 2)
def errfunc(p, x1, y1, x2, y2):
return np.ravel(np.r_[model(p[0], p[1:3], x1) - y1,
model(p[0], p[3:], x2) - y2])
coeff, _ = optimize.leastsq(errfunc, p,
args=(self.x, self.ny1, self.x, self.ny2))
assert_allclose(coeff, self.jf.fitparams, rtol=10 ** (-2))
class TestLinearLSQFitter(object):
def test_chebyshev1D(self):
"""Tests fitting a 1D Chebyshev polynomial to some real world data."""
test_file = get_pkg_data_filename(os.path.join('data',
'idcompspec.fits'))
with open(test_file) as f:
lines = f.read()
reclist = lines.split('begin')
record = irafutil.IdentifyRecord(reclist[1])
coeffs = record.coeff
order = int(record.fields['order'])
initial_model = models.Chebyshev1D(order - 1,
domain=record.get_range())
fitter = LinearLSQFitter()
fitted_model = fitter(initial_model, record.x, record.z)
assert_allclose(fitted_model.parameters, np.array(coeffs),
rtol=10e-2)
def test_linear_fit_model_set(self):
"""Tests fitting multiple models simultaneously."""
init_model = models.Polynomial1D(degree=2, c0=[1, 1], n_models=2)
x = np.arange(10)
y_expected = init_model(x, model_set_axis=False)
assert y_expected.shape == (2, 10)
# Add a bit of random noise
with NumpyRNGContext(_RANDOM_SEED):
y = y_expected + np.random.normal(0, 0.01, size=y_expected.shape)
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y)
assert_allclose(fitted_model(x, model_set_axis=False), y_expected,
rtol=1e-1)
def test_linear_fit_2d_model_set(self):
"""Tests fitted multiple 2-D models simultaneously."""
init_model = models.Polynomial2D(degree=2, c0_0=[1, 1], n_models=2)
x = np.arange(10)
y = np.arange(10)
z_expected = init_model(x, y, model_set_axis=False)
assert z_expected.shape == (2, 10)
# Add a bit of random noise
with NumpyRNGContext(_RANDOM_SEED):
z = z_expected + np.random.normal(0, 0.01, size=z_expected.shape)
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y, z)
assert_allclose(fitted_model(x, y, model_set_axis=False), z_expected,
rtol=1e-1)
def test_linear_fit_fixed_parameter(self):
"""
Tests fitting a polynomial model with a fixed parameter (issue #6135).
"""
init_model = models.Polynomial1D(degree=2, c1=1)
init_model.c1.fixed = True
x = np.arange(10)
y = 2 + x + 0.5*x*x
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y)
assert_allclose(fitted_model.parameters, [2., 1., 0.5], atol=1e-14)
def test_linear_fit_model_set_fixed_parameter(self):
"""
Tests fitting a polynomial model set with a fixed parameter (#6135).
"""
init_model = models.Polynomial1D(degree=2, c1=[1, -2], n_models=2)
init_model.c1.fixed = True
x = np.arange(10)
yy = np.array([2 + x + 0.5*x*x, -2*x])
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, yy)
assert_allclose(fitted_model.c0, [2., 0.], atol=1e-14)
assert_allclose(fitted_model.c1, [1., -2.], atol=1e-14)
assert_allclose(fitted_model.c2, [0.5, 0.], atol=1e-14)
def test_linear_fit_2d_model_set_fixed_parameters(self):
"""
Tests fitting a 2d polynomial model set with fixed parameters (#6135).
"""
init_model = models.Polynomial2D(degree=2, c1_0=[1, 2], c0_1=[-0.5, 1],
n_models=2,
fixed={'c1_0' : True, 'c0_1' : True})
x, y = np.mgrid[0:5, 0:5]
zz = np.array([1+x-0.5*y+0.1*x*x, 2*x+y-0.2*y*y])
fitter = LinearLSQFitter()
fitted_model = fitter(init_model, x, y, zz)
assert_allclose(fitted_model(x, y, model_set_axis=False), zz,
atol=1e-14)
@pytest.mark.skipif('not HAS_SCIPY')
class TestNonLinearFitters(object):
"""Tests non-linear least squares fitting and the SLSQP algorithm."""
def setup_class(self):
self.initial_values = [100, 5, 1]
self.xdata = np.arange(0, 10, 0.1)
sigma = 4. * np.ones_like(self.xdata)
with NumpyRNGContext(_RANDOM_SEED):
yerror = np.random.normal(0, sigma)
def func(p, x):
return p[0] * np.exp(-0.5 / p[2] ** 2 * (x - p[1]) ** 2)
self.ydata = func(self.initial_values, self.xdata) + yerror
self.gauss = models.Gaussian1D(100, 5, stddev=1)
def test_estimated_vs_analytic_deriv(self):
"""
Runs `LevMarLSQFitter` with estimated and analytic derivatives of a
`Gaussian1D`.
"""
fitter = LevMarLSQFitter()
model = fitter(self.gauss, self.xdata, self.ydata)
g1e = models.Gaussian1D(100, 5.0, stddev=1)
efitter = LevMarLSQFitter()
emodel = efitter(g1e, self.xdata, self.ydata, estimate_jacobian=True)
assert_allclose(model.parameters, emodel.parameters, rtol=10 ** (-3))
def test_estimated_vs_analytic_deriv_with_weights(self):
"""
Runs `LevMarLSQFitter` with estimated and analytic derivatives of a
`Gaussian1D`.
"""
weights = 1.0 / (self.ydata / 10.)
fitter = LevMarLSQFitter()
model = fitter(self.gauss, self.xdata, self.ydata, weights=weights)
g1e = models.Gaussian1D(100, 5.0, stddev=1)
efitter = LevMarLSQFitter()
emodel = efitter(g1e, self.xdata, self.ydata, weights=weights, estimate_jacobian=True)
assert_allclose(model.parameters, emodel.parameters, rtol=10 ** (-3))
def test_with_optimize(self):
"""
Tests results from `LevMarLSQFitter` against `scipy.optimize.leastsq`.
"""
fitter = LevMarLSQFitter()
model = fitter(self.gauss, self.xdata, self.ydata,
estimate_jacobian=True)
def func(p, x):
return p[0] * np.exp(-0.5 / p[2] ** 2 * (x - p[1]) ** 2)
def errfunc(p, x, y):
return func(p, x) - y
result = optimize.leastsq(errfunc, self.initial_values,
args=(self.xdata, self.ydata))
assert_allclose(model.parameters, result[0], rtol=10 ** (-3))
def test_with_weights(self):
"""
Tests results from `LevMarLSQFitter` with weights.
"""
# part 1: weights are equal to 1
fitter = LevMarLSQFitter()
model = fitter(self.gauss, self.xdata, self.ydata,
estimate_jacobian=True)
withw = fitter(self.gauss, self.xdata, self.ydata,
estimate_jacobian=True, weights=np.ones_like(self.xdata))
assert_allclose(model.parameters, withw.parameters, rtol=10 ** (-4))
# part 2: weights are 0 or 1 (effectively, they are a mask)
weights = np.zeros_like(self.xdata)
weights[::2] = 1.
mask = weights >= 1.
model = fitter(self.gauss, self.xdata[mask], self.ydata[mask],
estimate_jacobian=True)
withw = fitter(self.gauss, self.xdata, self.ydata,
estimate_jacobian=True, weights=weights)
assert_allclose(model.parameters, withw.parameters, rtol=10 ** (-4))
@pytest.mark.parametrize('fitter_class', fitters)
def test_fitter_against_LevMar(self, fitter_class):
"""Tests results from non-linear fitters against `LevMarLSQFitter`."""
levmar = LevMarLSQFitter()
fitter = fitter_class()
with ignore_non_integer_warning():
new_model = fitter(self.gauss, self.xdata, self.ydata)
model = levmar(self.gauss, self.xdata, self.ydata)
assert_allclose(model.parameters, new_model.parameters,
rtol=10 ** (-4))
def test_LSQ_SLSQP_with_constraints(self):
"""
Runs `LevMarLSQFitter` and `SLSQPLSQFitter` on a model with
constraints.
"""
g1 = models.Gaussian1D(100, 5, stddev=1)
g1.mean.fixed = True
fitter = LevMarLSQFitter()
fslsqp = SLSQPLSQFitter()
with ignore_non_integer_warning():
slsqp_model = fslsqp(g1, self.xdata, self.ydata)
model = fitter(g1, self.xdata, self.ydata)
assert_allclose(model.parameters, slsqp_model.parameters,
rtol=10 ** (-4))
def test_simplex_lsq_fitter(self):
"""A basic test for the `SimplexLSQ` fitter."""
class Rosenbrock(Fittable2DModel):
a = Parameter()
b = Parameter()
@staticmethod
def evaluate(x, y, a, b):
return (a - x) ** 2 + b * (y - x ** 2) ** 2
x = y = np.linspace(-3.0, 3.0, 100)
with NumpyRNGContext(_RANDOM_SEED):
z = Rosenbrock.evaluate(x, y, 1.0, 100.0)
z += np.random.normal(0., 0.1, size=z.shape)
fitter = SimplexLSQFitter()
r_i = Rosenbrock(1, 100)
r_f = fitter(r_i, x, y, z)
assert_allclose(r_f.parameters, [1.0, 100.0], rtol=1e-2)
def test_param_cov(self):
"""
Tests that the 'param_cov' fit_info entry gets the right answer for
*linear* least squares, where the answer is exact
"""
a = 2
b = 100
with NumpyRNGContext(_RANDOM_SEED):
x = np.linspace(0, 1, 100)
# y scatter is amplitude ~1 to make sure covarience is
# non-negligible
y = x*a + b + np.random.randn(len(x))
#first compute the ordinary least squares covariance matrix
X = np.matrix(np.vstack([x, np.ones(len(x))]).T)
beta = np.linalg.inv(X.T * X) * X.T * np.matrix(y).T
s2 = np.sum((y - (X * beta).A.ravel())**2) / (len(y) - len(beta))
olscov = np.linalg.inv(X.T * X) * s2
#now do the non-linear least squares fit
mod = models.Linear1D(a, b)
fitter = LevMarLSQFitter()
fmod = fitter(mod, x, y)
assert_allclose(fmod.parameters, beta.A.ravel())
assert_allclose(olscov, fitter.fit_info['param_cov'])
@pytest.mark.skipif('not HAS_MOCK')
@pytest.mark.skipif('not HAS_PKG')
class TestEntryPoint(object):
"""Tests population of fitting with entry point fitters"""
def setup_class(self):
self.exception_not_thrown = Exception("The test should not have gotten here. There was no exception thrown")
def successfulimport(self):
# This should work
class goodclass(Fitter):
__name__ = "GoodClass"
return goodclass
def raiseimporterror(self):
# This should fail as it raises an Import Error
raise ImportError
def returnbadfunc(self):
def badfunc():
# This should import but it should fail type check
pass
return badfunc
def returnbadclass(self):
# This should import But it should fail subclass type check
class badclass(object):
pass
return badclass
def test_working(self):
"""This should work fine"""
mock_entry_working = mock.create_autospec(EntryPoint)
mock_entry_working.name = "Working"
mock_entry_working.load = self.successfulimport
populate_entry_points([mock_entry_working])
def test_import_error(self):
"""This raises an import error on load to test that it is handled correctly"""
with warnings.catch_warnings():
warnings.filterwarnings('error')
try:
mock_entry_importerror = mock.create_autospec(EntryPoint)
mock_entry_importerror.name = "IErr"
mock_entry_importerror.load = self.raiseimporterror
populate_entry_points([mock_entry_importerror])
except AstropyUserWarning as w:
if "ImportError" in w.args[0]: # any error for this case should have this in it.
pass
else:
raise w
else:
raise self.exception_not_thrown
def test_bad_func(self):
"""This returns a function which fails the type check"""
with warnings.catch_warnings():
warnings.filterwarnings('error')
try:
mock_entry_badfunc = mock.create_autospec(EntryPoint)
mock_entry_badfunc.name = "BadFunc"
mock_entry_badfunc.load = self.returnbadfunc
populate_entry_points([mock_entry_badfunc])
except AstropyUserWarning as w:
if "Class" in w.args[0]: # any error for this case should have this in it.
pass
else:
raise w
else:
raise self.exception_not_thrown
def test_bad_class(self):
"""This returns a class which doesn't inherient from fitter """
with warnings.catch_warnings():
warnings.filterwarnings('error')
try:
mock_entry_badclass = mock.create_autospec(EntryPoint)
mock_entry_badclass.name = "BadClass"
mock_entry_badclass.load = self.returnbadclass
populate_entry_points([mock_entry_badclass])
except AstropyUserWarning as w:
if 'modeling.Fitter' in w.args[0]: # any error for this case should have this in it.
pass
else:
raise w
else:
raise self.exception_not_thrown
@pytest.mark.skipif('not HAS_SCIPY')
class Test1DFittingWithOutlierRemoval(object):
def setup_class(self):
self.x = np.linspace(-5., 5., 200)
self.model_params = (3.0, 1.3, 0.8)
def func(p, x):
return p[0]*np.exp(-0.5*(x - p[1])**2/p[2]**2)
self.y = func(self.model_params, self.x)
def test_with_fitters_and_sigma_clip(self):
import scipy.stats as stats
np.random.seed(0)
c = stats.bernoulli.rvs(0.25, size=self.x.shape)
self.y += (np.random.normal(0., 0.2, self.x.shape) +
c*np.random.normal(3.0, 5.0, self.x.shape))
g_init = models.Gaussian1D(amplitude=1., mean=0, stddev=1.)
# test with Levenberg-Marquardt Least Squares fitter
fit = FittingWithOutlierRemoval(LevMarLSQFitter(), sigma_clip,
niter=3, sigma=3.0)
_, fitted_model = fit(g_init, self.x, self.y)
assert_allclose(fitted_model.parameters, self.model_params, rtol=1e-1)
# test with Sequential Least Squares Programming fitter
fit = FittingWithOutlierRemoval(SLSQPLSQFitter(), sigma_clip,
niter=3, sigma=3.0)
_, fitted_model = fit(g_init, self.x, self.y)
assert_allclose(fitted_model.parameters, self.model_params, rtol=1e-1)
# test with Simplex LSQ fitter
fit = FittingWithOutlierRemoval(SimplexLSQFitter(), sigma_clip,
niter=3, sigma=3.0)
_, fitted_model = fit(g_init, self.x, self.y)
assert_allclose(fitted_model.parameters, self.model_params, atol=1e-1)
@pytest.mark.skipif('not HAS_SCIPY')
class Test2DFittingWithOutlierRemoval(object):
def setup_class(self):
self.y, self.x = np.mgrid[-3:3:128j, -3:3:128j]
self.model_params = (3.0, 1.0, 0.0, 0.8, 0.8)
def Gaussian_2D(p, pos):
return p[0]*np.exp(-0.5*(pos[0] - p[2])**2 / p[4]**2 -
0.5*(pos[1] - p[1])**2 / p[3]**2)
self.z = Gaussian_2D(self.model_params, np.array([self.y, self.x]))
def initial_guess(self, data, pos):
y = pos[0]
x = pos[1]
"""computes the centroid of the data as the initial guess for the
center position"""
wx = x * data
wy = y * data
total_intensity = np.sum(data)
x_mean = np.sum(wx) / total_intensity
y_mean = np.sum(wy) / total_intensity
x_to_pixel = x[0].size / (x[x[0].size - 1][x[0].size - 1] - x[0][0])
y_to_pixel = y[0].size / (y[y[0].size - 1][y[0].size - 1] - y[0][0])
x_pos = np.around(x_mean * x_to_pixel + x[0].size / 2.).astype(int)
y_pos = np.around(y_mean * y_to_pixel + y[0].size / 2.).astype(int)
amplitude = data[y_pos][x_pos]
return amplitude, x_mean, y_mean
def test_with_fitters_and_sigma_clip(self):
import scipy.stats as stats
np.random.seed(0)
c = stats.bernoulli.rvs(0.25, size=self.z.shape)
self.z += (np.random.normal(0., 0.2, self.z.shape) +
c*np.random.normal(self.z, 2.0, self.z.shape))
guess = self.initial_guess(self.z, np.array([self.y, self.x]))
g2_init = models.Gaussian2D(amplitude=guess[0], x_mean=guess[1],
y_mean=guess[2], x_stddev=0.75,
y_stddev=1.25)
# test with Levenberg-Marquardt Least Squares fitter
fit = FittingWithOutlierRemoval(LevMarLSQFitter(), sigma_clip,
niter=3, sigma=3.)
_, fitted_model = fit(g2_init, self.x, self.y, self.z)
assert_allclose(fitted_model.parameters[0:5], self.model_params,
atol=1e-1)
# test with Sequential Least Squares Programming fitter
fit = FittingWithOutlierRemoval(SLSQPLSQFitter(), sigma_clip, niter=3,
sigma=3.)
_, fitted_model = fit(g2_init, self.x, self.y, self.z)
assert_allclose(fitted_model.parameters[0:5], self.model_params,
atol=1e-1)
# test with Simplex LSQ fitter
fit = FittingWithOutlierRemoval(SimplexLSQFitter(), sigma_clip,
niter=3, sigma=3.)
_, fitted_model = fit(g2_init, self.x, self.y, self.z)
assert_allclose(fitted_model.parameters[0:5], self.model_params,
atol=1e-1)
@pytest.mark.skipif('not HAS_SCIPY')
def test_fitters_with_weights():
"""Issue #5737 """
Xin, Yin = np.mgrid[0:21, 0:21]
fitter = LevMarLSQFitter()
with NumpyRNGContext(_RANDOM_SEED):
zsig = np.random.normal(0, 0.01, size=Xin.shape)
# Non-linear model
g2 = models.Gaussian2D(10, 10, 9, 2, 3)
z = g2(Xin, Yin)
gmod = fitter(models.Gaussian2D(15, 7, 8, 1.3, 1.2), Xin, Yin, z + zsig)
assert_allclose(gmod.parameters, g2.parameters, atol=10 ** (-2))
# Linear model
p2=models.Polynomial2D(3)
p2.parameters=np.arange(10)/1.2
z = p2(Xin, Yin)
pmod = fitter(models.Polynomial2D(3), Xin, Yin, z + zsig)
assert_allclose(pmod.parameters, p2.parameters, atol=10 ** (-2))
@pytest.mark.skipif('not HAS_SCIPY')
def test_fitters_interface():
"""
Test that **kwargs work with all optimizers.
This is a basic smoke test.
"""
levmar = LevMarLSQFitter()
slsqp = SLSQPLSQFitter()
simplex = SimplexLSQFitter()
kwargs = {'maxiter': 77, 'verblevel': 1, 'epsilon': 1e-2, 'acc': 1e-6}
simplex_kwargs = {'maxiter': 77, 'verblevel': 1, 'acc': 1e-6}
model = models.Gaussian1D(10, 4, .3)
x = np.arange(21)
y = model(x)
slsqp_model = slsqp(model, x, y, **kwargs)
simplex_model = simplex(model, x, y, **simplex_kwargs)
kwargs.pop('verblevel')
lm_model = levmar(model, x, y, **kwargs)
|
kelle/astropy
|
astropy/modeling/tests/test_fitters.py
|
Python
|
bsd-3-clause
| 25,717
|
[
"Gaussian"
] |
8bbefc179f1217e710e173340be1108307c68a1701363df4ed03f03d56c6388a
|
import decimal
import gc
import itertools
import multiprocessing
import weakref
import sqlalchemy as sa
from sqlalchemy import ForeignKey
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy import Unicode
from sqlalchemy import util
from sqlalchemy.orm import aliased
from sqlalchemy.orm import clear_mappers
from sqlalchemy.orm import configure_mappers
from sqlalchemy.orm import create_session
from sqlalchemy.orm import join as orm_join
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import Load
from sqlalchemy.orm import mapper
from sqlalchemy.orm import relationship
from sqlalchemy.orm import selectinload
from sqlalchemy.orm import Session
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm import subqueryload
from sqlalchemy.orm.mapper import _mapper_registry
from sqlalchemy.orm.session import _sessions
from sqlalchemy.processors import to_decimal_processor_factory
from sqlalchemy.processors import to_unicode_processor_factory
from sqlalchemy.sql import column
from sqlalchemy.sql import util as sql_util
from sqlalchemy.sql.visitors import cloned_traverse
from sqlalchemy.sql.visitors import replacement_traverse
from sqlalchemy.testing import engines
from sqlalchemy.testing import eq_
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.schema import Column
from sqlalchemy.testing.schema import Table
from sqlalchemy.testing.util import gc_collect
from ..orm import _fixtures
class A(fixtures.ComparableEntity):
pass
class B(fixtures.ComparableEntity):
pass
class ASub(A):
pass
def assert_cycles(expected=0):
def decorate(fn):
def go():
fn() # warmup, configure mappers, caches, etc.
gc_collect()
gc_collect()
gc_collect() # multiple calls seem to matter
# gc.set_debug(gc.DEBUG_COLLECTABLE)
try:
return fn() # run for real
finally:
unreachable = gc_collect()
assert unreachable <= expected
gc_collect()
return go
return decorate
def profile_memory(
maxtimes=250, assert_no_sessions=True, get_num_objects=None
):
def decorate(func):
# run the test N times. if length of gc.get_objects()
# keeps growing, assert false
def get_objects_skipping_sqlite_issue():
# pysqlite keeps adding weakref objects which only
# get reset after 220 iterations. We'd like to keep these
# tests under 50 iterations and ideally about ten, so
# just filter them out so that we get a "flatline" more quickly.
if testing.against("sqlite+pysqlite"):
return [
o
for o in gc.get_objects()
if not isinstance(o, weakref.ref)
]
else:
return gc.get_objects()
def profile(queue, func_args):
# give testing.db a brand new pool and don't
# touch the existing pool, since closing a socket
# in the subprocess can affect the parent
testing.db.pool = testing.db.pool.recreate()
gc_collect()
samples = []
max_ = 0
max_grew_for = 0
success = False
until_maxtimes = 0
while True:
if until_maxtimes >= maxtimes // 5:
break
for x in range(5):
func(*func_args)
gc_collect()
samples.append(
get_num_objects()
if get_num_objects is not None
else len(get_objects_skipping_sqlite_issue())
)
if assert_no_sessions:
assert len(_sessions) == 0
# queue.put(('samples', samples))
latest_max = max(samples[-5:])
if latest_max > max_:
queue.put(
(
"status",
"Max grew from %s to %s, max has "
"grown for %s samples"
% (max_, latest_max, max_grew_for),
)
)
max_ = latest_max
max_grew_for += 1
until_maxtimes += 1
continue
else:
queue.put(
(
"status",
"Max remained at %s, %s more attempts left"
% (max_, max_grew_for),
)
)
max_grew_for -= 1
if max_grew_for == 0:
success = True
break
if not success:
queue.put(
(
"result",
False,
"Ran for a total of %d times, memory kept "
"growing: %r" % (maxtimes, samples),
)
)
else:
queue.put(("result", True, "success"))
def run_in_process(*func_args):
queue = multiprocessing.Queue()
proc = multiprocessing.Process(
target=profile, args=(queue, func_args)
)
proc.start()
while True:
row = queue.get()
typ = row[0]
if typ == "samples":
print("sample gc sizes:", row[1])
elif typ == "status":
print(row[1])
elif typ == "result":
break
else:
assert False, "can't parse row"
proc.join()
assert row[1], row[2]
return run_in_process
return decorate
def assert_no_mappers():
clear_mappers()
gc_collect()
assert len(_mapper_registry) == 0
class EnsureZeroed(fixtures.ORMTest):
def setup(self):
_sessions.clear()
_mapper_registry.clear()
self.engine = engines.testing_engine(options={"use_reaper": False})
class MemUsageTest(EnsureZeroed):
__tags__ = ("memory_intensive",)
__requires__ = ("cpython",)
def test_type_compile(self):
from sqlalchemy.dialects.sqlite.base import dialect as SQLiteDialect
cast = sa.cast(column("x"), sa.Integer)
@profile_memory()
def go():
dialect = SQLiteDialect()
cast.compile(dialect=dialect)
go()
@testing.requires.cextensions
def test_DecimalResultProcessor_init(self):
@profile_memory()
def go():
to_decimal_processor_factory({}, 10)
go()
@testing.requires.cextensions
def test_DecimalResultProcessor_process(self):
@profile_memory()
def go():
to_decimal_processor_factory(decimal.Decimal, 10)(1.2)
go()
@testing.requires.cextensions
def test_UnicodeResultProcessor_init(self):
@profile_memory()
def go():
to_unicode_processor_factory("utf8")
go()
def test_ad_hoc_types(self):
"""test storage of bind processors, result processors
in dialect-wide registry."""
from sqlalchemy.dialects import mysql, postgresql, sqlite
from sqlalchemy import types
eng = engines.testing_engine()
for args in (
(types.Integer,),
(types.String,),
(types.PickleType,),
(types.Enum, "a", "b", "c"),
(sqlite.DATETIME,),
(postgresql.ENUM, "a", "b", "c"),
(types.Interval,),
(postgresql.INTERVAL,),
(mysql.VARCHAR,),
):
@profile_memory()
def go():
type_ = args[0](*args[1:])
bp = type_._cached_bind_processor(eng.dialect)
rp = type_._cached_result_processor(eng.dialect, 0)
bp, rp # strong reference
go()
assert not eng.dialect._type_memos
@testing.fails()
def test_fixture_failure(self):
class Foo(object):
pass
stuff = []
@profile_memory(maxtimes=20)
def go():
stuff.extend(Foo() for i in range(100))
go()
class MemUsageWBackendTest(EnsureZeroed):
__tags__ = ("memory_intensive",)
__requires__ = "cpython", "memory_process_intensive"
__sparse_backend__ = True
# ensure a pure growing test trips the assertion
@testing.fails_if(lambda: True)
def test_fixture(self):
class Foo(object):
pass
x = []
@profile_memory(maxtimes=10)
def go():
x[-1:] = [Foo(), Foo(), Foo(), Foo(), Foo(), Foo()]
go()
def test_session(self):
metadata = MetaData(self.engine)
table1 = Table(
"mytable",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", String(30)),
)
table2 = Table(
"mytable2",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", String(30)),
Column("col3", Integer, ForeignKey("mytable.col1")),
)
metadata.create_all()
m1 = mapper(
A,
table1,
properties={
"bs": relationship(
B, cascade="all, delete", order_by=table2.c.col1
)
},
)
m2 = mapper(B, table2)
@profile_memory()
def go():
sess = create_session()
a1 = A(col2="a1")
a2 = A(col2="a2")
a3 = A(col2="a3")
a1.bs.append(B(col2="b1"))
a1.bs.append(B(col2="b2"))
a3.bs.append(B(col2="b3"))
for x in [a1, a2, a3]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).order_by(A.col1).all()
eq_(
[
A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
A(col2="a2", bs=[]),
A(col2="a3", bs=[B(col2="b3")]),
],
alist,
)
for a in alist:
sess.delete(a)
sess.flush()
go()
metadata.drop_all()
del m1, m2
assert_no_mappers()
def test_sessionmaker(self):
@profile_memory()
def go():
sessmaker = sessionmaker(bind=self.engine)
sess = sessmaker()
r = sess.execute(select([1]))
r.close()
sess.close()
del sess
del sessmaker
go()
@testing.emits_warning("Compiled statement cache for mapper.*")
@testing.emits_warning("Compiled statement cache for lazy loader.*")
@testing.crashes("sqlite", ":memory: connection not suitable here")
def test_orm_many_engines(self):
metadata = MetaData(self.engine)
table1 = Table(
"mytable",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", String(30)),
)
table2 = Table(
"mytable2",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", String(30)),
Column("col3", Integer, ForeignKey("mytable.col1")),
)
metadata.create_all()
m1 = mapper(
A,
table1,
properties={
"bs": relationship(
B, cascade="all, delete", order_by=table2.c.col1
)
},
_compiled_cache_size=50,
)
m2 = mapper(B, table2, _compiled_cache_size=50)
@profile_memory()
def go():
engine = engines.testing_engine(
options={
"logging_name": "FOO",
"pool_logging_name": "BAR",
"use_reaper": False,
}
)
sess = create_session(bind=engine)
a1 = A(col2="a1")
a2 = A(col2="a2")
a3 = A(col2="a3")
a1.bs.append(B(col2="b1"))
a1.bs.append(B(col2="b2"))
a3.bs.append(B(col2="b3"))
for x in [a1, a2, a3]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).order_by(A.col1).all()
eq_(
[
A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
A(col2="a2", bs=[]),
A(col2="a3", bs=[B(col2="b3")]),
],
alist,
)
for a in alist:
sess.delete(a)
sess.flush()
sess.close()
engine.dispose()
go()
metadata.drop_all()
del m1, m2
assert_no_mappers()
@testing.emits_warning("Compiled statement cache for.*")
def test_many_updates(self):
metadata = MetaData(self.engine)
wide_table = Table(
"t",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
*[Column("col%d" % i, Integer) for i in range(10)]
)
class Wide(object):
pass
mapper(Wide, wide_table, _compiled_cache_size=10)
metadata.create_all()
session = create_session()
w1 = Wide()
session.add(w1)
session.flush()
session.close()
del session
counter = [1]
@profile_memory()
def go():
session = create_session()
w1 = session.query(Wide).first()
x = counter[0]
dec = 10
while dec > 0:
# trying to count in binary here,
# works enough to trip the test case
if pow(2, dec) < x:
setattr(w1, "col%d" % dec, counter[0])
x -= pow(2, dec)
dec -= 1
session.flush()
session.close()
counter[0] += 1
try:
go()
finally:
metadata.drop_all()
@testing.requires.savepoints
@testing.provide_metadata
def test_savepoints(self):
metadata = self.metadata
some_table = Table(
"t",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
)
class SomeClass(object):
pass
mapper(SomeClass, some_table)
metadata.create_all()
session = Session(testing.db)
target_strings = (
session.connection().dialect.identifier_preparer._strings
)
session.close()
@profile_memory(
assert_no_sessions=False,
get_num_objects=lambda: len(target_strings),
)
def go():
session = Session(testing.db)
with session.transaction:
sc = SomeClass()
session.add(sc)
with session.begin_nested():
session.query(SomeClass).first()
go()
@testing.crashes("mysql+cymysql", "blocking")
def test_unicode_warnings(self):
metadata = MetaData(self.engine)
table1 = Table(
"mytable",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", Unicode(30)),
)
metadata.create_all()
i = [1]
# the times here is cranked way up so that we can see
# pysqlite clearing out its internal buffer and allow
# the test to pass
@testing.emits_warning()
@profile_memory()
def go():
# execute with a non-unicode object. a warning is emitted,
# this warning shouldn't clog up memory.
self.engine.execute(
table1.select().where(table1.c.col2 == "foo%d" % i[0])
)
i[0] += 1
try:
go()
finally:
metadata.drop_all()
def test_warnings_util(self):
counter = itertools.count()
import warnings
warnings.filterwarnings("ignore", "memusage warning.*")
@profile_memory()
def go():
util.warn_limited(
"memusage warning, param1: %s, param2: %s",
(next(counter), next(counter)),
)
go()
def test_mapper_reset(self):
metadata = MetaData(self.engine)
table1 = Table(
"mytable",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", String(30)),
)
table2 = Table(
"mytable2",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", String(30)),
Column("col3", Integer, ForeignKey("mytable.col1")),
)
@profile_memory()
def go():
mapper(
A,
table1,
properties={"bs": relationship(B, order_by=table2.c.col1)},
)
mapper(B, table2)
sess = create_session()
a1 = A(col2="a1")
a2 = A(col2="a2")
a3 = A(col2="a3")
a1.bs.append(B(col2="b1"))
a1.bs.append(B(col2="b2"))
a3.bs.append(B(col2="b3"))
for x in [a1, a2, a3]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).order_by(A.col1).all()
eq_(
[
A(col2="a1", bs=[B(col2="b1"), B(col2="b2")]),
A(col2="a2", bs=[]),
A(col2="a3", bs=[B(col2="b3")]),
],
alist,
)
for a in alist:
sess.delete(a)
sess.flush()
sess.close()
clear_mappers()
metadata.create_all()
try:
go()
finally:
metadata.drop_all()
assert_no_mappers()
def test_alias_pathing(self):
metadata = MetaData(self.engine)
a = Table(
"a",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("bid", Integer, ForeignKey("b.id")),
Column("type", String(30)),
)
asub = Table(
"asub",
metadata,
Column("id", Integer, ForeignKey("a.id"), primary_key=True),
Column("data", String(30)),
)
b = Table(
"b",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
)
mapper(A, a, polymorphic_identity="a", polymorphic_on=a.c.type)
mapper(ASub, asub, inherits=A, polymorphic_identity="asub")
mapper(B, b, properties={"as_": relationship(A)})
metadata.create_all()
sess = Session()
a1 = ASub(data="a1")
a2 = ASub(data="a2")
a3 = ASub(data="a3")
b1 = B(as_=[a1, a2, a3])
sess.add(b1)
sess.commit()
del sess
# sqlite has a slow enough growth here
# that we have to run it more times to see the
# "dip" again
@profile_memory(maxtimes=120)
def go():
sess = Session()
sess.query(B).options(subqueryload(B.as_.of_type(ASub))).all()
sess.close()
try:
go()
finally:
metadata.drop_all()
clear_mappers()
def test_path_registry(self):
metadata = MetaData()
a = Table(
"a",
metadata,
Column("id", Integer, primary_key=True),
Column("foo", Integer),
Column("bar", Integer),
)
b = Table(
"b",
metadata,
Column("id", Integer, primary_key=True),
Column("a_id", ForeignKey("a.id")),
)
m1 = mapper(A, a, properties={"bs": relationship(B)})
mapper(B, b)
@profile_memory()
def go():
ma = sa.inspect(aliased(A))
m1._path_registry[m1.attrs.bs][ma][m1.attrs.bar]
go()
clear_mappers()
def test_with_inheritance(self):
metadata = MetaData(self.engine)
table1 = Table(
"mytable",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", String(30)),
)
table2 = Table(
"mytable2",
metadata,
Column(
"col1",
Integer,
ForeignKey("mytable.col1"),
primary_key=True,
test_needs_autoincrement=True,
),
Column("col3", String(30)),
)
@profile_memory()
def go():
class A(fixtures.ComparableEntity):
pass
class B(A):
pass
mapper(
A,
table1,
polymorphic_on=table1.c.col2,
polymorphic_identity="a",
)
mapper(B, table2, inherits=A, polymorphic_identity="b")
sess = create_session()
a1 = A()
a2 = A()
b1 = B(col3="b1")
b2 = B(col3="b2")
for x in [a1, a2, b1, b2]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).order_by(A.col1).all()
eq_([A(), A(), B(col3="b1"), B(col3="b2")], alist)
for a in alist:
sess.delete(a)
sess.flush()
# don't need to clear_mappers()
del B
del A
metadata.create_all()
try:
go()
finally:
metadata.drop_all()
assert_no_mappers()
def test_with_manytomany(self):
metadata = MetaData(self.engine)
table1 = Table(
"mytable",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", String(30)),
)
table2 = Table(
"mytable2",
metadata,
Column(
"col1",
Integer,
primary_key=True,
test_needs_autoincrement=True,
),
Column("col2", String(30)),
)
table3 = Table(
"t1tot2",
metadata,
Column("t1", Integer, ForeignKey("mytable.col1")),
Column("t2", Integer, ForeignKey("mytable2.col1")),
)
@profile_memory()
def go():
class A(fixtures.ComparableEntity):
pass
class B(fixtures.ComparableEntity):
pass
mapper(
A,
table1,
properties={
"bs": relationship(
B, secondary=table3, backref="as", order_by=table3.c.t1
)
},
)
mapper(B, table2)
sess = create_session()
a1 = A(col2="a1")
a2 = A(col2="a2")
b1 = B(col2="b1")
b2 = B(col2="b2")
a1.bs.append(b1)
a2.bs.append(b2)
for x in [a1, a2]:
sess.add(x)
sess.flush()
sess.expunge_all()
alist = sess.query(A).order_by(A.col1).all()
eq_([A(bs=[B(col2="b1")]), A(bs=[B(col2="b2")])], alist)
for a in alist:
sess.delete(a)
sess.flush()
# don't need to clear_mappers()
del B
del A
metadata.create_all()
try:
go()
finally:
metadata.drop_all()
assert_no_mappers()
@testing.provide_metadata
def test_key_fallback_result(self):
e = self.engine
m = self.metadata
t = Table("t", m, Column("x", Integer), Column("y", Integer))
m.create_all(e)
e.execute(t.insert(), {"x": 1, "y": 1})
@profile_memory()
def go():
r = e.execute(t.alias().select())
for row in r:
row[t.c.x]
go()
def test_many_discarded_relationships(self):
"""a use case that really isn't supported, nonetheless we can
guard against memleaks here so why not"""
m1 = MetaData()
t1 = Table("t1", m1, Column("id", Integer, primary_key=True))
t2 = Table(
"t2",
m1,
Column("id", Integer, primary_key=True),
Column("t1id", ForeignKey("t1.id")),
)
class T1(object):
pass
t1_mapper = mapper(T1, t1)
@testing.emits_warning()
@profile_memory()
def go():
class T2(object):
pass
t2_mapper = mapper(T2, t2)
t1_mapper.add_property("bar", relationship(t2_mapper))
s1 = Session()
# this causes the path_registry to be invoked
s1.query(t1_mapper)._compile_context()
go()
# fails on newer versions of pysqlite due to unusual memory behavior
# in pysqlite itself. background at:
# http://thread.gmane.org/gmane.comp.python.db.pysqlite.user/2290
@testing.crashes("mysql+cymysql", "blocking")
def test_join_cache(self):
metadata = MetaData(self.engine)
table1 = Table(
"table1",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("data", String(30)),
)
table2 = Table(
"table2",
metadata,
Column(
"id", Integer, primary_key=True, test_needs_autoincrement=True
),
Column("data", String(30)),
Column("t1id", Integer, ForeignKey("table1.id")),
)
class Foo(object):
pass
class Bar(object):
pass
mapper(
Foo, table1, properties={"bars": relationship(mapper(Bar, table2))}
)
metadata.create_all()
session = sessionmaker()
@profile_memory()
def go():
s = table2.select()
sess = session()
sess.query(Foo).join((s, Foo.bars)).all()
sess.rollback()
try:
go()
finally:
metadata.drop_all()
class CycleTest(_fixtures.FixtureTest):
__tags__ = ("memory_intensive",)
__requires__ = ("cpython",)
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
@classmethod
def setup_mappers(cls):
cls._setup_stock_mapping()
def test_query(self):
User, Address = self.classes("User", "Address")
configure_mappers()
s = Session()
@assert_cycles()
def go():
return s.query(User).all()
go()
def test_raise_from(self):
@assert_cycles()
def go():
try:
try:
raise KeyError("foo")
except KeyError as ke:
util.raise_(Exception("oops"), from_=ke)
except Exception as err: # noqa
pass
go()
def test_query_alias(self):
User, Address = self.classes("User", "Address")
configure_mappers()
s = Session()
u1 = aliased(User)
@assert_cycles()
def go():
s.query(u1).all()
go()
def test_entity_path_w_aliased(self):
User, Address = self.classes("User", "Address")
configure_mappers()
@assert_cycles()
def go():
u1 = aliased(User)
inspect(u1)._path_registry[User.addresses.property]
go()
def test_orm_objects_from_query(self):
User, Address = self.classes("User", "Address")
configure_mappers()
s = Session()
def generate():
objects = s.query(User).filter(User.id == 7).all()
gc_collect()
return objects
@assert_cycles()
def go():
generate()
go()
def test_orm_objects_from_query_w_selectinload(self):
User, Address = self.classes("User", "Address")
s = Session()
def generate():
objects = s.query(User).options(selectinload(User.addresses)).all()
gc_collect()
return objects
@assert_cycles()
def go():
generate()
go()
def test_selectinload_option_unbound(self):
User, Address = self.classes("User", "Address")
@assert_cycles()
def go():
selectinload(User.addresses)
go()
def test_selectinload_option_bound(self):
User, Address = self.classes("User", "Address")
@assert_cycles()
def go():
Load(User).selectinload(User.addresses)
go()
def test_orm_path(self):
User, Address = self.classes("User", "Address")
@assert_cycles()
def go():
inspect(User)._path_registry[User.addresses.property][
inspect(Address)
]
go()
def test_joinedload_option_unbound(self):
User, Address = self.classes("User", "Address")
@assert_cycles()
def go():
joinedload(User.addresses)
go()
def test_joinedload_option_bound(self):
User, Address = self.classes("User", "Address")
@assert_cycles()
def go():
Load(User).joinedload(User.addresses)
go()
def test_orm_objects_from_query_w_joinedload(self):
User, Address = self.classes("User", "Address")
s = Session()
def generate():
objects = s.query(User).options(joinedload(User.addresses)).all()
gc_collect()
return objects
@assert_cycles()
def go():
generate()
go()
def test_query_filtered(self):
User, Address = self.classes("User", "Address")
s = Session()
@assert_cycles()
def go():
return s.query(User).filter(User.id == 7).all()
go()
def test_query_joins(self):
User, Address = self.classes("User", "Address")
s = Session()
# cycles here are due to ClauseElement._cloned_set
@assert_cycles(3)
def go():
s.query(User).join(User.addresses).all()
go()
def test_query_joinedload(self):
User, Address = self.classes("User", "Address")
s = Session()
def generate():
s.query(User).options(joinedload(User.addresses)).all()
# cycles here are due to ClauseElement._cloned_set and Load.context
@assert_cycles(28)
def go():
generate()
go()
def test_plain_join(self):
users, addresses = self.tables("users", "addresses")
@assert_cycles()
def go():
str(users.join(addresses))
go()
def test_plain_join_select(self):
users, addresses = self.tables("users", "addresses")
# cycles here are due to ClauseElement._cloned_set
@assert_cycles(6)
def go():
s = select([users]).select_from(users.join(addresses))
s._froms
go()
def test_orm_join(self):
User, Address = self.classes("User", "Address")
@assert_cycles()
def go():
str(orm_join(User, Address, User.addresses))
go()
def test_join_via_query_relationship(self):
User, Address = self.classes("User", "Address")
configure_mappers()
s = Session()
@assert_cycles()
def go():
s.query(User).join(User.addresses)
go()
def test_join_via_query_to_entity(self):
User, Address = self.classes("User", "Address")
configure_mappers()
s = Session()
@assert_cycles()
def go():
s.query(User).join(Address)
go()
def test_core_select(self):
User, Address = self.classes("User", "Address")
configure_mappers()
s = Session()
stmt = s.query(User).join(User.addresses).statement
@assert_cycles()
def go():
s.execute(stmt)
go()
def test_adapt_statement_replacement_traversal(self):
User, Address = self.classes("User", "Address")
statement = select([User]).select_from(
orm_join(User, Address, User.addresses)
)
@assert_cycles()
def go():
replacement_traverse(statement, {}, lambda x: None)
go()
def test_adapt_statement_cloned_traversal(self):
User, Address = self.classes("User", "Address")
statement = select([User]).select_from(
orm_join(User, Address, User.addresses)
)
@assert_cycles()
def go():
cloned_traverse(statement, {}, {})
go()
def test_column_adapter_lookup(self):
User, Address = self.classes("User", "Address")
u1 = aliased(User)
@assert_cycles()
def go():
adapter = sql_util.ColumnAdapter(inspect(u1).selectable)
adapter.columns[User.id]
go()
def test_orm_aliased(self):
User, Address = self.classes("User", "Address")
@assert_cycles()
def go():
u1 = aliased(User)
inspect(u1)
go()
@testing.fails
def test_the_counter(self):
@assert_cycles()
def go():
x = []
x.append(x)
go()
def test_weak_sequence(self):
class Foo(object):
pass
f = Foo()
@assert_cycles()
def go():
util.WeakSequence([f])
go()
@testing.provide_metadata
def test_optimized_get(self):
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base(metadata=self.metadata)
class Employee(Base):
__tablename__ = "employee"
id = Column(
Integer, primary_key=True, test_needs_autoincrement=True
)
type = Column(String(10))
__mapper_args__ = {"polymorphic_on": type}
class Engineer(Employee):
__tablename__ = " engineer"
id = Column(ForeignKey("employee.id"), primary_key=True)
engineer_name = Column(String(50))
__mapper_args__ = {"polymorphic_identity": "engineer"}
Base.metadata.create_all(testing.db)
s = Session(testing.db)
s.add(Engineer(engineer_name="wally"))
s.commit()
s.close()
@assert_cycles()
def go():
e1 = s.query(Employee).first()
e1.engineer_name
go()
def test_visit_binary_product(self):
a, b, q, e, f, j, r = [column(chr_) for chr_ in "abqefjr"]
from sqlalchemy import and_, func
from sqlalchemy.sql.util import visit_binary_product
expr = and_((a + b) == q + func.sum(e + f), j == r)
def visit(expr, left, right):
pass
@assert_cycles()
def go():
visit_binary_product(visit, expr)
go()
|
kawamon/hue
|
desktop/core/ext-py/SQLAlchemy-1.3.17/test/aaa_profiling/test_memusage.py
|
Python
|
apache-2.0
| 37,168
|
[
"VisIt"
] |
efb1a45fc3024520c3dac091598b158e48732c9247f77eab3d33812752979011
|
# -*- coding: utf-8 -*-
# Ambry Bundle Library File
# Use this file for code that may be imported into other bundles
class RandomSourcePipe(object):
def __init__(self, bundle, source=None):
if source:
self.year = int(source.time)
self.space = source.space
else:
self.year = 2010
self.space = 'CA'
def __iter__(self):
import uuid
import random
from datetime import date
from geoid import civick
from collections import OrderedDict
categorical = ['red', 'blue', 'green', 'yellow', 'black']
states = list(range(1,5))
counties = list(range(1, 5))
tracts = list(range(1, 6))
bgs = list(range(1, 6))
rc = random.choice
for i in range(1000):
row = OrderedDict()
row['uuid'] = str(uuid.uuid4())
row['int'] = i % 100
row['float'] = random.random() * 100
row['categorical'] = rc(categorical)
row['ordinal'] = random.randint(0, 10)
row['gaussian'] = random.gauss(100, 15)
row['triangle'] = random.triangular(500, 1500, 1000)
row['exponential'] = random.expovariate(.001)
row['year'] = self.year
row['date'] = date(self.year, random.randint(1, 12), random.randint(1, 28))
row['bg_gvid'] = str(civick.Blockgroup(rc(states), rc(counties), rc(tracts), rc(bgs)))
if i == 0:
yield list(row.keys())
yield list(row.values())
|
CivicKnowledge/ambry
|
test/bundle_tests/build.example.com/coverage/lib.py
|
Python
|
bsd-2-clause
| 1,571
|
[
"Gaussian"
] |
792e11e6c40483bff7c97761a2bf1d7edcb4565d90bb0d2b19a778755297b8fb
|
# -*- coding: utf-8 -*-
##
## CreateInflowFileFromERAInterimRunoff.py
## spt_lsm_autorapid_process
##
## Created by Alan D. Snow (adapted from CreateInflowFileFromECMWFRunoff.py).
## Copyright © 2015-2016 Alan D Snow. All rights reserved.
## License: BSD-3 Clause
import csv
import netCDF4 as NET
import numpy as NUM
import os
class CreateInflowFileFromERAInterimRunoff(object):
def __init__(self):
"""Define the tool (tool name is the name of the class)."""
self.label = "Create Inflow File From ERA Interim Runoff"
self.description = ("Creates RAPID NetCDF input of water inflow "
"based on ERA Interim runoff results and "
"previously created weight table.")
self.header_wt = ['StreamID', 'area_sqm', 'lon_index', 'lat_index', 'npoints']
self.dims_oi = [['lon', 'lat', 'time'], ['longitude', 'latitude', 'time']]
self.vars_oi = [["lon", "lat", "time", "RO"], ['longitude', 'latitude', 'time', 'ro']]
self.length_time = {"Daily": 1, "3-Hourly": 8}
self.errorMessages = ["Missing Variable 'time'",
"Incorrect dimensions in the input ERA Interim runoff file.",
"Incorrect variables in the input ERA Interim runoff file.",
"Incorrect time variable in the input ERA Interim runoff file",
"Incorrect number of columns in the weight table",
"No or incorrect header in the weight table",
"Incorrect sequence of rows in the weight table"]
def dataValidation(self, in_nc):
"""Check the necessary dimensions and variables in the input netcdf data"""
vars_oi_index = None
data_nc = NET.Dataset(in_nc)
dims = data_nc.dimensions.keys()
if dims not in self.dims_oi:
raise Exception(self.errorMessages[1])
vars = data_nc.variables.keys()
if vars == self.vars_oi[0]:
vars_oi_index = 0
elif vars == self.vars_oi[1]:
vars_oi_index = 1
else:
raise Exception(self.errorMessages[2])
return vars_oi_index
def dataIdentify(self, in_nc, vars_oi_index):
"""Check if the data is daily (one value) or 3 hourly"""
data_nc = NET.Dataset(in_nc)
name_time = self.vars_oi[vars_oi_index][2]
time = data_nc.variables[name_time][:]
if len(time) == self.length_time["Daily"]:
return "Daily"
diff = NUM.unique(NUM.diff(time))
data_nc.close()
time_interval_3hr = NUM.array([3.0],dtype=float)
if (diff == time_interval_3hr).all():
return "3-Hourly"
else:
return None
def readInWeightTable(self, in_weight_table):
"""
Read in weight table
"""
print "Reading the weight table..."
self.dict_list = {self.header_wt[0]:[], self.header_wt[1]:[], self.header_wt[2]:[],
self.header_wt[3]:[], self.header_wt[4]:[]}
with open(in_weight_table, "rb") as csvfile:
reader = csv.reader(csvfile)
self.count = 0
for row in reader:
if self.count == 0:
#check number of columns in the weight table
if len(row) < len(self.header_wt):
raise Exception(self.errorMessages[4])
#check header
if row[1:len(self.header_wt)] != self.header_wt[1:]:
raise Exception(self.errorMessages[5])
self.count += 1
else:
for i in xrange(len(self.header_wt)):
self.dict_list[self.header_wt[i]].append(row[i])
self.count += 1
self.size_streamID = len(set(self.dict_list[self.header_wt[0]]))
def generateOutputInflowFile(self, out_nc, in_weight_table, tot_size_time):
"""
Generate inflow file for RAPID
"""
self.readInWeightTable(in_weight_table)
# Create output inflow netcdf data
print "Generating inflow file"
# data_out_nc = NET.Dataset(out_nc, "w") # by default format = "NETCDF4"
data_out_nc = NET.Dataset(out_nc, "w", format = "NETCDF3_CLASSIC")
dim_Time = data_out_nc.createDimension('Time', tot_size_time)
dim_RiverID = data_out_nc.createDimension('rivid', self.size_streamID)
var_m3_riv = data_out_nc.createVariable('m3_riv', 'f4',
('Time', 'rivid'),
fill_value=0)
data_out_nc.close()
#empty list to be read in later
self.dict_list = {}
def execute(self, nc_file_list, index_list, in_weight_table,
out_nc, grid_type):
"""The source code of the tool."""
if not os.path.exists(out_nc):
print "ERROR: Outfile has not been created. You need to run: generateOutputInflowFile function ..."
raise Exception("ERROR: Outfile has not been created. You need to run: generateOutputInflowFile function ...")
if len(nc_file_list) != len(index_list):
print "ERROR: Number of runoff files not equal to number of indices ..."
raise Exception("ERROR: Number of runoff files not equal to number of indices ...")
self.readInWeightTable(in_weight_table)
lon_ind_all = [long(i) for i in self.dict_list[self.header_wt[2]]]
lat_ind_all = [long(j) for j in self.dict_list[self.header_wt[3]]]
# Obtain a subset of runoff data based on the indices in the weight table
min_lon_ind_all = min(lon_ind_all)
max_lon_ind_all = max(lon_ind_all)
min_lat_ind_all = min(lat_ind_all)
max_lat_ind_all = max(lat_ind_all)
index_new = []
data_out_nc = NET.Dataset(out_nc, "a", format = "NETCDF3_CLASSIC")
# Validate the netcdf dataset
vars_oi_index = self.dataValidation(nc_file_list[0])
id_data = self.dataIdentify(nc_file_list[0], vars_oi_index)
if id_data is None:
raise Exception(self.errorMessages[3])
#combine inflow data
for nc_file_array_index, nc_file in enumerate(nc_file_list):
index = index_list[nc_file_array_index]
'''Calculate water inflows'''
print "Calculating water inflows for", os.path.basename(nc_file) , grid_type, "..."
''' Read the netcdf dataset'''
data_in_nc = NET.Dataset(nc_file)
time = data_in_nc.variables[self.vars_oi[vars_oi_index][2]][:]
# Check the size of time variable in the netcdf data
size_time = len(time)
if size_time != self.length_time[id_data]:
raise Exception(self.errorMessages[3])
data_subset_all = data_in_nc.variables[self.vars_oi[vars_oi_index][3]][:, min_lat_ind_all:max_lat_ind_all+1, min_lon_ind_all:max_lon_ind_all+1]
data_in_nc.close()
len_time_subset_all = data_subset_all.shape[0]
len_lat_subset_all = data_subset_all.shape[1]
len_lon_subset_all = data_subset_all.shape[2]
data_subset_all = data_subset_all.reshape(len_time_subset_all, (len_lat_subset_all * len_lon_subset_all))
# compute new indices based on the data_subset_all
if not index_new:
for r in range(0,self.count-1):
ind_lat_orig = lat_ind_all[r]
ind_lon_orig = lon_ind_all[r]
index_new.append((ind_lat_orig - min_lat_ind_all)*len_lon_subset_all + (ind_lon_orig - min_lon_ind_all))
# obtain a new subset of data
data_subset_new = data_subset_all[:,index_new]
# start compute inflow
pointer = 0
for stream_index in xrange(self.size_streamID):
npoints = int(self.dict_list[self.header_wt[4]][pointer])
# Check if all npoints points correspond to the same streamID
if len(set(self.dict_list[self.header_wt[0]][pointer : (pointer + npoints)])) != 1:
print "ROW INDEX", pointer
print "COMID", self.dict_list[self.header_wt[0]][pointer]
raise Exception(self.errorMessages[2])
area_sqm_npoints = [float(k) for k in self.dict_list[self.header_wt[1]][pointer : (pointer + npoints)]]
area_sqm_npoints = NUM.array(area_sqm_npoints)
area_sqm_npoints = area_sqm_npoints.reshape(1, npoints)
data_goal = data_subset_new[:, pointer:(pointer + npoints)]
if grid_type == 't255':
#A) ERA Interim Low Res (T255) - data is cumulative
data_goal = data_goal.astype(NUM.float32)
#from time 3/6/9/12 (time zero not included, so assumed to be zero)
ro_first_half = NUM.concatenate([data_goal[0:1,], NUM.subtract(data_goal[1:4,], data_goal[0:3,])])
#from time 15/18/21/24 (time restarts at time 12, assumed to be zero)
ro_second_half = NUM.concatenate([data_goal[4:5,], NUM.subtract(data_goal[5:,], data_goal[4:7,])])
ro_stream = NUM.concatenate([ro_first_half, ro_second_half]) * area_sqm_npoints
else:
#A) ERA Interim High Res (T511) - data is incremental
#from time 3/6/9/12/15/18/21/24
ro_stream = data_goal * area_sqm_npoints
data_out_nc.variables['m3_riv'][index*size_time:(index+1)*size_time,stream_index] = ro_stream.sum(axis = 1)
pointer += npoints
# close the input and output netcdf datasets
data_out_nc.close()
|
erdc-cm/spt_lsm_autorapid_process
|
imports/CreateInflowFileFromERAInterimRunoff.py
|
Python
|
bsd-3-clause
| 10,290
|
[
"NetCDF"
] |
f6b1a89073d06609840edee0982859ee84a0cf9d45af6d0284f93324019bb74d
|
#!/usr/bin/env python
#########################################################################################
#
# Convert binary spinal cord segmentation to trilinear-interpolated segmentation. Instead of simply re-interpolating
# the image, this function oversample the binary mask, then smooth along centerline (to remove step-effects), then
# downsample back to native resolution.
#
# ---------------------------------------------------------------------------------------
# Copyright (c) 2013 Polytechnique Montreal <www.neuro.polymtl.ca>
# Authors: Julien Cohen-Adad
# Modified: 2014-07-06
#
# About the license: see the file LICENSE.TXT
#########################################################################################
import sys
import os
import getopt
import time
import numpy as np
from spinalcordtoolbox.image import Image, generate_output_file, convert
from spinalcordtoolbox.utils.sys import init_sct, run_proc, __data_dir__, printv
from spinalcordtoolbox.utils.fs import tmp_create, check_file_exist, rmtree
class Param:
def __init__(self):
self.debug = 0
self.smoothing_sigma = 5
self.interp_factor = 1 # interpolation factor. Works fine with 1 (i.e., no interpolation required).
self.suffix = '_trilin' # output suffix
self.remove_temp_files = 1
self.verbose = 1
# main
# =======================================================================================================================
def main():
# Initialization
fname_data = ''
interp_factor = param.interp_factor
remove_temp_files = param.remove_temp_files
verbose = param.verbose
suffix = param.suffix
smoothing_sigma = param.smoothing_sigma
# start timer
start_time = time.time()
# Parameters for debug mode
if param.debug:
fname_data = os.path.join(__data_dir__, 'sct_testing_data', 't2', 't2_seg.nii.gz')
remove_temp_files = 0
param.mask_size = 10
else:
# Check input parameters
try:
opts, args = getopt.getopt(sys.argv[1:], 'hi:v:r:s:')
except getopt.GetoptError:
usage()
raise SystemExit(2)
if not opts:
usage()
raise SystemExit(2)
for opt, arg in opts:
if opt == '-h':
usage()
return
elif opt in ('-i'):
fname_data = arg
elif opt in ('-r'):
remove_temp_files = int(arg)
elif opt in ('-s'):
smoothing_sigma = arg
elif opt in ('-v'):
verbose = int(arg)
# display usage if a mandatory argument is not provided
if fname_data == '':
usage()
raise SystemExit(2)
# printv(arguments)
printv('\nCheck parameters:')
printv(' segmentation ........... ' + fname_data)
printv(' interp factor .......... ' + str(interp_factor))
printv(' smoothing sigma ........ ' + str(smoothing_sigma))
# check existence of input files
printv('\nCheck existence of input files...')
check_file_exist(fname_data, verbose)
# Extract path, file and extension
path_data, file_data, ext_data = extract_fname(fname_data)
path_tmp = tmp_create(basename="binary_to_trilinear")
printv('\nCopying input data to tmp folder and convert to nii...', param.verbose)
im_input = convert(Image(fname_data))
im_input.save(os.path.join(path_tmp, "data.nii"), mutable=True, verbose=param.verbose)
# go to tmp folder
curdir = os.getcwd()
os.chdir(path_tmp)
# Get dimensions of data
printv('\nGet dimensions of data...', verbose)
nx, ny, nz, nt, px, py, pz, pt = Image('data.nii').dim
printv('.. ' + str(nx) + ' x ' + str(ny) + ' x ' + str(nz), verbose)
# upsample data
printv('\nUpsample data...', verbose)
run_proc(["sct_resample",
"-i", "data.nii",
"-x", "linear",
"-vox", str(nx * interp_factor) + 'x' + str(ny * interp_factor) + 'x' + str(nz * interp_factor),
"-o", "data_up.nii"], verbose)
# Smooth along centerline
printv('\nSmooth along centerline...', verbose)
run_proc(["sct_smooth_spinalcord",
"-i", "data_up.nii",
"-s", "data_up.nii",
"-smooth", str(smoothing_sigma),
"-r", str(remove_temp_files),
"-v", str(verbose)], verbose)
# downsample data
printv('\nDownsample data...', verbose)
run_proc(["sct_resample",
"-i", "data_up_smooth.nii",
"-x", "linear",
"-vox", str(nx) + 'x' + str(ny) + 'x' + str(nz),
"-o", "data_up_smooth_down.nii"], verbose)
# come back
os.chdir(curdir)
# Generate output files
printv('\nGenerate output files...')
fname_out = generate_output_file(os.path.join(path_tmp, "data_up_smooth_down.nii"), '' + file_data + suffix + ext_data)
# Delete temporary files
if remove_temp_files == 1:
printv('\nRemove temporary files...')
rmtree(path_tmp)
# display elapsed time
elapsed_time = time.time() - start_time
printv('\nFinished! Elapsed time: ' + str(int(np.round(elapsed_time))) + 's')
# to view results
printv('\nTo view results, type:')
printv('fslview ' + file_data + ' ' + file_data + suffix + ' &\n')
# printv(usage)
# ==========================================================================================
def usage():
print('\n'
'' + os.path.basename(__file__) + '\n'
'~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n'
'Part of the Spinal Cord Toolbox <https://sourceforge.net/projects/spinalcordtoolbox>\n'
'\n'
'DESCRIPTION\n'
' Convert binary spinal cord segmentation to trilinear-interpolated segmentation. Instead of simply\n'
' re-interpolating the image, this function oversamples the binary mask, smoothes along centerline\n'
' (to remove step-effects), then downsamples back to native resolution.\n'
'\n'
'USAGE\n'
' ' + os.path.basename(__file__) + ' -i <bin_seg>\n'
'\n'
'MANDATORY ARGUMENTS\n'
' -i <bin_seg> binary segmentation of spinal cord\n'
'\n'
'OPTIONAL ARGUMENTS\n'
' -s sigma of the smoothing Gaussian kernel (in voxel). Default=' + str(param_default.smoothing_sigma) + '\n'
' -r {0,1} remove temporary files. Default=' + str(param_default.remove_temp_files) + '\n'
' -v {0,1} verbose. Default=' + str(param_default.verbose) + '\n'
' -h help. Show this message\n'
'\n'
'EXAMPLE\n'
' ' + os.path.basename(__file__) + ' -i segmentation.nii \n')
# =======================================================================================================================
# Start program
# =======================================================================================================================
if __name__ == "__main__":
init_sct()
# initialize parameters
param = Param()
param_default = Param()
# call main function
main()
|
neuropoly/spinalcordtoolbox
|
spinalcordtoolbox/scripts/isct_convert_binary_to_trilinear.py
|
Python
|
mit
| 7,321
|
[
"Gaussian"
] |
6671dfd4fb85a8bd2e87adf454ba423d759db1bcf36544d9bd8086214e872f49
|
#!/usr/bin/env python
import datetime
import gevent
import os
import re
import urllib2
import urlparse
from gevent import monkey
monkey.patch_all()
from flask import Flask, render_template, send_from_directory, request, make_response
from lxml.html import parse
# initialization
app = Flask(__name__)
#app.config.update(
# DEBUG=True,
#)
# controllers
@app.route('/favicon.ico')
def favicon():
return add_cache_headers(send_from_directory(os.path.join(app.root_path, 'static'), 'ico/favicon.ico'), 30240)
@app.route('/css/<filename>')
def css(filename):
return add_cache_headers(send_from_directory(os.path.join(app.root_path, 'static'), "css/%s" % filename), 30240)
@app.route('/js/<filename>')
def js(filename):
return add_cache_headers(send_from_directory(os.path.join(app.root_path, 'static'), "js/%s" % filename), 30240)
@app.errorhandler(404)
def page_not_found():
return add_cache_headers(render_template('404.html'), 30240), 404
@app.route("/")
def index():
value = request.args.get('slug')
if value is None:
value = ""
return add_cache_headers(render_template('index.html', value=value), 60)
@app.route("/about")
def about():
return add_cache_headers(render_template('about.html'), 30240)
@app.route("/slug/<path:slug>")
def check(slug):
return add_cache_headers(service_check("/%s" % slug), 5)
# Helper functions
def extract_service_domain_from_link(link):
domain = urlparse.urlparse(link).netloc
if re.match('^.*\.service\.gov\.uk$', domain):
return True, domain
else:
return False, "The link is not to something on the service.gov.uk domain"
def find_link_from_slug(govuk_slug):
try:
service_link = None
html = urllib2.urlopen("https://www.gov.uk%s" % govuk_slug)
doc = parse(html).getroot()
for link in doc.cssselect('.get-started a'):
if link.text_content() == 'Start now' or link.text_content() == 'Apply now':
service_link = link.get('href')
if service_link is not None:
return True, service_link
for form in doc.cssselect('form.get-started'):
service_link = form.get('action')
if service_link is not None:
return True, service_link
return False, "Could not find 'Start now' link on https://www.gov.uk%s" % govuk_slug
except IOError:
return False, "https://www.gov.uk%s" % govuk_slug
def header_dict(headers):
dikt = {}
for header in headers:
key, value = header.split(': ', 1)
dikt[key.lower()] = value.rstrip()
return dikt
def format_output(status, title, description):
return render_template('check.html', status=status, title=title, description=description)
def datetime_filter(datetime, format_string='%d/%m/%Y %H:%M'):
return datetime.strftime(format_string)
app.jinja_env.filters['datetime'] = datetime_filter
def add_cache_headers(response, minutes):
response = make_response(response)
then = datetime.datetime.utcnow() + datetime.timedelta(minutes=minutes)
rfc822 = then.strftime("%a, %d %b %Y %H:%M:%S +0000")
response.headers.add('Expires', rfc822)
response.headers.add(
'Cache-Control', 'public,max-age=%d' % int(60 * minutes))
return response
# Service checks
def check_bare_ssl_domain_redirects_to_slug(domain, slug):
correct_location = "https://www.gov.uk%s" % slug
bare_domain = "https://%s/" % domain
url = urllib2.urlopen(bare_domain)
location = url.geturl()
correct_location = "https://www.gov.uk%s" % slug
check_title = "The bare service domain should redirect back to the GOV.UK start page"
check_description = """
In order to make sure that all transactions begin and end on GOV.UK, it is important that
the bare domain (<a href='%s'>%s</a>) redirects back to the GOV.UK start page (<a href='%s'>%s</a>), so that if users are
typing the URL from memory, they get a consistent user experience and their browser does
not cache the wrong entry page.
""" % (bare_domain, bare_domain, correct_location, correct_location)
if location == correct_location:
return True, check_title, check_description
else:
return False, check_title, check_description
def check_listening_on_http(domain):
check_title = "The service should enforce SSL"
check_description = """
Users must have confidence that any information they are submitting to a service, including
pages they visit, is not available to a 3rd-party. In order to enforce this, the service should
either reject non-SSL connections, or should immediately redirect them to secured connection via SSL.
"""
try:
url = urllib2.urlopen("http://%s/" % domain, timeout=1)
parsed_url = urlparse.urlparse(url.geturl())
if parsed_url.scheme == 'https':
return True, "%s (Service redirects HTTP to HTTPS)" % check_title, check_description
else:
return False, check_title, check_description
except IOError:
return True, "%s (Service does not listen on HTTP)" % check_title, check_description
def check_for_HSTS_header(link):
check_title = "The service should set a Strict-Transport-Security (HSTS) header"
check_description = """
To reduce the chance that traffic for a user can be intercepted, the service
should notify the browser that in future it should only use secure connections.
It can do this by setting an HTTP Header called 'Strict-Transport-Security'.
"""
try:
url = urllib2.urlopen(link)
headers = header_dict(url.info().headers)
if 'strict-transport-security' in headers.keys():
return True, check_title, check_description
else:
return False, check_title, check_description
except urllib2.HTTPError as e:
return False, check_title, "Error: %s" % e
def check_for_www(domain):
check_title = "The service domain format should be www.{service}.service.gov.uk"
check_description = """
The Service Manual states that Users must interact with a single domain and that it
will be www.{service}.service.gov.uk. It is permissible to create extra domains for
example for Content Delivery Networks, Assets or Administration, however the user-facing
domain should be prefixed by www.
"""
if re.match('^www\.[^.]+\.service\.gov\.uk$', domain):
return True, check_title, check_description
else:
return False, check_title, check_description
def check_for_robots_txt(domain):
check_title = "The service should have a robots.txt file"
check_description = """
Every service hosted on a service.gov.uk domain must have a robots.txt file asking search engines
not to index any part of the site. More details can be found on the <a href='http://www.robotstxt.org/faq/prevent.html'>Web Robots pages</a>
"""
try:
url = urllib2.urlopen("https://%s/robots.txt" % domain)
headers = header_dict(url.info().headers)
if headers['content-type'].startswith("text/plain"):
return True, check_title, check_description
else:
return False, check_title, "The robots.txt file exists, but is %s rather than text/plain." % headers['content-type']
except urllib2.HTTPError as e:
return False, check_title, "Could not find robots.txt (Error: %s)" % e
def check_cookies(link):
failed = False
check_title = "Cookies should be Secure, HttpOnly and scoped to the service domain"
check_description = """
Cookies used on www.{service}.service.gov.uk must be scoped to the originating domain only.
Cookies must not be scoped to the domain servicename.service.gov.uk. Cookies must be sent with
the <code>Secure</code> attribute and should, where appropriate, be sent with the <code>HttpOnly</code>
attribute. These flags <a href='https://en.wikipedia.org/wiki/HTTP_cookie#Secure_and_HttpOnly'>provide additional assurances
about how cookies will be handled by browsers.</a>
"""
domain = extract_service_domain_from_link(link)[1]
cookie_domain = "domain=" + domain
url = urllib2.urlopen(link)
headers = url.info().headers
for header in headers:
key, value = header.rstrip().split(': ', 1)
if key.lower() == 'set-cookie':
cookie_settings = value.lower().split('; ')
if 'httponly' not in cookie_settings:
check_description += "<br /><br />HttpOnly is not set<br /><"
check_description += " Set-Cookie: %s<br />" % value
failed = True
if 'secure' not in cookie_settings:
check_description += "<br /><br />Secure is not set<br />"
check_description += " Set-Cookie: %s<br />" % value
failed = True
if cookie_domain not in cookie_settings:
check_description += "<br /><br />Cookie not scoped to domain=%s<br />" % domain
check_description += " Set-Cookie: %s<br />" % value
failed = True
if failed:
return False, check_title, check_description
else:
return True, check_title, check_description
# Main logic process
def service_check(slug):
output = ""
result, link = find_link_from_slug(slug)
if result:
output += format_output(result,
"The GOV.UK start page should link to the service",
"""All transactions should start on GOV.UK with a transaction start page.
You supplied the start page of <a href='https://www.gov.uk%s'>https://www.gov.uk%s</a>
which appears to link to a service: <a href='%s'>%s</a>
""" % (slug, slug, link, link))
result, domain = extract_service_domain_from_link(link)
if result:
checks = [
gevent.spawn(check_bare_ssl_domain_redirects_to_slug, domain, slug),
gevent.spawn(check_listening_on_http, domain),
gevent.spawn(check_for_www, domain),
gevent.spawn(check_for_HSTS_header, link),
gevent.spawn(check_for_robots_txt, domain),
gevent.spawn(check_cookies, link)
]
gevent.joinall(checks)
for check in checks:
status, message, description = check.value
output += "%s\n" % format_output(status, message, description)
else:
output += format_output(result,
"The GOV.UK start page should link to service on a service.gov.uk domain",
"""You supplied the start page of <a href='https://www.gov.uk%s'>https://www.gov.uk%s</a>
which appears to have a 'Start now' button, but it does not link to something on the
service.gov.uk domain as it points to <a href='%s'>%s</a>.""" % (slug, slug, link, link))
else:
output += format_output(result,
"The GOV.UK start page should link to the service",
"""All transactions should start on GOV.UK with a transaction start page.
You supplied the start page of <a href='https://www.gov.uk%s'>https://www.gov.uk%s</a>,
but either the page does not exist, or I cannot find a 'Start now' link on this
page pointing to a service.""" % (slug, slug))
return render_template('service_check.html', output=output, link=link, checked_at=datetime.datetime.now())
# launch
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port, debug=True)
|
alphagov/service-domain-checker
|
webapp.py
|
Python
|
mit
| 11,942
|
[
"VisIt"
] |
981b23474125c19340652776f12cf1a9560c291070e9ad3e0119c37925f9284d
|
"""
Single page performance tests for LMS.
"""
from nose.plugins.attrib import attr
from common.test.acceptance.fixtures.course import CourseFixture, CourseUpdateDesc, XBlockFixtureDesc
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.common.logout import LogoutPage
from common.test.acceptance.pages.lms.course_info import CourseInfoPage
from common.test.acceptance.pages.lms.courseware import CoursewarePage
from common.test.acceptance.pages.lms.dashboard import DashboardPage
from common.test.acceptance.pages.lms.login import LoginPage
from common.test.acceptance.pages.lms.progress import ProgressPage
from common.test.acceptance.tests.helpers import UniqueCourseTest, load_data_str
@attr(har_mode='explicit')
class LmsPerformanceTest(UniqueCourseTest):
"""
Base class to capture LMS performance with HTTP Archives.
"""
username = 'test_student'
email = 'student101@example.com'
def setUp(self):
"""
Setup course
"""
super(LmsPerformanceTest, self).setUp()
# Install a course with sections/problems, tabs, updates, and handouts
course_fix = CourseFixture(
self.course_info['org'], self.course_info['number'],
self.course_info['run'], self.course_info['display_name']
)
course_fix.add_update(CourseUpdateDesc(date='January 29, 2014', content='Test course update1'))
course_fix.add_update(CourseUpdateDesc(date='January 30, 2014', content='Test course update2'))
course_fix.add_update(CourseUpdateDesc(date='January 31, 2014', content='Test course update3'))
course_fix.add_children(
XBlockFixtureDesc('chapter', 'Test Section 1').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection 1').add_children(
XBlockFixtureDesc('problem', 'Test Problem 1', data=load_data_str('multiple_choice.xml')),
XBlockFixtureDesc('problem', 'Test Problem 2', data=load_data_str('formula_problem.xml')),
XBlockFixtureDesc('html', 'Test HTML', data="<html>Html child text</html>"),
)
),
XBlockFixtureDesc('chapter', 'Test Section 2').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection 2').add_children(
XBlockFixtureDesc('html', 'Html Child', data="<html>Html child text</html>")
)
),
XBlockFixtureDesc('chapter', 'Test Section 3').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection 3').add_children(
XBlockFixtureDesc('problem', 'Test Problem 3')
)
)
).install()
AutoAuthPage(self.browser, username=self.username, email=self.email, course_id=self.course_id).visit()
def _make_har_file(self, page):
"""
Visit page and make HAR file.
"""
har_name = '{page}_{course}'.format(page=type(page).__name__, course=self.course_info['number'])
self.har_capturer.add_page(self.browser, har_name)
page.visit()
self.har_capturer.save_har(self.browser, har_name)
def test_visit_coursware(self):
"""
Produce a HAR for loading the Coursware page.
"""
courseware_page = CoursewarePage(self.browser, self.course_id)
self._make_har_file(courseware_page)
def test_visit_dashboard(self):
"""
Produce a HAR for loading the Dashboard page.
"""
dashboard_page = DashboardPage(self.browser)
self._make_har_file(dashboard_page)
def test_visit_course_info(self):
"""
Produce a HAR for loading the Course Info page.
"""
course_info_page = CourseInfoPage(self.browser, self.course_id)
self._make_har_file(course_info_page)
def test_visit_login_page(self):
"""
Produce a HAR for loading the Login page.
"""
login_page = LoginPage(self.browser)
# Logout previously logged in user to be able to see Login page.
LogoutPage(self.browser).visit()
self._make_har_file(login_page)
def test_visit_progress_page(self):
"""
Produce a HAR for loading the Progress page.
"""
progress_page = ProgressPage(self.browser, self.course_id)
self._make_har_file(progress_page)
|
Stanford-Online/edx-platform
|
common/test/acceptance/performance/test_lms_performance.py
|
Python
|
agpl-3.0
| 4,431
|
[
"VisIt"
] |
0f9c497ddb4a8f64f408393c310228318fed9308329eba7454b19b298b18299c
|
from collections import namedtuple
import os
import ase.io
import ase.db
homedir = os.path.expanduser('~')
materials_dir = homedir + '/Documents/Data/materials'
data_dir = homedir + '/Documents/Data/datasets/sulfur_clusters'
pointgroups = {'S8':'D4d', 'S7_branched':'Cs', 'S7_ring':'Cs', 'S6_branched':'Cs',
'S6_buckled':'C2v', 'S6_stack_S3':'D3h', 'S6_chain_63':'C1',
'S5_ring':'Cs', 'S4_eclipsed':'C2v', 'S4_buckled':'D2d', 'S3_ring':'D3h',
'S3_bent':'C2v','S2':'Dinfh'}
# pointgroup 'S4_C2h':'C2h' removed from dict as this species is unstable (negative frequencies)
rot_sym = {'C1':['nonlinear',1], 'Cs':['nonlinear',1], 'C2v':['nonlinear',2], 'C2h':['nonlinear',2],
'D3h':['nonlinear',6], 'D2d':['nonlinear',4],'D4d':['nonlinear',8],
'Dinfh':['linear',2]}
"""Generate a JSON database for computed sulfur allotropes for thermochemical modelling.
The database format is that used by ASE, with the following custom data fields:
frequencies: List of vibrational mode frequencies, including zero-frequency modes, in cm-1
geometry: takes the values 'linear', 'nonlinear', 'monatomic'. Needed for calculation of rotational energy.
symmetry: symmetry number; integer number of equivalent rotations.
pointgroup: string indicating point group of molecule.
"""
Species = namedtuple('Species', 'id structure_path frequencies geometry symmetry pointgroup')
def vibs_from(path, abs_path=False):
if abs_path:
full_path=path
else:
full_path = materials_dir + '/' + path
vib_list=[]
with open(full_path,'r') as f:
for line in f:
if line[0:13] == " Mode number":
break
for line in f:
if len(line.split()) == 4:
vib_list.append(float(line.split()[1]))
else:
break
return vib_list
def main():
for functional in 'PBEsol','PBE0', 'LDA', 'B3LYP':
c = ase.db.connect('sulfur_' + functional.lower() + '.json')
calc_dir = data_dir + '/' + functional
for species, pointgroup in pointgroups.iteritems():
try:
atoms = ase.io.read(calc_dir + '/' + species + '/vibs/basic.central.out')
vibs = vibs_from(calc_dir + '/' + species + '/vibs/basic.vib.out', abs_path=True)
c.write(species, atoms, data={'frequencies':vibs,
'geometry':rot_sym[pointgroup][0],
'symmetry':rot_sym[pointgroup][1],
'pointgroup':pointgroup
})
except IOError:
pass
c = ase.db.connect('sulfur_pbe0.json')
c_96 = ase.db.connect('sulfur_pbe0_96.json')
for allotrope in c.select():
atoms = c.get_atoms(allotrope.id)
data = allotrope.data
data.update({'frequencies':[v * 0.96 for v in data['frequencies']]})
c_96.write(allotrope.id, atoms, data=data)
if __name__ == '__main__':
main()
|
WMD-group/sulfur-model
|
scripts/gen_sulfur_db.py
|
Python
|
gpl-3.0
| 3,122
|
[
"ASE"
] |
4e6cb9123486cb30388d0eb734563eaef413397e6a0c6400ab3b52c2c0d9c66f
|
"""
Author: Roger Labbe
Copyright: 2014
This code performs various basic statistics functions for the
Kalman and Bayesian Filters in Python book. Much of this code
is non-optimal; production code should call the equivalent scipy.stats
functions. I wrote the code in this form to make explicit how the
computations are done. The scipy.stats module has many more useful functions
than what I have written here. In some cases, however, my code is significantly
faster, at least on my machine. For example, gaussian average 794 ns, whereas
stats.norm(), using the frozen form, averages 116 microseconds per call.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import math
import numpy as np
import numpy.linalg as linalg
import matplotlib.pyplot as plt
import scipy.sparse as sp
import scipy.sparse.linalg as spln
import scipy.stats
from scipy.stats import norm
from matplotlib.patches import Ellipse
_two_pi = 2*math.pi
def gaussian(x, mean, var):
"""returns normal distribution (pdf) for x given a Gaussian with the
specified mean and variance. x can either be a scalar or an
array-like.
gaussian (1,2,3) is equivalent to scipy.stats.norm(2,math.sqrt(3)).pdf(1)
It is quite a bit faster albeit much less flexible than the latter.
@param x test
Parameters
----------
x : scalar or array-like
The value for which we compute the probability
mean : scalar
Mean of the Gaussian
var : scalar
Variance of the Gaussian
Returns
-------
probability : float, or array-like
probability of x for the Gaussian (mean, var). E.g. 0.101 denotes
10.1%.
Examples
--------
gaussian(3, 1, 2)
gaussian([3,4,3,2,1], 1, 2)
"""
return (np.exp((-0.5*(np.asarray(x)-mean)**2)/var) /
np.sqrt(_two_pi*var))
def mul (mean1, var1, mean2, var2):
""" multiply Gaussians (mean1, var1) with (mean2, var2) and return the
results as a tuple (mean,var).
var1 and var2 are variances - sigma squared in the usual parlance.
"""
mean = (var1*mean2 + var2*mean1) / (var1 + var2)
var = 1 / (1/var1 + 1/var2)
return (mean, var)
def add (mean1, var1, mean2, var2):
""" add the Gaussians (mean1, var1) with (mean2, var2) and return the
results as a tuple (mean,var).
var1 and var2 are variances - sigma squared in the usual parlance.
"""
return (mean1+mean2, var1+var2)
def multivariate_gaussian(x, mu, cov):
""" This is designed to replace scipy.stats.multivariate_normal
which is not available before version 0.14. You may either pass in a
multivariate set of data:
multivariate_gaussian (array([1,1]), array([3,4]), eye(2)*1.4)
multivariate_gaussian (array([1,1,1]), array([3,4,5]), 1.4)
or unidimensional data:
multivariate_gaussian(1, 3, 1.4)
In the multivariate case if cov is a scalar it is interpreted as eye(n)*cov
The function gaussian() implements the 1D (univariate)case, and is much
faster than this function.
equivalent calls:
multivariate_gaussian(1, 2, 3)
scipy.stats.multivariate_normal(2,3).pdf(1)
Parameters
----------
x : scalar, or np.array-like
Value to compute the probability for. May be a scalar if univariate,
or any type that can be converted to an np.array (list, tuple, etc).
np.array is best for speed.
mu : scalar, or np.array-like
mean for the Gaussian . May be a scalar if univariate, or any type
that can be converted to an np.array (list, tuple, etc).np.array is
best for speed.
cov : scalar, or np.array-like
Covariance for the Gaussian . May be a scalar if univariate, or any
type that can be converted to an np.array (list, tuple, etc).np.array is
best for speed.
Returns
-------
probability : float
probability for x for the Gaussian (mu,cov)
"""
scipy.stats.multivariate_normal
# force all to numpy.array type
x = np.array(x, copy=False, ndmin=1)
mu = np.array(mu,copy=False, ndmin=1)
nx = len(mu)
cov = _to_cov(cov, nx)
norm_coeff = nx*math.log(2*math.pi) + np.linalg.slogdet(cov)[1]
err = x - mu
if (sp.issparse(cov)):
numerator = spln.spsolve(cov, err).T.dot(err)
else:
numerator = np.linalg.solve(cov, err).T.dot(err)
return math.exp(-0.5*(norm_coeff + numerator))
def plot_gaussian(mean, variance,
mean_line=False,
xlim=None,
xlabel=None,
ylabel=None):
""" plots the normal distribution with the given mean and variance. x-axis
contains the mean, the y-axis shows the probability.
mean_line : draws a line at x=mean
xlim: optionally specify the limits for the x axis as tuple (low,high).
If not specified, limits will be automatically chosen to be 'nice'
xlabel : optional label for the x-axis
ylabel : optional label for the y-axis
"""
sigma = math.sqrt(variance)
n = scipy.stats.norm(mean, sigma)
if xlim is None:
min_x = n.ppf(0.001)
max_x = n.ppf(0.999)
else:
min_x = xlim[0]
max_x = xlim[1]
xs = np.arange(min_x, max_x, (max_x - min_x) / 1000)
plt.plot(xs,n.pdf(xs))
plt.xlim((min_x, max_x))
if mean_line:
plt.axvline(mean)
if xlabel:
plt.xlabel(xlabel)
if ylabel:
plt.ylabel(ylabel)
def covariance_ellipse(P, deviations=1):
""" returns a tuple defining the ellipse representing the 2 dimensional
covariance matrix P.
Parameters
----------
P : nd.array shape (2,2)
covariance matrix
deviations : int (optional, default = 1)
# of standard deviations. Default is 1.
Returns (angle_radians, width_radius, height_radius)
"""
U,s,v = linalg.svd(P)
orientation = math.atan2(U[1,0],U[0,0])
width = deviations*math.sqrt(s[0])
height = deviations*math.sqrt(s[1])
assert width >= height
return (orientation, width, height)
def is_inside_ellipse(x,y, ex, ey, orientation, width, height):
co = np.cos(orientation)
so = np.sin(orientation)
xx = x*co + y*so
yy = y*co - x*so
return (xx / width)**2 + (yy / height)**2 <= 1.
return ((x-ex)*co - (y-ey)*so)**2/width**2 + \
((x-ex)*so + (y-ey)*co)**2/height**2 <= 1
def plot_covariance_ellipse(mean, cov=None, variance = 1.0,
ellipse=None, title=None, axis_equal=True,
facecolor='none', edgecolor='#004080',
alpha=1.0, xlim=None, ylim=None):
""" plots the covariance ellipse where
mean is a (x,y) tuple for the mean of the covariance (center of ellipse)
cov is a 2x2 covariance matrix.
variance is the normal sigma^2 that we want to plot. If list-like,
ellipses for all ellipses will be ploted. E.g. [1,2] will plot the
sigma^2 = 1 and sigma^2 = 2 ellipses.
ellipse is a (angle,width,height) tuple containing the angle in radians,
and width and height radii.
You may provide either cov or ellipse, but not both.
plt.show() is not called, allowing you to plot multiple things on the
same figure.
"""
assert cov is None or ellipse is None
assert not (cov is None and ellipse is None)
if cov is not None:
ellipse = covariance_ellipse(cov)
if axis_equal:
plt.axis('equal')
if title is not None:
plt.title (title)
if np.isscalar(variance):
variance = [variance]
ax = plt.gca()
angle = np.degrees(ellipse[0])
width = ellipse[1] * 2.
height = ellipse[2] * 2.
for var in variance:
sd = np.sqrt(var)
e = Ellipse(xy=mean, width=sd*width, height=sd*height, angle=angle,
facecolor=facecolor,
edgecolor=edgecolor,
alpha=alpha,
lw=2)
ax.add_patch(e)
plt.scatter(mean[0], mean[1], marker='+') # mark the center
if xlim is not None:
ax.set_xlim(xlim)
if ylim is not None:
ax.set_ylim(ylim)
def _to_cov(x,n):
""" If x is a scalar, returns a covariance matrix generated from it
as the identity matrix multiplied by x. The dimension will be nxn.
If x is already a numpy array then it is returned unchanged.
"""
try:
x.shape
if type(x) != np.ndarray:
x = np.asarray(x)[0]
return x
except:
return np.eye(n) * x
def do_plot_test():
from numpy.random import multivariate_normal
p = np.array([[32, 15],[15., 40.]])
x,y = multivariate_normal(mean=(0,0), cov=p, size=5000).T
sd = 2
a,w,h = covariance_ellipse(p,sd)
print (np.degrees(a), w, h)
count = 0
color=[]
for i in range(len(x)):
if is_inside_ellipse(x[i], y[i], 0, 0, a, w, h):
color.append('b')
count += 1
else:
color.append('r')
plt.scatter(x,y,alpha=0.2, c=color)
plt.axis('equal')
plot_covariance_ellipse(mean=(0., 0.),
cov = p,
variance=sd*sd,
facecolor='none')
print (count / len(x))
from numpy.linalg import inv
from numpy import asarray, dot
def multivariate_multiply(m1, c1, m2, c2):
C1 = asarray(c1)
C2 = asarray(c2)
M1 = asarray(m1)
M2 = asarray(m2)
sum_inv = inv(C1+C2)
C3 = dot(C1, sum_inv).dot(C2)
M3 = (dot(C2, sum_inv).dot(M1) +
dot(C1, sum_inv).dot(M2))
return M3, C3
def norm_cdf (x_range, mu, var=1, std=None):
""" computes the probability that a Gaussian distribution lies
within a range of values.
Paramateters
------------
x_range : (float, float)
tuple of range to compute probability for
mu : float
mean of the Gaussian
var : float, optional
variance of the Gaussian. Ignored if std is provided
std : float, optional
standard deviation of the Gaussian. This overrides the var parameter
Returns
-------
probability : float
probability that Gaussian is within x_range. E.g. .1 means 10%.
"""
if std is None:
std = math.sqrt(var)
return abs(norm.cdf(x_range[0], loc=mu, scale=std) -
norm.cdf(x_range[1], loc=mu, scale=std))
def test_norm_cdf():
# test using the 68-95-99.7 rule
mu = 5
std = 3
var = std*std
std_1 = (norm_cdf((mu-std, mu+std), mu, var))
assert abs(std_1 - .6827) < .0001
std_1 = (norm_cdf((mu+std, mu-std), mu, std=std))
assert abs(std_1 - .6827) < .0001
std_1half = (norm_cdf((mu+std, mu), mu, var))
assert abs(std_1half - .6827/2) < .0001
std_2 = (norm_cdf((mu-2*std, mu+2*std), mu, var))
assert abs(std_2 - .9545) < .0001
std_3 = (norm_cdf((mu-3*std, mu+3*std), mu, var))
assert abs(std_3 - .9973) < .0001
if __name__ == '__main__':
test_norm_cdf ()
do_plot_test()
#test_gaussian()
# test conversion of scalar to covariance matrix
x = multivariate_gaussian(np.array([1,1]), np.array([3,4]), np.eye(2)*1.4)
x2 = multivariate_gaussian(np.array([1,1]), np.array([3,4]), 1.4)
assert x == x2
# test univarate case
rv = norm(loc = 1., scale = np.sqrt(2.3))
x2 = multivariate_gaussian(1.2, 1., 2.3)
x3 = gaussian(1.2, 1., 2.3)
assert rv.pdf(1.2) == x2
assert abs(x2- x3) < 0.00000001
cov = np.array([[1.0, 1.0],
[1.0, 1.1]])
plt.figure()
P = np.array([[2,0],[0,2]])
plot_covariance_ellipse((2,7), cov=cov, variance=[1,2], facecolor='g', title='my title', alpha=.2)
plt.show()
print("all tests passed")
|
maxiee/MyCodes
|
KalmanAndBesianFiltersInPython/Chapter6_multivariate_kalman_filter/utils/stats.py
|
Python
|
gpl-3.0
| 11,807
|
[
"Gaussian"
] |
85866ee41b20d420258acc2ed3ef78fdab9bbed8094827c0757161a6de52c2cb
|
import math
import os
import sys
import httplib2
import requests
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import flow_from_clientsecrets
from oauth2client.file import Storage
from oauth2client.tools import argparser, run_flow
from bs4 import BeautifulSoup
BASE_URL = "https://www.youtube.com/channel/"
CLIENT_SECRETS_FILE = "client_secrets.json"
YOUTUBE_READ_WRITE_SCOPE = "https://www.googleapis.com/auth/youtube"
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
MISSING_CLIENT_SECRETS_MESSAGE = """
WARNING: Please configure OAuth 2.0
To make this sample run you will need to populate the client_secrets.json file
found at:
{}
with information from the Developers Console
https://console.developers.google.com/
For more information about the client_secrets.json file format, please visit:
https://developers.google.com/api-client-library/python/guide/aaa_client_secrets
""".format(os.path.abspath(os.path.join(os.path.dirname(__file__), CLIENT_SECRETS_FILE)))
def retrieve_youtube_subscriptions():
try:
next_page_token = ''
subs_iteration = 0
while True:
subscriptions_response = youtube_subscriptions(next_page_token)
subs_iteration += 1
total_results = subscriptions_response['pageInfo']['totalResults']
results_per_page = subscriptions_response['pageInfo']['resultsPerPage']
total_iterations = math.ceil(total_results / results_per_page)
print('Subscriptions iteration: {} of {} ({}%)'.format(subs_iteration,
total_iterations,
round(subs_iteration / total_iterations * 100),
0))
next_page_token = get_next_page(subscriptions_response)
channels = parse_youtube_subscriptions(subscriptions_response)
all_channels.extend(channels)
if not next_page_token:
break
all_channels.sort()
return all_channels
except HttpError as err:
print("An HTTP error {} occurred:\n{}".format(err.resp.status, err.content))
def get_authenticated_service():
storage = Storage("{}-oauth2.json".format(sys.argv[0]))
credentials = storage.get()
if credentials is None or credentials.invalid:
flow = flow_from_clientsecrets(CLIENT_SECRETS_FILE,
scope=YOUTUBE_READ_WRITE_SCOPE,
message=MISSING_CLIENT_SECRETS_MESSAGE)
args = argparser.parse_args()
credentials = run_flow(flow, storage, args)
return build(YOUTUBE_API_SERVICE_NAME,
YOUTUBE_API_VERSION,
http=credentials.authorize(httplib2.Http()))
def youtube_subscriptions(next_page_token):
subscriptions_response = youtube.subscriptions().list(
part='snippet,contentDetails',
mine=True,
maxResults=50,
order='alphabetical',
pageToken=next_page_token).execute()
return subscriptions_response
def get_next_page(subscriptions_response):
if 'nextPageToken' in subscriptions_response:
next_page_token = subscriptions_response['nextPageToken']
else:
next_page_token = ''
return next_page_token
def parse_youtube_subscriptions(subscriptions_response):
channels = []
for subscriptions_result in subscriptions_response.get("items", []):
if subscriptions_result["snippet"]["resourceId"]["kind"] == "youtube#channel":
title = subscriptions_result["snippet"]["title"]
channel_id = subscriptions_result["snippet"]["resourceId"]["channelId"]
results = youtube.channels().list(
part="contentDetails",
id=channel_id
).execute()
if(len(results["items"]) > 0):
uploads_playlist_id = results["items"][0]["contentDetails"]["relatedPlaylists"]["uploads"]
results = youtube.playlistItems().list(
part="snippet",
playlistId=uploads_playlist_id
).execute()
if(len(results["items"]) > 0):
last_upload_date = results["items"][0]["snippet"]["publishedAt"]
channels.append("<tr><td>{}</td><td><a href=\"{}{}\">{}</a></td><td>{}</td></tr>".format(last_upload_date, BASE_URL, channel_id, title, scrape_about_page_links(channel_id)))
return channels
# Scrape about page since this doesn't appear to be in the youtube api anywhere
def scrape_about_page_links(channel_id):
page = requests.get("{}{}/about".format(BASE_URL, channel_id))
soup = BeautifulSoup(page.content, 'html.parser')
# Locate the anchor tags in the channel banner and channel description
links = soup.find_all(class_="channel-links-item")
formatted_links = []
for link in links:
a = link.select("a")[0]
title = a["title"]
href = a["href"]
formatted_links.append("<a href=\"{}{}\">{}</a><br />".format(BASE_URL, href, title))
return ''.join(set(formatted_links))
if __name__ == "__main__":
all_channels = []
youtube = get_authenticated_service()
print('<html>')
print('<head>')
print('<title>Dormant YouTube Subscriptions</title>')
print('<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css">')
print('<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/js/bootstrap.min.js"></script>')
print('</head>')
print('<body>')
print('<pre>')
print('Retrieving subscriptions:')
all_channels = retrieve_youtube_subscriptions()
print('Retrieval complete')
print('Subscriptions found: {}'.format(len(all_channels)))
print('</pre>')
print('<table class="table">')
print('<thead><tr><th>Last Upload Date</th><th>Channel</th><th>Links</th></tr></thead>')
print('<tbody>')
[print(channel) for channel in all_channels]
print('</tbody>')
print('</table>')
print('</body>')
print('</html>')
|
haxorjim/dormant-youtube-subscriptions
|
youtube-tools.py
|
Python
|
mit
| 6,416
|
[
"VisIt"
] |
56f417ee5c3ddb38dd493b2f1279b9adbf75fa6ec27881f41d8adac77c684144
|
##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Mpiblast(AutotoolsPackage):
"""mpiBLAST is a freely available, open-source, parallel implementation of
NCBI BLAST"""
homepage = "http://www.mpiblast.org/"
url = "http://www.mpiblast.org/downloads/files/mpiBLAST-1.6.0.tgz"
version('1.6.0', '707105ccd56825db776b50bfd81cecd5')
patch('mpiBLAST-1.6.0-patch-110806')
depends_on('mpi')
def configure_args(self):
args = ['--with-mpi=%s' % self.spec['mpi'].prefix]
return args
def build(self, spec, prefix):
make('ncbi')
make()
def setup_environment(self, spack_env, run_env):
spack_env.set('ac_cv_path_CC', self.spec['mpi'].mpicc)
spack_env.set('ac_cv_path_CXX', self.spec['mpi'].mpicxx)
|
EmreAtes/spack
|
var/spack/repos/builtin/packages/mpiblast/package.py
|
Python
|
lgpl-2.1
| 1,997
|
[
"BLAST"
] |
ccadbbea827093934d5384cfeee61d08cdb3a7813d9473c1c8936272d616f5d2
|
#
# @file TestSBMLConvert.py
# @brief SBMLConvert unit tests
#
# @author Akiya Jouraku (Python conversion)
# @author Ben Bornstein
#
# $Id$
# $HeadURL$
#
# ====== WARNING ===== WARNING ===== WARNING ===== WARNING ===== WARNING ======
#
# DO NOT EDIT THIS FILE.
#
# This file was generated automatically by converting the file located at
# src/sbml/test/TestSBMLConvert.c
# using the conversion program dev/utilities/translateTests/translateTests.pl.
# Any changes made here will be lost the next time the file is regenerated.
#
# -----------------------------------------------------------------------------
# This file is part of libSBML. Please visit http://sbml.org for more
# information about SBML, and the latest version of libSBML.
#
# Copyright 2005-2010 California Institute of Technology.
# Copyright 2002-2005 California Institute of Technology and
# Japan Science and Technology Corporation.
#
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation. A copy of the license agreement is provided
# in the file named "LICENSE.txt" included with this software distribution
# and also available online as http://sbml.org/software/libsbml/license.html
# -----------------------------------------------------------------------------
import sys
import unittest
import libsbml
class TestSBMLConvert(unittest.TestCase):
def test_SBMLConvert_addModifiersToReaction(self):
d = libsbml.SBMLDocument(1,2)
m = d.createModel()
r = m.createReaction()
kl = r.createKineticLaw()
kl.setFormula( "k1*S1*S2*S3*S4*S5")
s1 = m.createSpecies()
s1.setId( "S1" )
s2 = m.createSpecies()
s2.setId( "S2")
s3 = m.createSpecies()
s3.setId( "S3")
s4 = m.createSpecies()
s4.setId( "S4")
s5 = m.createSpecies()
s5.setId( "S5")
sr1 = r.createReactant()
sr2 = r.createReactant()
sr3 = r.createProduct()
sr1.setSpecies( "S1")
sr2.setSpecies( "S2")
sr3.setSpecies( "S5")
self.assert_( r.getNumModifiers() == 0 )
self.assert_( d.setLevelAndVersion(2,1,False) == True )
self.assert_( d.getLevel() == 2 )
self.assert_( d.getVersion() == 1 )
self.assert_( m.getReaction(0).getNumModifiers() == 2 )
ssr1 = m.getReaction(0).getModifier(0)
ssr2 = m.getReaction(0).getModifier(1)
self.assert_(( "S3" == ssr1.getSpecies() ))
self.assert_(( "S4" == ssr2.getSpecies() ))
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertFromL3(self):
d = libsbml.SBMLDocument(3,1)
m = d.createModel()
sid = "C";
c = m.createCompartment()
c.setId(sid)
c.setSize(1.2)
c.setConstant(True)
c.setSpatialDimensions(3.4)
self.assert_( d.setLevelAndVersion(1,1,True) == False )
self.assert_( d.setLevelAndVersion(1,2,True) == False )
self.assert_( d.setLevelAndVersion(2,1,True) == False )
self.assert_( d.setLevelAndVersion(2,2,True) == False )
self.assert_( d.setLevelAndVersion(2,3,True) == False )
self.assert_( d.setLevelAndVersion(2,4,True) == False )
self.assert_( d.setLevelAndVersion(3,1,True) == True )
pass
def test_SBMLConvert_convertFromL3_conversionFactor(self):
d = libsbml.SBMLDocument(3,1)
m = d.createModel()
sid = "P";
m.setConversionFactor(sid)
c = m.createParameter()
c.setId(sid)
c.setConstant(True)
self.assert_( d.setLevelAndVersion(1,1,True) == False )
self.assert_( d.setLevelAndVersion(1,2,True) == False )
self.assert_( d.setLevelAndVersion(2,1,True) == False )
self.assert_( d.setLevelAndVersion(2,2,True) == False )
self.assert_( d.setLevelAndVersion(2,3,True) == False )
self.assert_( d.setLevelAndVersion(2,4,True) == False )
self.assert_( d.setLevelAndVersion(3,1,True) == True )
pass
def test_SBMLConvert_convertFromL3_initialValue(self):
d = libsbml.SBMLDocument(3,1)
m = d.createModel()
e = m.createEvent()
t = e.createTrigger()
t.setInitialValue(0)
self.assert_( d.setLevelAndVersion(1,1,False) == False )
self.assert_( d.setLevelAndVersion(1,2,False) == False )
self.assert_( d.setLevelAndVersion(2,1,False) == False )
self.assert_( d.setLevelAndVersion(2,2,False) == False )
self.assert_( d.setLevelAndVersion(2,3,False) == False )
self.assert_( d.setLevelAndVersion(2,4,False) == False )
self.assert_( d.setLevelAndVersion(3,1,False) == True )
pass
def test_SBMLConvert_convertFromL3_modelUnits(self):
d = libsbml.SBMLDocument(3,1)
m = d.createModel()
m.setVolumeUnits( "litre")
self.assert_( m.getNumUnitDefinitions() == 0 )
self.assert_( d.setLevelAndVersion(1,2,False) == True )
m = d.getModel()
self.assert_( m.getNumUnitDefinitions() == 1 )
ud = m.getUnitDefinition(0)
self.assert_(( "volume" == ud.getId() ))
self.assert_( ud.getNumUnits() == 1 )
self.assert_( ud.getUnit(0).getKind() == libsbml.UNIT_KIND_LITRE )
pass
def test_SBMLConvert_convertFromL3_persistent(self):
d = libsbml.SBMLDocument(3,1)
m = d.createModel()
e = m.createEvent()
t = e.createTrigger()
t.setPersistent(0)
self.assert_( d.setLevelAndVersion(1,1,False) == False )
self.assert_( d.setLevelAndVersion(1,2,False) == False )
self.assert_( d.setLevelAndVersion(2,1,False) == False )
self.assert_( d.setLevelAndVersion(2,2,False) == False )
self.assert_( d.setLevelAndVersion(2,3,False) == False )
self.assert_( d.setLevelAndVersion(2,4,False) == False )
self.assert_( d.setLevelAndVersion(3,1,False) == True )
pass
def test_SBMLConvert_convertFromL3_priority(self):
d = libsbml.SBMLDocument(3,1)
m = d.createModel()
e = m.createEvent()
p = e.createPriority()
self.assert_( d.setLevelAndVersion(1,1,False) == False )
self.assert_( d.setLevelAndVersion(1,2,False) == False )
self.assert_( d.setLevelAndVersion(2,1,False) == True )
self.assert_( d.setLevelAndVersion(2,2,False) == True )
self.assert_( d.setLevelAndVersion(2,3,False) == True )
self.assert_( d.setLevelAndVersion(2,4,False) == True )
self.assert_( d.setLevelAndVersion(3,1,False) == True )
pass
def test_SBMLConvert_convertToL1_SBMLDocument(self):
d = libsbml.SBMLDocument(2,1)
self.assert_( d.setLevelAndVersion(1,2,False) == True )
self.assert_( d.getLevel() == 1 )
self.assert_( d.getVersion() == 2 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL1_Species_Amount(self):
d = libsbml.SBMLDocument(2,1)
m = d.createModel()
sid = "C";
c = libsbml.Compartment(2,4)
s = libsbml.Species(2,4)
c.setId(sid)
m.addCompartment(c)
s.setCompartment(sid)
s.setInitialAmount(2.34)
m.addSpecies(s)
self.assert_( d.setLevelAndVersion(1,2,True) == True )
self.assert_( s.getInitialAmount() == 2.34 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL1_Species_Concentration(self):
d = libsbml.SBMLDocument(2,1)
m = d.createModel()
sid = "C";
c = libsbml.Compartment(2,1)
s = libsbml.Species(2,1)
c.setId(sid)
c.setSize(1.2)
m.addCompartment(c)
s.setId( "s" )
s.setCompartment(sid)
s.setInitialConcentration(2.34)
m.addSpecies(s)
self.assert_( d.setLevelAndVersion(1,2,True) == True )
s1 = m.getSpecies(0)
self.assert_( s1 != None )
self.assert_(( "C" == s1.getCompartment() ))
self.assert_( m.getCompartment( "C").getSize() == 1.2 )
self.assert_( s1.getInitialConcentration() == 2.34 )
self.assert_( s1.isSetInitialConcentration() == True )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL2_SBMLDocument(self):
d = libsbml.SBMLDocument(1,2)
self.assert_( d.setLevelAndVersion(2,1,False) == True )
self.assert_( d.getLevel() == 2 )
self.assert_( d.getVersion() == 1 )
self.assert_( d.setLevelAndVersion(2,2,False) == True )
self.assert_( d.getLevel() == 2 )
self.assert_( d.getVersion() == 2 )
self.assert_( d.setLevelAndVersion(2,3,False) == True )
self.assert_( d.getLevel() == 2 )
self.assert_( d.getVersion() == 3 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL2v4_DuplicateAnnotations_doc(self):
d = libsbml.SBMLDocument(2,1)
d.createModel()
annotation = "<rdf/>\n<rdf/>";
i = (d).setAnnotation(annotation)
self.assert_( d.getLevel() == 2 )
self.assert_( d.getVersion() == 1 )
self.assert_( (d).getAnnotation().getNumChildren() == 2 )
self.assert_( d.setLevelAndVersion(2,4,True) == True )
self.assert_( d.getLevel() == 2 )
self.assert_( d.getVersion() == 4 )
self.assert_( (d).getAnnotation().getNumChildren() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL2v4_DuplicateAnnotations_model(self):
d = libsbml.SBMLDocument(2,1)
m = d.createModel()
annotation = "<rdf/>\n<rdf/>";
i = (m).setAnnotation(annotation)
self.assert_( d.getLevel() == 2 )
self.assert_( d.getVersion() == 1 )
self.assert_( (m).getAnnotation().getNumChildren() == 2 )
self.assert_( d.setLevelAndVersion(2,4,True) == True )
self.assert_( d.getLevel() == 2 )
self.assert_( d.getVersion() == 4 )
m = d.getModel()
self.assert_( (m).getAnnotation().getNumChildren() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_compartment(self):
d = libsbml.SBMLDocument(2,2)
m = d.createModel()
sid = "C";
c = m.createCompartment()
c.setId(sid)
self.assert_( d.setLevelAndVersion(3,1,False) == True )
c1 = m.getCompartment(0)
self.assert_( c1.hasRequiredAttributes() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_defaultUnits(self):
d = libsbml.SBMLDocument(1,2)
m = d.createModel()
sid = "C";
c = m.createCompartment()
c.setId(sid)
c.setSize(1.2)
c.setUnits( "volume")
self.assert_( m.getNumUnitDefinitions() == 0 )
self.assert_( d.setLevelAndVersion(3,1,True) == True )
self.assert_( m.getNumUnitDefinitions() == 2 )
ud = m.getUnitDefinition(0)
self.assert_( ud != None )
self.assert_(( "volume" == ud.getId() ))
self.assert_( ud.getNumUnits() == 1 )
u = ud.getUnit(0)
self.assert_( u.getKind() == libsbml.UNIT_KIND_LITRE )
self.assert_( u.getExponent() == 1 )
self.assert_( u.getMultiplier() == 1 )
self.assert_( u.getScale() == 0 )
ud = m.getUnitDefinition(1)
self.assert_( ud != None )
self.assert_(( "time" == ud.getId() ))
self.assert_( ud.getNumUnits() == 1 )
u = ud.getUnit(0)
self.assert_( u.getKind() == libsbml.UNIT_KIND_SECOND )
self.assert_( u.getExponent() == 1 )
self.assert_( u.getMultiplier() == 1 )
self.assert_( u.getScale() == 0 )
self.assert_(( "time" == m.getTimeUnits() ))
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_event(self):
d = libsbml.SBMLDocument(2,2)
m = d.createModel()
e = m.createEvent()
self.assert_( d.setLevelAndVersion(3,1,False) == True )
e1 = m.getEvent(0)
self.assert_( e1.hasRequiredAttributes() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_localParameters(self):
d = libsbml.SBMLDocument(1,2)
m = d.createModel()
c = m.createCompartment()
c.setId( "c" )
s = m.createSpecies()
s.setId( "s")
s.setCompartment( "c")
r = m.createReaction()
sr = r.createReactant()
sr.setSpecies( "s")
kl = r.createKineticLaw()
kl.setFormula( "s*k")
p = kl.createParameter()
p.setId( "k")
self.assert_( kl.getNumLocalParameters() == 0 )
self.assert_( d.setLevelAndVersion(3,1,False) == True )
m = d.getModel()
r = m.getReaction(0)
kl = r.getKineticLaw()
self.assert_( kl.getNumLocalParameters() == 1 )
lp = kl.getLocalParameter(0)
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_parameter(self):
d = libsbml.SBMLDocument(2,2)
m = d.createModel()
sid = "C";
p = m.createParameter()
p.setId(sid)
self.assert_( d.setLevelAndVersion(3,1,False) == True )
p1 = m.getParameter(0)
self.assert_( p1.hasRequiredAttributes() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_product(self):
d = libsbml.SBMLDocument(2,2)
m = d.createModel()
r = m.createReaction()
sr = r.createProduct()
sr.setSpecies( "s" )
self.assert_( d.setLevelAndVersion(3,1,False) == True )
sr1 = m.getReaction(0).getProduct(0)
self.assert_( sr1.hasRequiredAttributes() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_reactant(self):
d = libsbml.SBMLDocument(2,2)
m = d.createModel()
r = m.createReaction()
sr = r.createReactant()
sr.setSpecies( "s" )
self.assert_( d.setLevelAndVersion(3,1,False) == True )
sr1 = m.getReaction(0).getReactant(0)
self.assert_( sr1.hasRequiredAttributes() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_reaction(self):
d = libsbml.SBMLDocument(2,2)
m = d.createModel()
sid = "C";
r = m.createReaction()
r.setId(sid)
self.assert_( d.setLevelAndVersion(3,1,False) == True )
r1 = m.getReaction(0)
self.assert_( r1.hasRequiredAttributes() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_species(self):
d = libsbml.SBMLDocument(2,2)
m = d.createModel()
sid = "C";
s = m.createSpecies()
s.setId(sid)
s.setCompartment( "comp")
self.assert_( d.setLevelAndVersion(3,1,False) == True )
s1 = m.getSpecies(0)
self.assert_( s1.hasRequiredAttributes() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_stoichiometryMath(self):
d = libsbml.SBMLDocument(2,1)
m = d.createModel()
c = m.createCompartment()
c.setId( "c" )
s = m.createSpecies()
s.setId( "s")
s.setCompartment( "c")
r = m.createReaction()
sr = r.createReactant()
sr.setSpecies( "s")
sm = sr.createStoichiometryMath()
ast = libsbml.parseFormula("c*2")
sm.setMath(ast)
self.assert_( m.getNumRules() == 0 )
self.assert_( sr.isSetId() == False )
self.assert_( d.setLevelAndVersion(3,1,False) == True )
m = d.getModel()
r = m.getReaction(0)
sr = r.getReactant(0)
self.assert_( m.getNumRules() == 1 )
self.assert_( sr.isSetId() == True )
rule = m.getRule(0)
self.assert_( sr.getId() == rule.getVariable() )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_trigger(self):
d = libsbml.SBMLDocument(2,2)
m = d.createModel()
e = m.createEvent()
t = e.createTrigger()
self.assert_( d.setLevelAndVersion(3,1,False) == True )
t1 = m.getEvent(0).getTrigger()
self.assert_( t1.hasRequiredAttributes() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_convertToL3_unit(self):
d = libsbml.SBMLDocument(2,2)
m = d.createModel()
sid = "C";
ud = m.createUnitDefinition()
ud.setId(sid)
u = ud.createUnit()
u.setKind(libsbml.UNIT_KIND_MOLE)
self.assert_( d.setLevelAndVersion(3,1,False) == True )
u1 = m.getUnitDefinition(0).getUnit(0)
self.assert_( u1.hasRequiredAttributes() == 1 )
_dummyList = [ d ]; _dummyList[:] = []; del _dummyList
pass
def test_SBMLConvert_invalidLevelVersion(self):
d = libsbml.SBMLDocument(2,1)
m = d.createModel()
sid = "C";
c = m.createCompartment()
c.setId(sid)
c.setSize(1.2)
c.setUnits( "volume")
self.assert_( d.setLevelAndVersion(1,3,True) == False )
self.assert_( d.setLevelAndVersion(2,5,True) == False )
self.assert_( d.setLevelAndVersion(3,2,True) == False )
self.assert_( d.setLevelAndVersion(4,1,True) == False )
pass
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestSBMLConvert))
return suite
if __name__ == "__main__":
if unittest.TextTestRunner(verbosity=1).run(suite()).wasSuccessful() :
sys.exit(0)
else:
sys.exit(1)
|
alexholehouse/SBMLIntegrator
|
libsbml-5.0.0/src/bindings/python/test/sbml/TestSBMLConvert.py
|
Python
|
gpl-3.0
| 16,780
|
[
"VisIt"
] |
c6e60c87d1658948c638efb52b85c1b6515ef3dda3d549ee770ef5b0469e0beb
|
from twisted.internet import defer, reactor
from nevow import livepage, loaders, tags, rend, static, entities
from nevow.livepage import js, document
testFrameNode = js.testFrameNode
contentDocument = testFrameNode.contentDocument
import os.path
resourceDirectory = os.path.dirname(__file__)
class Driver(object):
def __init__(self, suite):
self.suite = suite
self.results = {}
self.state = 0
self.iterator = self.drive()
self._notifications = []
passes = 0
failures = 0
_handle = None
def setHandle(self, handle):
self._handle = handle
def action_post(self, action, target, parameter, callWhenDone=None):
if callWhenDone is None:
callWhenDone = "function (){}"
def observePosting(client, location, destination):
if location.endswith(destination):
self.passed()
else:
self.failed()
return [
"var targetForm = ", contentDocument[target], ";",
"var postTarget = ", js.targetForm.action, ";",
[(js.targetForm[key].value, ' = "', value, '";')
for (key, value) in parameter.items()],
"addLoadObserver(function () {",
livepage.handler(
observePosting,
contentDocument.location,
js.postTarget),
"});",
js.sendSubmitEvent(js.targetForm, js(callWhenDone))]
def action_submit(self, action, target, parameter):
def observeSubmission(client):
self.passed()
return self.action_post(action, target, parameter, self._handle.flt([
"function() {",
livepage.handler(observeSubmission),
"}"], quote=False))
def action_follow(self, action, target, parameter):
def observeFollowing(client, location, destination):
if location.endswith(destination):
self.passed()
else:
self.failed()
return [
"var theTargetNode = ",
contentDocument.getElementById(target), ";",
"var theDestinationAddress = theTargetNode.href;",
"addLoadObserver(function() {",
livepage.handler(
observeFollowing,
contentDocument.location,
js.theDestinationAddress),
"});",
js.setContentLocation(js.theDestinationAddress)]
def action_click(self, action, target, parameter):
def observeClicking(client):
self.passed()
return [
"var theClickObservation = function() {",
livepage.handler(observeClicking), "};",
js.sendClickEvent(
target,
js.theClickObservation)]
def action_visit(self, action, target, parameter):
## TODO: Figure out how to detect a 404 using javascript
def observeLoading(client, location):
if location.endswith(target):
self.passed()
else:
self.failed()
return ["addLoadObserver(function() {",
livepage.handler(
observeLoading,
contentDocument.location),
"});",
js.setContentLocation(target)]
def action_assert(self, action, target, parameter):
def observeNodeContents(client, contents):
if contents == parameter:
self.passed()
else:
self.failed()
return [livepage.handler(
observeNodeContents,
contentDocument.getElementById(target).innerHTML, bubble=True)]
def action_call(self, action, target, parameter):
target(self._handle).addCallback(
lambda result: self.passed()
).addErrback(
lambda result: self.failed())
return ''
def drive(self):
for i, test in enumerate(self.suite):
self.state = i
self.action, self.target, self.parameter = test
yield getattr(self, 'action_%s' % self.action)(*test)
for notify in self._notifications:
notify.callback(self.results)
def notifyWhenTestsComplete(self):
self._notifications.append(defer.Deferred())
return self._notifications[-1]
def nextTest(self):
try:
test = self.iterator.next()
except StopIteration:
return
assert self._handle is not None, "nextTest cannot be called before handle is set!"
self._handle.sendScript(
self._handle.flt(
test, quote=False))
def passed(self):
self.results[self.state] = True
self.passes += 1
self._handle.set(
'test-passes', self.passes)
self._handle.sendScript(
js.passed(self.state))
self.nextTest()
def failed(self):
self.results[self.state] = False
self.failures += 1
self._handle.set(
'test-failures', self.failures)
self._handle.sendScript(
js.failed(self.state))
self.nextTest()
class Tester(livepage.LivePage):
addSlash = True
child_css = static.File(os.path.join(resourceDirectory, 'livetest.css'))
child_scripts = static.File(os.path.join(resourceDirectory, 'livetest.js'))
child_postscripts = static.File(os.path.join(resourceDirectory, 'livetest-postscripts.js'))
docFactory = loaders.stan(tags.html[
tags.head[
tags.script(src="scripts"),
tags.link(rel="stylesheet", type="text/css", href="css")],
tags.body[
tags.table(id="testprogress")[
tags.tr[
tags.th["Tests"], tags.th["Pass"], tags.th["Fail"]],
tags.tr[
tags.td(id="test-status")["Running"],
tags.td(id="test-passes", _class="test-passes")[entities.nbsp],
tags.td(id="test-failures", _class="test-failures")[entities.nbsp]]],
tags.table(id="testresults", render=tags.directive('table'))[
tags.tr(pattern="item", render=tags.directive('test'))[
tags.td[tags.slot('action')], tags.td[tags.slot('target')], tags.td[tags.slot('parameter')]]],
tags.iframe(id="testframe", src="asdf"),
tags.script(src="postscripts"),
livepage.glue]])
def render_table(self, ctx, suite):
self.testId = 0
driver = Driver(suite)
handle = livepage.IClientHandle(ctx)
driver.notifyWhenTestsComplete().addCallback(self.testsComplete, handle)
driver.setHandle(handle)
driver.nextTest()
return rend.sequence(ctx, suite)
def render_test(self, ctx, test):
ctx.tag(id=("test-", self.testId))
action, target, parameter = test
ctx.fillSlots('action', action)
ctx.fillSlots('target', str(target))
ctx.fillSlots('parameter', parameter)
self.testId += 1
return ctx.tag
def testsComplete(self, results, handle):
handle.set('test-status', 'Complete')
def callMe(client):
d = defer.Deferred()
def callMePlease(client):
d.callback('success')
client.sendScript(client.flt(
livepage.handler(callMePlease, bubble=True), quote=False))
return d
class TestTests(rend.Page):
docFactory = loaders.stan(tags.html[tags.a(href="/testtests/tests/")["Run tests"]])
child_foo = '<html><body><div id="body">foo</div><form method="POST", name="theForm" action="postTarget"><input name="blah" /></form></body></html>'
child_bar = "bar"
child_baz = '<html><body onclick="alert(event.clientX);alert( event.clientY);"><div id="body">toot</div><a id="nextPage" href="foo" onclick="alert(\'clicked\')">Foo</a></body></html>'
child_clickHandler = """<html>
<body>
<a id="theClicker" onclick="this.innerHTML='Clicked'">Click me!</a>
</body>
</html>"""
def child_postTarget(self, ctx):
return rend.Page(docFactory=loaders.stan(tags.html[tags.body(id="body")[str(ctx.arg('blah'))]]))
def child_tests(self, ctx):
return self
child_tests = Tester([
('visit', '/testtests/foo', ''),
('visit', '/testtests/bar', ''),
('visit', '/testtests/baz', ''),
('assert', 'body', 'toot'),
('follow', 'nextPage', ''),
('assert', 'body', 'foo'),
('post', 'theForm', dict(blah="blah")),
('assert', 'body', 'blah'),
('visit', '/testtests/clickHandler', ''),
('click', 'theClicker', ''),
('assert', 'theClicker', 'Clicked'),
('call', callMe, ''),
])
def createResource():
return TestTests()
|
tquilian/exelearningTest
|
nevow/livetest.py
|
Python
|
gpl-2.0
| 8,724
|
[
"VisIt"
] |
e37703838ea50010c7ac55e8383391531c9111d96bd08c70b7e465194a590db5
|
# $Id$
#
# Copyright (C) 2002-2006 greg Landrum and Rational Discovery LLC
#
# @@ All Rights Reserved @@
# This file is part of the RDKit.
# The contents are covered by the terms of the BSD license
# which is included in the file license.txt, found at the root
# of the RDKit source tree.
#
"""unit testing code for the EState indices
validation values are from the paper (JCICS _31_ 76-81 (1991))
"""
from __future__ import print_function
import unittest
from six import StringIO
import numpy as np
from rdkit import Chem
from rdkit.Chem import EState
class TestCase(unittest.TestCase):
def _compareEstates(self, val1, val2, msg, tol=1e-2):
maxV = max(abs(val1 - val2))
self.assertLess(maxV, tol, msg)
def _validate(self, vals, places=2, tol=1e-2, debug=False):
for smi, ans in vals:
ans = np.array(ans)
mol = Chem.MolFromSmiles(smi)
inds = EState.EStateIndices(mol)
if debug: # pragma: nocover
print(inds)
self._compareEstates(ans, inds, 'bad EStates for smiles: {0}'.format(smi), tol=tol)
self.assertLess(abs(EState.MaxEStateIndex(mol) - max(ans)), tol)
self.assertLess(abs(EState.MinEStateIndex(mol) - min(ans)), tol)
self.assertLess(abs(EState.MaxAbsEStateIndex(mol) - max(abs(ans))), tol)
self.assertLess(abs(EState.MinAbsEStateIndex(mol) - min(abs(ans))), tol)
def test_simpleMolecules(self):
data = [
('CCCC', [2.18, 1.32, 1.32, 2.18]),
('CCCCC', [2.21, 1.34, 1.39, 1.34, 2.21]),
('CCCCCCC', [2.24, 1.36, 1.42, 1.44, 1.42, 1.36, 2.24]),
('CCCCCCCCCC', [2.27, 1.37, 1.44, 1.46, 1.47, 1.47, 1.46, 1.44, 1.37, 2.27]),
]
self._validate(data)
def test_isomers(self):
data = [
('CCCCCC', [2.23, 1.36, 1.41, 1.41, 1.36, 2.23]),
('CCC(C)CC', [2.23, 1.33, 0.94, 2.28, 1.33, 2.23]),
('CC(C)CCC', [2.25, 0.90, 2.25, 1.38, 1.33, 2.22]),
('CC(C)(C)CC', [2.24, 0.54, 2.24, 2.24, 1.27, 2.20]),
]
self._validate(data)
def test_heteroatoms1(self):
data = [
('CCCCOCCCC', [2.18, 1.24, 1.21, 0.95, 5.31, 0.95, 1.21, 1.24, 2.18]),
('CCC(C)OC(C)CC', [2.15, 1.12, 0.43, 2.12, 5.54, 0.43, 2.12, 1.12, 2.15]),
('CC(C)(C)OC(C)(C)C', [2.07, -0.02, 2.07, 2.07, 5.63, -0.02, 2.07, 2.07, 2.07]),
('CC(C)CC', [2.22, 0.88, 2.22, 1.31, 2.20]),
('CC(C)CN', [2.10, 0.66, 2.10, 0.81, 5.17]),
('CC(C)CO', [1.97, 0.44, 1.97, 0.31, 8.14]),
('CC(C)CF', [1.85, 0.22, 1.85, -0.19, 11.11]),
('CC(C)CCl', [2.09, 0.65, 2.09, 0.78, 5.34]),
('CC(C)CBr', [2.17, 0.80, 2.17, 1.11, 3.31]),
('CC(C)CI', [2.21, 0.87, 2.21, 1.28, 2.38]),
]
self._validate(data, debug=False)
def test_heteroatoms2(self):
data = [
('CC(N)C(=O)O', [1.42, -0.73, 4.84, -0.96, 9.57, 7.86]),
('CCOCC', [1.99, 0.84, 4.83, 0.84, 1.99]),
('CCSCC', [2.17, 1.26, 1.96, 1.26, 2.17]), # NOTE: this doesn't match the values in the paper
('CC(=O)OC', [1.36, -0.24, 9.59, 4.11, 1.35]),
('CC(=S)OC', [1.73, 0.59, 4.47, 4.48, 1.56]),
]
self._validate(data, debug=False)
def test_aromatics(self):
# aromatics with heteroatoms
data = [
('Fc1ccc(C)cc1', [12.09, -0.17, 1.45, 1.75, 1.09, 1.93, 1.75, 1.45]),
('Clc1ccc(C)cc1', [5.61, 0.80, 1.89, 1.99, 1.24, 2.04, 1.99, 1.89]),
('Brc1ccc(C)cc1', [3.35, 1.14, 2.04, 2.07, 1.30, 2.08, 2.07, 2.04]),
('Ic1ccc(C)cc1', [2.30, 1.30, 2.10, 2.11, 1.32, 2.09, 2.11, 2.10]),
]
self._validate(data, debug=False)
def test_GetPrincipleQuantumNumber(self):
for principalQN, (nmin, nmax) in enumerate(
[(1, 2), (3, 10), (11, 18), (19, 36), (37, 54), (55, 86), (87, 120)], 1):
for n in range(nmin, nmax + 1):
self.assertEqual(EState.GetPrincipleQuantumNumber(n), principalQN)
def test_cacheEstate(self):
mol = Chem.MolFromSmiles('CCCC')
expected = [2.18, 1.32, 1.32, 2.18]
# The mol object has no information about E-states
self.assertFalse(hasattr(mol, '_eStateIndices'))
inds = EState.EStateIndices(mol)
self._compareEstates(inds, expected, 'cacheTest')
# We now have E-states stored with the molecule
self.assertTrue(hasattr(mol, '_eStateIndices'))
# Let's make sure that we skip the calculation next time if force is False
mol._eStateIndices = 'cached'
self.assertTrue(hasattr(mol, '_eStateIndices'))
inds = EState.EStateIndices(mol, force=False)
self.assertEqual(inds, 'cached')
# But with force (default) we calculate again
inds = EState.EStateIndices(mol)
self._compareEstates(inds, expected, 'cacheTest')
self._compareEstates(mol._eStateIndices, expected, 'cacheTest')
def test_exampleCode(self):
# We make sure that the example code runs
from rdkit.TestRunner import redirect_stdout
f = StringIO()
with redirect_stdout(f):
EState.EState._exampleCode()
s = f.getvalue()
self.assertIn('CC(N)C(=O)O', s)
if __name__ == '__main__': # pragma: nocover
unittest.main()
|
jandom/rdkit
|
rdkit/Chem/EState/UnitTestEState.py
|
Python
|
bsd-3-clause
| 5,001
|
[
"RDKit"
] |
23446bb8b4cf2b02702ee04691c6779d172804c8d0a385c966e6b35693c80c96
|
#! /usr/bin/env python
# FIXME: it has to be seen if this is any useful
# FIXME: to bring back to life
from DIRAC.Core.Base.Script import parseCommandLine
parseCommandLine()
from DIRAC.Resources.Catalog.FileCatalog import FileCatalog
from DIRAC.Core.Utilities.File import makeGuid
from DIRAC.Core.Utilities.Adler import stringAdler
from types import *
import unittest,time,os,shutil,sys
if len(sys.argv) < 2:
print 'Usage: TestCatalogPlugIn.py CatalogClient'
sys.exit()
else:
catalogClientToTest = sys.argv[1]
class CatalogPlugInTestCase(unittest.TestCase):
""" Base class for the CatalogPlugin test case """
def setUp(self):
self.fullMetadata = ['Status', 'ChecksumType', 'OwnerRole', 'CreationDate', 'Checksum', 'ModificationDate', 'OwnerDN', 'Mode', 'GUID', 'Size']
self.dirMetadata = self.fullMetadata + ['NumberOfSubPaths']
self.fileMetadata = self.fullMetadata + ['NumberOfLinks']
self.catalog = FileCatalog(catalogs=[catalogClientToTest])
valid = self.catalog.isOK()
self.assertTrue(valid)
self.destDir = '/lhcb/test/unit-test/TestCatalogPlugin'
self.link = "%s/link" % self.destDir
# Clean the existing directory
self.cleanDirectory()
res = self.catalog.createDirectory(self.destDir)
returnValue = self.parseResult(res,self.destDir)
# Register some files to work with
self.numberOfFiles = 2
self.files = []
for i in xrange(self.numberOfFiles):
lfn = "%s/testFile_%d" % (self.destDir,i)
res = self.registerFile(lfn)
self.assertTrue(res)
self.files.append(lfn)
def registerFile(self,lfn):
pfn = 'protocol://host:port/storage/path%s' % lfn
size = 10000000
se = 'DIRAC-storage'
guid = makeGuid()
adler = stringAdler(guid)
fileDict = {}
fileDict[lfn] = {'PFN':pfn,'Size':size,'SE':se,'GUID':guid,'Checksum':adler}
res = self.catalog.addFile(fileDict)
return self.parseResult(res,lfn)
def parseResult(self,res,path):
self.assertTrue(res['OK'])
self.assertTrue(res['Value'])
self.assertTrue(res['Value']['Successful'])
self.assertTrue(res['Value']['Successful'].has_key(path))
return res['Value']['Successful'][path]
def parseError(self,res,path):
self.assertTrue(res['OK'])
self.assertTrue(res['Value'])
self.assertTrue(res['Value']['Failed'])
self.assertTrue(res['Value']['Failed'].has_key(path))
return res['Value']['Failed'][path]
def cleanDirectory(self):
res = self.catalog.exists(self.destDir)
returnValue = self.parseResult(res,self.destDir)
if not returnValue:
return
res = self.catalog.listDirectory(self.destDir)
returnValue = self.parseResult(res,self.destDir)
toRemove = returnValue['Files'].keys()
if toRemove:
self.purgeFiles(toRemove)
res = self.catalog.removeDirectory(self.destDir)
returnValue = self.parseResult(res,self.destDir)
self.assertTrue(returnValue)
def purgeFiles(self,lfns):
for lfn in lfns:
res = self.catalog.getReplicas(lfn,True)
replicas = self.parseResult(res,lfn)
for se,pfn in replicas.items():
repDict = {}
repDict[lfn] = {'PFN':pfn,'SE':se}
res = self.catalog.removeReplica(repDict)
self.parseResult(res,lfn)
res = self.catalog.removeFile(lfn)
self.parseResult(res,lfn)
def tearDown(self):
self.cleanDirectory()
class FileTestCase(CatalogPlugInTestCase):
def test_isFile(self):
# Test isFile with a file
res = self.catalog.isFile(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertTrue(returnValue)
# Test isFile for missing path
res = self.catalog.isFile(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
# Test isFile with a directory
res = self.catalog.isFile(self.destDir)
returnValue = self.parseResult(res,self.destDir)
self.assertFalse(returnValue)
def test_getFileMetadata(self):
# Test getFileMetadata with a file
res = self.catalog.getFileMetadata(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertEqual(returnValue['Status'],'-')
self.assertEqual(returnValue['Size'],10000000)
self.metadata = ['Status', 'ChecksumType', 'NumberOfLinks', 'CreationDate', 'Checksum', 'ModificationDate', 'Mode', 'GUID', 'Size']
for key in self.metadata:
self.assertTrue(returnValue.has_key(key))
# Test getFileMetadata for missing path
res = self.catalog.getFileMetadata(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
# Test getFileMetadata with a directory
res = self.catalog.getFileMetadata(self.destDir)
returnValue = self.parseResult(res,self.destDir)
self.assertEqual(returnValue['Status'],'-')
self.assertEqual(returnValue['Size'],0)
self.metadata = ['Status', 'ChecksumType', 'NumberOfLinks', 'CreationDate', 'Checksum', 'ModificationDate', 'Mode', 'GUID', 'Size']
for key in self.metadata:
self.assertTrue(returnValue.has_key(key))
def test_getFileSize(self):
# Test getFileSize with a file
res = self.catalog.getFileSize(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertEqual(returnValue,10000000)
# Test getFileSize for missing path
res = self.catalog.getFileSize(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
# Test getFileSize with a directory
res = self.catalog.getFileSize(self.destDir)
returnValue = self.parseResult(res,self.destDir)
self.assertEqual(returnValue,0)
def test_getReplicas(self):
# Test getReplicas with a file
res = self.catalog.getReplicas(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertEqual(returnValue.keys(),['DIRAC-storage'])
self.assertEqual(returnValue.values(),['protocol://host:port/storage/path%s' % self.files[0]])
# Test getReplicas for missing path
res = self.catalog.getReplicas(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
# Test getReplicas with a directory
res = self.catalog.getReplicas(self.destDir)
error = self.parseError(res,self.destDir)
# TODO return an error (currently 'File has zero replicas')
#self.assertEqual(error,"Supplied path not a file")
def test_getReplicaStatus(self):
# Test getReplicaStatus with a file with existing replica
replicaDict = {}
replicaDict[self.files[0]] = 'DIRAC-storage'
res = self.catalog.getReplicaStatus(replicaDict)
returnValue = self.parseResult(res,self.files[0])
self.assertEqual(returnValue,'U')
# Test getReplicaStatus with a file with non-existing replica
replicaDict = {}
replicaDict[self.files[0]] = 'Missing'
res = self.catalog.getReplicaStatus(replicaDict)
error = self.parseError(res,self.files[0])
self.assertEqual(error,"No replica at supplied site")
# Test getReplicaStatus for missing path
res = self.catalog.getReplicaStatus(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
# Test getReplicaStatus with a directory
res = self.catalog.getReplicas(self.destDir)
error = self.parseError(res,self.destDir)
# TODO return an error (currently 'File has zero replicas')
#self.assertEqual(error,"Supplied path not a file")
def test_exists(self):
# Test exists with a file
res = self.catalog.exists(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertTrue(returnValue)
# Test exists for missing path
res = self.catalog.exists(self.files[0][:-1])
returnValue = self.parseResult(res,self.files[0][:-1])
self.assertFalse(returnValue)
# Test exists with a directory
res = self.catalog.exists(self.destDir)
returnValue = self.parseResult(res,self.destDir)
self.assertTrue(returnValue)
def test_addReplica(self):
# Test getReplicas with a file
res = self.catalog.getReplicas(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertEqual(returnValue.keys(),['DIRAC-storage'])
self.assertEqual(returnValue.values(),['protocol://host:port/storage/path%s' % self.files[0]])
# Test the addReplica with a file
registrationDict = {}
registrationDict[self.files[0]] = {'SE':'DIRAC-storage2','PFN':'protocol2://host:port/storage/path%s' % self.files[0]}
res = self.catalog.addReplica(registrationDict)
returnValue = self.parseResult(res,self.files[0])
self.assertTrue(returnValue)
# Check the addReplica worked correctly
res = self.catalog.getReplicas(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertEqual(sorted(returnValue.keys()),sorted(['DIRAC-storage','DIRAC-storage2']))
self.assertEqual(sorted(returnValue.values()),sorted(['protocol://host:port/storage/path%s' % self.files[0], 'protocol2://host:port/storage/path%s' % self.files[0]]))
# Test the addReplica with a non-existant file
registrationDict = {}
registrationDict[self.files[0][:-1]] = {'SE':'DIRAC-storage3','PFN':'protocol3://host:port/storage/path%s' % self.files[0]}
res = self.catalog.addReplica(registrationDict)
error = self.parseError(res,self.files[0][:-1])
# TODO When the master fails it should return an error in FileCatalog
#self.assertEqual(error,"No such file or directory")
def test_setReplicaStatus(self):
# Test setReplicaStatus with a file
lfnDict = {}
lfnDict[self.files[0]] = {'PFN': 'protocol://host:port/storage/path%s' % self.files[0],'SE':'DIRAC-storage' ,'Status':'P'}
res = self.catalog.setReplicaStatus(lfnDict)
returnValue = self.parseResult(res,self.files[0])
self.assertTrue(returnValue)
# Check the setReplicaStatus worked correctly
res = self.catalog.getReplicas(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertFalse(returnValue)
#time.sleep(2)
# Test setReplicaStatus with a file
lfnDict = {}
lfnDict[self.files[0]] = {'PFN': 'protocol://host:port/storage/path%s' % self.files[0],'SE':'DIRAC-storage' ,'Status':'U'}
res = self.catalog.setReplicaStatus(lfnDict)
returnValue = self.parseResult(res,self.files[0])
self.assertTrue(returnValue)
# Check the setReplicaStatus worked correctly
res = self.catalog.getReplicas(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertEqual(returnValue.keys(),['DIRAC-storage'])
self.assertEqual(returnValue.values(),['protocol://host:port/storage/path%s' % self.files[0]])
# Test setReplicaStatus with non-existant file
lfnDict = {}
lfnDict[self.files[0][:-1]] = {'PFN': 'protocol://host:port/storage/path%s' % self.files[0][:-1],'SE':'DIRAC-storage' ,'Status':'U'}
res = self.catalog.setReplicaStatus(lfnDict)
error = self.parseError(res,self.files[0][:-1])
# TODO When the master fails it should return an error in FileCatalog
#self.assertEqual(error,"No such file or directory")
def test_setReplicaHost(self):
# Test setReplicaHost with a file
lfnDict = {}
lfnDict[self.files[0]] = {'PFN': 'protocol://host:port/storage/path%s' % self.files[0],'SE':'DIRAC-storage' ,'NewSE':'DIRAC-storage2'}
res = self.catalog.setReplicaHost(lfnDict)
returnValue = self.parseResult(res,self.files[0])
self.assertTrue(returnValue)
# Check the setReplicaHost worked correctly
res = self.catalog.getReplicas(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertEqual(returnValue.keys(),['DIRAC-storage2'])
self.assertEqual(returnValue.values(),['protocol://host:port/storage/path%s' % self.files[0]])
# Test setReplicaHost with non-existant file
lfnDict = {}
lfnDict[self.files[0][:-1]] = {'PFN': 'protocol://host:port/storage/path%s' % self.files[0][:-1],'SE':'DIRAC-storage' ,'NewSE':'DIRAC-storage2'}
res = self.catalog.setReplicaHost(lfnDict)
error = self.parseError(res,self.files[0][:-1])
# TODO When the master fails it should return an error in FileCatalog
#self.assertEqual(error,"No such file or directory")
class DirectoryTestCase(CatalogPlugInTestCase):
def test_isDirectory(self):
# Test isDirectory with a directory
res = self.catalog.isDirectory(self.destDir)
returnValue = self.parseResult(res,self.destDir)
self.assertTrue(returnValue)
# Test isDirectory with a file
res = self.catalog.isDirectory(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertFalse(returnValue)
# Test isDirectory for missing path
res = self.catalog.isDirectory(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
def test_getDirectoryMetadata(self):
# Test getDirectoryMetadata with a directory
res = self.catalog.getDirectoryMetadata(self.destDir)
returnValue = self.parseResult(res,self.destDir)
self.assertEqual(returnValue['Status'],'-')
self.assertEqual(returnValue['Size'],0)
self.assertEqual(returnValue['NumberOfSubPaths'],self.numberOfFiles)
for key in self.dirMetadata:
self.assertTrue(returnValue.has_key(key))
# Test getDirectoryMetadata with a file
res = self.catalog.getDirectoryMetadata(self.files[0])
returnValue = self.parseResult(res,self.files[0])
self.assertEqual(returnValue['Status'],'-')
self.assertEqual(returnValue['Size'],10000000)
for key in self.dirMetadata:
self.assertTrue(returnValue.has_key(key))
# Test getDirectoryMetadata for missing path
res = self.catalog.getDirectoryMetadata(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
def test_listDirectory(self):
# Test listDirectory for directory
res = self.catalog.listDirectory(self.destDir,True)
returnValue = self.parseResult(res,self.destDir)
self.assertEqual(returnValue.keys(),['Files','SubDirs','Links'])
self.assertFalse(returnValue['SubDirs'])
self.assertFalse(returnValue['Links'])
self.assertEqual(sorted(returnValue['Files'].keys()),sorted(self.files))
directoryFiles = returnValue['Files']
for lfn,fileDict in directoryFiles.items():
self.assertTrue(fileDict.has_key('Replicas'))
self.assertEqual(len(fileDict['Replicas']),1)
self.assertTrue(fileDict.has_key('MetaData'))
for key in self.fileMetadata:
self.assertTrue(fileDict['MetaData'].has_key(key))
# Test listDirectory for a file
res = self.catalog.listDirectory(self.files[0],True)
error = self.parseError(res,self.files[0])
self.assertEqual(error,"Not a directory")
# Test listDirectory for missing path
res = self.catalog.listDirectory(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
def test_getDirectoryReplicas(self):
# Test getDirectoryReplicas for directory
res = self.catalog.getDirectoryReplicas(self.destDir,True)
returnValue = self.parseResult(res,self.destDir)
self.assertTrue(returnValue.has_key(self.files[0]))
fileReplicas = returnValue[self.files[0]]
self.assertEqual(fileReplicas.keys(),['DIRAC-storage'])
self.assertEqual(fileReplicas.values(),['protocol://host:port/storage/path%s' % self.files[0]])
# Test getDirectoryReplicas for a file
res = self.catalog.getDirectoryReplicas(self.files[0],True)
error = self.parseError(res,self.files[0])
self.assertEqual(error,"Not a directory")
# Test getDirectoryReplicas for missing path
res = self.catalog.getDirectoryReplicas(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
def test_getDirectorySize(self):
# Test getDirectorySize for directory
res = self.catalog.getDirectorySize(self.destDir)
returnValue = self.parseResult(res,self.destDir)
for key in ['Files','TotalSize','SubDirs','ClosedDirs','SiteUsage']:
self.assertTrue(returnValue.has_key(key))
self.assertEqual(returnValue['Files'],self.numberOfFiles)
self.assertEqual(returnValue['TotalSize'],(self.numberOfFiles*10000000))
#TODO create a sub dir, check, close it, check
self.assertFalse(returnValue['SubDirs'])
self.assertFalse(returnValue['ClosedDirs'])
usage = returnValue['SiteUsage']
self.assertEqual(usage.keys(),['DIRAC-storage'])
self.assertEqual(usage['DIRAC-storage']['Files'],self.numberOfFiles)
self.assertEqual(usage['DIRAC-storage']['Size'],(self.numberOfFiles*10000000))
# Test getDirectorySize for a file
res = self.catalog.getDirectorySize(self.files[0])
error = self.parseError(res,self.files[0])
self.assertEqual(error,"Not a directory")
# Test getDirectorySize for missing path
res = self.catalog.getDirectorySize(self.files[0][:-1])
error = self.parseError(res,self.files[0][:-1])
self.assertEqual(error,"No such file or directory")
class LinkTestCase(CatalogPlugInTestCase):
#'createLink','removeLink','isLink','readLink'
pass
class DatasetTestCase(CatalogPlugInTestCase):
#'removeDataset','removeFileFromDataset','createDataset'
pass
if __name__ == '__main__':
#TODO getDirectoryMetadata and getFileMetadata should be merged
#TODO Fix the return structure of write operations from FileCatalog
suite = unittest.defaultTestLoader.loadTestsFromTestCase(FileTestCase)
#suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(FileTestCase))
#suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(DirectoryTestCase))
testResult = unittest.TextTestRunner(verbosity=2).run(suite)
|
Andrew-McNab-UK/DIRAC
|
tests/Integration/Resources/Catalog/FIXME_Test_CatalogPlugin.py
|
Python
|
gpl-3.0
| 18,087
|
[
"DIRAC"
] |
15d9b17acdca5cb24b75e4cd4d7e1a46e66bf35dd63d2308405bef46249eab72
|
"""
A setuptools based setup module.
See:
https://packaging.python.org/guides/distributing-packages-using-setuptools/
https://github.com/pypa/sampleproject
"""
"""
THIS FILE IS USED IN THE DOCS BUILDING
DO NOT DELETE!
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
import os
# io.open is needed for projects that support Python 2.7
# It ensures open() defaults to text mode with universal newlines,
# and accepts an argument to specify the text encoding
# Python 3 only projects can skip this import
from io import open
from src.GridCal.__version__ import __GridCal_VERSION__
here = os.path.abspath(os.path.dirname(__file__))
# Get the long description from the README file
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
with open(os.path.join(here, 'doc', 'about.rst'), encoding='utf-8') as f:
description = f.read()
# create the file MANIFEST.in
# f = open("MANIFEST.in", "w")
# for root, dirs, files in os.walk(os.path.join('src', 'GridCal')):
# # path = root.split(os.sep)
# for file in files:
# print(len(root) * '---', file)
# if file.endswith('.bim') or 'docs_build' in root:
# line = 'include ' + os.path.join(root, file)
# f.write(line + '\n')
# f.close()
base_path = os.path.join('src', 'GridCal')
packages = find_packages(where=base_path, exclude=['docs', 'test'])
packages = [os.path.join(base_path, p) for p in packages]
dependencies = ["PySide2>=5.15", # for now, 5.14 breaks the UI generation for development
"numpy>=1.14.0",
"scipy>=1.0.0",
"networkx>=2.1",
"pandas>=0.22",
"xlwt>=1.3.0",
"xlrd>=1.1.0",
"matplotlib>=2.1.1",
"qtconsole>=4.5.4",
"pyDOE>=0.3.8",
"pySOT>=0.2.1",
"openpyxl>=2.4.9",
"smopy>=0.0.6",
"chardet>=3.0.4",
"scikit-learn>=0.18",
"geopy>=1.16",
"pytest>=3.8",
"h5py>=2.9.0",
"numba>=0.54",
"folium",
"pytest>=3.8"]
# Arguments marked as "Required" below must be included for upload to PyPI.
# Fields marked as "Optional" may be commented out.
setup(
# This is the name of your project. The first time you publish this
# package, this name will be registered for you. It will determine how
# users can install this project, e.g.:
#
# $ pip install sampleproject
#
# And where it will live on PyPI: https://pypi.org/project/sampleproject/
#
# There are some restrictions on what makes a valid project name
# specification here:
# https://packaging.python.org/specifications/core-metadata/#name
name='GridCal', # Required
# Versions should comply with PEP 440:
# https://www.python.org/dev/peps/pep-0440/
#
# For a discussion on single-sourcing the version across setup.py and the
# project code, see
# https://packaging.python.org/en/latest/single_source_version.html
version=__GridCal_VERSION__, # Required
# This is a one-line description or tag-line of what your project does. This
# corresponds to the "Summary" metadata field:
# https://packaging.python.org/specifications/core-metadata/#summary
description=description, # Optional
# This is an optional longer description of your project that represents
# the body of text which users will see when they visit PyPI.
#
# Often, this is the same as your README, so you can just read it in from
# that file directly (as we have already done above)
#
# This field corresponds to the "Description" metadata field:
# https://packaging.python.org/specifications/core-metadata/#description-optional
long_description=long_description, # Optional
# Denotes that our long_description is in Markdown; valid values are
# text/plain, text/x-rst, and text/markdown
#
# Optional if long_description is written in reStructuredText (rst) but
# required for plain-text or Markdown; if unspecified, "applications should
# attempt to render [the long_description] as text/x-rst; charset=UTF-8 and
# fall back to text/plain if it is not valid rst" (see link below)
#
# This field corresponds to the "Description-Content-Type" metadata field:
# https://packaging.python.org/specifications/core-metadata/#description-content-type-optional
# long_description_content_type='text/markdown', # Optional (see note above)
# This should be a valid link to your project's main homepage.
#
# This field corresponds to the "Home-Page" metadata field:
# https://packaging.python.org/specifications/core-metadata/#home-page-optional
url='https://github.com/SanPen/GridCal', # Optional
# This should be your name or the name of the organization which owns the
# project.
author='Santiago Peñate Vera and Michel Lavoie', # Optional
# This should be a valid email address corresponding to the author listed
# above.
author_email='santiago.penate.vera@gmail.com', # Optional
# Classifiers help users find your project by categorizing it.
#
# For a list of valid classifiers, see https://pypi.org/classifiers/
classifiers=[ # Optional
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 5 - Production/Stable',
# Indicate who your project is intended for
'Intended Audience :: Developers and electrical engineers',
'Topic :: Software Development :: Power Systems',
# Pick your license as you wish
'License :: OSI Approved :: GPLv3',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
# These classifiers are *not* checked by 'pip install'. See instead
# 'python_requires' below.
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
# This field adds keywords for your project which will appear on the
# project page. What does your project relate to?
#
# Note that this is a string of words separated by whitespace, not a list.
keywords='power systems planning', # Optional
# You can just specify package directories manually here if your project is
# simple. Or you can use find_packages().
#
# Alternatively, if you just want to distribute a single Python file, use
# the `py_modules` argument instead as follows, which will expect a file
# called `my_module.py` to exist:
#
# py_modules=["my_module"],
#
packages=packages, # Required
include_package_data=True,
# Specify which Python versions you support. In contrast to the
# 'Programming Language' classifiers above, 'pip install' will check this
# and refuse to install the project if the version does not match. If you
# do not support Python 2, you can simplify this to '>=3.5' or similar, see
# https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires
python_requires='>=3.7',
# This field lists other packages that your project depends on to run.
# Any package you put here will be installed by pip when your project is
# installed, so they must be valid existing projects.
#
# For an analysis of "install_requires" vs pip's requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=dependencies,
# List additional groups of dependencies here (e.g. development
# dependencies). Users will be able to install these using the "extras"
# syntax, for example:
#
# $ pip install sampleproject[dev]
#
# Similar to `install_requires` above, these must be valid existing
# projects.
# extras_require={ # Optional
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
# },
# If there are data files included in your packages that need to be
# installed, specify them here.
#
# If using Python 2.6 or earlier, then these have to be included in
# MANIFEST.in as well.
# package_data=package_data,
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files
#
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
# data_files=[('my_data', ['data/data_file'])], # Optional
# data_files=package_data,
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# `pip` to create the appropriate form of executable for the target
# platform.
#
# For example, the following would provide a command called `sample` which
# executes the function `main` from this package when invoked:
# entry_points={ # Optional
# 'console_scripts': [
# 'sample=sample:main',
# ],
# },
# List additional URLs that are relevant to your project as a dict.
#
# This field corresponds to the "Project-URL" metadata fields:
# https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use
#
# Examples listed include a pattern for specifying where the package tracks
# issues, where the source is hosted, where to say thanks to the package
# maintainers, and where to support the project financially. The key is
# what's used to render the link text on PyPI.
# project_urls='', # optional
)
|
SanPen/GridCal
|
setup.py
|
Python
|
lgpl-3.0
| 9,976
|
[
"VisIt"
] |
dc93abe0677db2e60ffa729d233595597b119f62b9eb40e8d2956b4ed81f3ee8
|
import re, math
from django.contrib.gis.db import models
from gass.bering.utils import *
class Station(models.Model):
'''
Ablatometer station, idealized i.e. designation B01 is re-used each year.
'''
site = models.CharField(max_length=255, unique=True)
operational = models.BooleanField(help_text="Indicates that the station data should be reported")
upload_path = models.TextField(help_text="File system path to the file or directory where data are uploaded")
single_file = models.BooleanField(help_text="Indicates that data uploads are aggregate in a single file, specified by the record's upload_path", default=True)
utc_offset = models.IntegerField(help_text="The UTC offset, in hours, positive or negative")
init_height_cm = models.FloatField(verbose_name='initial sensor height in cm', help_text="The initial height of the instrument box, in centimeters")
def __unicode__(self):
return str(self.site)
def __str__(self):
return str(self.site)
def clean(self, *args, **kwargs):
'''
Validates and/or cleans input before saving to the databse.
'''
# Force site names to be lowercase
self.site = str(self.site).lower()
class SiteVisit(models.Model):
'''
An in situ field visit, particular notable where ablatometer height was
adjusted manually.
'''
site = models.ForeignKey(Station, to_field='site')
datetime = models.DateTimeField(help_text="Date and time of site visit, in UTC")
ablato_adjusted = models.BooleanField(verbose_name='ablatometer was adjusted', default=True)
ablato_height_cm = models.FloatField(verbose_name='ablatometer height after adjustment in cm', blank=True, null=True)
notes = models.TextField()
class Meta:
get_latest_by = 'datetime'
def __unicode__(self):
return '%s: %d (%s)' % (self.site.upper(), self.ablato_height_cm,
self.datetime.strftime('%Y-%m-%d %H:%M:%S'))
class Campaign(models.Model):
'''
Ablation measurement campaign.
'''
site = models.ForeignKey(Station, to_field='site')
season = models.IntegerField(help_text="The year the ablatometer was deployed")
deployment = models.DateField(help_text="Date of the deployment")
recovery = models.DateField(help_text="Date of recovery")
region = models.CharField(max_length=255, help_text="General description of the deployed location e.g. Tashalich Arm")
has_uplink = models.BooleanField(help_text="Indicates that the instrument was equipped with a satellite uplink")
site_visits = models.ManyToManyField(SiteVisit, blank=True, null=True)
class Meta:
get_latest_by = 'deployment'
unique_together = ('site', 'season')
def __unicode__(self):
return '%s (%d)' % (self.site_id.upper(), self.season)
class Ablation(models.Model):
'''
Ablation measurement.
'''
objects = models.GeoManager()
valid = models.BooleanField(editable=False, help_text="Indiates whether the observation record is valid (this flag set by instrument only)")
site = models.ForeignKey(Station, to_field='site')
sats = models.IntegerField(verbose_name='satellites', help_text='Number of satellites')
hdop = models.FloatField(help_text='Horizontal dilution of precision (HDOP)', null=True)
time = models.TimeField(help_text="This is a naive time, not time-zone aware")
date = models.DateField()
datetime = models.DateTimeField(help_text='Date and time of measurement from GPS')
lat = models.FloatField(help_text='Latitude (Deg, Dec. Min. N)')
lng = models.FloatField(help_text='Longitude (Deg, Dec. Min. W)')
gps_valid = models.BooleanField(help_text="Indicates whether the GPS measurements are valid", default=True)
elev = models.FloatField(verbose_name='altitude (m)')
rng_cm = models.FloatField(verbose_name='acoustic range (cm)')
rng_cm_valid = models.BooleanField(help_text="Indicates whether the range measurement is valid", default=True)
above = models.IntegerField(verbose_name='irradiance')
below = models.IntegerField(verbose_name='reflectance')
wind_spd = models.FloatField(verbose_name='wind speed (m/s)')
temp_C = models.FloatField(verbose_name='temperature (C)')
volts = models.FloatField(verbose_name='battery voltage (V)')
point = models.PointField(srid=4326)
class Meta:
get_latest_by = 'datetime'
def __unicode__(self):
return '[%s] %s at %s' % (str(self.site_id),
str(self.date), str(self.time))
@classmethod
def get_field_names(self, string=None):
'''
Returns a list of field names that match an optional string that can be
parsed as a regular expression.
'''
names = self._meta.get_all_field_names()
if string:
return [name for name in names if re.compile(string).match(name) != None]
else:
return [name for name in names]
@classmethod
def get_base_field_names(self):
return ('site_id', 'sats', 'hdop', 'datetime', 'lat', 'lng', 'elev',
'rng_cm', 'above', 'below', 'wind_spd', 'temp_C', 'volts')
def get_previous_record(self, *args, **kwargs):
'''
'''
td = datetime.timedelta(hours=1)
foretime = self.datetime + td # An hour later
backtime = self.datetime - td # An hour earlier
# Get a window of observations around this sorted datetime descending
window = Ablation.objects.filter(site__exact=self.site,
datetime__range=(backtime, foretime)).order_by('-datetime')
# Find the first adjacent record earlier in time
if len(window) > 1:
for each in window:
if each.datetime.replace(tzinfo=UTC()) < self.datetime:
return each
elif len(window) == 1:
# Only if the record is previous in time...
if window[0].datetime.replace(tzinfo=UTC()) < self.datetime:
return window[0]
else: return None
# No adjacent measurements in time
elif len(window) == 0: return None
else: return ValueError
def clean(self, *args, **kwargs):
'''
Accepts a tzinfo keyword argument where tzinfo is an instance of
datetime.tzinfo that can be passed to the replace() method.
'''
if isinstance(self.valid, str):
if self.valid == 'A': self.valid = True
else: self.valid = False
if isinstance(self.lng, str):
# For now, force the negation of longitude values (raw data don't distinguish)
self.lng = -float(Lng(self.lng).value)
if isinstance(self.lat, str):
self.lat = Lat(self.lat).value
if isinstance(self.date, str):
self.date = Date(self.date).value
if isinstance(self.time, str):
# Django does not support timezone-aware times, only datetimes
self.time = Time(self.time).value
self.datetime = datetime.datetime.combine(self.date,
self.time).replace(tzinfo=kwargs['tzinfo'])
# For now, force the negation of longitude values
self.point = 'POINT(%s %s)' % (-float(self.lng), self.lat)
self.rng_cm = float(self.rng_cm)
self.check_flags()
def check_flags(self, *args, **kwargs):
'''
A validation procedure setting gps_valid and rng_cm_valid flags.
'''
last = self.get_previous_record()
# TEST: Sufficient satellite constellation?
if self.sats < 3:
self.gps_valid = False
# No test for hdop; do that in database queries where concerned
# TEST: Obviously bogus acoustic measurements (greater than 600 cm)?
if self.rng_cm > 600.0:
self.rng_cm_valid = False
try:
if last is None: return None
except UnboundLocalError:
return None
datetime_diff = abs((self.datetime - last.datetime.replace(tzinfo=UTC())).seconds)
rng_cm_diff = self.rng_cm - last.rng_cm
# TEST: Indpendent measurements?
if datetime_diff < 1600:
# Expected that measurements no more frequent than every 20 minutes
self.gps_valid = False
# TEST: Likely bogus acoustic measurements?
if datetime_diff < (60*60*3) and rng_cm_diff > 5.0 and last.rng_cm < 600.0:
# Closely-separated measurements in time (less than 3 hours)
# with more than 5 cm melt are likely invalid
self.rng_cm_valid = False
# TEST: Likely bogus acoustic measurements?
if not last.rng_cm_valid and rng_cm_diff > 0.0:
# The last range measurement was invalid and this one is greater
self.rng_cm_valid = False
def geographic_distance(self, obj):
'''
Calculates the distance, in meters, between the position of this
measurement and another.
Accepts:
obj {Ablation} Another Ablation model instance
Returns:
{Float} The net migration, in meters, between the two observations
'''
lat_m_per_degree = 111412.0
lng_m_per_degree = 55800.0
# 111,412 m/degree of latitude at 60 degrees north latitude
# (from National Geospatial Intelligence Agency)
# 55,800 m/degree of longitude at 60 degrees north latitude
# (from National Geospatial Intelligence Agency)
# http://msi.nga.mil/MSISiteContent/StaticFiles/Calculators/degree.html
lat_diff_m = abs(lat_m_per_degree*(self.lat - obj.lat))
lng_diff_m = abs(lng_m_per_degree*(self.lng - obj.lng))
# Simple distance estimate in meters between last and last observation
distance_m = math.sqrt((lat_diff_m*lat_diff_m) + (lng_diff_m*lng_diff_m))
return distance_m
class B1Ablation(models.Model):
'''Ablation measurement at GASS B01; identical to B2Ablation model.'''
satellites = models.IntegerField('Number of Satellites')
hdop = models.FloatField('Dilution of Precision', null=True)
time = models.TimeField('Time')
date = models.DateField('Date')
# datetime = models.DateTimeField('Date and Time', unique=True)
# We tried making datetime unique before but that led to
# database errors that could not be resolved when there
# was an attempt to insert a duplicate record
datetime = models.DateTimeField('Date and Time', primary_key=True)
# By using datetime as the primary key, we ensure that:
# a) Duplicate records in the raw data are entered as one
# record with the most recent meteorological and
# ablation data
# b) When updating the database, we can pull records from
# a consolidated file of all existing records without
# fear of re-inserting records already in the database
lat = models.DecimalField('Latitude (Deg, Dec. Min. N)', max_digits=8, decimal_places=5)
lng = models.DecimalField('Longitude (Deg, Dec. Min. W)', max_digits=9, decimal_places=5)
gps_ok = models.BooleanField('Position is Valid')
acoustic_range_cm = models.DecimalField('Acoustic Range (cm)', max_digits=5, decimal_places=2)
optical_range_cm = models.DecimalField('Optical Range (cm)', max_digits=10, decimal_places=5)
# sensor_height = models.DecimalField('Sensor Height at Installation', max_digits=5, decimal_places=2)
ablation_ok = models.BooleanField('Ablation is Valid')
top_light = models.IntegerField('Irradiance)')
bottom_light = models.IntegerField('Reflectance)')
wind_m_s = models.DecimalField('Wind Speed (m/s)', max_digits=5, decimal_places=2)
# temp_C is allowed to be null because negative temperature measurements
# currently can't be handled (hardware issue)
temp_C = models.DecimalField('Temperature (C)', max_digits=4, decimal_places=1, null=True)
voltage = models.FloatField('Battery Voltage (V)')
def __unicode__(self):
return str(self.date) + ', ' + str(self.time)
class B2Ablation(models.Model):
'''Ablation measurement at GASS B02; almost identical to B1Ablation model (has elevation).'''
satellites = models.IntegerField('Number of Satellites')
hdop = models.FloatField('Dilution of Precision', null=True)
time = models.TimeField('Time')
date = models.DateField('Date')
datetime = models.DateTimeField('Date and Time', primary_key=True)
lat = models.DecimalField('Latitude (Deg, Dec. Min. N)', max_digits=8, decimal_places=5)
lng = models.DecimalField('Longitude (Deg, Dec. Min. W)', max_digits=9, decimal_places=5)
elev = models.DecimalField('Elevation', max_digits=4, decimal_places=1, blank=True, null=True)
gps_ok = models.BooleanField('Position is Valid')
acoustic_range_cm = models.DecimalField('Acoustic Range (cm)', max_digits=5, decimal_places=2)
optical_range_cm = models.DecimalField('Optical Range (cm)', max_digits=10, decimal_places=5)
# sensor_height = models.DecimalField('Sensor Height at Installation', max_digits=5, decimal_places=2)
ablation_ok = models.BooleanField('Ablation is Valid')
top_light = models.IntegerField('Irradiance)')
bottom_light = models.IntegerField('Reflectance)')
wind_m_s = models.DecimalField('Wind Speed (m/s)', max_digits=5, decimal_places=2)
temp_C = models.DecimalField('Temperature (C)', max_digits=4, decimal_places=1, null=True)
voltage = models.FloatField('Battery Voltage (V)')
def __unicode__(self):
return str(self.date) + ', ' + str(self.time)
class B4Ablation(models.Model):
'''Ablation measurement at GASS B04; identical to B1Ablation model.'''
satellites = models.IntegerField('Number of Satellites')
hdop = models.FloatField('Dilution of Precision', null=True)
time = models.TimeField('Time')
date = models.DateField('Date')
datetime = models.DateTimeField('Date and Time', primary_key=True)
lat = models.DecimalField('Latitude (Deg, Dec. Min. N)', max_digits=8, decimal_places=5)
lng = models.DecimalField('Longitude (Deg, Dec. Min. W)', max_digits=9, decimal_places=5)
gps_ok = models.BooleanField('Position is Valid')
acoustic_range_cm = models.DecimalField('Acoustic Range (cm)', max_digits=5, decimal_places=2)
optical_range_cm = models.DecimalField('Optical Range (cm)', max_digits=10, decimal_places=5)
# sensor_height = models.DecimalField('Sensor Height at Installation', max_digits=5, decimal_places=2)
ablation_ok = models.BooleanField('Ablation is Valid')
top_light = models.IntegerField('Irradiance)')
bottom_light = models.IntegerField('Reflectance)')
wind_m_s = models.DecimalField('Wind Speed (m/s)', max_digits=5, decimal_places=2)
temp_C = models.DecimalField('Temperature (C)', max_digits=4, decimal_places=1, null=True)
voltage = models.FloatField('Battery Voltage (V)')
def __unicode__(self):
return str(self.date) + ', ' + str(self.time)
class B6Ablation(models.Model):
'''Ablation measurement at GASS B06; identical to B1Ablation model.'''
satellites = models.IntegerField('Number of Satellites')
hdop = models.FloatField('Dilution of Precision', null=True)
time = models.TimeField('Time')
date = models.DateField('Date')
datetime = models.DateTimeField('Date and Time', primary_key=True)
lat = models.DecimalField('Latitude (Deg, Dec. Min. N)', max_digits=8, decimal_places=5)
lng = models.DecimalField('Longitude (Deg, Dec. Min. W)', max_digits=9, decimal_places=5)
gps_ok = models.BooleanField('Position is Valid')
acoustic_range_cm = models.DecimalField('Acoustic Range (cm)', max_digits=5, decimal_places=2)
optical_range_cm = models.DecimalField('Optical Range (cm)', max_digits=10, decimal_places=5)
# sensor_height = models.DecimalField('Sensor Height at Installation', max_digits=5, decimal_places=2)
ablation_ok = models.BooleanField('Ablation is Valid')
top_light = models.IntegerField('Irradiance)')
bottom_light = models.IntegerField('Reflectance)')
wind_m_s = models.DecimalField('Wind Speed (m/s)', max_digits=5, decimal_places=2)
temp_C = models.DecimalField('Temperature (C)', max_digits=4, decimal_places=1, null=True)
voltage = models.FloatField('Battery Voltage (V)')
def __unicode__(self):
return str(self.date) + ', ' + str(self.time)
class T1Ablation(models.Model):
'''Ablation measurement at GASS T01; identical to B1Ablation model.'''
satellites = models.IntegerField('Number of Satellites')
hdop = models.FloatField('Dilution of Precision', null=True)
time = models.TimeField('Time')
date = models.DateField('Date')
datetime = models.DateTimeField('Date and Time', primary_key=True)
lat = models.DecimalField('Latitude (Deg, Dec. Min. N)', max_digits=8, decimal_places=5)
lng = models.DecimalField('Longitude (Deg, Dec. Min. W)', max_digits=9, decimal_places=5)
gps_ok = models.BooleanField('Position is Valid')
acoustic_range_cm = models.DecimalField('Acoustic Range (cm)', max_digits=5, decimal_places=2)
optical_range_cm = models.DecimalField('Optical Range (cm)', max_digits=10, decimal_places=5)
# sensor_height = models.DecimalField('Sensor Height at Installation', max_digits=5, decimal_places=2)
ablation_ok = models.BooleanField('Ablation is Valid')
top_light = models.IntegerField('Irradiance)')
bottom_light = models.IntegerField('Reflectance)')
wind_m_s = models.DecimalField('Wind Speed (m/s)', max_digits=5, decimal_places=2)
temp_C = models.DecimalField('Temperature (C)', max_digits=4, decimal_places=1, null=True)
voltage = models.FloatField('Battery Voltage (V)')
def __unicode__(self):
return str(self.date) + ', ' + str(self.time)
|
arthur-e/gass
|
bering/models.py
|
Python
|
mit
| 17,849
|
[
"VisIt"
] |
c0737b5d2aa6985ff368be36c37dfe67b14f4d2219fb0d3196fe71c7c82a134a
|
import logging
import os
from pyjade import Compiler as _Compiler, Parser, register_filter
from pyjade.runtime import attrs
from pyjade.exceptions import CurrentlyNotSupported
from pyjade.utils import process
from django.conf import settings
class Compiler(_Compiler):
autocloseCode = 'if,ifchanged,ifequal,ifnotequal,for,block,filter,autoescape,with,trans,blocktrans,spaceless,comment,cache,localize,compress,verbatim'.split(',')
useRuntime = True
def __init__(self, node, **options):
if settings.configured:
options.update(getattr(settings,'PYJADE',{}))
super(Compiler, self).__init__(node, **options)
def visitCodeBlock(self,block):
self.buffer('{%% block %s %%}'%block.name)
if block.mode=='append': self.buffer('{{block.super}}')
self.visitBlock(block)
if block.mode=='prepend': self.buffer('{{block.super}}')
self.buffer('{% endblock %}')
def visitAssignment(self,assignment):
self.buffer('{%% __pyjade_set %s = %s %%}'%(assignment.name,assignment.val))
def visitMixin(self,mixin):
self.mixing += 1
if not mixin.call:
self.buffer('{%% __pyjade_kwacro %s %s %%}'%(mixin.name,mixin.args))
self.visitBlock(mixin.block)
self.buffer('{% end__pyjade_kwacro %}')
elif mixin.block:
raise CurrentlyNotSupported("The mixin blocks are not supported yet.")
else:
self.buffer('{%% __pyjade_usekwacro %s %s %%}'%(mixin.name,mixin.args))
self.mixing -= 1
def visitCode(self,code):
if code.buffer:
val = code.val.lstrip()
val = self.var_processor(val)
self.buf.append('{{%s%s}}'%(val,'|force_escape' if code.escape else ''))
else:
self.buf.append('{%% %s %%}'%code.val)
if code.block:
self.visit(code.block)
if not code.buffer:
codeTag = code.val.strip().split(' ',1)[0]
if codeTag in self.autocloseCode:
self.buf.append('{%% end%s %%}'%codeTag)
def attributes(self,attrs):
return "{%% __pyjade_attrs %s %%}"%attrs
def visitVar(self, var, escape=False):
var = self.var_processor(var)
return ('%s%s%s%s' % (
self.variable_start_string,
var,
'|escape' if escape else '',
self.variable_end_string))
def interpolate(self, text, escape=None):
def repl(matchobj):
if escape is None:
if matchobj.group(2) == '!':
filter_string = ''
else:
filter_string = '|escape'
elif escape is True:
filter_string = '|escape'
elif escape is False:
filter_string = ''
return self.variable_start_string + matchobj.group(3) + \
filter_string + self.variable_end_string
return self.RE_INTERPOLATE.sub(repl, text)
try:
try:
from django.template.base import add_to_builtins
except ImportError: # Django < 1.8
from django.template import add_to_builtins
add_to_builtins('pyjade.ext.django.templatetags')
except ImportError:
# Django 1.9 removed add_to_builtins and instead
# provides a setting to specify builtins:
# TEMPLATES['OPTIONS']['builtins'] = ['pyjade.ext.django.templatetags']
pass
from django.utils.translation import trans_real
try:
from django.utils.encoding import force_text as to_text
except ImportError:
from django.utils.encoding import force_unicode as to_text
def decorate_templatize(func):
def templatize(src, origin=None):
src = to_text(src, settings.FILE_CHARSET)
if origin.endswith(".jade"):
html = process(src,compiler=Compiler)
else:
html = src
return func(html, origin)
return templatize
trans_real.templatize = decorate_templatize(trans_real.templatize)
try:
from django.contrib.markup.templatetags.markup import markdown
@register_filter('markdown')
def markdown_filter(x,y):
return markdown(x)
except ImportError:
pass
|
paradoxxxzero/pyjade
|
pyjade/ext/django/compiler.py
|
Python
|
mit
| 4,177
|
[
"VisIt"
] |
2f940498b336944200b1a93915d84565f19a64da0b460b6716a334c402c73ddc
|
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""The freesurfer module provides basic functions for interfacing with freesurfer tools.
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data'))
>>> os.chdir(datadir)
"""
__docformat__ = 'restructuredtext'
import os
from nipype.utils.filemanip import fname_presuffix
from nipype.interfaces.freesurfer.base import FSCommand, FSTraitedSpec
from nipype.interfaces.base import (TraitedSpec, File,
traits, InputMultiPath)
from nipype.utils.misc import isdefined
class MRISPreprocInputSpec(FSTraitedSpec):
out_file = File(argstr='--out %s', genfile=True,
desc='output filename')
target = traits.Str(argstr='--target %s', mandatory=True,
desc='target subject name')
hemi = traits.Enum('lh', 'rh', argstr='--hemi %s',
mandatory=True,
desc='hemisphere for source and target')
surf_measure = traits.Str(argstr='--meas %s',
xor = ('surf_measure', 'surf_measure_file', 'surf_area'),
desc='Use subject/surf/hemi.surf_measure as input')
surf_area = traits.Str(argstr='--area %s',
xor = ('surf_measure', 'surf_measure_file', 'surf_area'),
desc='Extract vertex area from subject/surf/hemi.surfname to use as input.')
subjects = traits.List(argstr='--s %s...',
xor = ('subjects', 'fsgd_file', 'subject_file'),
desc='subjects from who measures are calculated')
fsgd_file = File(exists=True, argstr='--fsgd %s',
xor = ('subjects', 'fsgd_file', 'subject_file'),
desc='specify subjects using fsgd file')
subject_file = File(exists=True, argstr='--f %s',
xor = ('subjects', 'fsgd_file', 'subject_file'),
desc='file specifying subjects separated by white space')
surf_measure_file = InputMultiPath(File(exists=True), argstr='--is %s...',
xor = ('surf_measure', 'surf_measure_file', 'surf_area'),
desc='file alternative to surfmeas, still requires list of subjects')
source_format = traits.Str(argstr='--srcfmt %s', desc='source format')
surf_dir = traits.Str(argstr='--surfdir %s',
desc='alternative directory (instead of surf)')
vol_measure_file = InputMultiPath(traits.Tuple(File(exists=True),File(exists=True)),
argstr='--iv %s %s...',
desc = 'list of volume measure and reg file tuples')
proj_frac = traits.Float(argstr='--projfrac %s',
desc='projection fraction for vol2surf')
fwhm = traits.Float(argstr='--fwhm %f',
xor=['num_iters'],
desc='smooth by fwhm mm on the target surface')
num_iters = traits.Int(argstr='--niters %d',
xor=['fwhm'],
desc='niters : smooth by niters on the target surface')
fwhm_source = traits.Float(argstr='--fwhm-src %f',
xor=['num_iters_source'],
desc='smooth by fwhm mm on the source surface')
num_iters_source = traits.Int(argstr='--niterssrc %d',
xor=['fwhm_source'],
desc='niters : smooth by niters on the source surface')
smooth_cortex_only = traits.Bool(argstr='--smooth-cortex-only',
desc='only smooth cortex (ie, exclude medial wall)')
class MRISPreprocOutputSpec(TraitedSpec):
out_file = File(exists=True, desc='preprocessed output file')
class MRISPreproc(FSCommand):
"""Use FreeSurfer mris_preproc to prepare a group of contrasts for
a second level analysis
Examples
--------
>>> preproc = MRISPreproc()
>>> preproc.inputs.target = 'fsaverage'
>>> preproc.inputs.hemi = 'lh'
>>> preproc.inputs.vol_measure_file = [('cont1.nii', 'register.dat'), \
('cont1a.nii', 'register.dat')]
>>> preproc.inputs.out_file = 'concatenated_file.mgz'
>>> preproc.cmdline
'mris_preproc --hemi lh --out concatenated_file.mgz --target fsaverage --iv cont1.nii register.dat --iv cont1a.nii register.dat'
"""
_cmd = 'mris_preproc'
input_spec = MRISPreprocInputSpec
output_spec = MRISPreprocOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outfile = self.inputs.out_file
if not isdefined(outfile):
outputs['out_file'] = os.path.join(os.getcwd(),
'concat_%s_%s.mgz'%(self.inputs.hemi,
self.inputs.target))
return outputs
def _gen_filename(self, name):
if name == 'out_file':
return self._list_outputs()[name]
return None
class GLMFitInputSpec(FSTraitedSpec):
glm_dir = traits.Str(argstr='--glmdir %s', desc='save outputs to dir',
genfile=True)
in_file = File(desc='input 4D file', argstr='--y %s', mandatory=True,
copyfile=False)
_design_xor = ('fsgd', 'design', 'one_sample')
fsgd = traits.Tuple(File(exists=True),traits.Enum('doss', 'dods'),
argstr='--fsgd %s %s', xor= _design_xor,
desc='freesurfer descriptor file')
design = File(exists=True, argstr='--X %s', xor= _design_xor,
desc='design matrix file')
contrast = InputMultiPath(File(exists=True), argstr='--C %s...',
desc='contrast file')
one_sample = traits.Bool(argstr='--osgm',
xor=('one_sample', 'fsgd', 'design', 'contrast'),
desc='construct X and C as a one-sample group mean')
no_contrast_sok = traits.Bool(argstr='--no-contrasts-ok',
desc='do not fail if no contrasts specified')
per_voxel_reg = InputMultiPath(File(exists=True), argstr='--pvr %s...',
desc='per-voxel regressors')
self_reg = traits.Tuple(traits.Int, traits.Int, traits.Int,
argstr='--selfreg %d %d %d',
desc='self-regressor from index col row slice')
weighted_ls = File(exists=True, argstr='--wls %s',
xor = ('weight_file', 'weight_inv', 'weight_sqrt'),
desc='weighted least squares')
fixed_fx_var = File(exists=True, argstr='--yffxvar %s',
desc='for fixed effects analysis')
fixed_fx_dof = traits.Int(argstr='--ffxdof %d',
xor=['fixed_fx_dof_file'],
desc='dof for fixed effects analysis')
fixed_fx_dof_file = File(argstr='--ffxdofdat %d',
xor=['fixed_fx_dof'],
desc='text file with dof for fixed effects analysis')
weight_file = File(exists=True, xor = ['weighted_ls'],
desc='weight for each input at each voxel')
weight_inv = traits.Bool(argstr='--w-inv', desc='invert weights',
xor = ['weighted_ls'])
weight_sqrt = traits.Bool(argstr='--w-sqrt', desc='sqrt of weights',
xor = ['weighted_ls'])
fwhm = traits.Float(min=0, argstr='--fwhm %f',
desc='smooth input by fwhm')
var_fwhm = traits.Float(min=0, argstr='--var-fwhm %f',
desc='smooth variance by fwhm')
no_mask_smooth = traits.Bool(argstr='--no-mask-smooth',
desc='do not mask when smoothing')
no_est_fwhm = traits.Bool(argstr='--no-est-fwhm',
desc='turn off FWHM output estimation')
mask_file = File(exists=True, argstr='--mask %s', desc='binary mask')
label_file = File(exists=True, argstr='--label %s',
xor=['cortex'],
desc='use label as mask, surfaces only')
cortex = traits.Bool(argstr='--cortex',
xor=['label_file'],
desc='use subjects ?h.cortex.label as label')
invert_mask = traits.Bool(argstr='--mask-inv',
desc='invert mask')
prune = traits.Bool(argstr='--prune',
desc='remove voxels that do not have a non-zero value at each frame (def)')
no_prune = traits.Bool(argstr='--no-prune',
xor = ['prunethresh'],
desc='do not prune')
prune_thresh = traits.Float(argstr='--prune_thr %f',
xor = ['noprune'],
desc='prune threshold. Default is FLT_MIN')
compute_log_y = traits.Bool(argstr='--logy',
desc='compute natural log of y prior to analysis')
save_estimate = traits.Bool(argstr='--yhat-save',
desc='save signal estimate (yhat)')
save_residual = traits.Bool(argstr='--eres-save',
desc='save residual error (eres)')
save_res_corr_mtx = traits.Bool(argstr='--eres-scm',
desc='save residual error spatial correlation matrix (eres.scm). Big!')
surf = traits.Tuple(traits.Str, traits.Enum('lh', 'rh'),
traits.Enum('white','pial','smoothwm','inflated'),
argstr='--surf %s %s %s',
desc='needed for some flags (uses white by default)')
simulation = traits.Tuple(traits.Enum('perm','mc-full','mc-z'),
traits.Int(min=1), traits.Float, traits.Str,
argstr='--sim %s %d %f %s',
desc='nulltype nsim thresh csdbasename')
sim_sign = traits.Enum('abs', 'pos', 'neg', argstr='--sim-sign %s',
desc='abs, pos, or neg')
uniform = traits.Tuple(traits.Float, traits.Float,
argstr='--uniform %f %f',
desc='use uniform distribution instead of gaussian')
pca = traits.Bool(argstr='--pca',
desc='perform pca/svd analysis on residual')
calc_AR1 = traits.Bool(argstr='--tar1',
desc='compute and save temporal AR1 of residual')
save_cond = traits.Bool(argstr='--save-cond',
desc='flag to save design matrix condition at each voxel')
vox_dump = traits.Tuple(traits.Int, traits.Int, traits.Int,
argstr='--voxdump %d %d %d',
desc='dump voxel GLM and exit')
seed = traits.Int(argstr='--seed %d', desc='used for synthesizing noise')
synth = traits.Bool(argstr='--synth', desc='replace input with gaussian')
resynth_test = traits.Int(argstr='--resynthtest %d', desc='test GLM by resynthsis')
profile = traits.Int(argstr='--profile %d', desc='niters : test speed')
force_perm = traits.Bool(argstr='--perm-force',
desc='force perumtation test, even when design matrix is not orthog')
diag = traits.Int('--diag %d', desc='Gdiag_no : set diagnositc level')
diag_cluster = traits.Bool(argstr='--diag-cluster',
desc='save sig volume and exit from first sim loop')
debug = traits.Bool(argstr='--debug', desc='turn on debugging')
check_opts = traits.Bool(argstr='--checkopts',
desc="don't run anything, just check options and exit")
allow_repeated_subjects = traits.Bool(argstr='--allowsubjrep',
desc='allow subject names to repeat in the fsgd file (must appear before --fsgd')
allow_ill_cond = traits.Bool(argstr='--illcond',
desc='allow ill-conditioned design matrices')
sim_done_file = File(argstr='--sim-done %s',
desc='create file when simulation finished')
class GLMFit(FSCommand):
"""Use FreeSurfer mri_glmfit to prepare a group of contrasts for
a second level analysis
Examples
--------
>>> glmfit = GLMFit()
>>> glmfit.inputs.in_file = 'functional.nii'
>>> glmfit.inputs.one_sample = True
>>> glmfit.cmdline == 'mri_glmfit --glmdir %s --y functional.nii --osgm'%os.getcwd()
True
"""
_cmd = 'mri_glmfit'
input_spec = GLMFitInputSpec
def _gen_filename(self, name):
if name == 'glm_dir':
return os.getcwd()
return None
class OneSampleTTest(GLMFit):
def __init__(self, **kwargs):
super(OneSampleTTest, self).__init__(**kwargs)
self.inputs.one_sample = True
class BinarizeInputSpec(FSTraitedSpec):
in_file = File(exists=True, argstr='--i %s', mandatory=True,
copyfile=False, desc='input volume')
min = traits.Float(argstr='--min %f',
desc='min thresh')
max = traits.Float(argstr='--max %f',
desc='max thresh')
rmin = traits.Float(argstr='--rmin %f',
desc='compute min based on rmin*globalmean')
rmax = traits.Float(argstr='--rmax %f',
desc='compute max based on rmax*globalmean')
match = traits.List(traits.Int, argstr='--match %d...',
desc='match instead of threshold')
wm = traits.Bool(argstr='--wm',
desc='set match vals to 2 and 41 (aseg for cerebral WM)')
ventricles = traits.Bool(argstr='--ventricles',
desc='set match vals those for aseg ventricles+choroid (not 4th)')
wm_ven_csf = traits.Bool(argstr='--wm+vcsf',
desc='WM and ventricular CSF, including choroid (not 4th)')
binary_file = File(argstr='--o %s', genfile=True,
desc='binary output volume')
out_type = traits.Enum('nii','nii.gz','mgz', argstr='',
desc='output file type')
count_file = traits.Either(traits.Bool, File,
argstr='--count %s',
desc='save number of hits in ascii file (hits,ntotvox,pct)')
bin_val = traits.Int(argstr='--binval %d',
desc='set vox within thresh to val (default is 1)')
bin_val_not = traits.Int(argstr='--binvalnot %d',
desc='set vox outside range to val (default is 0)')
invert = traits.Bool(argstr='--inv',
desc='set binval=0, binvalnot=1')
frame_no = traits.Int(argstr='--frame %s',
desc='use 0-based frame of input (default is 0)')
merge_file = File(exists=True, argstr='--merge %s',
desc='merge with mergevol')
mask_file = File(exists=True, argstr='--mask maskvol',
desc='must be within mask')
mask_thresh = traits.Float(argstr='--mask-thresh %f',
desc='set thresh for mask')
abs = traits.Bool(argstr='--abs',
desc='take abs of invol first (ie, make unsigned)')
bin_col_num = traits.Bool(argstr='--bincol',
desc='set binarized voxel value to its column number')
zero_edges = traits.Bool(argstr='--zero-edges',
desc='zero the edge voxels')
zero_slice_edge = traits.Bool(argstr='--zero-slice-edges',
desc='zero the edge slice voxels')
dilate = traits.Int(argstr='--dilate %d',
desc='niters: dilate binarization in 3D')
erode = traits.Int(argstr='--erode %d',
desc='nerode: erode binarization in 3D (after any dilation)')
erode2d = traits.Int(argstr='--erode2d %d',
desc='nerode2d: erode binarization in 2D (after any 3D erosion)')
class BinarizeOutputSpec(TraitedSpec):
binary_file = File(exists=True, desc='binarized output volume')
count_file = File(desc='ascii file containing number of hits')
class Binarize(FSCommand):
"""Use FreeSurfer mri_binarize to threshold an input volume
Examples
--------
>>> binvol = Binarize(in_file='structural.nii', min=10, binary_file='foo_out.nii')
>>> binvol.cmdline
'mri_binarize --o foo_out.nii --i structural.nii --min 10.000000'
"""
_cmd = 'mri_binarize'
input_spec = BinarizeInputSpec
output_spec = BinarizeOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outfile = self.inputs.binary_file
if not isdefined(outfile):
if isdefined(self.inputs.out_type):
outfile = fname_presuffix(self.inputs.in_file,
newpath=os.getcwd(),
suffix='.'.join(('_thresh',
self.inputs.out_type)),
use_ext=False)
else:
outfile = fname_presuffix(self.inputs.in_file,
newpath=os.getcwd(),
suffix='_thresh')
outputs['binary_file'] = outfile
value = self.inputs.count_file
if isdefined(value):
if isinstance(value, bool):
if value:
outputs['count_file'] = fname_presuffix(self.inputs.in_file,
suffix='_count.txt',
newpath=os.getcwd(),
use_ext=False)
else:
outputs['count_file'] = value
return outputs
def _format_arg(self, name, spec, value):
if name == 'count_file':
if isinstance(value, bool):
fname = self._list_outputs()[name]
else:
fname = value
return spec.argstr % fname
if name == 'out_type':
return ''
return super(Binarize, self)._format_arg(name, spec, value)
def _gen_filename(self, name):
if name == 'binary_file':
return self._list_outputs()[name]
return None
class ConcatenateInputSpec(FSTraitedSpec):
in_files = InputMultiPath(File(exists=True),
desc = 'Individual volumes to be concatenated',
argstr='--i %s...',mandatory=True)
concatenated_file = File(desc = 'Output volume', argstr='--o %s',
genfile=True)
sign = traits.Enum('abs','pos','neg', argstr='--%s',
desc = 'Take only pos or neg voxles from input, or take abs')
stats = traits.Enum('sum','var','std','max','min', 'mean', argstr='--%s',
desc = 'Compute the sum, var, std, max, min or mean of the input volumes')
paired_stats = traits.Enum('sum','avg','diff', 'diff-norm','diff-norm1',
'diff-norm2', argstr='--paired-%s',
desc = 'Compute paired sum, avg, or diff')
gmean = traits.Int(argstr='--gmean %d',
desc = 'create matrix to average Ng groups, Nper=Ntot/Ng')
mean_div_n = traits.Bool(argstr='--mean-div-n',
desc='compute mean/nframes (good for var)')
multiply_by = traits.Float(argstr='--mul %f',
desc = 'Multiply input volume by some amount')
add_val = traits.Float(argstr='--add %f',
desc = 'Add some amount to the input volume')
multiply_matrix_file = File(exists=True, argstr='--mtx %s',
desc = 'Multiply input by an ascii matrix in file')
combine = traits.Bool(argstr='--combine',
desc = 'Combine non-zero values into single frame volume')
keep_dtype = traits.Bool(argstr='--keep-datatype',
desc = 'Keep voxelwise precision type (default is float')
max_bonfcor = traits.Bool(argstr='--max-bonfcor',
desc = 'Compute max and bonferroni correct (assumes -log10(ps))')
max_index = traits.Bool(argstr='--max-index',
desc = 'Compute the index of max voxel in concatenated volumes')
mask_file = File(exists=True, argstr='--mask %s', desc = 'Mask input with a volume')
vote = traits.Bool(argstr='--vote',
desc = 'Most frequent value at each voxel and fraction of occurances')
sort = traits.Bool(argstr='--sort',
desc = 'Sort each voxel by ascending frame value')
class ConcatenateOutputSpec(TraitedSpec):
concatenated_file = File(exists=True,
desc='Path/name of the output volume')
class Concatenate(FSCommand):
"""Use Freesurfer mri_concat to combine several input volumes
into one output volume. Can concatenate by frames, or compute
a variety of statistics on the input volumes.
Examples
--------
Combine two input volumes into one volume with two frames
>>> concat = Concatenate()
>>> concat.inputs.in_files = ['cont1.nii','cont2.nii']
>>> concat.inputs.concatenated_file = 'bar.nii'
>>> concat.cmdline
'mri_concat --o bar.nii --i cont1.nii --i cont2.nii'
"""
_cmd = 'mri_concat'
input_spec = ConcatenateInputSpec
output_spec = ConcatenateOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
if not isdefined(self.inputs.concatenated_file):
outputs['concatenated_file'] = os.path.join(os.getcwd(),
'concat_output.nii.gz')
else:
outputs['concatenated_file'] = self.inputs.concatenated_file
return outputs
def _gen_filename(self, name):
if name == 'concatenated_file':
return self._list_outputs()[name]
return None
class SegStatsInputSpec(FSTraitedSpec):
_xor_inputs = ('segmentation_file', 'annot', 'surf_label')
segmentation_file = File(exists=True, argstr='--seg %s', xor=_xor_inputs,
mandatory=True, desc='segmentation volume path')
annot = traits.Tuple(traits.Str,traits.Enum('lh','rh'),traits.Str,
argstr='--annot %s %s %s', xor=_xor_inputs,
mandatory=True,
desc='subject hemi parc : use surface parcellation')
surf_label = traits.Tuple(traits.Str,traits.Enum('lh','rh'),traits.Str,
argstr='--slabel %s %s %s', xor=_xor_inputs,
mandatory=True,
desc='subject hemi label : use surface label')
summary_file = File(argstr='--sum %s', genfile=True,
desc='Segmentation stats summary table file')
partial_volume_file = File(exists=True, argstr='--pv %f',
desc='Compensate for partial voluming')
in_file = File(exists=True, argstr='--i %s',
desc='Use the segmentation to report stats on this volume')
frame = traits.Int(argstr='--frame %d',
desc='Report stats on nth frame of input volume')
multiply = traits.Float(argstr='--mul %f', desc='multiply input by val')
calc_snr = traits.Bool(argstr='--snr', desc='save mean/std as extra column in output table')
calc_power = traits.Enum('sqr','sqrt',argstr='--%s',
desc='Compute either the sqr or the sqrt of the input')
_ctab_inputs = ('color_table_file', 'default_color_table', 'gca_color_table')
color_table_file = File(exists=True, argstr='--ctab %s', xor=_ctab_inputs,
desc='color table file with seg id names')
default_color_table = traits.Bool(argstr='--ctab-default', xor=_ctab_inputs,
desc='use $FREESURFER_HOME/FreeSurferColorLUT.txt')
gca_color_table = File(exists=True, argstr='--ctab-gca %s', xor=_ctab_inputs,
desc='get color table from GCA (CMA)')
segment_id = traits.List(argstr='--id %s...',desc='Manually specify segmentation ids')
exclude_id = traits.Int(argstr='--excludeid %d',desc='Exclude seg id from report')
exclude_ctx_gm_wm = traits.Bool(argstr='--excl-ctxgmwm',
desc='exclude cortical gray and white matter')
wm_vol_from_surf = traits.Bool(argstr='--surf-wm-vol',desc='Compute wm volume from surf')
cortex_vol_from_surf = traits.Bool(argstr='--surf-ctx-vol',desc='Compute cortex volume from surf')
non_empty_only = traits.Bool(argstr='--nonempty',desc='Only report nonempty segmentations')
mask_file = File(exists=True, argstr='--mask %s',
desc='Mask volume (same size as seg')
mask_thresh = traits.Float(argstr='--maskthresh %f',
desc='binarize mask with this threshold <0.5>')
mask_sign = traits.Enum('abs','pos','neg','--masksign %s',
desc='Sign for mask threshold: pos, neg, or abs')
mask_frame = traits.Int('--maskframe %d',
requires=['mask_file'],
desc='Mask with this (0 based) frame of the mask volume')
mask_invert = traits.Bool(argstr='--maskinvert', desc='Invert binarized mask volume')
mask_erode = traits.Int(argstr='--maskerode %d', desc='Erode mask by some amount')
brain_vol = traits.Enum('brain-vol-from-seg','brainmask','--%s',
desc='Compute brain volume either with ``brainmask`` or ``brain-vol-from-seg``')
etiv = traits.Bool(argstr='--etiv',desc='Compute ICV from talairach transform')
etiv_only = traits.Enum('etiv','old-etiv','--%s-only',
desc='Compute etiv and exit. Use ``etiv`` or ``old-etiv``')
avgwf_txt_file = traits.Either(traits.Bool, File, argstr='--avgwf %s',
desc='Save average waveform into file (bool or filename)')
avgwf_file = traits.Either(traits.Bool, File, argstr='--avgwfvol %s',
desc='Save as binary volume (bool or filename)')
sf_avg_file = traits.Either(traits.Bool, File, argstr='--sfavg %s',
desc='Save mean across space and time')
vox = traits.List(traits.Int, argstr='--vox %s',
desc='Replace seg with all 0s except at C R S (three int inputs)')
class SegStatsOutputSpec(TraitedSpec):
summary_file = File(exists=True,desc='Segmentation summary statistics table')
avgwf_txt_file = File(desc='Text file with functional statistics averaged over segs')
avgwf_file = File(desc='Volume with functional statistics averaged over segs')
sf_avg_file = File(desc='Text file with func statistics averaged over segs and framss')
class SegStats(FSCommand):
"""Use FreeSurfer mri_segstats for ROI analysis
Examples
--------
>>> import nipype.interfaces.freesurfer as fs
>>> ss = fs.SegStats()
>>> ss.inputs.annot = ('PWS04', 'lh', 'aparc')
>>> ss.inputs.environ['SUBJECTS_DIR'] = '/somepath/FSDATA'
>>> ss.inputs.avgwf_txt_file = './avgwf.txt'
>>> ss.inputs.summary_file = './summary.stats'
>>> ss.cmdline
'mri_segstats --annot PWS04 lh aparc --avgwf ./avgwf.txt --sum ./summary.stats'
"""
_cmd = 'mri_segstats'
input_spec = SegStatsInputSpec
output_spec = SegStatsOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outputs['summary_file'] = self.inputs.summary_file
if not isdefined(outputs['summary_file']):
outputs['summary_file'] = os.path.join(os.getcwd(), 'summary.stats')
suffices =dict(avgwf_txt_file='_avgwf.txt', avgwf_file='_avgwf.nii.gz',
sf_avg_file='sfavg.txt')
if isdefined(self.inputs.segmentation_file):
_, src = os.path.split(self.inputs.segmentation_file)
if isdefined(self.inputs.annot):
src = '_'.join(self.inputs.annot)
if isdefined(self.inputs.surf_label):
src = '_'.join(self.inputs.surf_label)
for name, suffix in suffices.items():
value = getattr(self.inputs, name)
if isdefined(value):
if isinstance(value, bool):
outputs[name] = fname_presuffix(src, suffix=suffix,
newpath=os.getcwd(),
use_ext=False)
else:
outputs[name] = value
return outputs
def _format_arg(self, name, spec, value):
if name in ['avgwf_txt_file', 'avgwf_file', 'sf_avg_file']:
if isinstance(value, bool):
fname = self._list_outputs()[name]
else:
fname = value
return spec.argstr % fname
return super(SegStats, self)._format_arg(name, spec, value)
def _gen_filename(self, name):
if name == 'summary_file':
return self._list_outputs()[name]
return None
class Label2VolInputSpec(FSTraitedSpec):
label_file = InputMultiPath(File(exists=True), argstr='--label %s...',
xor = ('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
copyfile=False,
mandatory=True,
desc='list of label files')
annot_file = File(exists=True, argstr='--annot %s',
xor = ('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
requires = ('subjectid', 'hemi'),
mandatory=True,
copyfile=False,
desc='surface annotation file')
seg_file = File(exists=True, argstr='--seg %s',
xor = ('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
mandatory=True,
copyfile=False,
desc='segmentation file')
aparc_aseg = traits.Bool(argstr='--aparc+aseg',
xor = ('label_file', 'annot_file', 'seg_file', 'aparc_aseg'),
mandatory=True,
desc='use aparc+aseg.mgz in subjectdir as seg')
template_file = File(exists=True, argstr='--temp %s', mandatory=True,
desc='output template volume')
reg_file = File(exists=True, argstr='--reg %s',
xor = ('reg_file', 'reg_header', 'identity'),
desc='tkregister style matrix VolXYZ = R*LabelXYZ')
reg_header = File(exists=True, argstr='--regheader %s',
xor = ('reg_file', 'reg_header', 'identity'),
desc= 'label template volume')
identity = traits.Bool(argstr='--identity',
xor = ('reg_file', 'reg_header', 'identity'),
desc='set R=I')
invert_mtx = traits.Bool(argstr='--invertmtx',
desc='Invert the registration matrix')
fill_thresh = traits.Range(0., 1., argstr='--fillthresh %.f',
desc='thresh : between 0 and 1')
label_voxel_volume = traits.Float(argstr='--labvoxvol %f',
desc='volume of each label point (def 1mm3)')
proj = traits.Tuple(traits.Enum('abs','frac'), traits.Float,
traits.Float, traits.Float,
argstr='--proj %s %f %f %f',
requries = ('subjectid', 'hemi'),
desc='project along surface normal')
subject_id = traits.Str(argstr='--subject %s',
desc='subject id')
hemi = traits.Enum('lh', 'rh', argstr='--hemi %s',
desc='hemisphere to use lh or rh')
surface = traits.Str(argstr='--surf %s',
desc='use surface instead of white')
vol_label_file = File(argstr='--o %s', genfile=True,
desc='output volume')
label_hit_file = File(argstr='--hits %s',
desc='file with each frame is nhits for a label')
map_label_stat = File(argstr='--label-stat %s',
desc='map the label stats field into the vol')
native_vox2ras = traits.Bool(argstr='--native-vox2ras',
desc='use native vox2ras xform instead of tkregister-style')
class Label2VolOutputSpec(TraitedSpec):
vol_label_file = File(exists=True, desc='output volume')
class Label2Vol(FSCommand):
"""Make a binary volume from a Freesurfer label
Examples
--------
>>> binvol = Label2Vol(label_file='cortex.label', template_file='structural.nii', reg_file='register.dat', fill_thresh=0.5, vol_label_file='foo_out.nii')
>>> binvol.cmdline
'mri_label2vol --fillthresh 0 --label cortex.label --reg register.dat --temp structural.nii --o foo_out.nii'
"""
_cmd = 'mri_label2vol'
input_spec = Label2VolInputSpec
output_spec = Label2VolOutputSpec
def _list_outputs(self):
outputs = self.output_spec().get()
outfile = self.inputs.vol_label_file
if not isdefined(outfile):
for key in ['label_file', 'annot_file', 'seg_file']:
if isdefined(self.inputs.label_file):
_, src = os.path.split(getattr(self.inputs, key))
if isdefined(self.inputs.aparcaaseg):
src = 'aparc+aseg.mgz'
outfile = fname_presuffix(src, suffix='_vol.nii.gz',
newpath=os.getcwd(),
use_ext=False)
outputs['vol_label_file'] = outfile
return outputs
def _gen_filename(self, name):
if name == 'vol_label_file':
return self._list_outputs()[name]
return None
|
satra/NiPypeold
|
nipype/interfaces/freesurfer/model.py
|
Python
|
bsd-3-clause
| 33,883
|
[
"Gaussian"
] |
e1f3e1167ac2a5184be5124702f82dd832370e2b4e63605eb9729a83a8a65e33
|
"""pdbquery RAPD plugin"""
"""
This file is part of RAPD
c
Copyright (C) 2017, Cornell University
All rights reserved.
RAPD is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, version 3.
RAPD is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__created__ = "2019-02-21"
__maintainer__ = "Jon Schuermann"
__email__ = "schuerjp@anl.gov"
__status__ = "Development"
# This is an active RAPD plugin
RAPD_PLUGIN = True
# This plugin's type
DATA_TYPE = "MX"
PLUGIN_TYPE = "MR"
PLUGIN_SUBTYPE = "EXPERIMENTAL"
# A unique UUID for this handler (uuid.uuid1().hex)
ID = "c33b"
VERSION = "2.0.0"
# Standard imports
from distutils.spawn import find_executable
import glob
import logging
from multiprocessing import cpu_count
from threading import Thread
import os
from pprint import pprint
import shutil
import time
import importlib
import random
# RAPD
from bson.objectid import ObjectId
from plugins.subcontractors.rapd_phaser import run_phaser
from plugins.subcontractors.rapd_cctbx import get_pdb_info, get_mtz_info, get_res
from plugins.get_cif.plugin import check_pdbq
from utils import archive
import utils.credits as rcredits
import utils.exceptions as exceptions
import utils.global_vars as rglobals
from utils.text import json
import utils.xutils as xutils
from utils.processes import local_subprocess, mp_pool, mp_manager
import info
# NE-CAT REST PDB server
#PDBQ_SERVER = rglobals.PDBQ_SERVER
# Software dependencies
VERSIONS = {
"gnuplot": (
"gnuplot 4.2",
"gnuplot 5.0",
)
}
class RapdPlugin(Thread):
"""
RAPD plugin class
Command format:
{
"command":"MR",
"directories":
{
"work": "" # Where to perform the work
},
"input_data": {
'data_file': file,
'struct_file': file },
"preferences": {} # Settings for calculations
}
"""
# Run rigid-body refinement after MR.
#rigid = False
# Parameters
large_cell = False
laue = False
dres = 0.0
# Holders for passed-in info
command = None
#preferences = {}
# Holders for launched Phaser jobs
jobs = {}
# Holders for results
phaser_results = {}
# Results that are sent back
results = {}
# Initial status
status = 1
status_incr = 1
redis = False
pool = False
batch_queue = False
manager = False
# Timers for processes
phaser_timer = rglobals.PHASER_TIMEOUT
def __init__(self, command, site=False, processed_results=False, tprint=False, logger=False, verbosity=False):
"""Initialize the plugin"""
Thread.__init__ (self)
# If the logging instance is passed in...
if logger:
self.logger = logger
else:
# Otherwise get the logger Instance
self.logger = logging.getLogger("RAPDLogger")
self.logger.debug("__init__")
# Keep track of start time
self.start_time = time.time()
# Store tprint for use throughout
if tprint:
self.tprint = tprint
# Dead end if no tprint passed
else:
def func(arg=False, level=False, verbosity=False, color=False):
"""Dummy function"""
pass
self.tprint = func
# Used for sending results back to DB referencing a dataset
self.processed_results = processed_results
# Some logging
self.logger.info(command)
self.verbose = verbosity
# Store passed-in variables
self.site = site
self.command = command
self.preferences = self.command.get("preferences", {})
# Params
self.working_dir = self.command["directories"].get("work", os.getcwd())
# Input data MTZ file
self.data_file = xutils.convert_unicode(self.command["input_data"].get("data_file"))
# Input PDB/mmCIF file or PDB code.
self.struct_file = xutils.convert_unicode(self.command["input_data"].get("struct_file"))
# Save preferences
self.clean = self.preferences.get("clean", True)
# Calc ADF for each solution (creates a lot of big map files).
self.adf = self.preferences.get("adf", False)
# Check if there is a computer cluster and load adapter.
self.computer_cluster = xutils.load_cluster_adapter(self)
if self.computer_cluster:
self.launcher = self.computer_cluster.process_cluster
self.batch_queue = self.computer_cluster.check_queue(self.command.get('command'))
else:
# if NOT using a computer cluster setup a multiprocessing.pool and manager for queues.
self.launcher = local_subprocess
self.pool = mp_pool(self.preferences.get("nproc", cpu_count()-1))
self.manager = mp_manager()
# Set Python path for subcontractors.rapd_phaser
self.rapd_python = "rapd.python"
if self.site and hasattr(self.site, "RAPD_PYTHON_PATH"):
self.rapd_python = self.site.RAPD_PYTHON_PATH
def run(self):
"""Execution path of the plugin"""
self.preprocess()
self.process()
self.postprocess()
def preprocess(self):
"""Set up for plugin action"""
self.logger.debug("preprocess")
# Record a starting time
self.start_time = time.time()
# Register progress
self.tprint(arg=0, level="progress")
# Construct the results
self.construct_results()
# Let everyone know we are working on this
self.send_results(self.results)
# Get running instance of PDB server
self.repository = check_pdbq(self.tprint, self.logger)
# Change into working directory
xutils.create_folder(self.working_dir)
# Copy data file to working dir to minimize char length in Phaser.
data_path = os.path.join(os.getcwd(), os.path.basename(self.data_file))
if not os.path.exists(data_path):
os.symlink(self.data_file, data_path)
self.data_file = data_path
# Glean some information on the input file
input_spacegroup, cell, volume = get_mtz_info(self.data_file)
# Get high resolution limt from MTZ
self.dres = get_res(self.data_file)
# Determine the Laue group from the MTZ
input_spacegroup_num = xutils.convert_spacegroup(input_spacegroup)
self.laue = xutils.get_sub_groups(input_spacegroup_num, "laue")
# Throw some information into the terminal
self.tprint("\nDataset information", color="blue", level=10)
self.tprint(" Data file: %s" % self.data_file, level=10, color="white")
self.tprint(" Spacegroup: %s (%s)" % (input_spacegroup, input_spacegroup_num),
level=10,
color="white")
self.tprint(" Cell: %f.2 %f.2 %f.2 %f.2 %f.2 %f.2" % tuple(cell),
level=10,
color="white")
self.tprint(" Volume: %f.1" % volume, level=10, color="white")
self.tprint(" Resolution: %f.1" % self.dres, level=10, color="white")
# self.tprint(" Subgroups: %s" % self.laue, level=10, color="white")
# Set by number of residues in AU. Ribosome (70s) is 24k.
est_res_number = xutils.calc_res_number(input_spacegroup,
se=False,
volume=volume,
sample_type=self.preferences.get("type", "protein"),
solvent_content=self.preferences.get("solvent_content", 0.55))
if est_res_number > 5000:
self.large_cell = True
self.phaser_timer = self.phaser_timer * 1.5
# Check if mmCIF, PDB, or PDB code and get file.
if not self.struct_file:
self.postprocess_invalid_input_file()
# Check if mmCIF, PDB, or PDB code and get file.
elif self.struct_file[-4:].upper() in ('.PDB','.CIF'):
# Copy to local directory because of character limit in Phaser.
struc_path = os.path.join(os.getcwd(), os.path.basename(self.struct_file))
if not os.path.exists(struc_path):
os.symlink(self.struct_file, struc_path)
self.struct_file = struc_path
# Get PDB info
self.pdb_info = get_pdb_info(self.struct_file, self.data_file, self.dres)
# Use PDB code to download mmCIF file
elif len(self.struct_file) == 4:
# Download file from PDB code and get PDB info
repository = check_pdbq(self.tprint, self.logger)
self.struct_file = repository.download_cif(self.struct_file, os.path.join(os.getcwd(),self.struct_file.lower()+'.cif'))
self.pdb_info = get_pdb_info(self.struct_file, self.data_file, self.dres)
else:
self.postprocess_invalid_input_file()
#If user requests to search for more mols, then allow.
if self.preferences.get('nmol', False):
#if int(self.pdb_info['all'].get('NMol')) < int(self.nmol):
#self.pdb_info['all']['NMol'] = self.nmol
self.pdb_info['all']['NMol'] = self.preferences.get('nmol', False)
# Check for dependency problems
self.check_dependencies()
# Connect to Redis (computer cluster sends results via Redis)
if self.preferences.get("run_mode") == "server" or self.computer_cluster:
self.connect_to_redis()
def update_status(self, stat=False):
"""Update the status of the run."""
if stat:
self.results["process"]["status"] = int(stat)
self.tprint(arg=int(stat), level="progress", color="orange")
else:
self.status += self.status_incr
if self.status > 90:
self.status = 90
self.results["process"]["status"] = int(self.status)
self.tprint(arg=int(self.status), level="progress", color="orange")
def calculate_status_increment(self, njobs):
"""Calculate an int for incrementing job status."""
self.status_incr = int(90/njobs)
def check_dependencies(self):
"""Make sure dependencies are all available"""
# Any of these missing, dead in the water
#TODO reduce external dependencies
#for executable in ("bzip2", "gunzip", "phaser", "phenix.cif_as_pdb", "tar"):
for executable in ("gunzip", "phaser"):
if not find_executable(executable):
self.tprint("Executable for %s is not present, exiting" % executable,
level=30,
color="red")
self.results["process"]["status"] = -1
self.results["error"] = "Executable for %s is not present" % executable
self.write_json()
raise exceptions.MissingExecutableException(executable)
# If no gnuplot turn off printing
# if self.preferences.get("show_plots", True) and (not self.preferences.get("json", False)):
# if not find_executable("gnuplot"):
# self.tprint("\nExecutable for gnuplot is not present, turning off plotting",
# level=30,
# color="red")
# self.preferences["show_plots"] = False
def construct_results(self):
"""Create the self.results dict"""
self.results["command"] = self.command
# Copy over details of this run
#self.results["command"] = self.command.get("command")
#self.results["preferences"] = self.preferences
# Describe the process
self.results["process"] = self.command.get("process", {})
# Move process_id
#self.results["process"]["process_id"] = self.command.get("process_id")
self.results["process"]["process_id"] = self.command.pop("process_id", None)
# Status is now 1 (starting)
self.results["process"]["status"] = self.status
# Process type is plugin
self.results["process"]["type"] = "plugin"
# Give it a result_id
self.results["process"]["result_id"] = str(ObjectId())
# Add link to processed dataset
if self.processed_results:
#self.results["process"]["result_id"] = self.processed_results["process"]["result_id"]
# This links to MongoDB results._id
self.results["process"]["parent_id"] = self.processed_results.get("process", {}).get("result_id", False)
# This links to a session
self.results["process"]["session_id"] = self.processed_results.get("process", {}).get("session_id", False)
# Identify parent type
self.results["process"]["parent"] = self.processed_results.get("plugin", {})
# The repr
self.results["process"]["repr"] = self.processed_results.get("process", {}).get("repr", "Unknown")
# Describe plugin
self.results["plugin"] = {
"data_type": DATA_TYPE,
"type": PLUGIN_TYPE,
"subtype": PLUGIN_SUBTYPE,
"id": ID,
"version": VERSION
}
# Add fields to results
self.results["results"] = {
"mr_results": {},
"archive_files": [],
"data_produced": [],
"messages": [],
"errors": [],
"for_display": []
}
def connect_to_redis(self):
"""Connect to the redis instance"""
redis_database = importlib.import_module('database.redis_adapter')
self.redis = redis_database.Database(settings=self.site.CONTROL_DATABASE_SETTINGS,
logger=self.logger)
def send_results(self):
"""Let everyone know we are working on this"""
self.logger.debug("send_results")
if self.preferences.get("run_mode") == "server":
self.logger.debug("Sending back on redis")
#pprint(self.results)
# Transcribe results
json_results = json.dumps(self.results)
# Get redis instance
if not self.redis:
self.connect_to_redis()
# Send results back
self.redis.lpush("RAPD_RESULTS", json_results)
self.redis.publish("RAPD_RESULTS", json_results)
def process(self):
"""Run plugin action"""
self.process_phaser()
self.jobs_monitor()
def process_phaser(self, full=False):
"""Start Phaser for input pdb"""
self.logger.debug("process_phaser")
self.tprint("\nStarting molecular replacement", level=30, color="blue")
self.tprint(" Assembling Phaser runs", level=10, color="white")
def launch_job(inp):
"""Launch the Phaser job"""
#self.logger.debug("process_phaser Launching %s"%inp['name'])
tag = 'Phaser_%d' % random.randint(0, 10000)
if self.computer_cluster:
# Create a unique identifier for Phaser results
inp['tag'] = tag
# Send Redis settings so results can be sent thru redis
inp['db_settings'] = self.site.CONTROL_DATABASE_SETTINGS
# Don't need result queue since results will be sent via Redis
queue = False
else:
inp['pool'] = self.pool
# Add result queue
queue = self.manager.Queue()
inp['result_queue'] = queue
# Launch the job
job, pid = run_phaser(**inp)
self.jobs[job] = {'name': inp['name'],
'pid' : pid,
'tag' : tag,
'result_queue': queue,
'spacegroup': inp['spacegroup'] # Need for jobs that timeout.
}
# Determine which SG's to run MR.
run_sg = xutils.get_sub_groups(self.laue, "phaser")
# Prune if only one chain present, b/c 'all' and 'A' will be the same.
if len(self.pdb_info.keys()) == 2:
for key in self.pdb_info.keys():
if key != 'all':
del self.pdb_info[key]
# Only launch is greater than 20% solvent content
for chain in self.pdb_info.keys():
if self.pdb_info[chain]['SC'] > 0.2:
#if pdb_info[chain]["res"] != 0.0:
# Set copy to minimum of 1
copy = self.pdb_info[chain]["NMol"]
if copy == 0:
copy = 1
# Set recommended resolution for MR
res = xutils.set_phaser_res(self.pdb_info[chain]["res"],
self.large_cell,
self.dres)
for sg in run_sg:
# Convert SG number to name
sg = xutils.convert_spacegroup(sg, True)
# Setup MR job description
job_description = {
"data_file": self.data_file,
"struct_file": self.pdb_info[chain]["file"],
"spacegroup": sg,
"ncopy": copy,
"adf": self.adf,
#"test": self.preferences.get("test", False),
"resolution": res,
"launcher": self.launcher,
"tag": False,
"batch_queue": self.batch_queue,
"rapd_python": self.rapd_python}
# Launch quick MR (default)
if not full:
# Change folder
name = "%s_%s_0" % (sg, chain)
work_dir = os.path.abspath(os.path.join(self.working_dir, name))
xutils.create_folder(work_dir)
job_description.update({"work_dir": work_dir,
"name": name})
launch_job(job_description)
# Launch full MR
if self.computer_cluster or full:
name = "%s_%s_1" % (sg, chain)
work_dir = os.path.abspath(os.path.join(self.working_dir, name))
xutils.create_folder(work_dir)
job_description.update({"work_dir": work_dir,
"full": True,
"name": name})
launch_job(job_description)
else:
self.postprocess_phaser(chain, {"solution": False,
"message": "% Solvent < 20%"})
# Save number of jobs launched for correct status reply
if not full:
if self.computer_cluster:
self.calculate_status_increment(len(self.jobs.keys()))
else:
self.calculate_status_increment(len(self.jobs.keys())*2)
def postprocess_phaser(self, job_name, results):
"""fix Phaser results and pass back"""
self.logger.debug("postprocess_phaser")
# Copy tar to working dir
if results.get("tar", False):
orig = results.get("tar", {"path":False}).get("path")
if orig:
new = os.path.join(self.working_dir, os.path.basename(orig))
# If old file in working dir, remove it and recopy.
if os.path.exists(new):
os.unlink(new)
shutil.copy(orig, new)
results["tar"]["path"] = new
# Send back results skipping whether quick or full run.
#self.results['results']['mr_results'][job_name[:-2]].append(results)
self.results['results']['mr_results'].update({job_name[:-2] : results})
# Show results in log
#self.logger.debug(results)
# Save results for command line
self.phaser_results[job_name] = {"results": results}
# Update the status number
self.update_status()
# Move transferring files
self.transfer_files(results)
# Passback new results to RAPD
self.send_results()
def jobs_monitor(self, full=False):
"""Monitor running jobs and finsh them when they complete."""
self.logger.debug("jobs_monitor")
def finish_job(job):
"""Finish the jobs and send to postprocess_phaser"""
info = self.jobs.pop(job)
#print 'Finished Phaser on %s with id: %s'%(info['name'], info['tag'])
self.logger.debug('Finished Phaser on %s'%info['name'])
if self.computer_cluster:
results_json = self.redis.get(info['tag'])
results = json.loads(results_json)
self.postprocess_phaser(info['name'], results)
self.redis.delete(info['tag'])
"""
try:
# This try/except is for when results aren't in Redis in time.
results = json.loads(results_json)
self.postprocess_phaser(info['name'], results)
self.redis.delete(info['tag'])
except Exception as e:
self.logger.error('Error '+ str(e))
#self.logger.error('results_json: %s'%results_json)
#print 'PROBLEM: %s %s'%(info['name'], info['output_id'])
#print results_json
"""
else:
results = info['result_queue'].get()
self.postprocess_phaser(info['name'], json.loads(results.get('stdout')))
jobs.remove(job)
# Signal to the pool that no more processes will be added
if self.pool and full:
self.pool.close()
timed_out = False
timer = 0
if full:
jobs = [job for job in self.jobs.keys() if self.jobs[job]['name'][-1] == '1']
else:
jobs = [job for job in self.jobs.keys() if self.jobs[job]['name'][-1] == '0']
# Run loop to see when jobs finish
while len(jobs):
for job in jobs:
if self.pool:
if job.ready():
finish_job(job)
elif job.is_alive() == False:
finish_job(job)
time.sleep(1)
timer += 1
"""
if self.verbose:
if round(timer%1,1) in (0.0,1.0):
print 'Waiting for AutoStat jobs to finish '+str(timer)+' seconds'
"""
if self.phaser_timer:
if timer >= self.phaser_timer:
timed_out = True
break
if timed_out:
if self.verbose:
self.logger.debug('MR timed out.')
print 'MR timed out.'
for job in self.jobs.keys():
if self.computer_cluster:
# Kill job on cluster:
self.computer_cluster.kill_job(self.jobs[job].get('pid'))
else:
# terminate the job
job.terminate()
# Get the job info
info = self.jobs.pop(job)
print 'Timeout Phaser on %s'%info['name']
self.logger.debug('Timeout Phaser on %s'%info['name'])
# Send timeout result to postprocess
self.postprocess_phaser(info['name'], {"solution": False,
"spacegroup": info['spacegroup'],
"message": "Timed out"})
# Delete the Redis key
if self.redis:
self.redis.delete(info['output_id'])
# Join the self.pool if used
if self.pool and full:
# Close the multiprocessing.manager
self.manager.shutdown()
self.pool.join()
if self.verbose and self.logger:
self.logger.debug('MR.jobs_monitor finished.')
#Check if solution has been found.
if not full:
self.check_solution()
def check_solution(self):
"""
Check if solution is found. If not alter input and rerun.
"""
self.logger.debug("check_solution")
solution = False
keys0 = [key for key in self.phaser_results.keys() if key[-1] == '0']
keys1 = [key for key in self.phaser_results.keys() if key[-1] == '1']
for key in keys0:
sol = self.phaser_results[key].get('results').get('solution', False)
if sol not in ('No solution','Timed out','NA', False, None):
solution = True
if solution:
#Kill the jobs and remove the full results since not needed.
for job in keys1:
if self.computer_cluster:
# Kill job on cluster:
self.computer_cluster.kill_job(self.jobs[job].get('pid'))
else:
# terminate the job
job.terminate()
del self.phaser_results[job]
# Close the pool to new jobs and join the thread.
if self.pool:
# Close the multiprocessing.manager
self.manager.shutdown()
self.pool.close()
self.pool.join()
# Update status
self.update_status(90)
# Send updated status
self.send_results()
else:
# Remove results from quick run if no solution found.
for k in keys0:
del self.phaser_results[k]
# Run the full Phaser jobs.
if not self.computer_cluster:
self.process_phaser(full=True)
# Monitor the full jobs for when the finish
self.jobs_monitor(full=True)
def transfer_files(self, result):
"""
Transfer files to a directory that the control can access
"""
self.logger.debug("transfer_files")
#if self.preferences.get("exchange_dir", False):
if self.command["directories"].get("exchange_dir", False):
# Determine and validate the place to put the data
target_dir = os.path.join(
#self.preferences["exchange_dir"], os.path.split(self.working_dir)[1])
self.command["directories"].get("exchange_dir" ), os.path.split(self.working_dir)[1])
if not os.path.exists(target_dir):
os.makedirs(target_dir)
# Copy compressed results files to exchange dir and update path.
l = ["map_1_1", "map_2_1", 'pdb', 'mtz', 'tar', 'adf', 'peak']
for f in l:
if result.get(f, False):
archive_dict = result.get(f, {})
archive_file = archive_dict.get("path", False)
if archive_file:
# Copy data
target = os.path.join(target_dir, os.path.basename(archive_file))
# Copy files for now to make sure they are produced
shutil.copyfile(archive_file, target)
"""
if f in ("map_1_1", "map_2_1", 'tar'):
shutil.move(archive_file, target)
else:
# Once we know this works we can switch to moving files.
shutil.copyfile(archive_file, target)
"""
# Store new path information
archive_dict["path"] = target
# Add to the results.data_produced array
if f in ('pdb', 'mtz', 'tar', 'adf', 'peak'):
self.results["results"]["data_produced"].append(archive_dict)
# Also put PDB path in 'for_display' results
if f in ('pdb', "map_1_1", "map_2_1", 'adf', 'peak'):
self.results["results"]["for_display"].append(archive_dict)
"""
# If there is data produced (Used for files that could be passed to another Plugin later)
files_to_move = ("pdb", "mtz", "adf", "peak")
for key in files_to_move:
if result.get(key, None):
file_to_move = result.pop(key)
if os.path.exists(file_to_move):
# Move data
target = os.path.join(
target_dir, os.path.basename(file_to_move))
shutil.move(file_to_move, target)
# Compress data
arch_prod_file, arch_prod_hash = archive.compress_file(target)
# Remove the file that was compressed
os.unlink(target)
# Store information
new_data_produced = {
"path": arch_prod_file,
"hash": arch_prod_hash,
"description": '%s_%s'%(result.get("spacegroup"), key)
}
# Add the file to results.data_produced array
self.results["results"]["data_produced"].append(
new_data_produced)
# If there is an archive
#self.logger.debug("result", result)
archive_dict = result.get("tar", {})
#self.logger.debug("archive_dict %s", archive_dict)
archive_file = archive_dict.get("path", False)
#self.logger.debug("archive_file %s", archive_file)
if archive_file:
# Move the file
target = os.path.join(
target_dir, os.path.basename(archive_file))
#self.logger.debug("target %s", target)
shutil.move(archive_file, target)
# Store information
archive_dict["path"] = target
# Add to the results.archive_files array
self.results["results"]["archive_files"].append(
archive_dict)
"""
def postprocess_invalid_input_file(self):
"""Make a proper result for PDB that could not be downloaded"""
self.logger.debug("postprocess_invalid_input_file")
# Save message
self.results['results']['errors'].append('Invalid input structure file')
# Update the status number
self.update_status(100)
# Passback new results to RAPD
self.send_results()
# Kill the plugin early
os._exit(0)
def postprocess(self):
"""Clean up after plugin action"""
self.logger.debug("postprocess")
self.tprint(arg=90, level="progress")
# Cleanup my mess.
self.clean_up()
# Finished
self.update_status(100)
self.tprint(arg=100, level="progress")
#pprint(self.results)
self.write_json()
# Send Final results
self.send_results()
# print results if run from commandline
if self.tprint:
self.print_results()
# Print credits
self.print_credits()
# Message in logger
t = round(time.time()-self.start_time)
self.logger.debug('MR finished in %s seconds'%t)
def clean_up(self):
"""Clean up the working directory"""
self.logger.debug("clean_up")
self.tprint(" Cleaning up", level=30, color="white")
if self.command["preferences"].get("clean", False):
self.logger.debug("Cleaning up Phaser files and folders")
# Change to work dir
os.chdir(self.working_dir)
keep = glob.glob("*.*")
keep = [f for f in keep if not f.count(os.path.splitext(os.path.basename(self.struct_file))[0])]
keep = [f for f in keep if not f.count(os.path.splitext(os.path.basename(self.data_file))[0])]
dir_con = glob.glob("*")
for target in dir_con:
if target not in keep:
try:
if target.count('.'):
os.unlink(target)
else:
shutil.rmtree(target)
except:
self.logger.debug('Could not remove %s'%target)
def print_results(self):
"""Print the results to the commandline"""
self.logger.debug("print_results")
self.tprint("\nResults", level=99, color="blue")
def print_header_line():
"""Print the table header line"""
self.tprint((" {:^14} {:^14} {:^14} {:^14} {:^14} {:^14} {:^14} {}").format(
"Space Group",
"Search model",
"# placed",
"LL-Gain",
"RF Z-score",
"TF Z-score",
"# Clashes",
"Info",
#width=str(longest_field)),
),
level=99,
color="red")
def print_result_line(key, my_result):
"""Print the result line in the table"""
# Split out chains
sg = key.split('_')[0]
c = key.split('_')[1]
if c == 'all':
chain = "all chains"
else:
chain = "chain %s"%c
self.tprint(" {:^14} {:^14} {:^14} {:^14} {:^14} {:^14} {:^14} {}".format(
sg,
chain,
my_result.get("nmol", "-"),
my_result.get("gain", "-"),
my_result.get("rfz", "-"),
my_result.get("tfz", "-"),
my_result.get("clash", "-"),
my_result.get("message", ""),
),
level=99,
color="green")
print_header_line()
for sg in self.results['results']['mr_results'].keys():
# Get the result in question
my_result = self.results['results']['mr_results'][sg]
# Print the result line
print_result_line(sg, my_result)
def write_json(self):
"""Print out JSON-formatted result"""
json_string = json.dumps(self.results)
# If running in JSON mode, print to terminal
if self.preferences.get("run_mode") == "json":
print json_results
# Output to terminal?
#if self.preferences.get("json", False):
# print json_string
# Always write a file
os.chdir(self.working_dir)
with open("result.json", "w") as outfile:
outfile.writelines(json_string)
def print_credits(self):
"""Print credits for programs utilized by this plugin"""
self.tprint(rcredits.HEADER,
level=99,
color="blue")
programs = ["CCTBX", "PHENIX", "PHASER"]
info_string = rcredits.get_credits_text(programs, " ")
self.tprint(info_string, level=99, color="black")
|
RAPD/RAPD
|
src/plugins/mr/plugin.py
|
Python
|
agpl-3.0
| 36,161
|
[
"ADF"
] |
fbc0b1a3c85da22f3443eafed279eb61cc17e33d4db34f5ab6fb3e604affcd52
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
################################################################################
#
# RMG - Reaction Mechanism Generator
#
# Copyright (c) 2002-2017 Prof. William H. Green (whgreen@mit.edu),
# Prof. Richard H. West (r.west@neu.edu) and the RMG Team (rmg_dev@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
import unittest
import distutils.spawn
import itertools
import logging
import numpy as np
import os
import shutil
from rmgpy import getPath
from rmgpy.qm.main import QMCalculator
from rmgpy.molecule import Molecule
from rmgpy.qm.gaussian import Gaussian, GaussianMolPM3, GaussianMolPM6
executablePath = Gaussian.executablePath
NO_GAUSSIAN = not os.path.exists(executablePath)
mol1 = Molecule().fromSMILES('C1=CC=C2C=CC=CC2=C1')
class TestGaussianMolPM3(unittest.TestCase):
"""
Contains unit tests for the Geometry class.
"""
@unittest.skipIf(NO_GAUSSIAN, "Gaussian not found. Try resetting your environment variables if you want to use it.")
def setUp(self):
"""
A function run before each unit test in this class.
"""
RMGpy_path = os.path.normpath(os.path.join(getPath(),'..'))
qm = QMCalculator(software = 'gaussian',
method = 'pm3',
fileStore = os.path.join(RMGpy_path, 'testing', 'qm', 'QMfiles'),
scratchDirectory = os.path.join(RMGpy_path, 'testing', 'qm', 'QMscratch'),
)
if not os.path.exists(qm.settings.fileStore):
os.makedirs(qm.settings.fileStore)
self.qmmol1 = GaussianMolPM3(mol1, qm.settings)
def testGenerateThermoData(self):
"""
Test that generateThermoData() works correctly on gaussian PM3.
"""
# First ensure any old data are removed, or else they'll be reused!
for directory in (self.qmmol1.settings.fileStore, self.qmmol1.settings.scratchDirectory):
shutil.rmtree(directory, ignore_errors=True)
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM GaussianMolPM3 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units=='amu':
self.assertAlmostEqual(result.molecularMass.value, 128.0626, 3)
def testLoadThermoData(self):
"""
Test that generateThermoData() can load thermo from the previous gaussian PM3 run.
Check that it loaded, and the values are the same as above.
"""
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM GaussianMolPM3 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units=='amu':
self.assertAlmostEqual(result.molecularMass.value, 128.0626, 3)
class TestGaussianMolPM6(unittest.TestCase):
"""
Contains unit tests for the Geometry class.
"""
@unittest.skipIf(NO_GAUSSIAN, "Gaussian not found. Try resetting your environment variables if you want to use it.")
def setUp(self):
"""
A function run before each unit test in this class.
"""
RMGpy_path = os.path.normpath(os.path.join(getPath(),'..'))
qm = QMCalculator(software = 'gaussian',
method = 'pm6',
fileStore = os.path.join(RMGpy_path, 'testing', 'qm', 'QMfiles'),
scratchDirectory = os.path.join(RMGpy_path, 'testing', 'qm', 'QMscratch'),
)
if not os.path.exists(qm.settings.fileStore):
os.makedirs(qm.settings.fileStore)
self.qmmol1 = GaussianMolPM6(mol1, qm.settings)
@unittest.skipIf('g03' in executablePath, "This test was shown not to work on g03.")
def testGenerateThermoData(self):
"""
Test that generateThermoData() works correctly for gaussian PM6.
"""
# First ensure any old data are removed, or else they'll be reused!
for directory in (self.qmmol1.settings.fileStore, self.qmmol1.settings.scratchDirectory):
shutil.rmtree(directory, ignore_errors=True)
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM GaussianMolPM6 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units=='amu':
self.assertAlmostEqual(result.molecularMass.value, 128.0626, 3)
@unittest.skipIf('g03' in executablePath, "This test was shown not to work on g03.")
def testLoadThermoData(self):
"""
Test that generateThermoData() can load thermo from the previous gaussian PM6 run.
Check that it loaded, and the values are the same as above.
"""
self.qmmol1.generateThermoData()
result = self.qmmol1.qmData
self.assertTrue(self.qmmol1.thermo.comment.startswith('QM GaussianMolPM6 calculation'))
self.assertEqual(result.numberOfAtoms, 18)
self.assertIsInstance(result.atomicNumbers, np.ndarray)
if result.molecularMass.units=='amu':
self.assertAlmostEqual(result.molecularMass.value, 128.0626, 3)
################################################################################
if __name__ == '__main__':
unittest.main( testRunner = unittest.TextTestRunner(verbosity=2) )
|
Molecular-Image-Recognition/Molecular-Image-Recognition
|
code/rmgpy/qm/gaussianTest.py
|
Python
|
mit
| 6,890
|
[
"Gaussian"
] |
f4a0756af3b0df72d8046c2cf8bdd8ada481205f529c52c23ee417892cc765a5
|
# nerd font icon-name to unicode mapping
icons = {
"500px": "\uf26e",
"accessible-icon": "\uf368",
"accusoft": "\uf369",
"acquisitions-incorporated": "\uf6af",
"ad": "\uf641",
"address-book": "\uf2b9",
"address-card": "\uf2bb",
"adjust": "\uf042",
"adn": "\uf170",
"adversal": "\uf36a",
"affiliatetheme": "\uf36b",
"air-freshener": "\uf5d0",
"algolia": "\uf36c",
"align-center": "\uf037",
"align-justify": "\uf039",
"align-left": "\uf036",
"align-right": "\uf038",
"alipay": "\uf642",
"allergies": "\uf461",
"amazon": "\uf270",
"amazon-pay": "\uf42c",
"ambulance": "\uf0f9",
"american-sign-language-interpreting": "\uf2a3",
"amilia": "\uf36d",
"anchor": "\uf13d",
"android": "\uf17b",
"angellist": "\uf209",
"angle-double-down": "\uf103",
"angle-double-left": "\uf100",
"angle-double-right": "\uf101",
"angle-double-up": "\uf102",
"angle-down": "\uf107",
"angle-left": "\uf104",
"angle-right": "\uf105",
"angle-up": "\uf106",
"angry": "\uf556",
"angrycreative": "\uf36e",
"angular": "\uf420",
"ankh": "\uf644",
"app-store": "\uf36f",
"app-store-ios": "\uf370",
"apper": "\uf371",
"apple": "\uf179",
"apple-alt": "\uf5d1",
"apple-pay": "\uf415",
"archive": "\uf187",
"archway": "\uf557",
"arrow-alt-circle-down": "\uf358",
"arrow-alt-circle-left": "\uf359",
"arrow-alt-circle-right": "\uf35a",
"arrow-alt-circle-up": "\uf35b",
"arrow-circle-down": "\uf0ab",
"arrow-circle-left": "\uf0a8",
"arrow-circle-right": "\uf0a9",
"arrow-circle-up": "\uf0aa",
"arrow-down": "\uf063",
"arrow-left": "\uf060",
"arrow-right": "\uf061",
"arrow-up": "\uf062",
"arrows-alt": "\uf0b2",
"arrows-alt-h": "\uf337",
"arrows-alt-v": "\uf338",
"assistive-listening-systems": "\uf2a2",
"asterisk": "\uf069",
"asymmetrik": "\uf372",
"at": "\uf1fa",
"atlas": "\uf558",
"atom": "\uf5d2",
"audible": "\uf373",
"audio-description": "\uf29e",
"autoprefixer": "\uf41c",
"avianex": "\uf374",
"aviato": "\uf421",
"award": "\uf559",
"aws": "\uf375",
"backspace": "\uf55a",
"backward": "\uf04a",
"balance-scale": "\uf24e",
"ban": "\uf05e",
"band-aid": "\uf462",
"bandcamp": "\uf2d5",
"barcode": "\uf02a",
"bars": "\uf0c9",
"baseball-ball": "\uf433",
"basketball-ball": "\uf434",
"bath": "\uf2cd",
"battery-empty": "\uf244",
"battery-full": "\uf240",
"battery-half": "\uf242",
"battery-quarter": "\uf243",
"battery-three-quarters": "\uf241",
"bed": "\uf236",
"beer": "\uf0fc",
"behance": "\uf1b4",
"behance-square": "\uf1b5",
"bell": "\uf0f3",
"bell-slash": "\uf1f6",
"bezier-curve": "\uf55b",
"bible": "\uf647",
"bicycle": "\uf206",
"bimobject": "\uf378",
"binoculars": "\uf1e5",
"birthday-cake": "\uf1fd",
"bitbucket": "\uf171",
"bitcoin": "\uf379",
"bity": "\uf37a",
"black-tie": "\uf27e",
"blackberry": "\uf37b",
"blender": "\uf517",
"blender-phone": "\uf6b6",
"blind": "\uf29d",
"blogger": "\uf37c",
"blogger-b": "\uf37d",
"bluetooth": "\uf293",
"bluetooth-b": "\uf294",
"bold": "\uf032",
"bolt": "\uf0e7",
"bomb": "\uf1e2",
"bone": "\uf5d7",
"bong": "\uf55c",
"book": "\uf02d",
"book-dead": "\uf6b7",
"book-open": "\uf518",
"book-reader": "\uf5da",
"bookmark": "\uf02e",
"bowling-ball": "\uf436",
"box": "\uf466",
"box-open": "\uf49e",
"boxes": "\uf468",
"braille": "\uf2a1",
"brain": "\uf5dc",
"briefcase": "\uf0b1",
"briefcase-medical": "\uf469",
"broadcast-tower": "\uf519",
"broom": "\uf51a",
"brush": "\uf55d",
"btc": "\uf15a",
"bug": "\uf188",
"building": "\uf1ad",
"bullhorn": "\uf0a1",
"bullseye": "\uf140",
"burn": "\uf46a",
"buromobelexperte": "\uf37f",
"bus": "\uf207",
"bus-alt": "\uf55e",
"business-time": "\uf64a",
"buysellads": "\uf20d",
"calculator": "\uf1ec",
"calendar": "\uf133",
"calendar-alt": "\uf073",
"calendar-check": "\uf274",
"calendar-minus": "\uf272",
"calendar-plus": "\uf271",
"calendar-times": "\uf273",
"camera": "\uf030",
"camera-retro": "\uf083",
"campground": "\uf6bb",
"cannabis": "\uf55f",
"capsules": "\uf46b",
"car": "\uf1b9",
"car-alt": "\uf5de",
"car-battery": "\uf5df",
"car-crash": "\uf5e1",
"car-side": "\uf5e4",
"caret-down": "\uf0d7",
"caret-left": "\uf0d9",
"caret-right": "\uf0da",
"caret-square-down": "\uf150",
"caret-square-left": "\uf191",
"caret-square-right": "\uf152",
"caret-square-up": "\uf151",
"caret-up": "\uf0d8",
"cart-arrow-down": "\uf218",
"cart-plus": "\uf217",
"cat": "\uf6be",
"cc-amazon-pay": "\uf42d",
"cc-amex": "\uf1f3",
"cc-apple-pay": "\uf416",
"cc-diners-club": "\uf24c",
"cc-discover": "\uf1f2",
"cc-jcb": "\uf24b",
"cc-mastercard": "\uf1f1",
"cc-paypal": "\uf1f4",
"cc-stripe": "\uf1f5",
"cc-visa": "\uf1f0",
"centercode": "\uf380",
"certificate": "\uf0a3",
"chair": "\uf6c0",
"chalkboard": "\uf51b",
"chalkboard-teacher": "\uf51c",
"charging-station": "\uf5e7",
"chart-area": "\uf1fe",
"chart-bar": "\uf080",
"chart-line": "\uf201",
"chart-pie": "\uf200",
"check": "\uf00c",
"check-circle": "\uf058",
"check-double": "\uf560",
"check-square": "\uf14a",
"chess": "\uf439",
"chess-bishop": "\uf43a",
"chess-board": "\uf43c",
"chess-king": "\uf43f",
"chess-knight": "\uf441",
"chess-pawn": "\uf443",
"chess-queen": "\uf445",
"chess-rook": "\uf447",
"chevron-circle-down": "\uf13a",
"chevron-circle-left": "\uf137",
"chevron-circle-right": "\uf138",
"chevron-circle-up": "\uf139",
"chevron-down": "\uf078",
"chevron-left": "\uf053",
"chevron-right": "\uf054",
"chevron-up": "\uf077",
"child": "\uf1ae",
"chrome": "\uf268",
"church": "\uf51d",
"circle": "\uf111",
"circle-notch": "\uf1ce",
"city": "\uf64f",
"clipboard": "\uf328",
"clipboard-check": "\uf46c",
"clipboard-list": "\uf46d",
"clock": "\uf017",
"clone": "\uf24d",
"closed-captioning": "\uf20a",
"cloud": "\uf0c2",
"cloud-download-alt": "\uf381",
"cloud-meatball": "\uf73b",
"cloud-moon": "\uf6c3",
"cloud-moon-rain": "\uf73c",
"cloud-rain": "\uf73d",
"cloud-showers-heavy": "\uf740",
"cloud-sun": "\uf6c4",
"cloud-sun-rain": "\uf743",
"cloud-upload-alt": "\uf382",
"cloudscale": "\uf383",
"cloudsmith": "\uf384",
"cloudversify": "\uf385",
"cocktail": "\uf561",
"code": "\uf121",
"code-branch": "\uf126",
"codepen": "\uf1cb",
"codiepie": "\uf284",
"coffee": "\uf0f4",
"cog": "\uf013",
"cogs": "\uf085",
"coins": "\uf51e",
"columns": "\uf0db",
"comment": "\uf075",
"comment-alt": "\uf27a",
"comment-dollar": "\uf651",
"comment-dots": "\uf4ad",
"comment-slash": "\uf4b3",
"comments": "\uf086",
"comments-dollar": "\uf653",
"compact-disc": "\uf51f",
"compass": "\uf14e",
"compress": "\uf066",
"concierge-bell": "\uf562",
"connectdevelop": "\uf20e",
"contao": "\uf26d",
"cookie": "\uf563",
"cookie-bite": "\uf564",
"copy": "\uf0c5",
"copyright": "\uf1f9",
"couch": "\uf4b8",
"cpanel": "\uf388",
"creative-commons": "\uf25e",
"creative-commons-by": "\uf4e7",
"creative-commons-nc": "\uf4e8",
"creative-commons-nc-eu": "\uf4e9",
"creative-commons-nc-jp": "\uf4ea",
"creative-commons-nd": "\uf4eb",
"creative-commons-pd": "\uf4ec",
"creative-commons-pd-alt": "\uf4ed",
"creative-commons-remix": "\uf4ee",
"creative-commons-sa": "\uf4ef",
"creative-commons-sampling": "\uf4f0",
"creative-commons-sampling-plus": "\uf4f1",
"creative-commons-share": "\uf4f2",
"creative-commons-zero": "\uf4f3",
"credit-card": "\uf09d",
"critical-role": "\uf6c9",
"crop": "\uf125",
"crop-alt": "\uf565",
"cross": "\uf654",
"crosshairs": "\uf05b",
"crow": "\uf520",
"crown": "\uf521",
"css3": "\uf13c",
"css3-alt": "\uf38b",
"cube": "\uf1b2",
"cubes": "\uf1b3",
"cut": "\uf0c4",
"cuttlefish": "\uf38c",
"d-and-d": "\uf38d",
"d-and-d-beyond": "\uf6ca",
"dashcube": "\uf210",
"database": "\uf1c0",
"deaf": "\uf2a4",
"delicious": "\uf1a5",
"democrat": "\uf747",
"deploydog": "\uf38e",
"deskpro": "\uf38f",
"desktop": "\uf108",
"dev": "\uf6cc",
"deviantart": "\uf1bd",
"dharmachakra": "\uf655",
"diagnoses": "\uf470",
"dice": "\uf522",
"dice-d20": "\uf6cf",
"dice-d6": "\uf6d1",
"dice-five": "\uf523",
"dice-four": "\uf524",
"dice-one": "\uf525",
"dice-six": "\uf526",
"dice-three": "\uf527",
"dice-two": "\uf528",
"digg": "\uf1a6",
"digital-ocean": "\uf391",
"digital-tachograph": "\uf566",
"directions": "\uf5eb",
"discord": "\uf392",
"discourse": "\uf393",
"divide": "\uf529",
"dizzy": "\uf567",
"dna": "\uf471",
"dochub": "\uf394",
"docker": "\uf395",
"dog": "\uf6d3",
"dollar-sign": "\uf155",
"dolly": "\uf472",
"dolly-flatbed": "\uf474",
"donate": "\uf4b9",
"door-closed": "\uf52a",
"door-open": "\uf52b",
"dot-circle": "\uf192",
"dove": "\uf4ba",
"download": "\uf019",
"draft2digital": "\uf396",
"drafting-compass": "\uf568",
"dragon": "\uf6d5",
"draw-polygon": "\uf5ee",
"dribbble": "\uf17d",
"dribbble-square": "\uf397",
"dropbox": "\uf16b",
"drum": "\uf569",
"drum-steelpan": "\uf56a",
"drumstick-bite": "\uf6d7",
"drupal": "\uf1a9",
"dumbbell": "\uf44b",
"dungeon": "\uf6d9",
"dyalog": "\uf399",
"earlybirds": "\uf39a",
"ebay": "\uf4f4",
"edge": "\uf282",
"edit": "\uf044",
"eject": "\uf052",
"elementor": "\uf430",
"ellipsis-h": "\uf141",
"ellipsis-v": "\uf142",
"ello": "\uf5f1",
"ember": "\uf423",
"empire": "\uf1d1",
"envelope": "\uf0e0",
"envelope-open": "\uf2b6",
"envelope-open-text": "\uf658",
"envelope-square": "\uf199",
"envira": "\uf299",
"equals": "\uf52c",
"eraser": "\uf12d",
"erlang": "\uf39d",
"ethereum": "\uf42e",
"etsy": "\uf2d7",
"euro-sign": "\uf153",
"exchange-alt": "\uf362",
"exclamation": "\uf12a",
"exclamation-circle": "\uf06a",
"exclamation-triangle": "\uf071",
"expand": "\uf065",
"expand-arrows-alt": "\uf31e",
"expeditedssl": "\uf23e",
"external-link-alt": "\uf35d",
"external-link-square-alt": "\uf360",
"eye": "\uf06e",
"eye-dropper": "\uf1fb",
"eye-slash": "\uf070",
"facebook": "\uf09a",
"facebook-f": "\uf39e",
"facebook-messenger": "\uf39f",
"facebook-square": "\uf082",
"fantasy-flight-games": "\uf6dc",
"fast-backward": "\uf049",
"fast-forward": "\uf050",
"fax": "\uf1ac",
"feather": "\uf52d",
"feather-alt": "\uf56b",
"female": "\uf182",
"fighter-jet": "\uf0fb",
"file": "\uf15b",
"file-alt": "\uf15c",
"file-archive": "\uf1c6",
"file-audio": "\uf1c7",
"file-code": "\uf1c9",
"file-contract": "\uf56c",
"file-csv": "\uf6dd",
"file-download": "\uf56d",
"file-excel": "\uf1c3",
"file-export": "\uf56e",
"file-image": "\uf1c5",
"file-import": "\uf56f",
"file-invoice": "\uf570",
"file-invoice-dollar": "\uf571",
"file-medical": "\uf477",
"file-medical-alt": "\uf478",
"file-pdf": "\uf1c1",
"file-powerpoint": "\uf1c4",
"file-prescription": "\uf572",
"file-signature": "\uf573",
"file-upload": "\uf574",
"file-video": "\uf1c8",
"file-word": "\uf1c2",
"fill": "\uf575",
"fill-drip": "\uf576",
"film": "\uf008",
"filter": "\uf0b0",
"fingerprint": "\uf577",
"fire": "\uf06d",
"fire-extinguisher": "\uf134",
"firefox": "\uf269",
"first-aid": "\uf479",
"first-order": "\uf2b0",
"first-order-alt": "\uf50a",
"firstdraft": "\uf3a1",
"fish": "\uf578",
"fist-raised": "\uf6de",
"flag": "\uf024",
"flag-checkered": "\uf11e",
"flag-usa": "\uf74d",
"flask": "\uf0c3",
"flickr": "\uf16e",
"flipboard": "\uf44d",
"flushed": "\uf579",
"fly": "\uf417",
"folder": "\uf07b",
"folder-minus": "\uf65d",
"folder-open": "\uf07c",
"folder-plus": "\uf65e",
"font": "\uf031",
"font-awesome": "\uf2b4",
"font-awesome-alt": "\uf35c",
"font-awesome-flag": "\uf425",
"font-awesome-logo-full": "\uf4e6",
"fonticons": "\uf280",
"fonticons-fi": "\uf3a2",
"football-ball": "\uf44e",
"fort-awesome": "\uf286",
"fort-awesome-alt": "\uf3a3",
"forumbee": "\uf211",
"forward": "\uf04e",
"foursquare": "\uf180",
"free-code-camp": "\uf2c5",
"freebsd": "\uf3a4",
"frog": "\uf52e",
"frown": "\uf119",
"frown-open": "\uf57a",
"fulcrum": "\uf50b",
"funnel-dollar": "\uf662",
"futbol": "\uf1e3",
"galactic-republic": "\uf50c",
"galactic-senate": "\uf50d",
"gamepad": "\uf11b",
"gas-pump": "\uf52f",
"gavel": "\uf0e3",
"gem": "\uf3a5",
"genderless": "\uf22d",
"get-pocket": "\uf265",
"gg": "\uf260",
"gg-circle": "\uf261",
"ghost": "\uf6e2",
"gift": "\uf06b",
"git": "\uf1d3",
"git-square": "\uf1d2",
"github": "\uf09b",
"github-alt": "\uf113",
"github-square": "\uf092",
"gitkraken": "\uf3a6",
"gitlab": "\uf296",
"gitter": "\uf426",
"glass-martini": "\uf000",
"glass-martini-alt": "\uf57b",
"glasses": "\uf530",
"glide": "\uf2a5",
"glide-g": "\uf2a6",
"globe": "\uf0ac",
"globe-africa": "\uf57c",
"globe-americas": "\uf57d",
"globe-asia": "\uf57e",
"gofore": "\uf3a7",
"golf-ball": "\uf450",
"goodreads": "\uf3a8",
"goodreads-g": "\uf3a9",
"google": "\uf1a0",
"google-drive": "\uf3aa",
"google-play": "\uf3ab",
"google-plus": "\uf2b3",
"google-plus-g": "\uf0d5",
"google-plus-square": "\uf0d4",
"google-wallet": "\uf1ee",
"gopuram": "\uf664",
"graduation-cap": "\uf19d",
"gratipay": "\uf184",
"grav": "\uf2d6",
"greater-than": "\uf531",
"greater-than-equal": "\uf532",
"grimace": "\uf57f",
"grin": "\uf580",
"grin-alt": "\uf581",
"grin-beam": "\uf582",
"grin-beam-sweat": "\uf583",
"grin-hearts": "\uf584",
"grin-squint": "\uf585",
"grin-squint-tears": "\uf586",
"grin-stars": "\uf587",
"grin-tears": "\uf588",
"grin-tongue": "\uf589",
"grin-tongue-squint": "\uf58a",
"grin-tongue-wink": "\uf58b",
"grin-wink": "\uf58c",
"grip-horizontal": "\uf58d",
"grip-vertical": "\uf58e",
"gripfire": "\uf3ac",
"grunt": "\uf3ad",
"gulp": "\uf3ae",
"h-square": "\uf0fd",
"hacker-news": "\uf1d4",
"hacker-news-square": "\uf3af",
"hackerrank": "\uf5f7",
"hammer": "\uf6e3",
"hamsa": "\uf665",
"hand-holding": "\uf4bd",
"hand-holding-heart": "\uf4be",
"hand-holding-usd": "\uf4c0",
"hand-lizard": "\uf258",
"hand-paper": "\uf256",
"hand-peace": "\uf25b",
"hand-point-down": "\uf0a7",
"hand-point-left": "\uf0a5",
"hand-point-right": "\uf0a4",
"hand-point-up": "\uf0a6",
"hand-pointer": "\uf25a",
"hand-rock": "\uf255",
"hand-scissors": "\uf257",
"hand-spock": "\uf259",
"hands": "\uf4c2",
"hands-helping": "\uf4c4",
"handshake": "\uf2b5",
"hanukiah": "\uf6e6",
"hashtag": "\uf292",
"hat-wizard": "\uf6e8",
"haykal": "\uf666",
"hdd": "\uf0a0",
"heading": "\uf1dc",
"headphones": "\uf025",
"headphones-alt": "\uf58f",
"headset": "\uf590",
"heart": "\uf004",
"heartbeat": "\uf21e",
"helicopter": "\uf533",
"highlighter": "\uf591",
"hiking": "\uf6ec",
"hippo": "\uf6ed",
"hips": "\uf452",
"hire-a-helper": "\uf3b0",
"history": "\uf1da",
"hockey-puck": "\uf453",
"home": "\uf015",
"hooli": "\uf427",
"hornbill": "\uf592",
"horse": "\uf6f0",
"hospital": "\uf0f8",
"hospital-alt": "\uf47d",
"hospital-symbol": "\uf47e",
"hot-tub": "\uf593",
"hotel": "\uf594",
"hotjar": "\uf3b1",
"hourglass": "\uf254",
"hourglass-end": "\uf253",
"hourglass-half": "\uf252",
"hourglass-start": "\uf251",
"house-damage": "\uf6f1",
"houzz": "\uf27c",
"hryvnia": "\uf6f2",
"html5": "\uf13b",
"hubspot": "\uf3b2",
"i-cursor": "\uf246",
"id-badge": "\uf2c1",
"id-card": "\uf2c2",
"id-card-alt": "\uf47f",
"image": "\uf03e",
"images": "\uf302",
"imdb": "\uf2d8",
"inbox": "\uf01c",
"indent": "\uf03c",
"industry": "\uf275",
"infinity": "\uf534",
"info": "\uf129",
"info-circle": "\uf05a",
"instagram": "\uf16d",
"internet-explorer": "\uf26b",
"ioxhost": "\uf208",
"italic": "\uf033",
"itunes": "\uf3b4",
"itunes-note": "\uf3b5",
"java": "\uf4e4",
"jedi": "\uf669",
"jedi-order": "\uf50e",
"jenkins": "\uf3b6",
"joget": "\uf3b7",
"joint": "\uf595",
"joomla": "\uf1aa",
"journal-whills": "\uf66a",
"js": "\uf3b8",
"js-square": "\uf3b9",
"jsfiddle": "\uf1cc",
"kaaba": "\uf66b",
"kaggle": "\uf5fa",
"key": "\uf084",
"keybase": "\uf4f5",
"keyboard": "\uf11c",
"keycdn": "\uf3ba",
"khanda": "\uf66d",
"kickstarter": "\uf3bb",
"kickstarter-k": "\uf3bc",
"kiss": "\uf596",
"kiss-beam": "\uf597",
"kiss-wink-heart": "\uf598",
"kiwi-bird": "\uf535",
"korvue": "\uf42f",
"landmark": "\uf66f",
"language": "\uf1ab",
"laptop": "\uf109",
"laptop-code": "\uf5fc",
"laravel": "\uf3bd",
"lastfm": "\uf202",
"lastfm-square": "\uf203",
"laugh": "\uf599",
"laugh-beam": "\uf59a",
"laugh-squint": "\uf59b",
"laugh-wink": "\uf59c",
"layer-group": "\uf5fd",
"leaf": "\uf06c",
"leanpub": "\uf212",
"lemon": "\uf094",
"less": "\uf41d",
"less-than": "\uf536",
"less-than-equal": "\uf537",
"level-down-alt": "\uf3be",
"level-up-alt": "\uf3bf",
"life-ring": "\uf1cd",
"lightbulb": "\uf0eb",
"line": "\uf3c0",
"link": "\uf0c1",
"linkedin": "\uf08c",
"linkedin-in": "\uf0e1",
"linode": "\uf2b8",
"linux": "\uf17c",
"lira-sign": "\uf195",
"list": "\uf03a",
"list-alt": "\uf022",
"list-ol": "\uf0cb",
"list-ul": "\uf0ca",
"location-arrow": "\uf124",
"lock": "\uf023",
"lock-open": "\uf3c1",
"long-arrow-alt-down": "\uf309",
"long-arrow-alt-left": "\uf30a",
"long-arrow-alt-right": "\uf30b",
"long-arrow-alt-up": "\uf30c",
"low-vision": "\uf2a8",
"luggage-cart": "\uf59d",
"lyft": "\uf3c3",
"magento": "\uf3c4",
"magic": "\uf0d0",
"magnet": "\uf076",
"mail-bulk": "\uf674",
"mailchimp": "\uf59e",
"male": "\uf183",
"mandalorian": "\uf50f",
"map": "\uf279",
"map-marked": "\uf59f",
"map-marked-alt": "\uf5a0",
"map-marker": "\uf041",
"map-marker-alt": "\uf3c5",
"map-pin": "\uf276",
"map-signs": "\uf277",
"markdown": "\uf60f",
"marker": "\uf5a1",
"mars": "\uf222",
"mars-double": "\uf227",
"mars-stroke": "\uf229",
"mars-stroke-h": "\uf22b",
"mars-stroke-v": "\uf22a",
"mask": "\uf6fa",
"mastodon": "\uf4f6",
"maxcdn": "\uf136",
"medal": "\uf5a2",
"medapps": "\uf3c6",
"medium": "\uf23a",
"medium-m": "\uf3c7",
"medkit": "\uf0fa",
"medrt": "\uf3c8",
"meetup": "\uf2e0",
"megaport": "\uf5a3",
"meh": "\uf11a",
"meh-blank": "\uf5a4",
"meh-rolling-eyes": "\uf5a5",
"memory": "\uf538",
"menorah": "\uf676",
"mercury": "\uf223",
"meteor": "\uf753",
"microchip": "\uf2db",
"microphone": "\uf130",
"microphone-alt": "\uf3c9",
"microphone-alt-slash": "\uf539",
"microphone-slash": "\uf131",
"microscope": "\uf610",
"microsoft": "\uf3ca",
"minus": "\uf068",
"minus-circle": "\uf056",
"minus-square": "\uf146",
"mix": "\uf3cb",
"mixcloud": "\uf289",
"mizuni": "\uf3cc",
"mobile": "\uf10b",
"mobile-alt": "\uf3cd",
"modx": "\uf285",
"monero": "\uf3d0",
"money-bill": "\uf0d6",
"money-bill-alt": "\uf3d1",
"money-bill-wave": "\uf53a",
"money-bill-wave-alt": "\uf53b",
"money-check": "\uf53c",
"money-check-alt": "\uf53d",
"monument": "\uf5a6",
"moon": "\uf186",
"mortar-pestle": "\uf5a7",
"mosque": "\uf678",
"motorcycle": "\uf21c",
"mountain": "\uf6fc",
"mouse-pointer": "\uf245",
"music": "\uf001",
"napster": "\uf3d2",
"neos": "\uf612",
"network-wired": "\uf6ff",
"neuter": "\uf22c",
"newspaper": "\uf1ea",
"nimblr": "\uf5a8",
"nintendo-switch": "\uf418",
"node": "\uf419",
"node-js": "\uf3d3",
"not-equal": "\uf53e",
"notes-medical": "\uf481",
"npm": "\uf3d4",
"ns8": "\uf3d5",
"nutritionix": "\uf3d6",
"object-group": "\uf247",
"object-ungroup": "\uf248",
"odnoklassniki": "\uf263",
"odnoklassniki-square": "\uf264",
"oil-can": "\uf613",
"old-republic": "\uf510",
"om": "\uf679",
"opencart": "\uf23d",
"openid": "\uf19b",
"opera": "\uf26a",
"optin-monster": "\uf23c",
"osi": "\uf41a",
"otter": "\uf700",
"outdent": "\uf03b",
"page4": "\uf3d7",
"pagelines": "\uf18c",
"paint-brush": "\uf1fc",
"paint-roller": "\uf5aa",
"palette": "\uf53f",
"palfed": "\uf3d8",
"pallet": "\uf482",
"paper-plane": "\uf1d8",
"paperclip": "\uf0c6",
"parachute-box": "\uf4cd",
"paragraph": "\uf1dd",
"parking": "\uf540",
"passport": "\uf5ab",
"pastafarianism": "\uf67b",
"paste": "\uf0ea",
"patreon": "\uf3d9",
"pause": "\uf04c",
"pause-circle": "\uf28b",
"paw": "\uf1b0",
"paypal": "\uf1ed",
"peace": "\uf67c",
"pen": "\uf304",
"pen-alt": "\uf305",
"pen-fancy": "\uf5ac",
"pen-nib": "\uf5ad",
"pen-square": "\uf14b",
"pencil-alt": "\uf303",
"pencil-ruler": "\uf5ae",
"penny-arcade": "\uf704",
"people-carry": "\uf4ce",
"percent": "\uf295",
"percentage": "\uf541",
"periscope": "\uf3da",
"person-booth": "\uf756",
"phabricator": "\uf3db",
"phoenix-framework": "\uf3dc",
"phoenix-squadron": "\uf511",
"phone": "\uf095",
"phone-slash": "\uf3dd",
"phone-square": "\uf098",
"phone-volume": "\uf2a0",
"php": "\uf457",
"pied-piper": "\uf2ae",
"pied-piper-alt": "\uf1a8",
"pied-piper-hat": "\uf4e5",
"pied-piper-pp": "\uf1a7",
"piggy-bank": "\uf4d3",
"pills": "\uf484",
"pinterest": "\uf0d2",
"pinterest-p": "\uf231",
"pinterest-square": "\uf0d3",
"place-of-worship": "\uf67f",
"plane": "\uf072",
"plane-arrival": "\uf5af",
"plane-departure": "\uf5b0",
"play": "\uf04b",
"play-circle": "\uf144",
"playstation": "\uf3df",
"plug": "\uf1e6",
"plus": "\uf067",
"plus-circle": "\uf055",
"plus-square": "\uf0fe",
"podcast": "\uf2ce",
"poll": "\uf681",
"poll-h": "\uf682",
"poo": "\uf2fe",
"poo-storm": "\uf75a",
"poop": "\uf619",
"portrait": "\uf3e0",
"pound-sign": "\uf154",
"power-off": "\uf011",
"pray": "\uf683",
"praying-hands": "\uf684",
"prescription": "\uf5b1",
"prescription-bottle": "\uf485",
"prescription-bottle-alt": "\uf486",
"print": "\uf02f",
"procedures": "\uf487",
"product-hunt": "\uf288",
"project-diagram": "\uf542",
"pushed": "\uf3e1",
"puzzle-piece": "\uf12e",
"python": "\uf3e2",
"qq": "\uf1d6",
"qrcode": "\uf029",
"question": "\uf128",
"question-circle": "\uf059",
"quidditch": "\uf458",
"quinscape": "\uf459",
"quora": "\uf2c4",
"quote-left": "\uf10d",
"quote-right": "\uf10e",
"quran": "\uf687",
"r-project": "\uf4f7",
"rainbow": "\uf75b",
"random": "\uf074",
"ravelry": "\uf2d9",
"react": "\uf41b",
"reacteurope": "\uf75d",
"readme": "\uf4d5",
"rebel": "\uf1d0",
"receipt": "\uf543",
"recycle": "\uf1b8",
"red-river": "\uf3e3",
"reddit": "\uf1a1",
"reddit-alien": "\uf281",
"reddit-square": "\uf1a2",
"redo": "\uf01e",
"redo-alt": "\uf2f9",
"registered": "\uf25d",
"renren": "\uf18b",
"reply": "\uf3e5",
"reply-all": "\uf122",
"replyd": "\uf3e6",
"republican": "\uf75e",
"researchgate": "\uf4f8",
"resolving": "\uf3e7",
"retweet": "\uf079",
"rev": "\uf5b2",
"ribbon": "\uf4d6",
"ring": "\uf70b",
"road": "\uf018",
"robot": "\uf544",
"rocket": "\uf135",
"rocketchat": "\uf3e8",
"rockrms": "\uf3e9",
"route": "\uf4d7",
"rss": "\uf09e",
"rss-square": "\uf143",
"ruble-sign": "\uf158",
"ruler": "\uf545",
"ruler-combined": "\uf546",
"ruler-horizontal": "\uf547",
"ruler-vertical": "\uf548",
"running": "\uf70c",
"rupee-sign": "\uf156",
"sad-cry": "\uf5b3",
"sad-tear": "\uf5b4",
"safari": "\uf267",
"sass": "\uf41e",
"save": "\uf0c7",
"schlix": "\uf3ea",
"school": "\uf549",
"screwdriver": "\uf54a",
"scribd": "\uf28a",
"scroll": "\uf70e",
"search": "\uf002",
"search-dollar": "\uf688",
"search-location": "\uf689",
"search-minus": "\uf010",
"search-plus": "\uf00e",
"searchengin": "\uf3eb",
"seedling": "\uf4d8",
"sellcast": "\uf2da",
"sellsy": "\uf213",
"server": "\uf233",
"servicestack": "\uf3ec",
"shapes": "\uf61f",
"share": "\uf064",
"share-alt": "\uf1e0",
"share-alt-square": "\uf1e1",
"share-square": "\uf14d",
"shekel-sign": "\uf20b",
"shield-alt": "\uf3ed",
"ship": "\uf21a",
"shipping-fast": "\uf48b",
"shirtsinbulk": "\uf214",
"shoe-prints": "\uf54b",
"shopping-bag": "\uf290",
"shopping-basket": "\uf291",
"shopping-cart": "\uf07a",
"shopware": "\uf5b5",
"shower": "\uf2cc",
"shuttle-van": "\uf5b6",
"sign": "\uf4d9",
"sign-in-alt": "\uf2f6",
"sign-language": "\uf2a7",
"sign-out-alt": "\uf2f5",
"signal": "\uf012",
"signature": "\uf5b7",
"simplybuilt": "\uf215",
"sistrix": "\uf3ee",
"sitemap": "\uf0e8",
"sith": "\uf512",
"skull": "\uf54c",
"skull-crossbones": "\uf714",
"skyatlas": "\uf216",
"skype": "\uf17e",
"slack": "\uf198",
"slack-hash": "\uf3ef",
"slash": "\uf715",
"sliders-h": "\uf1de",
"slideshare": "\uf1e7",
"smile": "\uf118",
"smile-beam": "\uf5b8",
"smile-wink": "\uf4da",
"smog": "\uf75f",
"smoking": "\uf48d",
"smoking-ban": "\uf54d",
"snapchat": "\uf2ab",
"snapchat-ghost": "\uf2ac",
"snapchat-square": "\uf2ad",
"snowflake": "\uf2dc",
"socks": "\uf696",
"solar-panel": "\uf5ba",
"sort": "\uf0dc",
"sort-alpha-down": "\uf15d",
"sort-alpha-up": "\uf15e",
"sort-amount-down": "\uf160",
"sort-amount-up": "\uf161",
"sort-down": "\uf0dd",
"sort-numeric-down": "\uf162",
"sort-numeric-up": "\uf163",
"sort-up": "\uf0de",
"soundcloud": "\uf1be",
"spa": "\uf5bb",
"space-shuttle": "\uf197",
"speakap": "\uf3f3",
"spider": "\uf717",
"spinner": "\uf110",
"splotch": "\uf5bc",
"spotify": "\uf1bc",
"spray-can": "\uf5bd",
"square": "\uf0c8",
"square-full": "\uf45c",
"square-root-alt": "\uf698",
"squarespace": "\uf5be",
"stack-exchange": "\uf18d",
"stack-overflow": "\uf16c",
"stamp": "\uf5bf",
"star": "\uf005",
"star-and-crescent": "\uf699",
"star-half": "\uf089",
"star-half-alt": "\uf5c0",
"star-of-david": "\uf69a",
"star-of-life": "\uf621",
"staylinked": "\uf3f5",
"steam": "\uf1b6",
"steam-square": "\uf1b7",
"steam-symbol": "\uf3f6",
"step-backward": "\uf048",
"step-forward": "\uf051",
"stethoscope": "\uf0f1",
"sticker-mule": "\uf3f7",
"sticky-note": "\uf249",
"stop": "\uf04d",
"stop-circle": "\uf28d",
"stopwatch": "\uf2f2",
"store": "\uf54e",
"store-alt": "\uf54f",
"strava": "\uf428",
"stream": "\uf550",
"street-view": "\uf21d",
"strikethrough": "\uf0cc",
"stripe": "\uf429",
"stripe-s": "\uf42a",
"stroopwafel": "\uf551",
"studiovinari": "\uf3f8",
"stumbleupon": "\uf1a4",
"stumbleupon-circle": "\uf1a3",
"subscript": "\uf12c",
"subway": "\uf239",
"suitcase": "\uf0f2",
"suitcase-rolling": "\uf5c1",
"sun": "\uf185",
"superpowers": "\uf2dd",
"superscript": "\uf12b",
"supple": "\uf3f9",
"surprise": "\uf5c2",
"swatchbook": "\uf5c3",
"swimmer": "\uf5c4",
"swimming-pool": "\uf5c5",
"synagogue": "\uf69b",
"sync": "\uf021",
"sync-alt": "\uf2f1",
"syringe": "\uf48e",
"table": "\uf0ce",
"table-tennis": "\uf45d",
"tablet": "\uf10a",
"tablet-alt": "\uf3fa",
"tablets": "\uf490",
"tachometer-alt": "\uf3fd",
"tag": "\uf02b",
"tags": "\uf02c",
"tape": "\uf4db",
"tasks": "\uf0ae",
"taxi": "\uf1ba",
"teamspeak": "\uf4f9",
"teeth": "\uf62e",
"teeth-open": "\uf62f",
"telegram": "\uf2c6",
"telegram-plane": "\uf3fe",
"temperature-high": "\uf769",
"temperature-low": "\uf76b",
"tencent-weibo": "\uf1d5",
"terminal": "\uf120",
"text-height": "\uf034",
"text-width": "\uf035",
"th": "\uf00a",
"th-large": "\uf009",
"th-list": "\uf00b",
"the-red-yeti": "\uf69d",
"theater-masks": "\uf630",
"themeco": "\uf5c6",
"themeisle": "\uf2b2",
"thermometer": "\uf491",
"thermometer-empty": "\uf2cb",
"thermometer-full": "\uf2c7",
"thermometer-half": "\uf2c9",
"thermometer-quarter": "\uf2ca",
"thermometer-three-quarters": "\uf2c8",
"think-peaks": "\uf731",
"thumbs-down": "\uf165",
"thumbs-up": "\uf164",
"thumbtack": "\uf08d",
"ticket-alt": "\uf3ff",
"times": "\uf00d",
"times-circle": "\uf057",
"tint": "\uf043",
"tint-slash": "\uf5c7",
"tired": "\uf5c8",
"todo": "\ue29a",
"toggle-off": "\uf204",
"toggle-on": "\uf205",
"toilet-paper": "\uf71e",
"toolbox": "\uf552",
"tooth": "\uf5c9",
"torah": "\uf6a0",
"torii-gate": "\uf6a1",
"tractor": "\uf722",
"trade-federation": "\uf513",
"trademark": "\uf25c",
"traffic-light": "\uf637",
"train": "\uf238",
"transgender": "\uf224",
"transgender-alt": "\uf225",
"trash": "\uf1f8",
"trash-alt": "\uf2ed",
"tree": "\uf1bb",
"trello": "\uf181",
"tripadvisor": "\uf262",
"trophy": "\uf091",
"truck": "\uf0d1",
"truck-loading": "\uf4de",
"truck-monster": "\uf63b",
"truck-moving": "\uf4df",
"truck-pickup": "\uf63c",
"tshirt": "\uf553",
"tty": "\uf1e4",
"tumblr": "\uf173",
"tumblr-square": "\uf174",
"tv": "\uf26c",
"twitch": "\uf1e8",
"twitter": "\uf099",
"twitter-square": "\uf081",
"typo3": "\uf42b",
"uber": "\uf402",
"uikit": "\uf403",
"umbrella": "\uf0e9",
"umbrella-beach": "\uf5ca",
"underline": "\uf0cd",
"undo": "\uf0e2",
"undo-alt": "\uf2ea",
"uniregistry": "\uf404",
"universal-access": "\uf29a",
"university": "\uf19c",
"unlink": "\uf127",
"unlock": "\uf09c",
"unlock-alt": "\uf13e",
"untappd": "\uf405",
"upload": "\uf093",
"usb": "\uf287",
"user": "\uf007",
"user-alt": "\uf406",
"user-alt-slash": "\uf4fa",
"user-astronaut": "\uf4fb",
"user-check": "\uf4fc",
"user-circle": "\uf2bd",
"user-clock": "\uf4fd",
"user-cog": "\uf4fe",
"user-edit": "\uf4ff",
"user-friends": "\uf500",
"user-graduate": "\uf501",
"user-injured": "\uf728",
"user-lock": "\uf502",
"user-md": "\uf0f0",
"user-minus": "\uf503",
"user-ninja": "\uf504",
"user-plus": "\uf234",
"user-secret": "\uf21b",
"user-shield": "\uf505",
"user-slash": "\uf506",
"user-tag": "\uf507",
"user-tie": "\uf508",
"user-times": "\uf235",
"users": "\uf0c0",
"users-cog": "\uf509",
"ussunnah": "\uf407",
"utensil-spoon": "\uf2e5",
"utensils": "\uf2e7",
"vaadin": "\uf408",
"vector-square": "\uf5cb",
"venus": "\uf221",
"venus-double": "\uf226",
"venus-mars": "\uf228",
"viacoin": "\uf237",
"viadeo": "\uf2a9",
"viadeo-square": "\uf2aa",
"vial": "\uf492",
"vials": "\uf493",
"viber": "\uf409",
"video": "\uf03d",
"video-slash": "\uf4e2",
"vihara": "\uf6a7",
"vim": "\ue62b",
"vimeo": "\uf40a",
"vimeo-square": "\uf194",
"vimeo-v": "\uf27d",
"vine": "\uf1ca",
"vk": "\uf189",
"vnv": "\uf40b",
"volleyball-ball": "\uf45f",
"volume-down": "\uf027",
"volume-mute": "\uf6a9",
"volume-off": "\uf026",
"volume-up": "\uf028",
"vote-yea": "\uf772",
"vr-cardboard": "\uf729",
"vuejs": "\uf41f",
"walking": "\uf554",
"wallet": "\uf555",
"warehouse": "\uf494",
"water": "\uf773",
"weebly": "\uf5cc",
"weechat": "\uf1d7",
"weibo": "\uf18a",
"weight": "\uf496",
"weight-hanging": "\uf5cd",
"weixin": "\uf1d7",
"whatsapp": "\uf232",
"whatsapp-square": "\uf40c",
"wheelchair": "\uf193",
"whmcs": "\uf40d",
"wifi": "\uf1eb",
"wikipedia-w": "\uf266",
"wind": "\uf72e",
"window-close": "\uf410",
"window-maximize": "\uf2d0",
"window-minimize": "\uf2d1",
"window-restore": "\uf2d2",
"windows": "\uf17a",
"wine-bottle": "\uf72f",
"wine-glass": "\uf4e3",
"wine-glass-alt": "\uf5ce",
"wix": "\uf5cf",
"wizards-of-the-coast": "\uf730",
"wolf-pack-battalion": "\uf514",
"won-sign": "\uf159",
"wordpress": "\uf19a",
"wordpress-simple": "\uf411",
"wpbeginner": "\uf297",
"wpexplorer": "\uf2de",
"wpforms": "\uf298",
"wpressr": "\uf3e4",
"wrench": "\uf0ad",
"x-ray": "\uf497",
"xbox": "\uf412",
"xing": "\uf168",
"xing-square": "\uf169",
"y-combinator": "\uf23b",
"yahoo": "\uf19e",
"yandex": "\uf413",
"yandex-international": "\uf414",
"yelp": "\uf1e9",
"yen-sign": "\uf157",
"yin-yang": "\uf6ad",
"yoast": "\uf2b1",
"youtube": "\uf167",
"youtube-square": "\uf431",
}
|
adamatom/dotfiles
|
.bin/i3_icons.py
|
Python
|
apache-2.0
| 34,587
|
[
"GULP"
] |
0b6da26d9028a8f0f84118cf76fbe44aea3793afd46430de4328a5f12ed31438
|
"Demonstrates molecular dynamics with constant energy."
from ase import *
from ase.lattice.cubic import FaceCenteredCubic
from ase.md.velocitydistribution import *
from ase.md.verlet import *
# Use Asap for a huge performance increase if it is installed
useAsap = False
if useAsap:
from asap3 import EMT
size = 10
else:
size = 3
# Set up a crystal
atoms = FaceCenteredCubic(directions=[[1,0,0],[0,1,0],[0,0,1]], symbol="Cu",
size=(size,size,size), pbc=True)
# Describe the interatomic interactions with the Effective Medium Theory
atoms.set_calculator(EMT())
# Set the momenta corresponding to T=300K
MaxwellBoltzmannDistribution(atoms, 300*units.kB)
# We want to run MD with constant energy using the VelocityVerlet algorithm.
dyn = VelocityVerlet(atoms, 5*units.fs) # 5 fs time step.
#Function to print the potential, kinetic and total energy
def printenergy(a):
epot = a.get_potential_energy() / len(a)
ekin = a.get_kinetic_energy() / len(a)
print ("Energy per atom: Epot = %.3feV Ekin = %.3feV (T=%3.0fK) Etot = %.3feV" %
(epot, ekin, ekin/(1.5*units.kB), epot+ekin))
# Now run the dynamics
printenergy(atoms)
for i in range(20):
dyn.run(10)
printenergy(atoms)
|
freephys/python_ase
|
doc/tutorials/md/moldyn1.py
|
Python
|
gpl-3.0
| 1,250
|
[
"ASE",
"CRYSTAL"
] |
6bcfbfc586d1c4dcc34d14fa038ec70e333e48e52ddb3554db2d0c83bb7f13be
|
"""
Container page in Studio
"""
from bok_choy.page_object import PageObject
from bok_choy.promise import Promise, EmptyPromise
from . import BASE_URL
from .utils import click_css, confirm_prompt, type_in_codemirror
class ContainerPage(PageObject):
"""
Container page in Studio
"""
NAME_SELECTOR = '.page-header-title'
NAME_INPUT_SELECTOR = '.page-header .xblock-field-input'
NAME_FIELD_WRAPPER_SELECTOR = '.page-header .wrapper-xblock-field'
ADD_MISSING_GROUPS_SELECTOR = '.notification-action-button[data-notification-action="add-missing-groups"]'
def __init__(self, browser, locator):
super(ContainerPage, self).__init__(browser)
self.locator = locator
@property
def url(self):
"""URL to the container page for an xblock."""
return "{}/container/{}".format(BASE_URL, self.locator)
@property
def name(self):
titles = self.q(css=self.NAME_SELECTOR).text
if titles:
return titles[0]
else:
return None
def is_browser_on_page(self):
def _xblock_count(class_name, request_token):
return len(self.q(css='{body_selector} .xblock.{class_name}[data-request-token="{request_token}"]'.format(
body_selector=XBlockWrapper.BODY_SELECTOR, class_name=class_name, request_token=request_token
)).results)
def _is_finished_loading():
is_done = False
# Get the request token of the first xblock rendered on the page and assume it is correct.
data_request_elements = self.q(css='[data-request-token]')
if len(data_request_elements) > 0:
request_token = data_request_elements.first.attrs('data-request-token')[0]
# Then find the number of Studio xblock wrappers on the page with that request token.
num_wrappers = len(self.q(css='{} [data-request-token="{}"]'.format(XBlockWrapper.BODY_SELECTOR, request_token)).results)
# Wait until all components have been loaded and marked as either initialized or failed.
# See:
# - common/static/js/xblock/core.js which adds the class "xblock-initialized"
# at the end of initializeBlock.
# - common/static/js/views/xblock.js which adds the class "xblock-initialization-failed"
# if the xblock threw an error while initializing.
num_initialized_xblocks = _xblock_count('xblock-initialized', request_token)
num_failed_xblocks = _xblock_count('xblock-initialization-failed', request_token)
is_done = num_wrappers == (num_initialized_xblocks + num_failed_xblocks)
return (is_done, is_done)
# First make sure that an element with the view-container class is present on the page,
# and then wait for the loading spinner to go away and all the xblocks to be initialized.
return (
self.q(css='body.view-container').present and
self.q(css='div.ui-loading.is-hidden').present and
Promise(_is_finished_loading, 'Finished rendering the xblock wrappers.').fulfill()
)
def wait_for_component_menu(self):
"""
Waits until the menu bar of components is present on the page.
"""
EmptyPromise(
lambda: self.q(css='div.add-xblock-component').present,
'Wait for the menu of components to be present'
).fulfill()
@property
def xblocks(self):
"""
Return a list of xblocks loaded on the container page.
"""
return self._get_xblocks()
@property
def inactive_xblocks(self):
"""
Return a list of inactive xblocks loaded on the container page.
"""
return self._get_xblocks(".is-inactive ")
@property
def active_xblocks(self):
"""
Return a list of active xblocks loaded on the container page.
"""
return self._get_xblocks(".is-active ")
@property
def publish_title(self):
"""
Returns the title as displayed on the publishing sidebar component.
"""
return self.q(css='.pub-status').first.text[0]
@property
def release_title(self):
"""
Returns the title before the release date in the publishing sidebar component.
"""
return self.q(css='.wrapper-release .title').first.text[0]
@property
def release_date(self):
"""
Returns the release date of the unit (with ancestor inherited from), as displayed
in the publishing sidebar component.
"""
return self.q(css='.wrapper-release .copy').first.text[0]
@property
def last_saved_text(self):
"""
Returns the last saved message as displayed in the publishing sidebar component.
"""
return self.q(css='.wrapper-last-draft').first.text[0]
@property
def last_published_text(self):
"""
Returns the last published message as displayed in the sidebar.
"""
return self.q(css='.wrapper-last-publish').first.text[0]
@property
def currently_visible_to_students(self):
"""
Returns True if the unit is marked as currently visible to students
(meaning that a warning is being displayed).
"""
warnings = self.q(css='.container-message .warning')
if not warnings.is_present():
return False
warning_text = warnings.first.text[0]
return warning_text == "Caution: The last published version of this unit is live. By publishing changes you will change the student experience."
def shows_inherited_staff_lock(self, parent_type=None, parent_name=None):
"""
Returns True if the unit inherits staff lock from a section or subsection.
"""
return self.q(css='.bit-publishing .wrapper-visibility .copy .inherited-from').visible
@property
def sidebar_visibility_message(self):
"""
Returns the text within the sidebar visibility section.
"""
return self.q(css='.bit-publishing .wrapper-visibility').first.text[0]
@property
def publish_action(self):
"""
Returns the link for publishing a unit.
"""
return self.q(css='.action-publish').first
def discard_changes(self):
"""
Discards draft changes (which will then re-render the page).
"""
click_css(self, 'a.action-discard', 0, require_notification=False)
confirm_prompt(self)
self.wait_for_ajax()
@property
def is_staff_locked(self):
""" Returns True if staff lock is currently enabled, False otherwise """
for attr in self.q(css='a.action-staff-lock>i').attrs('class'):
if 'fa-check-square-o' in attr:
return True
return False
def toggle_staff_lock(self, inherits_staff_lock=False):
"""
Toggles "hide from students" which enables or disables a staff-only lock.
Returns True if the lock is now enabled, else False.
"""
was_locked_initially = self.is_staff_locked
if not was_locked_initially:
self.q(css='a.action-staff-lock').first.click()
else:
click_css(self, 'a.action-staff-lock', 0, require_notification=False)
if not inherits_staff_lock:
confirm_prompt(self)
self.wait_for_ajax()
return not was_locked_initially
def view_published_version(self):
"""
Clicks "View Live Version", which will open the published version of the unit page in the LMS.
Switches the browser to the newly opened LMS window.
"""
self.q(css='.button-view').first.click()
self._switch_to_lms()
def preview(self):
"""
Clicks "Preview Changes", which will open the draft version of the unit page in the LMS.
Switches the browser to the newly opened LMS window.
"""
self.q(css='.button-preview').first.click()
self._switch_to_lms()
def _switch_to_lms(self):
"""
Assumes LMS has opened-- switches to that window.
"""
browser_window_handles = self.browser.window_handles
# Switch to browser window that shows HTML Unit in LMS
# The last handle represents the latest windows opened
self.browser.switch_to_window(browser_window_handles[-1])
def _get_xblocks(self, prefix=""):
return self.q(css=prefix + XBlockWrapper.BODY_SELECTOR).map(
lambda el: XBlockWrapper(self.browser, el.get_attribute('data-locator'))).results
def duplicate(self, source_index):
"""
Duplicate the item with index source_index (based on vertical placement in page).
"""
click_css(self, 'a.duplicate-button', source_index)
def delete(self, source_index):
"""
Delete the item with index source_index (based on vertical placement in page).
Only visible items are counted in the source_index.
The index of the first item is 0.
"""
# Click the delete button
click_css(self, 'a.delete-button', source_index, require_notification=False)
# Click the confirmation dialog button
confirm_prompt(self)
def edit(self):
"""
Clicks the "edit" button for the first component on the page.
"""
return _click_edit(self, '.edit-button', '.xblock-studio_view')
def add_missing_groups(self):
"""
Click the "add missing groups" link.
Note that this does an ajax call.
"""
self.q(css=self.ADD_MISSING_GROUPS_SELECTOR).first.click()
self.wait_for_ajax()
# Wait until all xblocks rendered.
self.wait_for_page()
def missing_groups_button_present(self):
"""
Returns True if the "add missing groups" button is present.
"""
return self.q(css=self.ADD_MISSING_GROUPS_SELECTOR).present
def get_xblock_information_message(self):
"""
Returns an information message for the container page.
"""
return self.q(css=".xblock-message.information").first.text[0]
def is_inline_editing_display_name(self):
"""
Return whether this container's display name is in its editable form.
"""
return "is-editing" in self.q(css=self.NAME_FIELD_WRAPPER_SELECTOR).first.attrs("class")[0]
class XBlockWrapper(PageObject):
"""
A PageObject representing a wrapper around an XBlock child shown on the Studio container page.
"""
url = None
BODY_SELECTOR = '.studio-xblock-wrapper'
NAME_SELECTOR = '.xblock-display-name'
VALIDATION_SELECTOR = '.xblock-message.validation'
COMPONENT_BUTTONS = {
'basic_tab': '.editor-tabs li.inner_tab_wrap:nth-child(1) > a',
'advanced_tab': '.editor-tabs li.inner_tab_wrap:nth-child(2) > a',
'settings_tab': '.editor-modes .settings-button',
'save_settings': '.action-save',
}
def __init__(self, browser, locator):
super(XBlockWrapper, self).__init__(browser)
self.locator = locator
def is_browser_on_page(self):
return self.q(css='{}[data-locator="{}"]'.format(self.BODY_SELECTOR, self.locator)).present
def _bounded_selector(self, selector):
"""
Return `selector`, but limited to this particular `CourseOutlineChild` context
"""
return '{}[data-locator="{}"] {}'.format(
self.BODY_SELECTOR,
self.locator,
selector
)
@property
def student_content(self):
"""
Returns the text content of the xblock as displayed on the container page.
"""
return self.q(css=self._bounded_selector('.xblock-student_view'))[0].text
@property
def author_content(self):
"""
Returns the text content of the xblock as displayed on the container page.
(For blocks which implement a distinct author_view).
"""
return self.q(css=self._bounded_selector('.xblock-author_view'))[0].text
@property
def name(self):
titles = self.q(css=self._bounded_selector(self.NAME_SELECTOR)).text
if titles:
return titles[0]
else:
return None
@property
def children(self):
"""
Will return any first-generation descendant xblocks of this xblock.
"""
descendants = self.q(css=self._bounded_selector(self.BODY_SELECTOR)).map(
lambda el: XBlockWrapper(self.browser, el.get_attribute('data-locator'))).results
# Now remove any non-direct descendants.
grandkids = []
for descendant in descendants:
grandkids.extend(descendant.children)
grand_locators = [grandkid.locator for grandkid in grandkids]
return [descendant for descendant in descendants if descendant.locator not in grand_locators]
@property
def has_validation_message(self):
""" Is a validation warning/error/message shown? """
return self.q(css=self._bounded_selector(self.VALIDATION_SELECTOR)).present
def _validation_paragraph(self, css_class):
""" Helper method to return the <p> element of a validation warning """
return self.q(css=self._bounded_selector('{} p.{}'.format(self.VALIDATION_SELECTOR, css_class)))
@property
def has_validation_warning(self):
""" Is a validation warning shown? """
return self._validation_paragraph('warning').present
@property
def has_validation_error(self):
""" Is a validation error shown? """
return self._validation_paragraph('error').present
@property
# pylint: disable=invalid-name
def has_validation_not_configured_warning(self):
""" Is a validation "not configured" message shown? """
return self._validation_paragraph('not-configured').present
@property
def validation_warning_text(self):
""" Get the text of the validation warning. """
return self._validation_paragraph('warning').text[0]
@property
def validation_error_text(self):
""" Get the text of the validation error. """
return self._validation_paragraph('error').text[0]
@property
def validation_error_messages(self):
return self.q(css=self._bounded_selector('{} .xblock-message-item.error'.format(self.VALIDATION_SELECTOR))).text
@property
# pylint: disable=invalid-name
def validation_not_configured_warning_text(self):
""" Get the text of the validation "not configured" message. """
return self._validation_paragraph('not-configured').text[0]
@property
def preview_selector(self):
return self._bounded_selector('.xblock-student_view,.xblock-author_view')
@property
def has_group_visibility_set(self):
return self.q(css=self._bounded_selector('.wrapper-xblock.has-group-visibility-set')).is_present()
@property
def has_edit_visibility_button(self):
"""
Returns true if this xblock has an 'edit visibility' button
:return:
"""
return self.q(css=self._bounded_selector('.visibility-button')).is_present()
def go_to_container(self):
"""
Open the container page linked to by this xblock, and return
an initialized :class:`.ContainerPage` for that xblock.
"""
return ContainerPage(self.browser, self.locator).visit()
def edit(self):
"""
Clicks the "edit" button for this xblock.
"""
return _click_edit(self, '.edit-button', '.xblock-studio_view', self._bounded_selector)
def edit_visibility(self):
"""
Clicks the edit visibility button for this xblock.
"""
return _click_edit(self, '.visibility-button', '.xblock-visibility_view', self._bounded_selector)
def open_advanced_tab(self):
"""
Click on Advanced Tab.
"""
self._click_button('advanced_tab')
def open_basic_tab(self):
"""
Click on Basic Tab.
"""
self._click_button('basic_tab')
def open_settings_tab(self):
"""
If editing, click on the "Settings" tab
"""
self._click_button('settings_tab')
def set_field_val(self, field_display_name, field_value):
"""
If editing, set the value of a field.
"""
selector = '{} li.field label:contains("{}") + input'.format(self.editor_selector, field_display_name)
script = "$(arguments[0]).val(arguments[1]).change();"
self.browser.execute_script(script, selector, field_value)
def reset_field_val(self, field_display_name):
"""
If editing, reset the value of a field to its default.
"""
scope = '{} li.field label:contains("{}")'.format(self.editor_selector, field_display_name)
script = "$(arguments[0]).siblings('.setting-clear').click();"
self.browser.execute_script(script, scope)
def set_codemirror_text(self, text, index=0):
"""
Set the text of a CodeMirror editor that is part of this xblock's settings.
"""
type_in_codemirror(self, index, text, find_prefix='$("{}").find'.format(self.editor_selector))
def save_settings(self):
"""
Click on settings Save button.
"""
self._click_button('save_settings')
@property
def editor_selector(self):
return '.xblock-studio_view'
def _click_button(self, button_name):
"""
Click on a button as specified by `button_name`
Arguments:
button_name (str): button name
"""
self.q(css=self.COMPONENT_BUTTONS[button_name]).first.click()
self.wait_for_ajax()
def go_to_group_configuration_page(self):
"""
Go to the Group Configuration used by the component.
"""
self.q(css=self._bounded_selector('span.message-text a')).first.click()
def is_placeholder(self):
"""
Checks to see if the XBlock is rendered as a placeholder without a preview.
"""
return not self.q(css=self._bounded_selector('.wrapper-xblock article')).present
@property
def group_configuration_link_name(self):
"""
Get Group Configuration name from link.
"""
return self.q(css=self._bounded_selector('span.message-text a')).first.text[0]
def _click_edit(page_object, button_css, view_css, bounded_selector=lambda(x): x):
"""
Click on the first editing button found and wait for the Studio editor to be present.
"""
page_object.q(css=bounded_selector(button_css)).first.click()
EmptyPromise(
lambda: page_object.q(css=view_css).present,
'Wait for the Studio editor to be present'
).fulfill()
return page_object
|
valtech-mooc/edx-platform
|
common/test/acceptance/pages/studio/container.py
|
Python
|
agpl-3.0
| 18,991
|
[
"VisIt"
] |
14ca9fbcfc24efe3c3b20c61b5bad15f7c3756582ed466e75f11514179ab7a82
|
# htmlwindow-1.py
# Very simple demo of HTMLWindow.
from wax import *
HTML = """\
<h3>HTMLWindow</h3>
<p>Hello friends! This is some simple <b>HTML</b>.</p>
<p>Flourish & Blotts</p>
<p>Also visit <a href="http://www.python.org/">www.python.org</a>!</p>
"""
class MainFrame(Frame):
def Body(self):
self.htmlwindow = HTMLWindow(self)
self.AddComponent(self.htmlwindow, expand=1, stretch=1)
self.Pack()
self.Size = (500, 400)
self.htmlwindow.AppendToPage(HTML)
app = Application(MainFrame, title='htmlwindow-1.py')
app.Run()
|
MSMBA/msmba-workflow
|
msmba-workflow/srclib/wax/examples/htmlwindow-1.py
|
Python
|
gpl-2.0
| 579
|
[
"VisIt"
] |
d4dce3f5ae542dcdb656c382a3a143d3c158c8a9d6bbee7aedaf3ee07657cecf
|
#!/usr/bin/env python
"""Universal feed parser
Handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds
Visit http://feedparser.org/ for the latest version
Visit http://feedparser.org/docs/ for the latest documentation
Required: Python 2.4 or later
Recommended: CJKCodecs and iconv_codec <http://cjkpython.i18n.org/>
"""
__version__ = "5.0.1"
__license__ = """Copyright (c) 2002-2008, Mark Pilgrim, All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 'AS IS'
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE."""
__author__ = "Mark Pilgrim <http://diveintomark.org/>"
__contributors__ = ["Jason Diamond <http://injektilo.org/>",
"John Beimler <http://john.beimler.org/>",
"Fazal Majid <http://www.majid.info/mylos/weblog/>",
"Aaron Swartz <http://aaronsw.com/>",
"Kevin Marks <http://epeus.blogspot.com/>",
"Sam Ruby <http://intertwingly.net/>",
"Ade Oshineye <http://blog.oshineye.com/>",
"Martin Pool <http://sourcefrog.net/>",
"Kurt McKee <http://kurtmckee.org/>"]
_debug = 0
# HTTP "User-Agent" header to send to servers when downloading feeds.
# If you are embedding feedparser in a larger application, you should
# change this to your application name and URL.
USER_AGENT = "UniversalFeedParser/%s +http://feedparser.org/" % __version__
# HTTP "Accept" header to send to servers when downloading feeds. If you don't
# want to send an Accept header, set this to None.
ACCEPT_HEADER = "application/atom+xml,application/rdf+xml,application/rss+xml,application/x-netcdf,application/xml;q=0.9,text/xml;q=0.2,*/*;q=0.1"
# List of preferred XML parsers, by SAX driver name. These will be tried first,
# but if they're not installed, Python will keep searching through its own list
# of pre-installed parsers until it finds one that supports everything we need.
PREFERRED_XML_PARSERS = ["drv_libxml2"]
# If you want feedparser to automatically run HTML markup through HTML Tidy, set
# this to 1. Requires mxTidy <http://www.egenix.com/files/python/mxTidy.html>
# or utidylib <http://utidylib.berlios.de/>.
TIDY_MARKUP = 0
# List of Python interfaces for HTML Tidy, in order of preference. Only useful
# if TIDY_MARKUP = 1
PREFERRED_TIDY_INTERFACES = ["uTidy", "mxTidy"]
# If you want feedparser to automatically resolve all relative URIs, set this
# to 1.
RESOLVE_RELATIVE_URIS = 1
# If you want feedparser to automatically sanitize all potentially unsafe
# HTML content, set this to 1.
SANITIZE_HTML = 1
# ---------- Python 3 modules (make it work if possible) ----------
try:
import rfc822
except ImportError:
from email import _parseaddr as rfc822
try:
# Python 3.1 introduces bytes.maketrans and simultaneously
# deprecates string.maketrans; use bytes.maketrans if possible
_maketrans = bytes.maketrans
except (NameError, AttributeError):
import string
_maketrans = string.maketrans
# base64 support for Atom feeds that contain embedded binary data
try:
import base64, binascii
# Python 3.1 deprecates decodestring in favor of decodebytes
_base64decode = getattr(base64, 'decodebytes', base64.decodestring)
except:
base64 = binascii = None
def _s2bytes(s):
# Convert a UTF-8 str to bytes if the interpreter is Python 3
try:
return bytes(s, 'utf8')
except (NameError, TypeError):
# In Python 2.5 and below, bytes doesn't exist (NameError)
# In Python 2.6 and above, bytes and str are the same (TypeError)
return s
def _l2bytes(l):
# Convert a list of ints to bytes if the interpreter is Python 3
try:
if bytes is not str:
# In Python 2.6 and above, this call won't raise an exception
# but it will return bytes([65]) as '[65]' instead of 'A'
return bytes(l)
raise NameError
except NameError:
return ''.join(map(chr, l))
# If you want feedparser to allow all URL schemes, set this to ()
# List culled from Python's urlparse documentation at:
# http://docs.python.org/library/urlparse.html
# as well as from "URI scheme" at Wikipedia:
# https://secure.wikimedia.org/wikipedia/en/wiki/URI_scheme
# Many more will likely need to be added!
ACCEPTABLE_URI_SCHEMES = (
'file', 'ftp', 'gopher', 'h323', 'hdl', 'http', 'https', 'imap', 'mailto',
'mms', 'news', 'nntp', 'prospero', 'rsync', 'rtsp', 'rtspu', 'sftp',
'shttp', 'sip', 'sips', 'snews', 'svn', 'svn+ssh', 'telnet', 'wais',
# Additional common-but-unofficial schemes
'aim', 'callto', 'cvs', 'facetime', 'feed', 'git', 'gtalk', 'irc', 'ircs',
'irc6', 'itms', 'mms', 'msnim', 'skype', 'ssh', 'smb', 'svn', 'ymsg',
)
#ACCEPTABLE_URI_SCHEMES = ()
# ---------- required modules (should come with any Python distribution) ----------
import sgmllib, re, sys, copy, urlparse, time, types, cgi, urllib, urllib2, datetime
try:
from io import BytesIO as _StringIO
except ImportError:
try:
from cStringIO import StringIO as _StringIO
except:
from StringIO import StringIO as _StringIO
# ---------- optional modules (feedparser will work without these, but with reduced functionality) ----------
# gzip is included with most Python distributions, but may not be available if you compiled your own
try:
import gzip
except:
gzip = None
try:
import zlib
except:
zlib = None
# If a real XML parser is available, feedparser will attempt to use it. feedparser has
# been tested with the built-in SAX parser, PyXML, and libxml2. On platforms where the
# Python distribution does not come with an XML parser (such as Mac OS X 10.2 and some
# versions of FreeBSD), feedparser will quietly fall back on regex-based parsing.
try:
import xml.sax
xml.sax.make_parser(PREFERRED_XML_PARSERS) # test for valid parsers
from xml.sax.saxutils import escape as _xmlescape
_XML_AVAILABLE = 1
except:
_XML_AVAILABLE = 0
def _xmlescape(data,entities={}):
data = data.replace('&', '&')
data = data.replace('>', '>')
data = data.replace('<', '<')
for char, entity in entities:
data = data.replace(char, entity)
return data
# cjkcodecs and iconv_codec provide support for more character encodings.
# Both are available from http://cjkpython.i18n.org/
try:
import cjkcodecs.aliases
except:
pass
try:
import iconv_codec
except:
pass
# chardet library auto-detects character encodings
# Download from http://chardet.feedparser.org/
try:
import chardet
if _debug:
import chardet.constants
chardet.constants._debug = 1
except:
chardet = None
# reversable htmlentitydefs mappings for Python 2.2
try:
from htmlentitydefs import name2codepoint, codepoint2name
except:
import htmlentitydefs
name2codepoint={}
codepoint2name={}
for (name,codepoint) in htmlentitydefs.entitydefs.iteritems():
if codepoint.startswith('&#'): codepoint=unichr(int(codepoint[2:-1]))
name2codepoint[name]=ord(codepoint)
codepoint2name[ord(codepoint)]=name
# BeautifulSoup parser used for parsing microformats from embedded HTML content
# http://www.crummy.com/software/BeautifulSoup/
# feedparser is tested with BeautifulSoup 3.0.x, but it might work with the
# older 2.x series. If it doesn't, and you can figure out why, I'll accept a
# patch and modify the compatibility statement accordingly.
try:
import BeautifulSoup
except:
BeautifulSoup = None
# ---------- don't touch these ----------
class ThingsNobodyCaresAboutButMe(Exception): pass
class CharacterEncodingOverride(ThingsNobodyCaresAboutButMe): pass
class CharacterEncodingUnknown(ThingsNobodyCaresAboutButMe): pass
class NonXMLContentType(ThingsNobodyCaresAboutButMe): pass
class UndeclaredNamespace(Exception): pass
sgmllib.tagfind = re.compile('[a-zA-Z][-_.:a-zA-Z0-9]*')
sgmllib.special = re.compile('<!')
sgmllib.charref = re.compile('&#(\d+|[xX][0-9a-fA-F]+);')
if sgmllib.endbracket.search(' <').start(0):
class EndBracketRegEx:
def __init__(self):
# Overriding the built-in sgmllib.endbracket regex allows the
# parser to find angle brackets embedded in element attributes.
self.endbracket = re.compile('''([^'"<>]|"[^"]*"(?=>|/|\s|\w+=)|'[^']*'(?=>|/|\s|\w+=))*(?=[<>])|.*?(?=[<>])''')
def search(self,string,index=0):
match = self.endbracket.match(string,index)
if match is not None:
# Returning a new object in the calling thread's context
# resolves a thread-safety.
return EndBracketMatch(match)
return None
class EndBracketMatch:
def __init__(self, match):
self.match = match
def start(self, n):
return self.match.end(n)
sgmllib.endbracket = EndBracketRegEx()
SUPPORTED_VERSIONS = {'': 'unknown',
'rss090': 'RSS 0.90',
'rss091n': 'RSS 0.91 (Netscape)',
'rss091u': 'RSS 0.91 (Userland)',
'rss092': 'RSS 0.92',
'rss093': 'RSS 0.93',
'rss094': 'RSS 0.94',
'rss20': 'RSS 2.0',
'rss10': 'RSS 1.0',
'rss': 'RSS (unknown version)',
'atom01': 'Atom 0.1',
'atom02': 'Atom 0.2',
'atom03': 'Atom 0.3',
'atom10': 'Atom 1.0',
'atom': 'Atom (unknown version)',
'cdf': 'CDF',
'hotrss': 'Hot RSS'
}
try:
UserDict = dict
except NameError:
# Python 2.1 does not have dict
from UserDict import UserDict
def dict(aList):
rc = {}
for k, v in aList:
rc[k] = v
return rc
class FeedParserDict(UserDict):
keymap = {'channel': 'feed',
'items': 'entries',
'guid': 'id',
'date': 'updated',
'date_parsed': 'updated_parsed',
'description': ['summary', 'subtitle'],
'url': ['href'],
'modified': 'updated',
'modified_parsed': 'updated_parsed',
'issued': 'published',
'issued_parsed': 'published_parsed',
'copyright': 'rights',
'copyright_detail': 'rights_detail',
'tagline': 'subtitle',
'tagline_detail': 'subtitle_detail'}
def __getitem__(self, key):
if key == 'category':
return UserDict.__getitem__(self, 'tags')[0]['term']
if key == 'enclosures':
norel = lambda link: FeedParserDict([(name,value) for (name,value) in link.items() if name!='rel'])
return [norel(link) for link in UserDict.__getitem__(self, 'links') if link['rel']=='enclosure']
if key == 'license':
for link in UserDict.__getitem__(self, 'links'):
if link['rel']=='license' and link.has_key('href'):
return link['href']
if key == 'categories':
return [(tag['scheme'], tag['term']) for tag in UserDict.__getitem__(self, 'tags')]
realkey = self.keymap.get(key, key)
if type(realkey) == types.ListType:
for k in realkey:
if UserDict.__contains__(self, k):
return UserDict.__getitem__(self, k)
if UserDict.__contains__(self, key):
return UserDict.__getitem__(self, key)
return UserDict.__getitem__(self, realkey)
def __setitem__(self, key, value):
for k in self.keymap.keys():
if key == k:
key = self.keymap[k]
if type(key) == types.ListType:
key = key[0]
return UserDict.__setitem__(self, key, value)
def get(self, key, default=None):
if self.has_key(key):
return self[key]
else:
return default
def setdefault(self, key, value):
if not self.has_key(key):
self[key] = value
return self[key]
def has_key(self, key):
try:
return hasattr(self, key) or UserDict.__contains__(self, key)
except AttributeError:
return False
# This alias prevents the 2to3 tool from changing the semantics of the
# __contains__ function below and exhausting the maximum recursion depth
__has_key = has_key
def __getattr__(self, key):
try:
return self.__dict__[key]
except KeyError:
pass
try:
assert not key.startswith('_')
return self.__getitem__(key)
except:
raise AttributeError, "object has no attribute '%s'" % key
def __setattr__(self, key, value):
if key.startswith('_') or key == 'data':
self.__dict__[key] = value
else:
return self.__setitem__(key, value)
def __contains__(self, key):
return self.__has_key(key)
def zopeCompatibilityHack():
global FeedParserDict
del FeedParserDict
def FeedParserDict(aDict=None):
rc = {}
if aDict:
rc.update(aDict)
return rc
_ebcdic_to_ascii_map = None
def _ebcdic_to_ascii(s):
global _ebcdic_to_ascii_map
if not _ebcdic_to_ascii_map:
emap = (
0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15,
16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31,
128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7,
144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26,
32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33,
38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94,
45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63,
186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34,
195,97,98,99,100,101,102,103,104,105,196,197,198,199,200,201,
202,106,107,108,109,110,111,112,113,114,203,204,205,206,207,208,
209,126,115,116,117,118,119,120,121,122,210,211,212,213,214,215,
216,217,218,219,220,221,222,223,224,225,226,227,228,229,230,231,
123,65,66,67,68,69,70,71,72,73,232,233,234,235,236,237,
125,74,75,76,77,78,79,80,81,82,238,239,240,241,242,243,
92,159,83,84,85,86,87,88,89,90,244,245,246,247,248,249,
48,49,50,51,52,53,54,55,56,57,250,251,252,253,254,255
)
_ebcdic_to_ascii_map = _maketrans( \
_l2bytes(range(256)), _l2bytes(emap))
return s.translate(_ebcdic_to_ascii_map)
_cp1252 = {
unichr(128): unichr(8364), # euro sign
unichr(130): unichr(8218), # single low-9 quotation mark
unichr(131): unichr( 402), # latin small letter f with hook
unichr(132): unichr(8222), # double low-9 quotation mark
unichr(133): unichr(8230), # horizontal ellipsis
unichr(134): unichr(8224), # dagger
unichr(135): unichr(8225), # double dagger
unichr(136): unichr( 710), # modifier letter circumflex accent
unichr(137): unichr(8240), # per mille sign
unichr(138): unichr( 352), # latin capital letter s with caron
unichr(139): unichr(8249), # single left-pointing angle quotation mark
unichr(140): unichr( 338), # latin capital ligature oe
unichr(142): unichr( 381), # latin capital letter z with caron
unichr(145): unichr(8216), # left single quotation mark
unichr(146): unichr(8217), # right single quotation mark
unichr(147): unichr(8220), # left double quotation mark
unichr(148): unichr(8221), # right double quotation mark
unichr(149): unichr(8226), # bullet
unichr(150): unichr(8211), # en dash
unichr(151): unichr(8212), # em dash
unichr(152): unichr( 732), # small tilde
unichr(153): unichr(8482), # trade mark sign
unichr(154): unichr( 353), # latin small letter s with caron
unichr(155): unichr(8250), # single right-pointing angle quotation mark
unichr(156): unichr( 339), # latin small ligature oe
unichr(158): unichr( 382), # latin small letter z with caron
unichr(159): unichr( 376)} # latin capital letter y with diaeresis
_urifixer = re.compile('^([A-Za-z][A-Za-z0-9+-.]*://)(/*)(.*?)')
def _urljoin(base, uri):
uri = _urifixer.sub(r'\1\3', uri)
try:
return urlparse.urljoin(base, uri)
except:
uri = urlparse.urlunparse([urllib.quote(part) for part in urlparse.urlparse(uri)])
return urlparse.urljoin(base, uri)
class _FeedParserMixin:
namespaces = {'': '',
'http://backend.userland.com/rss': '',
'http://blogs.law.harvard.edu/tech/rss': '',
'http://purl.org/rss/1.0/': '',
'http://my.netscape.com/rdf/simple/0.9/': '',
'http://example.com/newformat#': '',
'http://example.com/necho': '',
'http://purl.org/echo/': '',
'uri/of/echo/namespace#': '',
'http://purl.org/pie/': '',
'http://purl.org/atom/ns#': '',
'http://www.w3.org/2005/Atom': '',
'http://purl.org/rss/1.0/modules/rss091#': '',
'http://webns.net/mvcb/': 'admin',
'http://purl.org/rss/1.0/modules/aggregation/': 'ag',
'http://purl.org/rss/1.0/modules/annotate/': 'annotate',
'http://media.tangent.org/rss/1.0/': 'audio',
'http://backend.userland.com/blogChannelModule': 'blogChannel',
'http://web.resource.org/cc/': 'cc',
'http://backend.userland.com/creativeCommonsRssModule': 'creativeCommons',
'http://purl.org/rss/1.0/modules/company': 'co',
'http://purl.org/rss/1.0/modules/content/': 'content',
'http://my.theinfo.org/changed/1.0/rss/': 'cp',
'http://purl.org/dc/elements/1.1/': 'dc',
'http://purl.org/dc/terms/': 'dcterms',
'http://purl.org/rss/1.0/modules/email/': 'email',
'http://purl.org/rss/1.0/modules/event/': 'ev',
'http://rssnamespace.org/feedburner/ext/1.0': 'feedburner',
'http://freshmeat.net/rss/fm/': 'fm',
'http://xmlns.com/foaf/0.1/': 'foaf',
'http://www.w3.org/2003/01/geo/wgs84_pos#': 'geo',
'http://postneo.com/icbm/': 'icbm',
'http://purl.org/rss/1.0/modules/image/': 'image',
'http://www.itunes.com/DTDs/PodCast-1.0.dtd': 'itunes',
'http://example.com/DTDs/PodCast-1.0.dtd': 'itunes',
'http://purl.org/rss/1.0/modules/link/': 'l',
'http://search.yahoo.com/mrss': 'media',
#Version 1.1.2 of the Media RSS spec added the trailing slash on the namespace
'http://search.yahoo.com/mrss/': 'media',
'http://madskills.com/public/xml/rss/module/pingback/': 'pingback',
'http://prismstandard.org/namespaces/1.2/basic/': 'prism',
'http://www.w3.org/1999/02/22-rdf-syntax-ns#': 'rdf',
'http://www.w3.org/2000/01/rdf-schema#': 'rdfs',
'http://purl.org/rss/1.0/modules/reference/': 'ref',
'http://purl.org/rss/1.0/modules/richequiv/': 'reqv',
'http://purl.org/rss/1.0/modules/search/': 'search',
'http://purl.org/rss/1.0/modules/slash/': 'slash',
'http://schemas.xmlsoap.org/soap/envelope/': 'soap',
'http://purl.org/rss/1.0/modules/servicestatus/': 'ss',
'http://hacks.benhammersley.com/rss/streaming/': 'str',
'http://purl.org/rss/1.0/modules/subscription/': 'sub',
'http://purl.org/rss/1.0/modules/syndication/': 'sy',
'http://schemas.pocketsoap.com/rss/myDescModule/': 'szf',
'http://purl.org/rss/1.0/modules/taxonomy/': 'taxo',
'http://purl.org/rss/1.0/modules/threading/': 'thr',
'http://purl.org/rss/1.0/modules/textinput/': 'ti',
'http://madskills.com/public/xml/rss/module/trackback/':'trackback',
'http://wellformedweb.org/commentAPI/': 'wfw',
'http://purl.org/rss/1.0/modules/wiki/': 'wiki',
'http://www.w3.org/1999/xhtml': 'xhtml',
'http://www.w3.org/1999/xlink': 'xlink',
'http://www.w3.org/XML/1998/namespace': 'xml'
}
_matchnamespaces = {}
can_be_relative_uri = ['link', 'id', 'wfw_comment', 'wfw_commentrss', 'docs', 'url', 'href', 'comments', 'icon', 'logo']
can_contain_relative_uris = ['content', 'title', 'summary', 'info', 'tagline', 'subtitle', 'copyright', 'rights', 'description']
can_contain_dangerous_markup = ['content', 'title', 'summary', 'info', 'tagline', 'subtitle', 'copyright', 'rights', 'description']
html_types = ['text/html', 'application/xhtml+xml']
def __init__(self, baseuri=None, baselang=None, encoding='utf-8'):
if _debug: sys.stderr.write('initializing FeedParser\n')
if not self._matchnamespaces:
for k, v in self.namespaces.items():
self._matchnamespaces[k.lower()] = v
self.feeddata = FeedParserDict() # feed-level data
self.encoding = encoding # character encoding
self.entries = [] # list of entry-level data
self.version = '' # feed type/version, see SUPPORTED_VERSIONS
self.namespacesInUse = {} # dictionary of namespaces defined by the feed
# the following are used internally to track state;
# this is really out of control and should be refactored
self.infeed = 0
self.inentry = 0
self.incontent = 0
self.intextinput = 0
self.inimage = 0
self.inauthor = 0
self.incontributor = 0
self.inpublisher = 0
self.insource = 0
self.sourcedata = FeedParserDict()
self.contentparams = FeedParserDict()
self._summaryKey = None
self.namespacemap = {}
self.elementstack = []
self.basestack = []
self.langstack = []
self.baseuri = baseuri or ''
self.lang = baselang or None
self.svgOK = 0
self.hasTitle = 0
if baselang:
self.feeddata['language'] = baselang.replace('_','-')
def unknown_starttag(self, tag, attrs):
if _debug: sys.stderr.write('start %s with %s\n' % (tag, attrs))
# normalize attrs
attrs = [(k.lower(), v) for k, v in attrs]
attrs = [(k, k in ('rel', 'type') and v.lower() or v) for k, v in attrs]
# the sgml parser doesn't handle entities in attributes, but
# strict xml parsers do -- account for this difference
if isinstance(self, _LooseFeedParser):
attrs = [(k, v.replace('&', '&')) for k, v in attrs]
# track xml:base and xml:lang
attrsD = dict(attrs)
baseuri = attrsD.get('xml:base', attrsD.get('base')) or self.baseuri
if type(baseuri) != type(u''):
try:
baseuri = unicode(baseuri, self.encoding)
except:
baseuri = unicode(baseuri, 'iso-8859-1')
# ensure that self.baseuri is always an absolute URI that
# uses a whitelisted URI scheme (e.g. not `javscript:`)
if self.baseuri:
self.baseuri = _makeSafeAbsoluteURI(self.baseuri, baseuri) or self.baseuri
else:
self.baseuri = _urljoin(self.baseuri, baseuri)
lang = attrsD.get('xml:lang', attrsD.get('lang'))
if lang == '':
# xml:lang could be explicitly set to '', we need to capture that
lang = None
elif lang is None:
# if no xml:lang is specified, use parent lang
lang = self.lang
if lang:
if tag in ('feed', 'rss', 'rdf:RDF'):
self.feeddata['language'] = lang.replace('_','-')
self.lang = lang
self.basestack.append(self.baseuri)
self.langstack.append(lang)
# track namespaces
for prefix, uri in attrs:
if prefix.startswith('xmlns:'):
self.trackNamespace(prefix[6:], uri)
elif prefix == 'xmlns':
self.trackNamespace(None, uri)
# track inline content
if self.incontent and self.contentparams.has_key('type') and not self.contentparams.get('type', 'xml').endswith('xml'):
if tag in ['xhtml:div', 'div']: return # typepad does this 10/2007
# element declared itself as escaped markup, but it isn't really
self.contentparams['type'] = 'application/xhtml+xml'
if self.incontent and self.contentparams.get('type') == 'application/xhtml+xml':
if tag.find(':') <> -1:
prefix, tag = tag.split(':', 1)
namespace = self.namespacesInUse.get(prefix, '')
if tag=='math' and namespace=='http://www.w3.org/1998/Math/MathML':
attrs.append(('xmlns',namespace))
if tag=='svg' and namespace=='http://www.w3.org/2000/svg':
attrs.append(('xmlns',namespace))
if tag == 'svg': self.svgOK += 1
return self.handle_data('<%s%s>' % (tag, self.strattrs(attrs)), escape=0)
# match namespaces
if tag.find(':') <> -1:
prefix, suffix = tag.split(':', 1)
else:
prefix, suffix = '', tag
prefix = self.namespacemap.get(prefix, prefix)
if prefix:
prefix = prefix + '_'
# special hack for better tracking of empty textinput/image elements in illformed feeds
if (not prefix) and tag not in ('title', 'link', 'description', 'name'):
self.intextinput = 0
if (not prefix) and tag not in ('title', 'link', 'description', 'url', 'href', 'width', 'height'):
self.inimage = 0
# call special handler (if defined) or default handler
methodname = '_start_' + prefix + suffix
try:
method = getattr(self, methodname)
return method(attrsD)
except AttributeError:
# Since there's no handler or something has gone wrong we explicitly add the element and its attributes
unknown_tag = prefix + suffix
if len(attrsD) == 0:
# No attributes so merge it into the encosing dictionary
return self.push(unknown_tag, 1)
else:
# Has attributes so create it in its own dictionary
context = self._getContext()
context[unknown_tag] = attrsD
def unknown_endtag(self, tag):
if _debug: sys.stderr.write('end %s\n' % tag)
# match namespaces
if tag.find(':') <> -1:
prefix, suffix = tag.split(':', 1)
else:
prefix, suffix = '', tag
prefix = self.namespacemap.get(prefix, prefix)
if prefix:
prefix = prefix + '_'
if suffix == 'svg' and self.svgOK: self.svgOK -= 1
# call special handler (if defined) or default handler
methodname = '_end_' + prefix + suffix
try:
if self.svgOK: raise AttributeError()
method = getattr(self, methodname)
method()
except AttributeError:
self.pop(prefix + suffix)
# track inline content
if self.incontent and self.contentparams.has_key('type') and not self.contentparams.get('type', 'xml').endswith('xml'):
# element declared itself as escaped markup, but it isn't really
if tag in ['xhtml:div', 'div']: return # typepad does this 10/2007
self.contentparams['type'] = 'application/xhtml+xml'
if self.incontent and self.contentparams.get('type') == 'application/xhtml+xml':
tag = tag.split(':')[-1]
self.handle_data('</%s>' % tag, escape=0)
# track xml:base and xml:lang going out of scope
if self.basestack:
self.basestack.pop()
if self.basestack and self.basestack[-1]:
self.baseuri = self.basestack[-1]
if self.langstack:
self.langstack.pop()
if self.langstack: # and (self.langstack[-1] is not None):
self.lang = self.langstack[-1]
def handle_charref(self, ref):
# called for each character reference, e.g. for ' ', ref will be '160'
if not self.elementstack: return
ref = ref.lower()
if ref in ('34', '38', '39', '60', '62', 'x22', 'x26', 'x27', 'x3c', 'x3e'):
text = '&#%s;' % ref
else:
if ref[0] == 'x':
c = int(ref[1:], 16)
else:
c = int(ref)
text = unichr(c).encode('utf-8')
self.elementstack[-1][2].append(text)
def handle_entityref(self, ref):
# called for each entity reference, e.g. for '©', ref will be 'copy'
if not self.elementstack: return
if _debug: sys.stderr.write('entering handle_entityref with %s\n' % ref)
if ref in ('lt', 'gt', 'quot', 'amp', 'apos'):
text = '&%s;' % ref
elif ref in self.entities.keys():
text = self.entities[ref]
if text.startswith('&#') and text.endswith(';'):
return self.handle_entityref(text)
else:
try: name2codepoint[ref]
except KeyError: text = '&%s;' % ref
else: text = unichr(name2codepoint[ref]).encode('utf-8')
self.elementstack[-1][2].append(text)
def handle_data(self, text, escape=1):
# called for each block of plain text, i.e. outside of any tag and
# not containing any character or entity references
if not self.elementstack: return
if escape and self.contentparams.get('type') == 'application/xhtml+xml':
text = _xmlescape(text)
self.elementstack[-1][2].append(text)
def handle_comment(self, text):
# called for each comment, e.g. <!-- insert message here -->
pass
def handle_pi(self, text):
# called for each processing instruction, e.g. <?instruction>
pass
def handle_decl(self, text):
pass
def parse_declaration(self, i):
# override internal declaration handler to handle CDATA blocks
if _debug: sys.stderr.write('entering parse_declaration\n')
if self.rawdata[i:i+9] == '<![CDATA[':
k = self.rawdata.find(']]>', i)
if k == -1:
# CDATA block began but didn't finish
k = len(self.rawdata)
return k
self.handle_data(_xmlescape(self.rawdata[i+9:k]), 0)
return k+3
else:
k = self.rawdata.find('>', i)
if k >= 0:
return k+1
else:
# We have an incomplete CDATA block.
return k
def mapContentType(self, contentType):
contentType = contentType.lower()
if contentType == 'text' or contentType == 'plain':
contentType = 'text/plain'
elif contentType == 'html':
contentType = 'text/html'
elif contentType == 'xhtml':
contentType = 'application/xhtml+xml'
return contentType
def trackNamespace(self, prefix, uri):
loweruri = uri.lower()
if (prefix, loweruri) == (None, 'http://my.netscape.com/rdf/simple/0.9/') and not self.version:
self.version = 'rss090'
if loweruri == 'http://purl.org/rss/1.0/' and not self.version:
self.version = 'rss10'
if loweruri == 'http://www.w3.org/2005/atom' and not self.version:
self.version = 'atom10'
if loweruri.find('backend.userland.com/rss') <> -1:
# match any backend.userland.com namespace
uri = 'http://backend.userland.com/rss'
loweruri = uri
if self._matchnamespaces.has_key(loweruri):
self.namespacemap[prefix] = self._matchnamespaces[loweruri]
self.namespacesInUse[self._matchnamespaces[loweruri]] = uri
else:
self.namespacesInUse[prefix or ''] = uri
def resolveURI(self, uri):
return _urljoin(self.baseuri or '', uri)
def decodeEntities(self, element, data):
return data
def strattrs(self, attrs):
return ''.join([' %s="%s"' % (t[0],_xmlescape(t[1],{'"':'"'})) for t in attrs])
def push(self, element, expectingText):
self.elementstack.append([element, expectingText, []])
def pop(self, element, stripWhitespace=1):
if not self.elementstack: return
if self.elementstack[-1][0] != element: return
element, expectingText, pieces = self.elementstack.pop()
if self.version == 'atom10' and self.contentparams.get('type','text') == 'application/xhtml+xml':
# remove enclosing child element, but only if it is a <div> and
# only if all the remaining content is nested underneath it.
# This means that the divs would be retained in the following:
# <div>foo</div><div>bar</div>
while pieces and len(pieces)>1 and not pieces[-1].strip():
del pieces[-1]
while pieces and len(pieces)>1 and not pieces[0].strip():
del pieces[0]
if pieces and (pieces[0] == '<div>' or pieces[0].startswith('<div ')) and pieces[-1]=='</div>':
depth = 0
for piece in pieces[:-1]:
if piece.startswith('</'):
depth -= 1
if depth == 0: break
elif piece.startswith('<') and not piece.endswith('/>'):
depth += 1
else:
pieces = pieces[1:-1]
# Ensure each piece is a str for Python 3
for (i, v) in enumerate(pieces):
if not isinstance(v, basestring):
pieces[i] = v.decode('utf-8')
output = ''.join(pieces)
if stripWhitespace:
output = output.strip()
if not expectingText: return output
# decode base64 content
if base64 and self.contentparams.get('base64', 0):
try:
output = _base64decode(output)
except binascii.Error:
pass
except binascii.Incomplete:
pass
except TypeError:
# In Python 3, base64 takes and outputs bytes, not str
# This may not be the most correct way to accomplish this
output = _base64decode(output.encode('utf-8')).decode('utf-8')
# resolve relative URIs
if (element in self.can_be_relative_uri) and output:
output = self.resolveURI(output)
# decode entities within embedded markup
if not self.contentparams.get('base64', 0):
output = self.decodeEntities(element, output)
if self.lookslikehtml(output):
self.contentparams['type']='text/html'
# remove temporary cruft from contentparams
try:
del self.contentparams['mode']
except KeyError:
pass
try:
del self.contentparams['base64']
except KeyError:
pass
is_htmlish = self.mapContentType(self.contentparams.get('type', 'text/html')) in self.html_types
# resolve relative URIs within embedded markup
if is_htmlish and RESOLVE_RELATIVE_URIS:
if element in self.can_contain_relative_uris:
output = _resolveRelativeURIs(output, self.baseuri, self.encoding, self.contentparams.get('type', 'text/html'))
# parse microformats
# (must do this before sanitizing because some microformats
# rely on elements that we sanitize)
if is_htmlish and element in ['content', 'description', 'summary']:
mfresults = _parseMicroformats(output, self.baseuri, self.encoding)
if mfresults:
for tag in mfresults.get('tags', []):
self._addTag(tag['term'], tag['scheme'], tag['label'])
for enclosure in mfresults.get('enclosures', []):
self._start_enclosure(enclosure)
for xfn in mfresults.get('xfn', []):
self._addXFN(xfn['relationships'], xfn['href'], xfn['name'])
vcard = mfresults.get('vcard')
if vcard:
self._getContext()['vcard'] = vcard
# sanitize embedded markup
if is_htmlish and SANITIZE_HTML:
if element in self.can_contain_dangerous_markup:
output = _sanitizeHTML(output, self.encoding, self.contentparams.get('type', 'text/html'))
if self.encoding and type(output) != type(u''):
try:
output = unicode(output, self.encoding)
except:
pass
# address common error where people take data that is already
# utf-8, presume that it is iso-8859-1, and re-encode it.
if self.encoding in ('utf-8', 'utf-8_INVALID_PYTHON_3') and type(output) == type(u''):
try:
output = unicode(output.encode('iso-8859-1'), 'utf-8')
except:
pass
# map win-1252 extensions to the proper code points
if type(output) == type(u''):
output = u''.join([c in _cp1252.keys() and _cp1252[c] or c for c in output])
# categories/tags/keywords/whatever are handled in _end_category
if element == 'category':
return output
if element == 'title' and self.hasTitle:
return output
# store output in appropriate place(s)
if self.inentry and not self.insource:
if element == 'content':
self.entries[-1].setdefault(element, [])
contentparams = copy.deepcopy(self.contentparams)
contentparams['value'] = output
self.entries[-1][element].append(contentparams)
elif element == 'link':
if not self.inimage:
# query variables in urls in link elements are improperly
# converted from `?a=1&b=2` to `?a=1&b;=2` as if they're
# unhandled character references. fix this special case.
output = re.sub("&([A-Za-z0-9_]+);", "&\g<1>", output)
self.entries[-1][element] = output
if output:
self.entries[-1]['links'][-1]['href'] = output
else:
if element == 'description':
element = 'summary'
self.entries[-1][element] = output
if self.incontent:
contentparams = copy.deepcopy(self.contentparams)
contentparams['value'] = output
self.entries[-1][element + '_detail'] = contentparams
elif (self.infeed or self.insource):# and (not self.intextinput) and (not self.inimage):
context = self._getContext()
if element == 'description':
element = 'subtitle'
context[element] = output
if element == 'link':
# fix query variables; see above for the explanation
output = re.sub("&([A-Za-z0-9_]+);", "&\g<1>", output)
context[element] = output
context['links'][-1]['href'] = output
elif self.incontent:
contentparams = copy.deepcopy(self.contentparams)
contentparams['value'] = output
context[element + '_detail'] = contentparams
return output
def pushContent(self, tag, attrsD, defaultContentType, expectingText):
self.incontent += 1
if self.lang: self.lang=self.lang.replace('_','-')
self.contentparams = FeedParserDict({
'type': self.mapContentType(attrsD.get('type', defaultContentType)),
'language': self.lang,
'base': self.baseuri})
self.contentparams['base64'] = self._isBase64(attrsD, self.contentparams)
self.push(tag, expectingText)
def popContent(self, tag):
value = self.pop(tag)
self.incontent -= 1
self.contentparams.clear()
return value
# a number of elements in a number of RSS variants are nominally plain
# text, but this is routinely ignored. This is an attempt to detect
# the most common cases. As false positives often result in silent
# data loss, this function errs on the conservative side.
def lookslikehtml(self, s):
if self.version.startswith('atom'): return
if self.contentparams.get('type','text/html') != 'text/plain': return
# must have a close tag or a entity reference to qualify
if not (re.search(r'</(\w+)>',s) or re.search("&#?\w+;",s)): return
# all tags must be in a restricted subset of valid HTML tags
if filter(lambda t: t.lower() not in _HTMLSanitizer.acceptable_elements,
re.findall(r'</?(\w+)',s)): return
# all entities must have been defined as valid HTML entities
from htmlentitydefs import entitydefs
if filter(lambda e: e not in entitydefs.keys(),
re.findall(r'&(\w+);',s)): return
return 1
def _mapToStandardPrefix(self, name):
colonpos = name.find(':')
if colonpos <> -1:
prefix = name[:colonpos]
suffix = name[colonpos+1:]
prefix = self.namespacemap.get(prefix, prefix)
name = prefix + ':' + suffix
return name
def _getAttribute(self, attrsD, name):
return attrsD.get(self._mapToStandardPrefix(name))
def _isBase64(self, attrsD, contentparams):
if attrsD.get('mode', '') == 'base64':
return 1
if self.contentparams['type'].startswith('text/'):
return 0
if self.contentparams['type'].endswith('+xml'):
return 0
if self.contentparams['type'].endswith('/xml'):
return 0
return 1
def _itsAnHrefDamnIt(self, attrsD):
href = attrsD.get('url', attrsD.get('uri', attrsD.get('href', None)))
if href:
try:
del attrsD['url']
except KeyError:
pass
try:
del attrsD['uri']
except KeyError:
pass
attrsD['href'] = href
return attrsD
def _save(self, key, value, overwrite=False):
context = self._getContext()
if overwrite:
context[key] = value
else:
context.setdefault(key, value)
def _start_rss(self, attrsD):
versionmap = {'0.91': 'rss091u',
'0.92': 'rss092',
'0.93': 'rss093',
'0.94': 'rss094'}
#If we're here then this is an RSS feed.
#If we don't have a version or have a version that starts with something
#other than RSS then there's been a mistake. Correct it.
if not self.version or not self.version.startswith('rss'):
attr_version = attrsD.get('version', '')
version = versionmap.get(attr_version)
if version:
self.version = version
elif attr_version.startswith('2.'):
self.version = 'rss20'
else:
self.version = 'rss'
def _start_dlhottitles(self, attrsD):
self.version = 'hotrss'
def _start_channel(self, attrsD):
self.infeed = 1
self._cdf_common(attrsD)
_start_feedinfo = _start_channel
def _cdf_common(self, attrsD):
if attrsD.has_key('lastmod'):
self._start_modified({})
self.elementstack[-1][-1] = attrsD['lastmod']
self._end_modified()
if attrsD.has_key('href'):
self._start_link({})
self.elementstack[-1][-1] = attrsD['href']
self._end_link()
def _start_feed(self, attrsD):
self.infeed = 1
versionmap = {'0.1': 'atom01',
'0.2': 'atom02',
'0.3': 'atom03'}
if not self.version:
attr_version = attrsD.get('version')
version = versionmap.get(attr_version)
if version:
self.version = version
else:
self.version = 'atom'
def _end_channel(self):
self.infeed = 0
_end_feed = _end_channel
def _start_image(self, attrsD):
context = self._getContext()
if not self.inentry:
context.setdefault('image', FeedParserDict())
self.inimage = 1
self.hasTitle = 0
self.push('image', 0)
def _end_image(self):
self.pop('image')
self.inimage = 0
def _start_textinput(self, attrsD):
context = self._getContext()
context.setdefault('textinput', FeedParserDict())
self.intextinput = 1
self.hasTitle = 0
self.push('textinput', 0)
_start_textInput = _start_textinput
def _end_textinput(self):
self.pop('textinput')
self.intextinput = 0
_end_textInput = _end_textinput
def _start_author(self, attrsD):
self.inauthor = 1
self.push('author', 1)
# Append a new FeedParserDict when expecting an author
context = self._getContext()
context.setdefault('authors', [])
context['authors'].append(FeedParserDict())
_start_managingeditor = _start_author
_start_dc_author = _start_author
_start_dc_creator = _start_author
_start_itunes_author = _start_author
def _end_author(self):
self.pop('author')
self.inauthor = 0
self._sync_author_detail()
_end_managingeditor = _end_author
_end_dc_author = _end_author
_end_dc_creator = _end_author
_end_itunes_author = _end_author
def _start_itunes_owner(self, attrsD):
self.inpublisher = 1
self.push('publisher', 0)
def _end_itunes_owner(self):
self.pop('publisher')
self.inpublisher = 0
self._sync_author_detail('publisher')
def _start_contributor(self, attrsD):
self.incontributor = 1
context = self._getContext()
context.setdefault('contributors', [])
context['contributors'].append(FeedParserDict())
self.push('contributor', 0)
def _end_contributor(self):
self.pop('contributor')
self.incontributor = 0
def _start_dc_contributor(self, attrsD):
self.incontributor = 1
context = self._getContext()
context.setdefault('contributors', [])
context['contributors'].append(FeedParserDict())
self.push('name', 0)
def _end_dc_contributor(self):
self._end_name()
self.incontributor = 0
def _start_name(self, attrsD):
self.push('name', 0)
_start_itunes_name = _start_name
def _end_name(self):
value = self.pop('name')
if self.inpublisher:
self._save_author('name', value, 'publisher')
elif self.inauthor:
self._save_author('name', value)
elif self.incontributor:
self._save_contributor('name', value)
elif self.intextinput:
context = self._getContext()
context['name'] = value
_end_itunes_name = _end_name
def _start_width(self, attrsD):
self.push('width', 0)
def _end_width(self):
value = self.pop('width')
try:
value = int(value)
except:
value = 0
if self.inimage:
context = self._getContext()
context['width'] = value
def _start_height(self, attrsD):
self.push('height', 0)
def _end_height(self):
value = self.pop('height')
try:
value = int(value)
except:
value = 0
if self.inimage:
context = self._getContext()
context['height'] = value
def _start_url(self, attrsD):
self.push('href', 1)
_start_homepage = _start_url
_start_uri = _start_url
def _end_url(self):
value = self.pop('href')
if self.inauthor:
self._save_author('href', value)
elif self.incontributor:
self._save_contributor('href', value)
_end_homepage = _end_url
_end_uri = _end_url
def _start_email(self, attrsD):
self.push('email', 0)
_start_itunes_email = _start_email
def _end_email(self):
value = self.pop('email')
if self.inpublisher:
self._save_author('email', value, 'publisher')
elif self.inauthor:
self._save_author('email', value)
elif self.incontributor:
self._save_contributor('email', value)
_end_itunes_email = _end_email
def _getContext(self):
if self.insource:
context = self.sourcedata
elif self.inimage and self.feeddata.has_key('image'):
context = self.feeddata['image']
elif self.intextinput:
context = self.feeddata['textinput']
elif self.inentry:
context = self.entries[-1]
else:
context = self.feeddata
return context
def _save_author(self, key, value, prefix='author'):
context = self._getContext()
context.setdefault(prefix + '_detail', FeedParserDict())
context[prefix + '_detail'][key] = value
self._sync_author_detail()
context.setdefault('authors', [FeedParserDict()])
context['authors'][-1][key] = value
def _save_contributor(self, key, value):
context = self._getContext()
context.setdefault('contributors', [FeedParserDict()])
context['contributors'][-1][key] = value
def _sync_author_detail(self, key='author'):
context = self._getContext()
detail = context.get('%s_detail' % key)
if detail:
name = detail.get('name')
email = detail.get('email')
if name and email:
context[key] = '%s (%s)' % (name, email)
elif name:
context[key] = name
elif email:
context[key] = email
else:
author, email = context.get(key), None
if not author: return
emailmatch = re.search(r'''(([a-zA-Z0-9\_\-\.\+]+)@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.)|(([a-zA-Z0-9\-]+\.)+))([a-zA-Z]{2,4}|[0-9]{1,3})(\]?))(\?subject=\S+)?''', author)
if emailmatch:
email = emailmatch.group(0)
# probably a better way to do the following, but it passes all the tests
author = author.replace(email, '')
author = author.replace('()', '')
author = author.replace('<>', '')
author = author.replace('<>', '')
author = author.strip()
if author and (author[0] == '('):
author = author[1:]
if author and (author[-1] == ')'):
author = author[:-1]
author = author.strip()
if author or email:
context.setdefault('%s_detail' % key, FeedParserDict())
if author:
context['%s_detail' % key]['name'] = author
if email:
context['%s_detail' % key]['email'] = email
def _start_subtitle(self, attrsD):
self.pushContent('subtitle', attrsD, 'text/plain', 1)
_start_tagline = _start_subtitle
_start_itunes_subtitle = _start_subtitle
def _end_subtitle(self):
self.popContent('subtitle')
_end_tagline = _end_subtitle
_end_itunes_subtitle = _end_subtitle
def _start_rights(self, attrsD):
self.pushContent('rights', attrsD, 'text/plain', 1)
_start_dc_rights = _start_rights
_start_copyright = _start_rights
def _end_rights(self):
self.popContent('rights')
_end_dc_rights = _end_rights
_end_copyright = _end_rights
def _start_item(self, attrsD):
self.entries.append(FeedParserDict())
self.push('item', 0)
self.inentry = 1
self.guidislink = 0
self.hasTitle = 0
id = self._getAttribute(attrsD, 'rdf:about')
if id:
context = self._getContext()
context['id'] = id
self._cdf_common(attrsD)
_start_entry = _start_item
_start_product = _start_item
def _end_item(self):
self.pop('item')
self.inentry = 0
_end_entry = _end_item
def _start_dc_language(self, attrsD):
self.push('language', 1)
_start_language = _start_dc_language
def _end_dc_language(self):
self.lang = self.pop('language')
_end_language = _end_dc_language
def _start_dc_publisher(self, attrsD):
self.push('publisher', 1)
_start_webmaster = _start_dc_publisher
def _end_dc_publisher(self):
self.pop('publisher')
self._sync_author_detail('publisher')
_end_webmaster = _end_dc_publisher
def _start_published(self, attrsD):
self.push('published', 1)
_start_dcterms_issued = _start_published
_start_issued = _start_published
def _end_published(self):
value = self.pop('published')
self._save('published_parsed', _parse_date(value), overwrite=True)
_end_dcterms_issued = _end_published
_end_issued = _end_published
def _start_updated(self, attrsD):
self.push('updated', 1)
_start_modified = _start_updated
_start_dcterms_modified = _start_updated
_start_pubdate = _start_updated
_start_dc_date = _start_updated
_start_lastbuilddate = _start_updated
def _end_updated(self):
value = self.pop('updated')
parsed_value = _parse_date(value)
self._save('updated_parsed', parsed_value, overwrite=True)
_end_modified = _end_updated
_end_dcterms_modified = _end_updated
_end_pubdate = _end_updated
_end_dc_date = _end_updated
_end_lastbuilddate = _end_updated
def _start_created(self, attrsD):
self.push('created', 1)
_start_dcterms_created = _start_created
def _end_created(self):
value = self.pop('created')
self._save('created_parsed', _parse_date(value), overwrite=True)
_end_dcterms_created = _end_created
def _start_expirationdate(self, attrsD):
self.push('expired', 1)
def _end_expirationdate(self):
self._save('expired_parsed', _parse_date(self.pop('expired')), overwrite=True)
def _start_cc_license(self, attrsD):
context = self._getContext()
value = self._getAttribute(attrsD, 'rdf:resource')
attrsD = FeedParserDict()
attrsD['rel']='license'
if value: attrsD['href']=value
context.setdefault('links', []).append(attrsD)
def _start_creativecommons_license(self, attrsD):
self.push('license', 1)
_start_creativeCommons_license = _start_creativecommons_license
def _end_creativecommons_license(self):
value = self.pop('license')
context = self._getContext()
attrsD = FeedParserDict()
attrsD['rel']='license'
if value: attrsD['href']=value
context.setdefault('links', []).append(attrsD)
del context['license']
_end_creativeCommons_license = _end_creativecommons_license
def _addXFN(self, relationships, href, name):
context = self._getContext()
xfn = context.setdefault('xfn', [])
value = FeedParserDict({'relationships': relationships, 'href': href, 'name': name})
if value not in xfn:
xfn.append(value)
def _addTag(self, term, scheme, label):
context = self._getContext()
tags = context.setdefault('tags', [])
if (not term) and (not scheme) and (not label): return
value = FeedParserDict({'term': term, 'scheme': scheme, 'label': label})
if value not in tags:
tags.append(value)
def _start_category(self, attrsD):
if _debug: sys.stderr.write('entering _start_category with %s\n' % repr(attrsD))
term = attrsD.get('term')
scheme = attrsD.get('scheme', attrsD.get('domain'))
label = attrsD.get('label')
self._addTag(term, scheme, label)
self.push('category', 1)
_start_dc_subject = _start_category
_start_keywords = _start_category
def _start_media_category(self, attrsD):
attrsD.setdefault('scheme', 'http://search.yahoo.com/mrss/category_schema')
self._start_category(attrsD)
def _end_itunes_keywords(self):
for term in self.pop('itunes_keywords').split():
self._addTag(term, 'http://www.itunes.com/', None)
def _start_itunes_category(self, attrsD):
self._addTag(attrsD.get('text'), 'http://www.itunes.com/', None)
self.push('category', 1)
def _end_category(self):
value = self.pop('category')
if not value: return
context = self._getContext()
tags = context['tags']
if value and len(tags) and not tags[-1]['term']:
tags[-1]['term'] = value
else:
self._addTag(value, None, None)
_end_dc_subject = _end_category
_end_keywords = _end_category
_end_itunes_category = _end_category
_end_media_category = _end_category
def _start_cloud(self, attrsD):
self._getContext()['cloud'] = FeedParserDict(attrsD)
def _start_link(self, attrsD):
attrsD.setdefault('rel', 'alternate')
if attrsD['rel'] == 'self':
attrsD.setdefault('type', 'application/atom+xml')
else:
attrsD.setdefault('type', 'text/html')
context = self._getContext()
attrsD = self._itsAnHrefDamnIt(attrsD)
if attrsD.has_key('href'):
attrsD['href'] = self.resolveURI(attrsD['href'])
expectingText = self.infeed or self.inentry or self.insource
context.setdefault('links', [])
if not (self.inentry and self.inimage):
context['links'].append(FeedParserDict(attrsD))
if attrsD.has_key('href'):
expectingText = 0
if (attrsD.get('rel') == 'alternate') and (self.mapContentType(attrsD.get('type')) in self.html_types):
context['link'] = attrsD['href']
else:
self.push('link', expectingText)
_start_producturl = _start_link
def _end_link(self):
value = self.pop('link')
context = self._getContext()
_end_producturl = _end_link
def _start_guid(self, attrsD):
self.guidislink = (attrsD.get('ispermalink', 'true') == 'true')
self.push('id', 1)
def _end_guid(self):
value = self.pop('id')
self._save('guidislink', self.guidislink and not self._getContext().has_key('link'))
if self.guidislink:
# guid acts as link, but only if 'ispermalink' is not present or is 'true',
# and only if the item doesn't already have a link element
self._save('link', value)
def _start_title(self, attrsD):
if self.svgOK: return self.unknown_starttag('title', attrsD.items())
self.pushContent('title', attrsD, 'text/plain', self.infeed or self.inentry or self.insource)
_start_dc_title = _start_title
_start_media_title = _start_title
def _end_title(self):
if self.svgOK: return
value = self.popContent('title')
if not value: return
context = self._getContext()
self.hasTitle = 1
_end_dc_title = _end_title
def _end_media_title(self):
hasTitle = self.hasTitle
self._end_title()
self.hasTitle = hasTitle
def _start_description(self, attrsD):
context = self._getContext()
if context.has_key('summary'):
self._summaryKey = 'content'
self._start_content(attrsD)
else:
self.pushContent('description', attrsD, 'text/html', self.infeed or self.inentry or self.insource)
_start_dc_description = _start_description
def _start_abstract(self, attrsD):
self.pushContent('description', attrsD, 'text/plain', self.infeed or self.inentry or self.insource)
def _end_description(self):
if self._summaryKey == 'content':
self._end_content()
else:
value = self.popContent('description')
self._summaryKey = None
_end_abstract = _end_description
_end_dc_description = _end_description
def _start_info(self, attrsD):
self.pushContent('info', attrsD, 'text/plain', 1)
_start_feedburner_browserfriendly = _start_info
def _end_info(self):
self.popContent('info')
_end_feedburner_browserfriendly = _end_info
def _start_generator(self, attrsD):
if attrsD:
attrsD = self._itsAnHrefDamnIt(attrsD)
if attrsD.has_key('href'):
attrsD['href'] = self.resolveURI(attrsD['href'])
self._getContext()['generator_detail'] = FeedParserDict(attrsD)
self.push('generator', 1)
def _end_generator(self):
value = self.pop('generator')
context = self._getContext()
if context.has_key('generator_detail'):
context['generator_detail']['name'] = value
def _start_admin_generatoragent(self, attrsD):
self.push('generator', 1)
value = self._getAttribute(attrsD, 'rdf:resource')
if value:
self.elementstack[-1][2].append(value)
self.pop('generator')
self._getContext()['generator_detail'] = FeedParserDict({'href': value})
def _start_admin_errorreportsto(self, attrsD):
self.push('errorreportsto', 1)
value = self._getAttribute(attrsD, 'rdf:resource')
if value:
self.elementstack[-1][2].append(value)
self.pop('errorreportsto')
def _start_summary(self, attrsD):
context = self._getContext()
if context.has_key('summary'):
self._summaryKey = 'content'
self._start_content(attrsD)
else:
self._summaryKey = 'summary'
self.pushContent(self._summaryKey, attrsD, 'text/plain', 1)
_start_itunes_summary = _start_summary
def _end_summary(self):
if self._summaryKey == 'content':
self._end_content()
else:
self.popContent(self._summaryKey or 'summary')
self._summaryKey = None
_end_itunes_summary = _end_summary
def _start_enclosure(self, attrsD):
attrsD = self._itsAnHrefDamnIt(attrsD)
context = self._getContext()
attrsD['rel']='enclosure'
context.setdefault('links', []).append(FeedParserDict(attrsD))
def _start_source(self, attrsD):
if 'url' in attrsD:
# This means that we're processing a source element from an RSS 2.0 feed
self.sourcedata['href'] = attrsD[u'url']
self.push('source', 1)
self.insource = 1
self.hasTitle = 0
def _end_source(self):
self.insource = 0
value = self.pop('source')
if value:
self.sourcedata['title'] = value
self._getContext()['source'] = copy.deepcopy(self.sourcedata)
self.sourcedata.clear()
def _start_content(self, attrsD):
self.pushContent('content', attrsD, 'text/plain', 1)
src = attrsD.get('src')
if src:
self.contentparams['src'] = src
self.push('content', 1)
def _start_prodlink(self, attrsD):
self.pushContent('content', attrsD, 'text/html', 1)
def _start_body(self, attrsD):
self.pushContent('content', attrsD, 'application/xhtml+xml', 1)
_start_xhtml_body = _start_body
def _start_content_encoded(self, attrsD):
self.pushContent('content', attrsD, 'text/html', 1)
_start_fullitem = _start_content_encoded
def _end_content(self):
copyToSummary = self.mapContentType(self.contentparams.get('type')) in (['text/plain'] + self.html_types)
value = self.popContent('content')
if copyToSummary:
self._save('summary', value)
_end_body = _end_content
_end_xhtml_body = _end_content
_end_content_encoded = _end_content
_end_fullitem = _end_content
_end_prodlink = _end_content
def _start_itunes_image(self, attrsD):
self.push('itunes_image', 0)
if attrsD.get('href'):
self._getContext()['image'] = FeedParserDict({'href': attrsD.get('href')})
_start_itunes_link = _start_itunes_image
def _end_itunes_block(self):
value = self.pop('itunes_block', 0)
self._getContext()['itunes_block'] = (value == 'yes') and 1 or 0
def _end_itunes_explicit(self):
value = self.pop('itunes_explicit', 0)
# Convert 'yes' -> True, 'clean' to False, and any other value to None
# False and None both evaluate as False, so the difference can be ignored
# by applications that only need to know if the content is explicit.
self._getContext()['itunes_explicit'] = (None, False, True)[(value == 'yes' and 2) or value == 'clean' or 0]
def _start_media_content(self, attrsD):
context = self._getContext()
context.setdefault('media_content', [])
context['media_content'].append(attrsD)
def _start_media_thumbnail(self, attrsD):
context = self._getContext()
context.setdefault('media_thumbnail', [])
self.push('url', 1) # new
context['media_thumbnail'].append(attrsD)
def _end_media_thumbnail(self):
url = self.pop('url')
context = self._getContext()
if url != None and len(url.strip()) != 0:
if not context['media_thumbnail'][-1].has_key('url'):
context['media_thumbnail'][-1]['url'] = url
def _start_media_player(self, attrsD):
self.push('media_player', 0)
self._getContext()['media_player'] = FeedParserDict(attrsD)
def _end_media_player(self):
value = self.pop('media_player')
context = self._getContext()
context['media_player']['content'] = value
def _start_newlocation(self, attrsD):
self.push('newlocation', 1)
def _end_newlocation(self):
url = self.pop('newlocation')
context = self._getContext()
# don't set newlocation if the context isn't right
if context is not self.feeddata:
return
context['newlocation'] = _makeSafeAbsoluteURI(self.baseuri, url.strip())
if _XML_AVAILABLE:
class _StrictFeedParser(_FeedParserMixin, xml.sax.handler.ContentHandler):
def __init__(self, baseuri, baselang, encoding):
if _debug: sys.stderr.write('trying StrictFeedParser\n')
xml.sax.handler.ContentHandler.__init__(self)
_FeedParserMixin.__init__(self, baseuri, baselang, encoding)
self.bozo = 0
self.exc = None
self.decls = {}
def startPrefixMapping(self, prefix, uri):
self.trackNamespace(prefix, uri)
if uri == 'http://www.w3.org/1999/xlink':
self.decls['xmlns:'+prefix] = uri
def startElementNS(self, name, qname, attrs):
namespace, localname = name
lowernamespace = str(namespace or '').lower()
if lowernamespace.find('backend.userland.com/rss') <> -1:
# match any backend.userland.com namespace
namespace = 'http://backend.userland.com/rss'
lowernamespace = namespace
if qname and qname.find(':') > 0:
givenprefix = qname.split(':')[0]
else:
givenprefix = None
prefix = self._matchnamespaces.get(lowernamespace, givenprefix)
if givenprefix and (prefix == None or (prefix == '' and lowernamespace == '')) and not self.namespacesInUse.has_key(givenprefix):
raise UndeclaredNamespace, "'%s' is not associated with a namespace" % givenprefix
localname = str(localname).lower()
# qname implementation is horribly broken in Python 2.1 (it
# doesn't report any), and slightly broken in Python 2.2 (it
# doesn't report the xml: namespace). So we match up namespaces
# with a known list first, and then possibly override them with
# the qnames the SAX parser gives us (if indeed it gives us any
# at all). Thanks to MatejC for helping me test this and
# tirelessly telling me that it didn't work yet.
attrsD, self.decls = self.decls, {}
if localname=='math' and namespace=='http://www.w3.org/1998/Math/MathML':
attrsD['xmlns']=namespace
if localname=='svg' and namespace=='http://www.w3.org/2000/svg':
attrsD['xmlns']=namespace
if prefix:
localname = prefix.lower() + ':' + localname
elif namespace and not qname: #Expat
for name,value in self.namespacesInUse.items():
if name and value == namespace:
localname = name + ':' + localname
break
if _debug: sys.stderr.write('startElementNS: qname = %s, namespace = %s, givenprefix = %s, prefix = %s, attrs = %s, localname = %s\n' % (qname, namespace, givenprefix, prefix, attrs.items(), localname))
for (namespace, attrlocalname), attrvalue in attrs._attrs.items():
lowernamespace = (namespace or '').lower()
prefix = self._matchnamespaces.get(lowernamespace, '')
if prefix:
attrlocalname = prefix + ':' + attrlocalname
attrsD[str(attrlocalname).lower()] = attrvalue
for qname in attrs.getQNames():
attrsD[str(qname).lower()] = attrs.getValueByQName(qname)
self.unknown_starttag(localname, attrsD.items())
def characters(self, text):
self.handle_data(text)
def endElementNS(self, name, qname):
namespace, localname = name
lowernamespace = str(namespace or '').lower()
if qname and qname.find(':') > 0:
givenprefix = qname.split(':')[0]
else:
givenprefix = ''
prefix = self._matchnamespaces.get(lowernamespace, givenprefix)
if prefix:
localname = prefix + ':' + localname
elif namespace and not qname: #Expat
for name,value in self.namespacesInUse.items():
if name and value == namespace:
localname = name + ':' + localname
break
localname = str(localname).lower()
self.unknown_endtag(localname)
def error(self, exc):
self.bozo = 1
self.exc = exc
def fatalError(self, exc):
self.error(exc)
raise exc
class _BaseHTMLProcessor(sgmllib.SGMLParser):
special = re.compile('''[<>'"]''')
bare_ampersand = re.compile("&(?!#\d+;|#x[0-9a-fA-F]+;|\w+;)")
elements_no_end_tag = [
'area', 'base', 'basefont', 'br', 'col', 'command', 'embed', 'frame',
'hr', 'img', 'input', 'isindex', 'keygen', 'link', 'meta', 'param',
'source', 'track', 'wbr'
]
def __init__(self, encoding, _type):
self.encoding = encoding
self._type = _type
if _debug: sys.stderr.write('entering BaseHTMLProcessor, encoding=%s\n' % self.encoding)
sgmllib.SGMLParser.__init__(self)
def reset(self):
self.pieces = []
sgmllib.SGMLParser.reset(self)
def _shorttag_replace(self, match):
tag = match.group(1)
if tag in self.elements_no_end_tag:
return '<' + tag + ' />'
else:
return '<' + tag + '></' + tag + '>'
def parse_starttag(self,i):
j=sgmllib.SGMLParser.parse_starttag(self, i)
if self._type == 'application/xhtml+xml':
if j>2 and self.rawdata[j-2:j]=='/>':
self.unknown_endtag(self.lasttag)
return j
def feed(self, data):
data = re.compile(r'<!((?!DOCTYPE|--|\[))', re.IGNORECASE).sub(r'<!\1', data)
#data = re.sub(r'<(\S+?)\s*?/>', self._shorttag_replace, data) # bug [ 1399464 ] Bad regexp for _shorttag_replace
data = re.sub(r'<([^<>\s]+?)\s*/>', self._shorttag_replace, data)
data = data.replace(''', "'")
data = data.replace('"', '"')
try:
bytes
if bytes is str:
raise NameError
self.encoding = self.encoding + '_INVALID_PYTHON_3'
except NameError:
if self.encoding and type(data) == type(u''):
data = data.encode(self.encoding)
sgmllib.SGMLParser.feed(self, data)
sgmllib.SGMLParser.close(self)
def normalize_attrs(self, attrs):
if not attrs: return attrs
# utility method to be called by descendants
attrs = dict([(k.lower(), v) for k, v in attrs]).items()
attrs = [(k, k in ('rel', 'type') and v.lower() or v) for k, v in attrs]
attrs.sort()
return attrs
def unknown_starttag(self, tag, attrs):
# called for each start tag
# attrs is a list of (attr, value) tuples
# e.g. for <pre class='screen'>, tag='pre', attrs=[('class', 'screen')]
if _debug: sys.stderr.write('_BaseHTMLProcessor, unknown_starttag, tag=%s\n' % tag)
uattrs = []
strattrs=''
if attrs:
for key, value in attrs:
value=value.replace('>','>').replace('<','<').replace('"','"')
value = self.bare_ampersand.sub("&", value)
# thanks to Kevin Marks for this breathtaking hack to deal with (valid) high-bit attribute values in UTF-8 feeds
if type(value) != type(u''):
try:
value = unicode(value, self.encoding)
except:
value = unicode(value, 'iso-8859-1')
try:
# Currently, in Python 3 the key is already a str, and cannot be decoded again
uattrs.append((unicode(key, self.encoding), value))
except TypeError:
uattrs.append((key, value))
strattrs = u''.join([u' %s="%s"' % (key, value) for key, value in uattrs])
if self.encoding:
try:
strattrs=strattrs.encode(self.encoding)
except:
pass
if tag in self.elements_no_end_tag:
self.pieces.append('<%(tag)s%(strattrs)s />' % locals())
else:
self.pieces.append('<%(tag)s%(strattrs)s>' % locals())
def unknown_endtag(self, tag):
# called for each end tag, e.g. for </pre>, tag will be 'pre'
# Reconstruct the original end tag.
if tag not in self.elements_no_end_tag:
self.pieces.append("</%(tag)s>" % locals())
def handle_charref(self, ref):
# called for each character reference, e.g. for ' ', ref will be '160'
# Reconstruct the original character reference.
if ref.startswith('x'):
value = unichr(int(ref[1:],16))
else:
value = unichr(int(ref))
if value in _cp1252.keys():
self.pieces.append('&#%s;' % hex(ord(_cp1252[value]))[1:])
else:
self.pieces.append('&#%(ref)s;' % locals())
def handle_entityref(self, ref):
# called for each entity reference, e.g. for '©', ref will be 'copy'
# Reconstruct the original entity reference.
if name2codepoint.has_key(ref):
self.pieces.append('&%(ref)s;' % locals())
else:
self.pieces.append('&%(ref)s' % locals())
def handle_data(self, text):
# called for each block of plain text, i.e. outside of any tag and
# not containing any character or entity references
# Store the original text verbatim.
if _debug: sys.stderr.write('_BaseHTMLProcessor, handle_data, text=%s\n' % text)
self.pieces.append(text)
def handle_comment(self, text):
# called for each HTML comment, e.g. <!-- insert Javascript code here -->
# Reconstruct the original comment.
self.pieces.append('<!--%(text)s-->' % locals())
def handle_pi(self, text):
# called for each processing instruction, e.g. <?instruction>
# Reconstruct original processing instruction.
self.pieces.append('<?%(text)s>' % locals())
def handle_decl(self, text):
# called for the DOCTYPE, if present, e.g.
# <!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
# "http://www.w3.org/TR/html4/loose.dtd">
# Reconstruct original DOCTYPE
self.pieces.append('<!%(text)s>' % locals())
_new_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9:]*\s*').match
def _scan_name(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
if i == n:
return None, -1
m = self._new_declname_match(rawdata, i)
if m:
s = m.group()
name = s.strip()
if (i + len(s)) == n:
return None, -1 # end of buffer
return name.lower(), m.end()
else:
self.handle_data(rawdata)
# self.updatepos(declstartpos, i)
return None, -1
def convert_charref(self, name):
return '&#%s;' % name
def convert_entityref(self, name):
return '&%s;' % name
def output(self):
'''Return processed HTML as a single string'''
return ''.join([str(p) for p in self.pieces])
def parse_declaration(self, i):
try:
return sgmllib.SGMLParser.parse_declaration(self, i)
except sgmllib.SGMLParseError:
# escape the doctype declaration and continue parsing
self.handle_data('<')
return i+1
class _LooseFeedParser(_FeedParserMixin, _BaseHTMLProcessor):
def __init__(self, baseuri, baselang, encoding, entities):
sgmllib.SGMLParser.__init__(self)
_FeedParserMixin.__init__(self, baseuri, baselang, encoding)
_BaseHTMLProcessor.__init__(self, encoding, 'application/xhtml+xml')
self.entities=entities
def decodeEntities(self, element, data):
data = data.replace('<', '<')
data = data.replace('<', '<')
data = data.replace('<', '<')
data = data.replace('>', '>')
data = data.replace('>', '>')
data = data.replace('>', '>')
data = data.replace('&', '&')
data = data.replace('&', '&')
data = data.replace('"', '"')
data = data.replace('"', '"')
data = data.replace(''', ''')
data = data.replace(''', ''')
if self.contentparams.has_key('type') and not self.contentparams.get('type', 'xml').endswith('xml'):
data = data.replace('<', '<')
data = data.replace('>', '>')
data = data.replace('&', '&')
data = data.replace('"', '"')
data = data.replace(''', "'")
return data
def strattrs(self, attrs):
return ''.join([' %s="%s"' % (n,v.replace('"','"')) for n,v in attrs])
class _MicroformatsParser:
STRING = 1
DATE = 2
URI = 3
NODE = 4
EMAIL = 5
known_xfn_relationships = ['contact', 'acquaintance', 'friend', 'met', 'co-worker', 'coworker', 'colleague', 'co-resident', 'coresident', 'neighbor', 'child', 'parent', 'sibling', 'brother', 'sister', 'spouse', 'wife', 'husband', 'kin', 'relative', 'muse', 'crush', 'date', 'sweetheart', 'me']
known_binary_extensions = ['zip','rar','exe','gz','tar','tgz','tbz2','bz2','z','7z','dmg','img','sit','sitx','hqx','deb','rpm','bz2','jar','rar','iso','bin','msi','mp2','mp3','ogg','ogm','mp4','m4v','m4a','avi','wma','wmv']
def __init__(self, data, baseuri, encoding):
self.document = BeautifulSoup.BeautifulSoup(data)
self.baseuri = baseuri
self.encoding = encoding
if type(data) == type(u''):
data = data.encode(encoding)
self.tags = []
self.enclosures = []
self.xfn = []
self.vcard = None
def vcardEscape(self, s):
if type(s) in (type(''), type(u'')):
s = s.replace(',', '\\,').replace(';', '\\;').replace('\n', '\\n')
return s
def vcardFold(self, s):
s = re.sub(';+$', '', s)
sFolded = ''
iMax = 75
sPrefix = ''
while len(s) > iMax:
sFolded += sPrefix + s[:iMax] + '\n'
s = s[iMax:]
sPrefix = ' '
iMax = 74
sFolded += sPrefix + s
return sFolded
def normalize(self, s):
return re.sub(r'\s+', ' ', s).strip()
def unique(self, aList):
results = []
for element in aList:
if element not in results:
results.append(element)
return results
def toISO8601(self, dt):
return time.strftime('%Y-%m-%dT%H:%M:%SZ', dt)
def getPropertyValue(self, elmRoot, sProperty, iPropertyType=4, bAllowMultiple=0, bAutoEscape=0):
all = lambda x: 1
sProperty = sProperty.lower()
bFound = 0
bNormalize = 1
propertyMatch = {'class': re.compile(r'\b%s\b' % sProperty)}
if bAllowMultiple and (iPropertyType != self.NODE):
snapResults = []
containers = elmRoot(['ul', 'ol'], propertyMatch)
for container in containers:
snapResults.extend(container('li'))
bFound = (len(snapResults) != 0)
if not bFound:
snapResults = elmRoot(all, propertyMatch)
bFound = (len(snapResults) != 0)
if (not bFound) and (sProperty == 'value'):
snapResults = elmRoot('pre')
bFound = (len(snapResults) != 0)
bNormalize = not bFound
if not bFound:
snapResults = [elmRoot]
bFound = (len(snapResults) != 0)
arFilter = []
if sProperty == 'vcard':
snapFilter = elmRoot(all, propertyMatch)
for node in snapFilter:
if node.findParent(all, propertyMatch):
arFilter.append(node)
arResults = []
for node in snapResults:
if node not in arFilter:
arResults.append(node)
bFound = (len(arResults) != 0)
if not bFound:
if bAllowMultiple: return []
elif iPropertyType == self.STRING: return ''
elif iPropertyType == self.DATE: return None
elif iPropertyType == self.URI: return ''
elif iPropertyType == self.NODE: return None
else: return None
arValues = []
for elmResult in arResults:
sValue = None
if iPropertyType == self.NODE:
if bAllowMultiple:
arValues.append(elmResult)
continue
else:
return elmResult
sNodeName = elmResult.name.lower()
if (iPropertyType == self.EMAIL) and (sNodeName == 'a'):
sValue = (elmResult.get('href') or '').split('mailto:').pop().split('?')[0]
if sValue:
sValue = bNormalize and self.normalize(sValue) or sValue.strip()
if (not sValue) and (sNodeName == 'abbr'):
sValue = elmResult.get('title')
if sValue:
sValue = bNormalize and self.normalize(sValue) or sValue.strip()
if (not sValue) and (iPropertyType == self.URI):
if sNodeName == 'a': sValue = elmResult.get('href')
elif sNodeName == 'img': sValue = elmResult.get('src')
elif sNodeName == 'object': sValue = elmResult.get('data')
if sValue:
sValue = bNormalize and self.normalize(sValue) or sValue.strip()
if (not sValue) and (sNodeName == 'img'):
sValue = elmResult.get('alt')
if sValue:
sValue = bNormalize and self.normalize(sValue) or sValue.strip()
if not sValue:
sValue = elmResult.renderContents()
sValue = re.sub(r'<\S[^>]*>', '', sValue)
sValue = sValue.replace('\r\n', '\n')
sValue = sValue.replace('\r', '\n')
if sValue:
sValue = bNormalize and self.normalize(sValue) or sValue.strip()
if not sValue: continue
if iPropertyType == self.DATE:
sValue = _parse_date_iso8601(sValue)
if bAllowMultiple:
arValues.append(bAutoEscape and self.vcardEscape(sValue) or sValue)
else:
return bAutoEscape and self.vcardEscape(sValue) or sValue
return arValues
def findVCards(self, elmRoot, bAgentParsing=0):
sVCards = ''
if not bAgentParsing:
arCards = self.getPropertyValue(elmRoot, 'vcard', bAllowMultiple=1)
else:
arCards = [elmRoot]
for elmCard in arCards:
arLines = []
def processSingleString(sProperty):
sValue = self.getPropertyValue(elmCard, sProperty, self.STRING, bAutoEscape=1).decode(self.encoding)
if sValue:
arLines.append(self.vcardFold(sProperty.upper() + ':' + sValue))
return sValue or u''
def processSingleURI(sProperty):
sValue = self.getPropertyValue(elmCard, sProperty, self.URI)
if sValue:
sContentType = ''
sEncoding = ''
sValueKey = ''
if sValue.startswith('data:'):
sEncoding = ';ENCODING=b'
sContentType = sValue.split(';')[0].split('/').pop()
sValue = sValue.split(',', 1).pop()
else:
elmValue = self.getPropertyValue(elmCard, sProperty)
if elmValue:
if sProperty != 'url':
sValueKey = ';VALUE=uri'
sContentType = elmValue.get('type', '').strip().split('/').pop().strip()
sContentType = sContentType.upper()
if sContentType == 'OCTET-STREAM':
sContentType = ''
if sContentType:
sContentType = ';TYPE=' + sContentType.upper()
arLines.append(self.vcardFold(sProperty.upper() + sEncoding + sContentType + sValueKey + ':' + sValue))
def processTypeValue(sProperty, arDefaultType, arForceType=None):
arResults = self.getPropertyValue(elmCard, sProperty, bAllowMultiple=1)
for elmResult in arResults:
arType = self.getPropertyValue(elmResult, 'type', self.STRING, 1, 1)
if arForceType:
arType = self.unique(arForceType + arType)
if not arType:
arType = arDefaultType
sValue = self.getPropertyValue(elmResult, 'value', self.EMAIL, 0)
if sValue:
arLines.append(self.vcardFold(sProperty.upper() + ';TYPE=' + ','.join(arType) + ':' + sValue))
# AGENT
# must do this before all other properties because it is destructive
# (removes nested class="vcard" nodes so they don't interfere with
# this vcard's other properties)
arAgent = self.getPropertyValue(elmCard, 'agent', bAllowMultiple=1)
for elmAgent in arAgent:
if re.compile(r'\bvcard\b').search(elmAgent.get('class')):
sAgentValue = self.findVCards(elmAgent, 1) + '\n'
sAgentValue = sAgentValue.replace('\n', '\\n')
sAgentValue = sAgentValue.replace(';', '\\;')
if sAgentValue:
arLines.append(self.vcardFold('AGENT:' + sAgentValue))
# Completely remove the agent element from the parse tree
elmAgent.extract()
else:
sAgentValue = self.getPropertyValue(elmAgent, 'value', self.URI, bAutoEscape=1);
if sAgentValue:
arLines.append(self.vcardFold('AGENT;VALUE=uri:' + sAgentValue))
# FN (full name)
sFN = processSingleString('fn')
# N (name)
elmName = self.getPropertyValue(elmCard, 'n')
if elmName:
sFamilyName = self.getPropertyValue(elmName, 'family-name', self.STRING, bAutoEscape=1)
sGivenName = self.getPropertyValue(elmName, 'given-name', self.STRING, bAutoEscape=1)
arAdditionalNames = self.getPropertyValue(elmName, 'additional-name', self.STRING, 1, 1) + self.getPropertyValue(elmName, 'additional-names', self.STRING, 1, 1)
arHonorificPrefixes = self.getPropertyValue(elmName, 'honorific-prefix', self.STRING, 1, 1) + self.getPropertyValue(elmName, 'honorific-prefixes', self.STRING, 1, 1)
arHonorificSuffixes = self.getPropertyValue(elmName, 'honorific-suffix', self.STRING, 1, 1) + self.getPropertyValue(elmName, 'honorific-suffixes', self.STRING, 1, 1)
arLines.append(self.vcardFold('N:' + sFamilyName + ';' +
sGivenName + ';' +
','.join(arAdditionalNames) + ';' +
','.join(arHonorificPrefixes) + ';' +
','.join(arHonorificSuffixes)))
elif sFN:
# implied "N" optimization
# http://microformats.org/wiki/hcard#Implied_.22N.22_Optimization
arNames = self.normalize(sFN).split()
if len(arNames) == 2:
bFamilyNameFirst = (arNames[0].endswith(',') or
len(arNames[1]) == 1 or
((len(arNames[1]) == 2) and (arNames[1].endswith('.'))))
if bFamilyNameFirst:
arLines.append(self.vcardFold('N:' + arNames[0] + ';' + arNames[1]))
else:
arLines.append(self.vcardFold('N:' + arNames[1] + ';' + arNames[0]))
# SORT-STRING
sSortString = self.getPropertyValue(elmCard, 'sort-string', self.STRING, bAutoEscape=1)
if sSortString:
arLines.append(self.vcardFold('SORT-STRING:' + sSortString))
# NICKNAME
arNickname = self.getPropertyValue(elmCard, 'nickname', self.STRING, 1, 1)
if arNickname:
arLines.append(self.vcardFold('NICKNAME:' + ','.join(arNickname)))
# PHOTO
processSingleURI('photo')
# BDAY
dtBday = self.getPropertyValue(elmCard, 'bday', self.DATE)
if dtBday:
arLines.append(self.vcardFold('BDAY:' + self.toISO8601(dtBday)))
# ADR (address)
arAdr = self.getPropertyValue(elmCard, 'adr', bAllowMultiple=1)
for elmAdr in arAdr:
arType = self.getPropertyValue(elmAdr, 'type', self.STRING, 1, 1)
if not arType:
arType = ['intl','postal','parcel','work'] # default adr types, see RFC 2426 section 3.2.1
sPostOfficeBox = self.getPropertyValue(elmAdr, 'post-office-box', self.STRING, 0, 1)
sExtendedAddress = self.getPropertyValue(elmAdr, 'extended-address', self.STRING, 0, 1)
sStreetAddress = self.getPropertyValue(elmAdr, 'street-address', self.STRING, 0, 1)
sLocality = self.getPropertyValue(elmAdr, 'locality', self.STRING, 0, 1)
sRegion = self.getPropertyValue(elmAdr, 'region', self.STRING, 0, 1)
sPostalCode = self.getPropertyValue(elmAdr, 'postal-code', self.STRING, 0, 1)
sCountryName = self.getPropertyValue(elmAdr, 'country-name', self.STRING, 0, 1)
arLines.append(self.vcardFold('ADR;TYPE=' + ','.join(arType) + ':' +
sPostOfficeBox + ';' +
sExtendedAddress + ';' +
sStreetAddress + ';' +
sLocality + ';' +
sRegion + ';' +
sPostalCode + ';' +
sCountryName))
# LABEL
processTypeValue('label', ['intl','postal','parcel','work'])
# TEL (phone number)
processTypeValue('tel', ['voice'])
# EMAIL
processTypeValue('email', ['internet'], ['internet'])
# MAILER
processSingleString('mailer')
# TZ (timezone)
processSingleString('tz')
# GEO (geographical information)
elmGeo = self.getPropertyValue(elmCard, 'geo')
if elmGeo:
sLatitude = self.getPropertyValue(elmGeo, 'latitude', self.STRING, 0, 1)
sLongitude = self.getPropertyValue(elmGeo, 'longitude', self.STRING, 0, 1)
arLines.append(self.vcardFold('GEO:' + sLatitude + ';' + sLongitude))
# TITLE
processSingleString('title')
# ROLE
processSingleString('role')
# LOGO
processSingleURI('logo')
# ORG (organization)
elmOrg = self.getPropertyValue(elmCard, 'org')
if elmOrg:
sOrganizationName = self.getPropertyValue(elmOrg, 'organization-name', self.STRING, 0, 1)
if not sOrganizationName:
# implied "organization-name" optimization
# http://microformats.org/wiki/hcard#Implied_.22organization-name.22_Optimization
sOrganizationName = self.getPropertyValue(elmCard, 'org', self.STRING, 0, 1)
if sOrganizationName:
arLines.append(self.vcardFold('ORG:' + sOrganizationName))
else:
arOrganizationUnit = self.getPropertyValue(elmOrg, 'organization-unit', self.STRING, 1, 1)
arLines.append(self.vcardFold('ORG:' + sOrganizationName + ';' + ';'.join(arOrganizationUnit)))
# CATEGORY
arCategory = self.getPropertyValue(elmCard, 'category', self.STRING, 1, 1) + self.getPropertyValue(elmCard, 'categories', self.STRING, 1, 1)
if arCategory:
arLines.append(self.vcardFold('CATEGORIES:' + ','.join(arCategory)))
# NOTE
processSingleString('note')
# REV
processSingleString('rev')
# SOUND
processSingleURI('sound')
# UID
processSingleString('uid')
# URL
processSingleURI('url')
# CLASS
processSingleString('class')
# KEY
processSingleURI('key')
if arLines:
arLines = [u'BEGIN:vCard',u'VERSION:3.0'] + arLines + [u'END:vCard']
sVCards += u'\n'.join(arLines) + u'\n'
return sVCards.strip()
def isProbablyDownloadable(self, elm):
attrsD = elm.attrMap
if not attrsD.has_key('href'): return 0
linktype = attrsD.get('type', '').strip()
if linktype.startswith('audio/') or \
linktype.startswith('video/') or \
(linktype.startswith('application/') and not linktype.endswith('xml')):
return 1
path = urlparse.urlparse(attrsD['href'])[2]
if path.find('.') == -1: return 0
fileext = path.split('.').pop().lower()
return fileext in self.known_binary_extensions
def findTags(self):
all = lambda x: 1
for elm in self.document(all, {'rel': re.compile(r'\btag\b')}):
href = elm.get('href')
if not href: continue
urlscheme, domain, path, params, query, fragment = \
urlparse.urlparse(_urljoin(self.baseuri, href))
segments = path.split('/')
tag = segments.pop()
if not tag:
tag = segments.pop()
tagscheme = urlparse.urlunparse((urlscheme, domain, '/'.join(segments), '', '', ''))
if not tagscheme.endswith('/'):
tagscheme += '/'
self.tags.append(FeedParserDict({"term": tag, "scheme": tagscheme, "label": elm.string or ''}))
def findEnclosures(self):
all = lambda x: 1
enclosure_match = re.compile(r'\benclosure\b')
for elm in self.document(all, {'href': re.compile(r'.+')}):
if not enclosure_match.search(elm.get('rel', '')) and not self.isProbablyDownloadable(elm): continue
if elm.attrMap not in self.enclosures:
self.enclosures.append(elm.attrMap)
if elm.string and not elm.get('title'):
self.enclosures[-1]['title'] = elm.string
def findXFN(self):
all = lambda x: 1
for elm in self.document(all, {'rel': re.compile('.+'), 'href': re.compile('.+')}):
rels = elm.get('rel', '').split()
xfn_rels = []
for rel in rels:
if rel in self.known_xfn_relationships:
xfn_rels.append(rel)
if xfn_rels:
self.xfn.append({"relationships": xfn_rels, "href": elm.get('href', ''), "name": elm.string})
def _parseMicroformats(htmlSource, baseURI, encoding):
if not BeautifulSoup: return
if _debug: sys.stderr.write('entering _parseMicroformats\n')
try:
p = _MicroformatsParser(htmlSource, baseURI, encoding)
except UnicodeEncodeError:
# sgmllib throws this exception when performing lookups of tags
# with non-ASCII characters in them.
return
p.vcard = p.findVCards(p.document)
p.findTags()
p.findEnclosures()
p.findXFN()
return {"tags": p.tags, "enclosures": p.enclosures, "xfn": p.xfn, "vcard": p.vcard}
class _RelativeURIResolver(_BaseHTMLProcessor):
relative_uris = [('a', 'href'),
('applet', 'codebase'),
('area', 'href'),
('blockquote', 'cite'),
('body', 'background'),
('del', 'cite'),
('form', 'action'),
('frame', 'longdesc'),
('frame', 'src'),
('iframe', 'longdesc'),
('iframe', 'src'),
('head', 'profile'),
('img', 'longdesc'),
('img', 'src'),
('img', 'usemap'),
('input', 'src'),
('input', 'usemap'),
('ins', 'cite'),
('link', 'href'),
('object', 'classid'),
('object', 'codebase'),
('object', 'data'),
('object', 'usemap'),
('q', 'cite'),
('script', 'src')]
def __init__(self, baseuri, encoding, _type):
_BaseHTMLProcessor.__init__(self, encoding, _type)
self.baseuri = baseuri
def resolveURI(self, uri):
return _makeSafeAbsoluteURI(_urljoin(self.baseuri, uri.strip()))
def unknown_starttag(self, tag, attrs):
if _debug:
sys.stderr.write('tag: [%s] with attributes: [%s]\n' % (tag, str(attrs)))
attrs = self.normalize_attrs(attrs)
attrs = [(key, ((tag, key) in self.relative_uris) and self.resolveURI(value) or value) for key, value in attrs]
_BaseHTMLProcessor.unknown_starttag(self, tag, attrs)
def _resolveRelativeURIs(htmlSource, baseURI, encoding, _type):
if _debug:
sys.stderr.write('entering _resolveRelativeURIs\n')
p = _RelativeURIResolver(baseURI, encoding, _type)
p.feed(htmlSource)
return p.output()
def _makeSafeAbsoluteURI(base, rel=None):
# bail if ACCEPTABLE_URI_SCHEMES is empty
if not ACCEPTABLE_URI_SCHEMES:
return _urljoin(base, rel or u'')
if not base:
return rel or u''
if not rel:
scheme = urlparse.urlparse(base)[0]
if not scheme or scheme in ACCEPTABLE_URI_SCHEMES:
return base
return u''
uri = _urljoin(base, rel)
if uri.strip().split(':', 1)[0] not in ACCEPTABLE_URI_SCHEMES:
return u''
return uri
class _HTMLSanitizer(_BaseHTMLProcessor):
acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area',
'article', 'aside', 'audio', 'b', 'big', 'blockquote', 'br', 'button',
'canvas', 'caption', 'center', 'cite', 'code', 'col', 'colgroup',
'command', 'datagrid', 'datalist', 'dd', 'del', 'details', 'dfn',
'dialog', 'dir', 'div', 'dl', 'dt', 'em', 'event-source', 'fieldset',
'figcaption', 'figure', 'footer', 'font', 'form', 'header', 'h1',
'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'input', 'ins',
'keygen', 'kbd', 'label', 'legend', 'li', 'm', 'map', 'menu', 'meter',
'multicol', 'nav', 'nextid', 'ol', 'output', 'optgroup', 'option',
'p', 'pre', 'progress', 'q', 's', 'samp', 'section', 'select',
'small', 'sound', 'source', 'spacer', 'span', 'strike', 'strong',
'sub', 'sup', 'table', 'tbody', 'td', 'textarea', 'time', 'tfoot',
'th', 'thead', 'tr', 'tt', 'u', 'ul', 'var', 'video', 'noscript']
acceptable_attributes = ['abbr', 'accept', 'accept-charset', 'accesskey',
'action', 'align', 'alt', 'autocomplete', 'autofocus', 'axis',
'background', 'balance', 'bgcolor', 'bgproperties', 'border',
'bordercolor', 'bordercolordark', 'bordercolorlight', 'bottompadding',
'cellpadding', 'cellspacing', 'ch', 'challenge', 'char', 'charoff',
'choff', 'charset', 'checked', 'cite', 'class', 'clear', 'color', 'cols',
'colspan', 'compact', 'contenteditable', 'controls', 'coords', 'data',
'datafld', 'datapagesize', 'datasrc', 'datetime', 'default', 'delay',
'dir', 'disabled', 'draggable', 'dynsrc', 'enctype', 'end', 'face', 'for',
'form', 'frame', 'galleryimg', 'gutter', 'headers', 'height', 'hidefocus',
'hidden', 'high', 'href', 'hreflang', 'hspace', 'icon', 'id', 'inputmode',
'ismap', 'keytype', 'label', 'leftspacing', 'lang', 'list', 'longdesc',
'loop', 'loopcount', 'loopend', 'loopstart', 'low', 'lowsrc', 'max',
'maxlength', 'media', 'method', 'min', 'multiple', 'name', 'nohref',
'noshade', 'nowrap', 'open', 'optimum', 'pattern', 'ping', 'point-size',
'prompt', 'pqg', 'radiogroup', 'readonly', 'rel', 'repeat-max',
'repeat-min', 'replace', 'required', 'rev', 'rightspacing', 'rows',
'rowspan', 'rules', 'scope', 'selected', 'shape', 'size', 'span', 'src',
'start', 'step', 'summary', 'suppress', 'tabindex', 'target', 'template',
'title', 'toppadding', 'type', 'unselectable', 'usemap', 'urn', 'valign',
'value', 'variable', 'volume', 'vspace', 'vrml', 'width', 'wrap',
'xml:lang']
unacceptable_elements_with_end_tag = ['script', 'applet', 'style']
acceptable_css_properties = ['azimuth', 'background-color',
'border-bottom-color', 'border-collapse', 'border-color',
'border-left-color', 'border-right-color', 'border-top-color', 'clear',
'color', 'cursor', 'direction', 'display', 'elevation', 'float', 'font',
'font-family', 'font-size', 'font-style', 'font-variant', 'font-weight',
'height', 'letter-spacing', 'line-height', 'overflow', 'pause',
'pause-after', 'pause-before', 'pitch', 'pitch-range', 'richness',
'speak', 'speak-header', 'speak-numeral', 'speak-punctuation',
'speech-rate', 'stress', 'text-align', 'text-decoration', 'text-indent',
'unicode-bidi', 'vertical-align', 'voice-family', 'volume',
'white-space', 'width']
# survey of common keywords found in feeds
acceptable_css_keywords = ['auto', 'aqua', 'black', 'block', 'blue',
'bold', 'both', 'bottom', 'brown', 'center', 'collapse', 'dashed',
'dotted', 'fuchsia', 'gray', 'green', '!important', 'italic', 'left',
'lime', 'maroon', 'medium', 'none', 'navy', 'normal', 'nowrap', 'olive',
'pointer', 'purple', 'red', 'right', 'solid', 'silver', 'teal', 'top',
'transparent', 'underline', 'white', 'yellow']
valid_css_values = re.compile('^(#[0-9a-f]+|rgb\(\d+%?,\d*%?,?\d*%?\)?|' +
'\d{0,2}\.?\d{0,2}(cm|em|ex|in|mm|pc|pt|px|%|,|\))?)$')
mathml_elements = ['annotation', 'annotation-xml', 'maction', 'math',
'merror', 'mfenced', 'mfrac', 'mi', 'mmultiscripts', 'mn', 'mo', 'mover', 'mpadded',
'mphantom', 'mprescripts', 'mroot', 'mrow', 'mspace', 'msqrt', 'mstyle',
'msub', 'msubsup', 'msup', 'mtable', 'mtd', 'mtext', 'mtr', 'munder',
'munderover', 'none', 'semantics']
mathml_attributes = ['actiontype', 'align', 'columnalign', 'columnalign',
'columnalign', 'close', 'columnlines', 'columnspacing', 'columnspan', 'depth',
'display', 'displaystyle', 'encoding', 'equalcolumns', 'equalrows',
'fence', 'fontstyle', 'fontweight', 'frame', 'height', 'linethickness',
'lspace', 'mathbackground', 'mathcolor', 'mathvariant', 'mathvariant',
'maxsize', 'minsize', 'open', 'other', 'rowalign', 'rowalign', 'rowalign',
'rowlines', 'rowspacing', 'rowspan', 'rspace', 'scriptlevel', 'selection',
'separator', 'separators', 'stretchy', 'width', 'width', 'xlink:href',
'xlink:show', 'xlink:type', 'xmlns', 'xmlns:xlink']
# svgtiny - foreignObject + linearGradient + radialGradient + stop
svg_elements = ['a', 'animate', 'animateColor', 'animateMotion',
'animateTransform', 'circle', 'defs', 'desc', 'ellipse', 'foreignObject',
'font-face', 'font-face-name', 'font-face-src', 'g', 'glyph', 'hkern',
'linearGradient', 'line', 'marker', 'metadata', 'missing-glyph', 'mpath',
'path', 'polygon', 'polyline', 'radialGradient', 'rect', 'set', 'stop',
'svg', 'switch', 'text', 'title', 'tspan', 'use']
# svgtiny + class + opacity + offset + xmlns + xmlns:xlink
svg_attributes = ['accent-height', 'accumulate', 'additive', 'alphabetic',
'arabic-form', 'ascent', 'attributeName', 'attributeType',
'baseProfile', 'bbox', 'begin', 'by', 'calcMode', 'cap-height',
'class', 'color', 'color-rendering', 'content', 'cx', 'cy', 'd', 'dx',
'dy', 'descent', 'display', 'dur', 'end', 'fill', 'fill-opacity',
'fill-rule', 'font-family', 'font-size', 'font-stretch', 'font-style',
'font-variant', 'font-weight', 'from', 'fx', 'fy', 'g1', 'g2',
'glyph-name', 'gradientUnits', 'hanging', 'height', 'horiz-adv-x',
'horiz-origin-x', 'id', 'ideographic', 'k', 'keyPoints', 'keySplines',
'keyTimes', 'lang', 'mathematical', 'marker-end', 'marker-mid',
'marker-start', 'markerHeight', 'markerUnits', 'markerWidth', 'max',
'min', 'name', 'offset', 'opacity', 'orient', 'origin',
'overline-position', 'overline-thickness', 'panose-1', 'path',
'pathLength', 'points', 'preserveAspectRatio', 'r', 'refX', 'refY',
'repeatCount', 'repeatDur', 'requiredExtensions', 'requiredFeatures',
'restart', 'rotate', 'rx', 'ry', 'slope', 'stemh', 'stemv',
'stop-color', 'stop-opacity', 'strikethrough-position',
'strikethrough-thickness', 'stroke', 'stroke-dasharray',
'stroke-dashoffset', 'stroke-linecap', 'stroke-linejoin',
'stroke-miterlimit', 'stroke-opacity', 'stroke-width', 'systemLanguage',
'target', 'text-anchor', 'to', 'transform', 'type', 'u1', 'u2',
'underline-position', 'underline-thickness', 'unicode', 'unicode-range',
'units-per-em', 'values', 'version', 'viewBox', 'visibility', 'width',
'widths', 'x', 'x-height', 'x1', 'x2', 'xlink:actuate', 'xlink:arcrole',
'xlink:href', 'xlink:role', 'xlink:show', 'xlink:title', 'xlink:type',
'xml:base', 'xml:lang', 'xml:space', 'xmlns', 'xmlns:xlink', 'y', 'y1',
'y2', 'zoomAndPan']
svg_attr_map = None
svg_elem_map = None
acceptable_svg_properties = [ 'fill', 'fill-opacity', 'fill-rule',
'stroke', 'stroke-width', 'stroke-linecap', 'stroke-linejoin',
'stroke-opacity']
def reset(self):
_BaseHTMLProcessor.reset(self)
self.unacceptablestack = 0
self.mathmlOK = 0
self.svgOK = 0
def unknown_starttag(self, tag, attrs):
acceptable_attributes = self.acceptable_attributes
keymap = {}
if not tag in self.acceptable_elements or self.svgOK:
if tag in self.unacceptable_elements_with_end_tag:
self.unacceptablestack += 1
# add implicit namespaces to html5 inline svg/mathml
if self._type.endswith('html'):
if not dict(attrs).get('xmlns'):
if tag=='svg':
attrs.append( ('xmlns','http://www.w3.org/2000/svg') )
if tag=='math':
attrs.append( ('xmlns','http://www.w3.org/1998/Math/MathML') )
# not otherwise acceptable, perhaps it is MathML or SVG?
if tag=='math' and ('xmlns','http://www.w3.org/1998/Math/MathML') in attrs:
self.mathmlOK += 1
if tag=='svg' and ('xmlns','http://www.w3.org/2000/svg') in attrs:
self.svgOK += 1
# chose acceptable attributes based on tag class, else bail
if self.mathmlOK and tag in self.mathml_elements:
acceptable_attributes = self.mathml_attributes
elif self.svgOK and tag in self.svg_elements:
# for most vocabularies, lowercasing is a good idea. Many
# svg elements, however, are camel case
if not self.svg_attr_map:
lower=[attr.lower() for attr in self.svg_attributes]
mix=[a for a in self.svg_attributes if a not in lower]
self.svg_attributes = lower
self.svg_attr_map = dict([(a.lower(),a) for a in mix])
lower=[attr.lower() for attr in self.svg_elements]
mix=[a for a in self.svg_elements if a not in lower]
self.svg_elements = lower
self.svg_elem_map = dict([(a.lower(),a) for a in mix])
acceptable_attributes = self.svg_attributes
tag = self.svg_elem_map.get(tag,tag)
keymap = self.svg_attr_map
elif not tag in self.acceptable_elements:
return
# declare xlink namespace, if needed
if self.mathmlOK or self.svgOK:
if filter(lambda (n,v): n.startswith('xlink:'),attrs):
if not ('xmlns:xlink','http://www.w3.org/1999/xlink') in attrs:
attrs.append(('xmlns:xlink','http://www.w3.org/1999/xlink'))
clean_attrs = []
for key, value in self.normalize_attrs(attrs):
if key in acceptable_attributes:
key=keymap.get(key,key)
# make sure the uri uses an acceptable uri scheme
if key == u'href':
value = _makeSafeAbsoluteURI(value)
clean_attrs.append((key,value))
elif key=='style':
clean_value = self.sanitize_style(value)
if clean_value: clean_attrs.append((key,clean_value))
_BaseHTMLProcessor.unknown_starttag(self, tag, clean_attrs)
def unknown_endtag(self, tag):
if not tag in self.acceptable_elements:
if tag in self.unacceptable_elements_with_end_tag:
self.unacceptablestack -= 1
if self.mathmlOK and tag in self.mathml_elements:
if tag == 'math' and self.mathmlOK: self.mathmlOK -= 1
elif self.svgOK and tag in self.svg_elements:
tag = self.svg_elem_map.get(tag,tag)
if tag == 'svg' and self.svgOK: self.svgOK -= 1
else:
return
_BaseHTMLProcessor.unknown_endtag(self, tag)
def handle_pi(self, text):
pass
def handle_decl(self, text):
pass
def handle_data(self, text):
if not self.unacceptablestack:
_BaseHTMLProcessor.handle_data(self, text)
def sanitize_style(self, style):
# disallow urls
style=re.compile('url\s*\(\s*[^\s)]+?\s*\)\s*').sub(' ',style)
# gauntlet
if not re.match("""^([:,;#%.\sa-zA-Z0-9!]|\w-\w|'[\s\w]+'|"[\s\w]+"|\([\d,\s]+\))*$""", style): return ''
# This replaced a regexp that used re.match and was prone to pathological back-tracking.
if re.sub("\s*[-\w]+\s*:\s*[^:;]*;?", '', style).strip(): return ''
clean = []
for prop,value in re.findall("([-\w]+)\s*:\s*([^:;]*)",style):
if not value: continue
if prop.lower() in self.acceptable_css_properties:
clean.append(prop + ': ' + value + ';')
elif prop.split('-')[0].lower() in ['background','border','margin','padding']:
for keyword in value.split():
if not keyword in self.acceptable_css_keywords and \
not self.valid_css_values.match(keyword):
break
else:
clean.append(prop + ': ' + value + ';')
elif self.svgOK and prop.lower() in self.acceptable_svg_properties:
clean.append(prop + ': ' + value + ';')
return ' '.join(clean)
def parse_comment(self, i, report=1):
ret = _BaseHTMLProcessor.parse_comment(self, i, report)
if ret >= 0:
return ret
# if ret == -1, this may be a malicious attempt to circumvent
# sanitization, or a page-destroying unclosed comment
match = re.compile(r'--[^>]*>').search(self.rawdata, i+4)
if match:
return match.end()
# unclosed comment; deliberately fail to handle_data()
return len(self.rawdata)
def _sanitizeHTML(htmlSource, encoding, _type):
p = _HTMLSanitizer(encoding, _type)
htmlSource = htmlSource.replace('<![CDATA[', '<![CDATA[')
p.feed(htmlSource)
data = p.output()
if TIDY_MARKUP:
# loop through list of preferred Tidy interfaces looking for one that's installed,
# then set up a common _tidy function to wrap the interface-specific API.
_tidy = None
for tidy_interface in PREFERRED_TIDY_INTERFACES:
try:
if tidy_interface == "uTidy":
from tidy import parseString as _utidy
def _tidy(data, **kwargs):
return str(_utidy(data, **kwargs))
break
elif tidy_interface == "mxTidy":
from mx.Tidy import Tidy as _mxtidy
def _tidy(data, **kwargs):
nerrors, nwarnings, data, errordata = _mxtidy.tidy(data, **kwargs)
return data
break
except:
pass
if _tidy:
utf8 = type(data) == type(u'')
if utf8:
data = data.encode('utf-8')
data = _tidy(data, output_xhtml=1, numeric_entities=1, wrap=0, char_encoding="utf8")
if utf8:
data = unicode(data, 'utf-8')
if data.count('<body'):
data = data.split('<body', 1)[1]
if data.count('>'):
data = data.split('>', 1)[1]
if data.count('</body'):
data = data.split('</body', 1)[0]
data = data.strip().replace('\r\n', '\n')
return data
class _FeedURLHandler(urllib2.HTTPDigestAuthHandler, urllib2.HTTPRedirectHandler, urllib2.HTTPDefaultErrorHandler):
def http_error_default(self, req, fp, code, msg, headers):
if ((code / 100) == 3) and (code != 304):
return self.http_error_302(req, fp, code, msg, headers)
infourl = urllib.addinfourl(fp, headers, req.get_full_url())
infourl.status = code
return infourl
def http_error_302(self, req, fp, code, msg, headers):
if headers.dict.has_key('location'):
infourl = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
else:
infourl = urllib.addinfourl(fp, headers, req.get_full_url())
if not hasattr(infourl, 'status'):
infourl.status = code
return infourl
def http_error_301(self, req, fp, code, msg, headers):
if headers.dict.has_key('location'):
infourl = urllib2.HTTPRedirectHandler.http_error_301(self, req, fp, code, msg, headers)
else:
infourl = urllib.addinfourl(fp, headers, req.get_full_url())
if not hasattr(infourl, 'status'):
infourl.status = code
return infourl
http_error_300 = http_error_302
http_error_303 = http_error_302
http_error_307 = http_error_302
def http_error_401(self, req, fp, code, msg, headers):
# Check if
# - server requires digest auth, AND
# - we tried (unsuccessfully) with basic auth, AND
# - we're using Python 2.3.3 or later (digest auth is irreparably broken in earlier versions)
# If all conditions hold, parse authentication information
# out of the Authorization header we sent the first time
# (for the username and password) and the WWW-Authenticate
# header the server sent back (for the realm) and retry
# the request with the appropriate digest auth headers instead.
# This evil genius hack has been brought to you by Aaron Swartz.
host = urlparse.urlparse(req.get_full_url())[1]
try:
assert sys.version.split()[0] >= '2.3.3'
assert base64 != None
user, passw = _base64decode(req.headers['Authorization'].split(' ')[1]).split(':')
realm = re.findall('realm="([^"]*)"', headers['WWW-Authenticate'])[0]
self.add_password(realm, host, user, passw)
retry = self.http_error_auth_reqed('www-authenticate', host, req, headers)
self.reset_retry_count()
return retry
except:
return self.http_error_default(req, fp, code, msg, headers)
def _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers):
"""URL, filename, or string --> stream
This function lets you define parsers that take any input source
(URL, pathname to local or network file, or actual data as a string)
and deal with it in a uniform manner. Returned object is guaranteed
to have all the basic stdio read methods (read, readline, readlines).
Just .close() the object when you're done with it.
If the etag argument is supplied, it will be used as the value of an
If-None-Match request header.
If the modified argument is supplied, it can be a tuple of 9 integers
(as returned by gmtime() in the standard Python time module) or a date
string in any format supported by feedparser. Regardless, it MUST
be in GMT (Greenwich Mean Time). It will be reformatted into an
RFC 1123-compliant date and used as the value of an If-Modified-Since
request header.
If the agent argument is supplied, it will be used as the value of a
User-Agent request header.
If the referrer argument is supplied, it will be used as the value of a
Referer[sic] request header.
If handlers is supplied, it is a list of handlers used to build a
urllib2 opener.
if request_headers is supplied it is a dictionary of HTTP request headers
that will override the values generated by FeedParser.
"""
if hasattr(url_file_stream_or_string, 'read'):
return url_file_stream_or_string
if url_file_stream_or_string == '-':
return sys.stdin
if urlparse.urlparse(url_file_stream_or_string)[0] in ('http', 'https', 'ftp', 'file', 'feed'):
# Deal with the feed URI scheme
if url_file_stream_or_string.startswith('feed:http'):
url_file_stream_or_string = url_file_stream_or_string[5:]
elif url_file_stream_or_string.startswith('feed:'):
url_file_stream_or_string = 'http:' + url_file_stream_or_string[5:]
if not agent:
agent = USER_AGENT
# test for inline user:password for basic auth
auth = None
if base64:
urltype, rest = urllib.splittype(url_file_stream_or_string)
realhost, rest = urllib.splithost(rest)
if realhost:
user_passwd, realhost = urllib.splituser(realhost)
if user_passwd:
url_file_stream_or_string = '%s://%s%s' % (urltype, realhost, rest)
auth = base64.standard_b64encode(user_passwd).strip()
# iri support
try:
if isinstance(url_file_stream_or_string,unicode):
url_file_stream_or_string = url_file_stream_or_string.encode('idna').decode('utf-8')
else:
url_file_stream_or_string = url_file_stream_or_string.decode('utf-8').encode('idna').decode('utf-8')
except:
pass
# try to open with urllib2 (to use optional headers)
request = _build_urllib2_request(url_file_stream_or_string, agent, etag, modified, referrer, auth, request_headers)
opener = apply(urllib2.build_opener, tuple(handlers + [_FeedURLHandler()]))
opener.addheaders = [] # RMK - must clear so we only send our custom User-Agent
try:
return opener.open(request)
finally:
opener.close() # JohnD
# try to open with native open function (if url_file_stream_or_string is a filename)
try:
return open(url_file_stream_or_string, 'rb')
except:
pass
# treat url_file_stream_or_string as string
return _StringIO(str(url_file_stream_or_string))
def _build_urllib2_request(url, agent, etag, modified, referrer, auth, request_headers):
request = urllib2.Request(url)
request.add_header('User-Agent', agent)
if etag:
request.add_header('If-None-Match', etag)
if type(modified) == type(''):
modified = _parse_date(modified)
elif isinstance(modified, datetime.datetime):
modified = modified.utctimetuple()
if modified:
# format into an RFC 1123-compliant timestamp. We can't use
# time.strftime() since the %a and %b directives can be affected
# by the current locale, but RFC 2616 states that dates must be
# in English.
short_weekdays = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
request.add_header('If-Modified-Since', '%s, %02d %s %04d %02d:%02d:%02d GMT' % (short_weekdays[modified[6]], modified[2], months[modified[1] - 1], modified[0], modified[3], modified[4], modified[5]))
if referrer:
request.add_header('Referer', referrer)
if gzip and zlib:
request.add_header('Accept-encoding', 'gzip, deflate')
elif gzip:
request.add_header('Accept-encoding', 'gzip')
elif zlib:
request.add_header('Accept-encoding', 'deflate')
else:
request.add_header('Accept-encoding', '')
if auth:
request.add_header('Authorization', 'Basic %s' % auth)
if ACCEPT_HEADER:
request.add_header('Accept', ACCEPT_HEADER)
# use this for whatever -- cookies, special headers, etc
# [('Cookie','Something'),('x-special-header','Another Value')]
for header_name, header_value in request_headers.items():
request.add_header(header_name, header_value)
request.add_header('A-IM', 'feed') # RFC 3229 support
return request
_date_handlers = []
def registerDateHandler(func):
'''Register a date handler function (takes string, returns 9-tuple date in GMT)'''
_date_handlers.insert(0, func)
# ISO-8601 date parsing routines written by Fazal Majid.
# The ISO 8601 standard is very convoluted and irregular - a full ISO 8601
# parser is beyond the scope of feedparser and would be a worthwhile addition
# to the Python library.
# A single regular expression cannot parse ISO 8601 date formats into groups
# as the standard is highly irregular (for instance is 030104 2003-01-04 or
# 0301-04-01), so we use templates instead.
# Please note the order in templates is significant because we need a
# greedy match.
_iso8601_tmpl = ['YYYY-?MM-?DD', 'YYYY-0MM?-?DD', 'YYYY-MM', 'YYYY-?OOO',
'YY-?MM-?DD', 'YY-?OOO', 'YYYY',
'-YY-?MM', '-OOO', '-YY',
'--MM-?DD', '--MM',
'---DD',
'CC', '']
_iso8601_re = [
tmpl.replace(
'YYYY', r'(?P<year>\d{4})').replace(
'YY', r'(?P<year>\d\d)').replace(
'MM', r'(?P<month>[01]\d)').replace(
'DD', r'(?P<day>[0123]\d)').replace(
'OOO', r'(?P<ordinal>[0123]\d\d)').replace(
'CC', r'(?P<century>\d\d$)')
+ r'(T?(?P<hour>\d{2}):(?P<minute>\d{2})'
+ r'(:(?P<second>\d{2}))?'
+ r'(\.(?P<fracsecond>\d+))?'
+ r'(?P<tz>[+-](?P<tzhour>\d{2})(:(?P<tzmin>\d{2}))?|Z)?)?'
for tmpl in _iso8601_tmpl]
try:
del tmpl
except NameError:
pass
_iso8601_matches = [re.compile(regex).match for regex in _iso8601_re]
try:
del regex
except NameError:
pass
def _parse_date_iso8601(dateString):
'''Parse a variety of ISO-8601-compatible formats like 20040105'''
m = None
for _iso8601_match in _iso8601_matches:
m = _iso8601_match(dateString)
if m: break
if not m: return
if m.span() == (0, 0): return
params = m.groupdict()
ordinal = params.get('ordinal', 0)
if ordinal:
ordinal = int(ordinal)
else:
ordinal = 0
year = params.get('year', '--')
if not year or year == '--':
year = time.gmtime()[0]
elif len(year) == 2:
# ISO 8601 assumes current century, i.e. 93 -> 2093, NOT 1993
year = 100 * int(time.gmtime()[0] / 100) + int(year)
else:
year = int(year)
month = params.get('month', '-')
if not month or month == '-':
# ordinals are NOT normalized by mktime, we simulate them
# by setting month=1, day=ordinal
if ordinal:
month = 1
else:
month = time.gmtime()[1]
month = int(month)
day = params.get('day', 0)
if not day:
# see above
if ordinal:
day = ordinal
elif params.get('century', 0) or \
params.get('year', 0) or params.get('month', 0):
day = 1
else:
day = time.gmtime()[2]
else:
day = int(day)
# special case of the century - is the first year of the 21st century
# 2000 or 2001 ? The debate goes on...
if 'century' in params.keys():
year = (int(params['century']) - 1) * 100 + 1
# in ISO 8601 most fields are optional
for field in ['hour', 'minute', 'second', 'tzhour', 'tzmin']:
if not params.get(field, None):
params[field] = 0
hour = int(params.get('hour', 0))
minute = int(params.get('minute', 0))
second = int(float(params.get('second', 0)))
# weekday is normalized by mktime(), we can ignore it
weekday = 0
daylight_savings_flag = -1
tm = [year, month, day, hour, minute, second, weekday,
ordinal, daylight_savings_flag]
# ISO 8601 time zone adjustments
tz = params.get('tz')
if tz and tz != 'Z':
if tz[0] == '-':
tm[3] += int(params.get('tzhour', 0))
tm[4] += int(params.get('tzmin', 0))
elif tz[0] == '+':
tm[3] -= int(params.get('tzhour', 0))
tm[4] -= int(params.get('tzmin', 0))
else:
return None
# Python's time.mktime() is a wrapper around the ANSI C mktime(3c)
# which is guaranteed to normalize d/m/y/h/m/s.
# Many implementations have bugs, but we'll pretend they don't.
return time.localtime(time.mktime(tuple(tm)))
registerDateHandler(_parse_date_iso8601)
# 8-bit date handling routines written by ytrewq1.
_korean_year = u'\ub144' # b3e2 in euc-kr
_korean_month = u'\uc6d4' # bff9 in euc-kr
_korean_day = u'\uc77c' # c0cf in euc-kr
_korean_am = u'\uc624\uc804' # bfc0 c0fc in euc-kr
_korean_pm = u'\uc624\ud6c4' # bfc0 c8c4 in euc-kr
_korean_onblog_date_re = \
re.compile('(\d{4})%s\s+(\d{2})%s\s+(\d{2})%s\s+(\d{2}):(\d{2}):(\d{2})' % \
(_korean_year, _korean_month, _korean_day))
_korean_nate_date_re = \
re.compile(u'(\d{4})-(\d{2})-(\d{2})\s+(%s|%s)\s+(\d{,2}):(\d{,2}):(\d{,2})' % \
(_korean_am, _korean_pm))
def _parse_date_onblog(dateString):
'''Parse a string according to the OnBlog 8-bit date format'''
m = _korean_onblog_date_re.match(dateString)
if not m: return
w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s' % \
{'year': m.group(1), 'month': m.group(2), 'day': m.group(3),\
'hour': m.group(4), 'minute': m.group(5), 'second': m.group(6),\
'zonediff': '+09:00'}
if _debug: sys.stderr.write('OnBlog date parsed as: %s\n' % w3dtfdate)
return _parse_date_w3dtf(w3dtfdate)
registerDateHandler(_parse_date_onblog)
def _parse_date_nate(dateString):
'''Parse a string according to the Nate 8-bit date format'''
m = _korean_nate_date_re.match(dateString)
if not m: return
hour = int(m.group(5))
ampm = m.group(4)
if (ampm == _korean_pm):
hour += 12
hour = str(hour)
if len(hour) == 1:
hour = '0' + hour
w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s' % \
{'year': m.group(1), 'month': m.group(2), 'day': m.group(3),\
'hour': hour, 'minute': m.group(6), 'second': m.group(7),\
'zonediff': '+09:00'}
if _debug: sys.stderr.write('Nate date parsed as: %s\n' % w3dtfdate)
return _parse_date_w3dtf(w3dtfdate)
registerDateHandler(_parse_date_nate)
_mssql_date_re = \
re.compile('(\d{4})-(\d{2})-(\d{2})\s+(\d{2}):(\d{2}):(\d{2})(\.\d+)?')
def _parse_date_mssql(dateString):
'''Parse a string according to the MS SQL date format'''
m = _mssql_date_re.match(dateString)
if not m: return
w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s:%(second)s%(zonediff)s' % \
{'year': m.group(1), 'month': m.group(2), 'day': m.group(3),\
'hour': m.group(4), 'minute': m.group(5), 'second': m.group(6),\
'zonediff': '+09:00'}
if _debug: sys.stderr.write('MS SQL date parsed as: %s\n' % w3dtfdate)
return _parse_date_w3dtf(w3dtfdate)
registerDateHandler(_parse_date_mssql)
# Unicode strings for Greek date strings
_greek_months = \
{ \
u'\u0399\u03b1\u03bd': u'Jan', # c9e1ed in iso-8859-7
u'\u03a6\u03b5\u03b2': u'Feb', # d6e5e2 in iso-8859-7
u'\u039c\u03ac\u03ce': u'Mar', # ccdcfe in iso-8859-7
u'\u039c\u03b1\u03ce': u'Mar', # cce1fe in iso-8859-7
u'\u0391\u03c0\u03c1': u'Apr', # c1f0f1 in iso-8859-7
u'\u039c\u03ac\u03b9': u'May', # ccdce9 in iso-8859-7
u'\u039c\u03b1\u03ca': u'May', # cce1fa in iso-8859-7
u'\u039c\u03b1\u03b9': u'May', # cce1e9 in iso-8859-7
u'\u0399\u03bf\u03cd\u03bd': u'Jun', # c9effded in iso-8859-7
u'\u0399\u03bf\u03bd': u'Jun', # c9efed in iso-8859-7
u'\u0399\u03bf\u03cd\u03bb': u'Jul', # c9effdeb in iso-8859-7
u'\u0399\u03bf\u03bb': u'Jul', # c9f9eb in iso-8859-7
u'\u0391\u03cd\u03b3': u'Aug', # c1fde3 in iso-8859-7
u'\u0391\u03c5\u03b3': u'Aug', # c1f5e3 in iso-8859-7
u'\u03a3\u03b5\u03c0': u'Sep', # d3e5f0 in iso-8859-7
u'\u039f\u03ba\u03c4': u'Oct', # cfeaf4 in iso-8859-7
u'\u039d\u03bf\u03ad': u'Nov', # cdefdd in iso-8859-7
u'\u039d\u03bf\u03b5': u'Nov', # cdefe5 in iso-8859-7
u'\u0394\u03b5\u03ba': u'Dec', # c4e5ea in iso-8859-7
}
_greek_wdays = \
{ \
u'\u039a\u03c5\u03c1': u'Sun', # caf5f1 in iso-8859-7
u'\u0394\u03b5\u03c5': u'Mon', # c4e5f5 in iso-8859-7
u'\u03a4\u03c1\u03b9': u'Tue', # d4f1e9 in iso-8859-7
u'\u03a4\u03b5\u03c4': u'Wed', # d4e5f4 in iso-8859-7
u'\u03a0\u03b5\u03bc': u'Thu', # d0e5ec in iso-8859-7
u'\u03a0\u03b1\u03c1': u'Fri', # d0e1f1 in iso-8859-7
u'\u03a3\u03b1\u03b2': u'Sat', # d3e1e2 in iso-8859-7
}
_greek_date_format_re = \
re.compile(u'([^,]+),\s+(\d{2})\s+([^\s]+)\s+(\d{4})\s+(\d{2}):(\d{2}):(\d{2})\s+([^\s]+)')
def _parse_date_greek(dateString):
'''Parse a string according to a Greek 8-bit date format.'''
m = _greek_date_format_re.match(dateString)
if not m: return
try:
wday = _greek_wdays[m.group(1)]
month = _greek_months[m.group(3)]
except:
return
rfc822date = '%(wday)s, %(day)s %(month)s %(year)s %(hour)s:%(minute)s:%(second)s %(zonediff)s' % \
{'wday': wday, 'day': m.group(2), 'month': month, 'year': m.group(4),\
'hour': m.group(5), 'minute': m.group(6), 'second': m.group(7),\
'zonediff': m.group(8)}
if _debug: sys.stderr.write('Greek date parsed as: %s\n' % rfc822date)
return _parse_date_rfc822(rfc822date)
registerDateHandler(_parse_date_greek)
# Unicode strings for Hungarian date strings
_hungarian_months = \
{ \
u'janu\u00e1r': u'01', # e1 in iso-8859-2
u'febru\u00e1ri': u'02', # e1 in iso-8859-2
u'm\u00e1rcius': u'03', # e1 in iso-8859-2
u'\u00e1prilis': u'04', # e1 in iso-8859-2
u'm\u00e1ujus': u'05', # e1 in iso-8859-2
u'j\u00fanius': u'06', # fa in iso-8859-2
u'j\u00falius': u'07', # fa in iso-8859-2
u'augusztus': u'08',
u'szeptember': u'09',
u'okt\u00f3ber': u'10', # f3 in iso-8859-2
u'november': u'11',
u'december': u'12',
}
_hungarian_date_format_re = \
re.compile(u'(\d{4})-([^-]+)-(\d{,2})T(\d{,2}):(\d{2})((\+|-)(\d{,2}:\d{2}))')
def _parse_date_hungarian(dateString):
'''Parse a string according to a Hungarian 8-bit date format.'''
m = _hungarian_date_format_re.match(dateString)
if not m: return
try:
month = _hungarian_months[m.group(2)]
day = m.group(3)
if len(day) == 1:
day = '0' + day
hour = m.group(4)
if len(hour) == 1:
hour = '0' + hour
except:
return
w3dtfdate = '%(year)s-%(month)s-%(day)sT%(hour)s:%(minute)s%(zonediff)s' % \
{'year': m.group(1), 'month': month, 'day': day,\
'hour': hour, 'minute': m.group(5),\
'zonediff': m.group(6)}
if _debug: sys.stderr.write('Hungarian date parsed as: %s\n' % w3dtfdate)
return _parse_date_w3dtf(w3dtfdate)
registerDateHandler(_parse_date_hungarian)
# W3DTF-style date parsing adapted from PyXML xml.utils.iso8601, written by
# Drake and licensed under the Python license. Removed all range checking
# for month, day, hour, minute, and second, since mktime will normalize
# these later
def _parse_date_w3dtf(dateString):
def __extract_date(m):
year = int(m.group('year'))
if year < 100:
year = 100 * int(time.gmtime()[0] / 100) + int(year)
if year < 1000:
return 0, 0, 0
julian = m.group('julian')
if julian:
julian = int(julian)
month = julian / 30 + 1
day = julian % 30 + 1
jday = None
while jday != julian:
t = time.mktime((year, month, day, 0, 0, 0, 0, 0, 0))
jday = time.gmtime(t)[-2]
diff = abs(jday - julian)
if jday > julian:
if diff < day:
day = day - diff
else:
month = month - 1
day = 31
elif jday < julian:
if day + diff < 28:
day = day + diff
else:
month = month + 1
return year, month, day
month = m.group('month')
day = 1
if month is None:
month = 1
else:
month = int(month)
day = m.group('day')
if day:
day = int(day)
else:
day = 1
return year, month, day
def __extract_time(m):
if not m:
return 0, 0, 0
hours = m.group('hours')
if not hours:
return 0, 0, 0
hours = int(hours)
minutes = int(m.group('minutes'))
seconds = m.group('seconds')
if seconds:
seconds = int(seconds)
else:
seconds = 0
return hours, minutes, seconds
def __extract_tzd(m):
'''Return the Time Zone Designator as an offset in seconds from UTC.'''
if not m:
return 0
tzd = m.group('tzd')
if not tzd:
return 0
if tzd == 'Z':
return 0
hours = int(m.group('tzdhours'))
minutes = m.group('tzdminutes')
if minutes:
minutes = int(minutes)
else:
minutes = 0
offset = (hours*60 + minutes) * 60
if tzd[0] == '+':
return -offset
return offset
__date_re = ('(?P<year>\d\d\d\d)'
'(?:(?P<dsep>-|)'
'(?:(?P<month>\d\d)(?:(?P=dsep)(?P<day>\d\d))?'
'|(?P<julian>\d\d\d)))?')
__tzd_re = '(?P<tzd>[-+](?P<tzdhours>\d\d)(?::?(?P<tzdminutes>\d\d))|Z)'
__tzd_rx = re.compile(__tzd_re)
__time_re = ('(?P<hours>\d\d)(?P<tsep>:|)(?P<minutes>\d\d)'
'(?:(?P=tsep)(?P<seconds>\d\d)(?:[.,]\d+)?)?'
+ __tzd_re)
__datetime_re = '%s(?:T%s)?' % (__date_re, __time_re)
__datetime_rx = re.compile(__datetime_re)
m = __datetime_rx.match(dateString)
if (m is None) or (m.group() != dateString): return
gmt = __extract_date(m) + __extract_time(m) + (0, 0, 0)
if gmt[0] == 0: return
return time.gmtime(time.mktime(gmt) + __extract_tzd(m) - time.timezone)
registerDateHandler(_parse_date_w3dtf)
def _parse_date_rfc822(dateString):
'''Parse an RFC822, RFC1123, RFC2822, or asctime-style date'''
data = dateString.split()
if data[0][-1] in (',', '.') or data[0].lower() in rfc822._daynames:
del data[0]
if len(data) == 4:
s = data[3]
i = s.find('+')
if i > 0:
data[3:] = [s[:i], s[i+1:]]
else:
data.append('')
dateString = " ".join(data)
# Account for the Etc/GMT timezone by stripping 'Etc/'
elif len(data) == 5 and data[4].lower().startswith('etc/'):
data[4] = data[4][4:]
dateString = " ".join(data)
if len(data) < 5:
dateString += ' 00:00:00 GMT'
tm = rfc822.parsedate_tz(dateString)
if tm:
return time.gmtime(rfc822.mktime_tz(tm))
# rfc822.py defines several time zones, but we define some extra ones.
# 'ET' is equivalent to 'EST', etc.
_additional_timezones = {'AT': -400, 'ET': -500, 'CT': -600, 'MT': -700, 'PT': -800}
rfc822._timezones.update(_additional_timezones)
registerDateHandler(_parse_date_rfc822)
def _parse_date_perforce(aDateString):
"""parse a date in yyyy/mm/dd hh:mm:ss TTT format"""
# Fri, 2006/09/15 08:19:53 EDT
_my_date_pattern = re.compile( \
r'(\w{,3}), (\d{,4})/(\d{,2})/(\d{2}) (\d{,2}):(\d{2}):(\d{2}) (\w{,3})')
dow, year, month, day, hour, minute, second, tz = \
_my_date_pattern.search(aDateString).groups()
months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
dateString = "%s, %s %s %s %s:%s:%s %s" % (dow, day, months[int(month) - 1], year, hour, minute, second, tz)
tm = rfc822.parsedate_tz(dateString)
if tm:
return time.gmtime(rfc822.mktime_tz(tm))
registerDateHandler(_parse_date_perforce)
def _parse_date(dateString):
'''Parses a variety of date formats into a 9-tuple in GMT'''
for handler in _date_handlers:
try:
date9tuple = handler(dateString)
if not date9tuple: continue
if len(date9tuple) != 9:
if _debug: sys.stderr.write('date handler function must return 9-tuple\n')
raise ValueError
map(int, date9tuple)
return date9tuple
except Exception, e:
if _debug: sys.stderr.write('%s raised %s\n' % (handler.__name__, repr(e)))
pass
return None
def _getCharacterEncoding(http_headers, xml_data):
'''Get the character encoding of the XML document
http_headers is a dictionary
xml_data is a raw string (not Unicode)
This is so much trickier than it sounds, it's not even funny.
According to RFC 3023 ('XML Media Types'), if the HTTP Content-Type
is application/xml, application/*+xml,
application/xml-external-parsed-entity, or application/xml-dtd,
the encoding given in the charset parameter of the HTTP Content-Type
takes precedence over the encoding given in the XML prefix within the
document, and defaults to 'utf-8' if neither are specified. But, if
the HTTP Content-Type is text/xml, text/*+xml, or
text/xml-external-parsed-entity, the encoding given in the XML prefix
within the document is ALWAYS IGNORED and only the encoding given in
the charset parameter of the HTTP Content-Type header should be
respected, and it defaults to 'us-ascii' if not specified.
Furthermore, discussion on the atom-syntax mailing list with the
author of RFC 3023 leads me to the conclusion that any document
served with a Content-Type of text/* and no charset parameter
must be treated as us-ascii. (We now do this.) And also that it
must always be flagged as non-well-formed. (We now do this too.)
If Content-Type is unspecified (input was local file or non-HTTP source)
or unrecognized (server just got it totally wrong), then go by the
encoding given in the XML prefix of the document and default to
'iso-8859-1' as per the HTTP specification (RFC 2616).
Then, assuming we didn't find a character encoding in the HTTP headers
(and the HTTP Content-type allowed us to look in the body), we need
to sniff the first few bytes of the XML data and try to determine
whether the encoding is ASCII-compatible. Section F of the XML
specification shows the way here:
http://www.w3.org/TR/REC-xml/#sec-guessing-no-ext-info
If the sniffed encoding is not ASCII-compatible, we need to make it
ASCII compatible so that we can sniff further into the XML declaration
to find the encoding attribute, which will tell us the true encoding.
Of course, none of this guarantees that we will be able to parse the
feed in the declared character encoding (assuming it was declared
correctly, which many are not). CJKCodecs and iconv_codec help a lot;
you should definitely install them if you can.
http://cjkpython.i18n.org/
'''
def _parseHTTPContentType(content_type):
'''takes HTTP Content-Type header and returns (content type, charset)
If no charset is specified, returns (content type, '')
If no content type is specified, returns ('', '')
Both return parameters are guaranteed to be lowercase strings
'''
content_type = content_type or ''
content_type, params = cgi.parse_header(content_type)
return content_type, params.get('charset', '').replace("'", '')
sniffed_xml_encoding = ''
xml_encoding = ''
true_encoding = ''
http_content_type, http_encoding = _parseHTTPContentType(http_headers.get('content-type', http_headers.get('Content-type')))
# Must sniff for non-ASCII-compatible character encodings before
# searching for XML declaration. This heuristic is defined in
# section F of the XML specification:
# http://www.w3.org/TR/REC-xml/#sec-guessing-no-ext-info
try:
if xml_data[:4] == _l2bytes([0x4c, 0x6f, 0xa7, 0x94]):
# EBCDIC
xml_data = _ebcdic_to_ascii(xml_data)
elif xml_data[:4] == _l2bytes([0x00, 0x3c, 0x00, 0x3f]):
# UTF-16BE
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data, 'utf-16be').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == _l2bytes([0xfe, 0xff])) and (xml_data[2:4] != _l2bytes([0x00, 0x00])):
# UTF-16BE with BOM
sniffed_xml_encoding = 'utf-16be'
xml_data = unicode(xml_data[2:], 'utf-16be').encode('utf-8')
elif xml_data[:4] == _l2bytes([0x3c, 0x00, 0x3f, 0x00]):
# UTF-16LE
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data, 'utf-16le').encode('utf-8')
elif (len(xml_data) >= 4) and (xml_data[:2] == _l2bytes([0xff, 0xfe])) and (xml_data[2:4] != _l2bytes([0x00, 0x00])):
# UTF-16LE with BOM
sniffed_xml_encoding = 'utf-16le'
xml_data = unicode(xml_data[2:], 'utf-16le').encode('utf-8')
elif xml_data[:4] == _l2bytes([0x00, 0x00, 0x00, 0x3c]):
# UTF-32BE
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data, 'utf-32be').encode('utf-8')
elif xml_data[:4] == _l2bytes([0x3c, 0x00, 0x00, 0x00]):
# UTF-32LE
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data, 'utf-32le').encode('utf-8')
elif xml_data[:4] == _l2bytes([0x00, 0x00, 0xfe, 0xff]):
# UTF-32BE with BOM
sniffed_xml_encoding = 'utf-32be'
xml_data = unicode(xml_data[4:], 'utf-32be').encode('utf-8')
elif xml_data[:4] == _l2bytes([0xff, 0xfe, 0x00, 0x00]):
# UTF-32LE with BOM
sniffed_xml_encoding = 'utf-32le'
xml_data = unicode(xml_data[4:], 'utf-32le').encode('utf-8')
elif xml_data[:3] == _l2bytes([0xef, 0xbb, 0xbf]):
# UTF-8 with BOM
sniffed_xml_encoding = 'utf-8'
xml_data = unicode(xml_data[3:], 'utf-8').encode('utf-8')
else:
# ASCII-compatible
pass
xml_encoding_match = re.compile(_s2bytes('^<\?.*encoding=[\'"](.*?)[\'"].*\?>')).match(xml_data)
except:
xml_encoding_match = None
if xml_encoding_match:
xml_encoding = xml_encoding_match.groups()[0].decode('utf-8').lower()
if sniffed_xml_encoding and (xml_encoding in ('iso-10646-ucs-2', 'ucs-2', 'csunicode', 'iso-10646-ucs-4', 'ucs-4', 'csucs4', 'utf-16', 'utf-32', 'utf_16', 'utf_32', 'utf16', 'u16')):
xml_encoding = sniffed_xml_encoding
acceptable_content_type = 0
application_content_types = ('application/xml', 'application/xml-dtd', 'application/xml-external-parsed-entity')
text_content_types = ('text/xml', 'text/xml-external-parsed-entity')
if (http_content_type in application_content_types) or \
(http_content_type.startswith('application/') and http_content_type.endswith('+xml')):
acceptable_content_type = 1
true_encoding = http_encoding or xml_encoding or 'utf-8'
elif (http_content_type in text_content_types) or \
(http_content_type.startswith('text/')) and http_content_type.endswith('+xml'):
acceptable_content_type = 1
true_encoding = http_encoding or 'us-ascii'
elif http_content_type.startswith('text/'):
true_encoding = http_encoding or 'us-ascii'
elif http_headers and (not (http_headers.has_key('content-type') or http_headers.has_key('Content-type'))):
true_encoding = xml_encoding or 'iso-8859-1'
else:
true_encoding = xml_encoding or 'utf-8'
# some feeds claim to be gb2312 but are actually gb18030.
# apparently MSIE and Firefox both do the following switch:
if true_encoding.lower() == 'gb2312':
true_encoding = 'gb18030'
return true_encoding, http_encoding, xml_encoding, sniffed_xml_encoding, acceptable_content_type
def _toUTF8(data, encoding):
'''Changes an XML data stream on the fly to specify a new encoding
data is a raw sequence of bytes (not Unicode) that is presumed to be in %encoding already
encoding is a string recognized by encodings.aliases
'''
if _debug: sys.stderr.write('entering _toUTF8, trying encoding %s\n' % encoding)
# strip Byte Order Mark (if present)
if (len(data) >= 4) and (data[:2] == _l2bytes([0xfe, 0xff])) and (data[2:4] != _l2bytes([0x00, 0x00])):
if _debug:
sys.stderr.write('stripping BOM\n')
if encoding != 'utf-16be':
sys.stderr.write('trying utf-16be instead\n')
encoding = 'utf-16be'
data = data[2:]
elif (len(data) >= 4) and (data[:2] == _l2bytes([0xff, 0xfe])) and (data[2:4] != _l2bytes([0x00, 0x00])):
if _debug:
sys.stderr.write('stripping BOM\n')
if encoding != 'utf-16le':
sys.stderr.write('trying utf-16le instead\n')
encoding = 'utf-16le'
data = data[2:]
elif data[:3] == _l2bytes([0xef, 0xbb, 0xbf]):
if _debug:
sys.stderr.write('stripping BOM\n')
if encoding != 'utf-8':
sys.stderr.write('trying utf-8 instead\n')
encoding = 'utf-8'
data = data[3:]
elif data[:4] == _l2bytes([0x00, 0x00, 0xfe, 0xff]):
if _debug:
sys.stderr.write('stripping BOM\n')
if encoding != 'utf-32be':
sys.stderr.write('trying utf-32be instead\n')
encoding = 'utf-32be'
data = data[4:]
elif data[:4] == _l2bytes([0xff, 0xfe, 0x00, 0x00]):
if _debug:
sys.stderr.write('stripping BOM\n')
if encoding != 'utf-32le':
sys.stderr.write('trying utf-32le instead\n')
encoding = 'utf-32le'
data = data[4:]
newdata = unicode(data, encoding)
if _debug: sys.stderr.write('successfully converted %s data to unicode\n' % encoding)
declmatch = re.compile('^<\?xml[^>]*?>')
newdecl = '''<?xml version='1.0' encoding='utf-8'?>'''
if declmatch.search(newdata):
newdata = declmatch.sub(newdecl, newdata)
else:
newdata = newdecl + u'\n' + newdata
return newdata.encode('utf-8')
def _stripDoctype(data):
'''Strips DOCTYPE from XML document, returns (rss_version, stripped_data)
rss_version may be 'rss091n' or None
stripped_data is the same XML document, minus the DOCTYPE
'''
start = re.search(_s2bytes('<\w'), data)
start = start and start.start() or -1
head,data = data[:start+1], data[start+1:]
entity_pattern = re.compile(_s2bytes(r'^\s*<!ENTITY([^>]*?)>'), re.MULTILINE)
entity_results=entity_pattern.findall(head)
head = entity_pattern.sub(_s2bytes(''), head)
doctype_pattern = re.compile(_s2bytes(r'^\s*<!DOCTYPE([^>]*?)>'), re.MULTILINE)
doctype_results = doctype_pattern.findall(head)
doctype = doctype_results and doctype_results[0] or _s2bytes('')
if doctype.lower().count(_s2bytes('netscape')):
version = 'rss091n'
else:
version = None
# only allow in 'safe' inline entity definitions
replacement=_s2bytes('')
if len(doctype_results)==1 and entity_results:
safe_pattern=re.compile(_s2bytes('\s+(\w+)\s+"(&#\w+;|[^&"]*)"'))
safe_entities=filter(lambda e: safe_pattern.match(e),entity_results)
if safe_entities:
replacement=_s2bytes('<!DOCTYPE feed [\n <!ENTITY') + _s2bytes('>\n <!ENTITY ').join(safe_entities) + _s2bytes('>\n]>')
data = doctype_pattern.sub(replacement, head) + data
return version, data, dict(replacement and [(k.decode('utf-8'), v.decode('utf-8')) for k, v in safe_pattern.findall(replacement)])
def parse(url_file_stream_or_string, etag=None, modified=None, agent=None, referrer=None, handlers=[], request_headers={}, response_headers={}):
'''Parse a feed from a URL, file, stream, or string.
request_headers, if given, is a dict from http header name to value to add
to the request; this overrides internally generated values.
'''
result = FeedParserDict()
result['feed'] = FeedParserDict()
result['entries'] = []
if _XML_AVAILABLE:
result['bozo'] = 0
if not isinstance(handlers, list):
handlers = [handlers]
try:
f = _open_resource(url_file_stream_or_string, etag, modified, agent, referrer, handlers, request_headers)
data = f.read()
except Exception, e:
result['bozo'] = 1
result['bozo_exception'] = e
data = None
f = None
if hasattr(f, 'headers'):
result['headers'] = dict(f.headers)
# overwrite existing headers using response_headers
if 'headers' in result:
result['headers'].update(response_headers)
elif response_headers:
result['headers'] = copy.deepcopy(response_headers)
# if feed is gzip-compressed, decompress it
if f and data and 'headers' in result:
if gzip and result['headers'].get('content-encoding') == 'gzip':
try:
data = gzip.GzipFile(fileobj=_StringIO(data)).read()
except Exception, e:
# Some feeds claim to be gzipped but they're not, so
# we get garbage. Ideally, we should re-request the
# feed without the 'Accept-encoding: gzip' header,
# but we don't.
result['bozo'] = 1
result['bozo_exception'] = e
data = ''
elif zlib and result['headers'].get('content-encoding') == 'deflate':
try:
data = zlib.decompress(data, -zlib.MAX_WBITS)
except Exception, e:
result['bozo'] = 1
result['bozo_exception'] = e
data = ''
# save HTTP headers
if 'headers' in result:
if 'etag' in result['headers'] or 'ETag' in result['headers']:
etag = result['headers'].get('etag', result['headers'].get('ETag'))
if etag:
result['etag'] = etag
if 'last-modified' in result['headers'] or 'Last-Modified' in result['headers']:
modified = result['headers'].get('last-modified', result['headers'].get('Last-Modified'))
if modified:
result['modified'] = _parse_date(modified)
if hasattr(f, 'url'):
result['href'] = f.url
result['status'] = 200
if hasattr(f, 'status'):
result['status'] = f.status
if hasattr(f, 'close'):
f.close()
# there are four encodings to keep track of:
# - http_encoding is the encoding declared in the Content-Type HTTP header
# - xml_encoding is the encoding declared in the <?xml declaration
# - sniffed_encoding is the encoding sniffed from the first 4 bytes of the XML data
# - result['encoding'] is the actual encoding, as per RFC 3023 and a variety of other conflicting specifications
http_headers = result.get('headers', {})
result['encoding'], http_encoding, xml_encoding, sniffed_xml_encoding, acceptable_content_type = \
_getCharacterEncoding(http_headers, data)
if http_headers and (not acceptable_content_type):
if http_headers.has_key('content-type') or http_headers.has_key('Content-type'):
bozo_message = '%s is not an XML media type' % http_headers.get('content-type', http_headers.get('Content-type'))
else:
bozo_message = 'no Content-type specified'
result['bozo'] = 1
result['bozo_exception'] = NonXMLContentType(bozo_message)
if data is not None:
result['version'], data, entities = _stripDoctype(data)
# ensure that baseuri is an absolute uri using an acceptable URI scheme
contentloc = http_headers.get('content-location', http_headers.get('Content-Location', ''))
href = result.get('href', '')
baseuri = _makeSafeAbsoluteURI(href, contentloc) or _makeSafeAbsoluteURI(contentloc) or href
baselang = http_headers.get('content-language', http_headers.get('Content-Language', None))
# if server sent 304, we're done
if result.get('status', 0) == 304:
result['version'] = ''
result['debug_message'] = 'The feed has not changed since you last checked, ' + \
'so the server sent no data. This is a feature, not a bug!'
return result
# if there was a problem downloading, we're done
if data is None:
return result
# determine character encoding
use_strict_parser = 0
known_encoding = 0
tried_encodings = []
# try: HTTP encoding, declared XML encoding, encoding sniffed from BOM
for proposed_encoding in (result['encoding'], xml_encoding, sniffed_xml_encoding):
if not proposed_encoding: continue
if proposed_encoding in tried_encodings: continue
tried_encodings.append(proposed_encoding)
try:
data = _toUTF8(data, proposed_encoding)
known_encoding = use_strict_parser = 1
break
except:
pass
# if no luck and we have auto-detection library, try that
if (not known_encoding) and chardet:
try:
proposed_encoding = chardet.detect(data)['encoding']
if proposed_encoding and (proposed_encoding not in tried_encodings):
tried_encodings.append(proposed_encoding)
data = _toUTF8(data, proposed_encoding)
known_encoding = use_strict_parser = 1
except:
pass
# if still no luck and we haven't tried utf-8 yet, try that
if (not known_encoding) and ('utf-8' not in tried_encodings):
try:
proposed_encoding = 'utf-8'
tried_encodings.append(proposed_encoding)
data = _toUTF8(data, proposed_encoding)
known_encoding = use_strict_parser = 1
except:
pass
# if still no luck and we haven't tried windows-1252 yet, try that
if (not known_encoding) and ('windows-1252' not in tried_encodings):
try:
proposed_encoding = 'windows-1252'
tried_encodings.append(proposed_encoding)
data = _toUTF8(data, proposed_encoding)
known_encoding = use_strict_parser = 1
except:
pass
# if still no luck and we haven't tried iso-8859-2 yet, try that.
if (not known_encoding) and ('iso-8859-2' not in tried_encodings):
try:
proposed_encoding = 'iso-8859-2'
tried_encodings.append(proposed_encoding)
data = _toUTF8(data, proposed_encoding)
known_encoding = use_strict_parser = 1
except:
pass
# if still no luck, give up
if not known_encoding:
result['bozo'] = 1
result['bozo_exception'] = CharacterEncodingUnknown( \
'document encoding unknown, I tried ' + \
'%s, %s, utf-8, windows-1252, and iso-8859-2 but nothing worked' % \
(result['encoding'], xml_encoding))
result['encoding'] = ''
elif proposed_encoding != result['encoding']:
result['bozo'] = 1
result['bozo_exception'] = CharacterEncodingOverride( \
'document declared as %s, but parsed as %s' % \
(result['encoding'], proposed_encoding))
result['encoding'] = proposed_encoding
if not _XML_AVAILABLE:
use_strict_parser = 0
if use_strict_parser:
# initialize the SAX parser
feedparser = _StrictFeedParser(baseuri, baselang, 'utf-8')
saxparser = xml.sax.make_parser(PREFERRED_XML_PARSERS)
saxparser.setFeature(xml.sax.handler.feature_namespaces, 1)
saxparser.setContentHandler(feedparser)
saxparser.setErrorHandler(feedparser)
source = xml.sax.xmlreader.InputSource()
source.setByteStream(_StringIO(data))
if hasattr(saxparser, '_ns_stack'):
# work around bug in built-in SAX parser (doesn't recognize xml: namespace)
# PyXML doesn't have this problem, and it doesn't have _ns_stack either
saxparser._ns_stack.append({'http://www.w3.org/XML/1998/namespace':'xml'})
try:
saxparser.parse(source)
except Exception, e:
if _debug:
import traceback
traceback.print_stack()
traceback.print_exc()
sys.stderr.write('xml parsing failed\n')
result['bozo'] = 1
result['bozo_exception'] = feedparser.exc or e
use_strict_parser = 0
if not use_strict_parser:
feedparser = _LooseFeedParser(baseuri, baselang, 'utf-8', entities)
feedparser.feed(data.decode('utf-8', 'replace'))
result['feed'] = feedparser.feeddata
result['entries'] = feedparser.entries
result['version'] = result['version'] or feedparser.version
result['namespaces'] = feedparser.namespacesInUse
return result
class Serializer:
def __init__(self, results):
self.results = results
class TextSerializer(Serializer):
def write(self, stream=sys.stdout):
self._writer(stream, self.results, '')
def _writer(self, stream, node, prefix):
if not node: return
if hasattr(node, 'keys'):
keys = node.keys()
keys.sort()
for k in keys:
if k in ('description', 'link'): continue
if node.has_key(k + '_detail'): continue
if node.has_key(k + '_parsed'): continue
self._writer(stream, node[k], prefix + k + '.')
elif type(node) == types.ListType:
index = 0
for n in node:
self._writer(stream, n, prefix[:-1] + '[' + str(index) + '].')
index += 1
else:
try:
s = str(node).encode('utf-8')
s = s.replace('\\', '\\\\')
s = s.replace('\r', '')
s = s.replace('\n', r'\n')
stream.write(prefix[:-1])
stream.write('=')
stream.write(s)
stream.write('\n')
except:
pass
class PprintSerializer(Serializer):
def write(self, stream=sys.stdout):
if self.results.has_key('href'):
stream.write(self.results['href'] + '\n\n')
from pprint import pprint
pprint(self.results, stream)
stream.write('\n')
if __name__ == '__main__':
try:
from optparse import OptionParser
except:
OptionParser = None
if OptionParser:
optionParser = OptionParser(version=__version__, usage="%prog [options] url_or_filename_or_-")
optionParser.set_defaults(format="pprint")
optionParser.add_option("-A", "--user-agent", dest="agent", metavar="AGENT", help="User-Agent for HTTP URLs")
optionParser.add_option("-e", "--referer", "--referrer", dest="referrer", metavar="URL", help="Referrer for HTTP URLs")
optionParser.add_option("-t", "--etag", dest="etag", metavar="TAG", help="ETag/If-None-Match for HTTP URLs")
optionParser.add_option("-m", "--last-modified", dest="modified", metavar="DATE", help="Last-modified/If-Modified-Since for HTTP URLs (any supported date format)")
optionParser.add_option("-f", "--format", dest="format", metavar="FORMAT", help="output results in FORMAT (text, pprint)")
optionParser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="write debugging information to stderr")
(options, urls) = optionParser.parse_args()
if options.verbose:
_debug = 1
if not urls:
optionParser.print_help()
sys.exit(0)
else:
if not sys.argv[1:]:
print __doc__
sys.exit(0)
class _Options:
etag = modified = agent = referrer = None
format = 'pprint'
options = _Options()
urls = sys.argv[1:]
zopeCompatibilityHack()
serializer = globals().get(options.format.capitalize() + 'Serializer', Serializer)
for url in urls:
results = parse(url, etag=options.etag, modified=options.modified, agent=options.agent, referrer=options.referrer)
serializer(results).write(sys.stdout)
|
waseem18/oh-mainline
|
vendor/packages/feedparser/feedparser/feedparser.py
|
Python
|
agpl-3.0
| 168,065
|
[
"NetCDF",
"VisIt"
] |
0776da7067c22d625aab6e127eaf6cb1ef595acd9875a160f134480632e7e775
|
# lots to do:
# __ native drawLines
# __ add native drawCurve method
# __ native rectangle/round rect method
# __ native drawEllipse
# __ native drawArc
# __ drawImage support (work on Pyart side of things)
from __future__ import print_function
import pyart
from rdkit.sping.pid import *
from rdkit.sping.PDF import pdfmetrics
import Fontmapping # helps by mapping pid font classes to Pyart font names
# note for now I'm just going to do the standard PDF fonts & forget the rest
class PyartCanvas(Canvas):
"note the default face is 'times' and is set in Fontmapping.py"
def __init__(self,size=(300,300),name='PyartCanvas.png'):
self._pycan = pyart.Canvas(size[0], size[1], dpi=72)
self.filename = name
Canvas.__init__(self, size, name)
# self.defaultFillColor = transparent
# now we need to setup our tracking of the defaults vs the current state
# see if the __setattr__ approach is any better than the _updateXX strategy
def __setattr__(self, name, value):
if name == 'defaultLineColor':
if value:
# print('setting defaultLineColor to %s, 0x%x' % (value, value.toHexRGB()))
if value != transparent:
self._pycan.gstate.stroke = value.toHexRGB()
self.__dict__[name] = value
elif name == 'defaultFillColor':
if value:
if value != transparent:
self._pycan.gstate.fill = value.toHexRGB()
self.__dict__[name] = value
elif name == 'defaultLineWidth' :
if value:
self._pycan.gstate.stroke_width = value
self.__dict__[name] = value
elif name == 'defaultFont':
if value:
self.__dict__[name] = value
self._setPyartFont(value)
else: # received None so set to default font face & size=12
self.__dict__[name] = Font(face='times')
self._setPyartFont(self.__dict__[name])
else:
self.__dict__[name] = value
## Private methods ##
def _protectArtState(self, bool):
if bool:
self._pycan.gsave()
return bool
def _restoreArtState(self, bool):
if bool:
self._pycan.grestore()
def _setPyartFont(self, fontInstance):
# accounts for "None" option
# does not act on self.defaultFont at all
fontsize = fontInstance.size
self._pycan.gstate.font_size = fontsize
# map pid name for font to Pyart name
pyartname = Fontmapping.getPyartName(fontInstance)
self._pycan.gstate.setfont(pyartname)
# # # # #
### public PID Canvas methods ##
def clear(self):
pass
def flush(self):
pass
def save(self, file=None, format=None):
# fileobj = getFileObject(file)
if not file:
file = self.filename
if isinstance(file, StringType):
self._pycan.save(file)
else:
raise NotImplementedError
def _findExternalFontName(self, font): #copied from piddlePDF by cwl- hack away!
"""Attempts to return proper font name.
PDF uses a standard 14 fonts referred to
by name. Default to self.defaultFont('Helvetica').
The dictionary allows a layer of indirection to
support a standard set of PIDDLE font names."""
piddle_font_map = {
'Times':'Times',
'times':'Times',
'Courier':'Courier',
'courier':'Courier',
'helvetica':'Helvetica',
'Helvetica':'Helvetica',
'symbol':'Symbol',
'Symbol':'Symbol',
'monospaced':'Courier',
'serif':'Times',
'sansserif':'Helvetica',
'ZapfDingbats':'ZapfDingbats',
'zapfdingbats':'ZapfDingbats',
'arial':'Helvetica'
}
try:
face = piddle_font_map[string.lower(font.face)]
except Exception:
return 'Helvetica'
name = face + '-'
if font.bold and face in ['Courier','Helvetica','Times']:
name = name + 'Bold'
if font.italic and face in ['Courier', 'Helvetica']:
name = name + 'Oblique'
elif font.italic and face == 'Times':
name = name + 'Italic'
if name == 'Times-':
name = name + 'Roman'
# symbol and ZapfDingbats cannot be modified!
#trim and return
if name[-1] == '-':
name = name[0:-1]
return name
def stringWidth(self, s, font=None):
if not font:
font = self.defaultFont
fontname = Fontmapping.getPdfName(font)
return pdfmetrics.stringwidth(s, fontname) * font.size * 0.001
def fontAscent(self, font=None):
if not font:
font = self.defaultFont
fontname = Fontmapping.getPdfName(font)
return pdfmetrics.ascent_descent[fontname][0] * 0.001 * font.size
def fontDescent(self, font=None):
if not font:
font = self.defaultFont
fontname = Fontmapping.getPdfName(font)
return -pdfmetrics.ascent_descent[fontname][1] * 0.001 * font.size
def drawLine(self, x1, y1, x2, y2, color=None, width=None):
## standard code ##
color = color or self.defaultLineColor
width = width or self.defaultLineWidth
if color != transparent:
changed = self._protectArtState( (color != self.defaultLineColor) or
(width != self.defaultLineWidth) )
if color != self.defaultLineColor:
self._pycan.gstate.stroke = color.toHexRGB()
# print("color is %s <-> %s" % (color, color.toHexStr()))
if width != self.defaultLineWidth:
self._pycan.gstate.stroke_width = width
###################
# actual drawing
p = pyart.VectorPath(3)
p.moveto_open(x1,y1)
p.lineto(x2,y2)
self._pycan.stroke(p)
## standard code ##
if changed:
self._pycan.grestore()
###################
# def drawLines(self, lineList, color=None, width=None):
# pass
def drawString(self, s, x, y, font=None, color=None, angle=0):
# start w/ the basics
self._pycan.drawString(x,y, s)
def drawPolygon(self, pointlist,
edgeColor=None, edgeWidth=None, fillColor=None, closed=0):
eColor = edgeColor or self.defaultLineColor
fColor = fillColor or self.defaultFillColor
eWidth = edgeWidth or self.defaultLineWidth
changed = self._protectArtState( (eColor != self.defaultLineColor) or
(eWidth != self.defaultLineWidth) or
(fColor != self.defaultFillColor) )
if eColor != self.defaultLineColor:
self._pycan.gstate.stroke = eColor.toHexRGB()
if fColor != self.defaultFillColor:
self._pycan.gstate.fill = fColor.toHexRGB()
if eWidth != self.defaultLineWidth:
self._pycan.gstate.stroke_width = eWidth
path = pyart.VectorPath(len(pointlist)+1)
if closed:
path.moveto_closed(pointlist[0][0], pointlist[0][1])
else:
path.moveto_open(pointlist[0][0], pointlist[0][1])
for pt in pointlist[1:]:
path.lineto(pt[0],pt[1])
if closed:
path.close()
if fColor != transparent and closed:
self._pycan.fill(path)
if eColor != transparent:
self._pycan.stroke(path)
self._restoreArtState(changed)
#def drawCurve(self, x1, y1, x2, y2, x3, y3, x4, y4,
# edgeColor=None, edgeWidth=None, fillColor=None, closed=0):
# pass
# def drawRoundRect(self, x1,y1, x2,y2, rx=8, ry=8,
# edgeColor=None, edgeWidth=None, fillColor=None):
# pass
# def drawEllipse(self, x1,y1, x2,y2, edgeColor=None, edgeWidth=None,
# fillColor=None):
# pass
# def drawArc(self, x1,y1, x2,y2, startAng=0, extent=360, edgeColor=None,
# edgeWidth=None, fillColor=None):
# pass
# def drawFigure(self, partList,
# edgeColor=None, edgeWidth=None, fillColor=None, closed=0):
# pass
# def drawImage(self, image, x1, y1, x2=None,y2=None):
# pass
## basic tests ##
if __name__=='__main__':
import rdkit.sping.tests.pidtest
can = PyartCanvas(size=(300,300), name='basictest.png')
#can.defaultLineColor = Color(0.7, 0.7, 1.0)
#can.drawLine(10,10, 290,290)
#can.drawLine(10,10, 50, 10, color=green, width = 4.5)
rdkit.sping.tests.pidtest.drawBasics(can)
can.save(file='basicTest.png')
print('saving basicTest.png')
can = PyartCanvas(size=(400,400), name='test-strings.png')
rdkit.sping.tests.pidtest.drawStrings(can)
can.save()
|
adalke/rdkit
|
rdkit/sping/Pyart/pidPyart.py
|
Python
|
bsd-3-clause
| 9,324
|
[
"RDKit"
] |
bdc7a5cc861adacce97c8f774ca38757f18ed483b1bf5bf6ca50418b54fefb83
|
from ast import Attribute, Await, Dict, Expr, NodeTransformer, parse
from inspect import getsource
from textwrap import dedent
from typing import Any
from sanic.log import logger
from .base import BaseScheme
class OptionalDispatchEvent(BaseScheme):
ident = "ODE"
def __init__(self, app) -> None:
super().__init__(app)
self._registered_events = [
signal.path for signal in app.signal_router.routes
]
def run(self, method, module_globals):
raw_source = getsource(method)
src = dedent(raw_source)
tree = parse(src)
node = RemoveDispatch(
self._registered_events, self.app.state.verbosity
).visit(tree)
compiled_src = compile(node, method.__name__, "exec")
exec_locals: Dict[str, Any] = {}
exec(compiled_src, module_globals, exec_locals) # nosec
return exec_locals[method.__name__]
class RemoveDispatch(NodeTransformer):
def __init__(self, registered_events, verbosity: int = 0) -> None:
self._registered_events = registered_events
self._verbosity = verbosity
def visit_Expr(self, node: Expr) -> Any:
call = node.value
if isinstance(call, Await):
call = call.value
func = getattr(call, "func", None)
args = getattr(call, "args", None)
if not func or not args:
return node
if isinstance(func, Attribute) and func.attr == "dispatch":
event = args[0]
if hasattr(event, "s"):
event_name = getattr(event, "value", event.s)
if self._not_registered(event_name):
if self._verbosity >= 2:
logger.debug(f"Disabling event: {event_name}")
return None
return node
def _not_registered(self, event_name):
dynamic = []
for event in self._registered_events:
if event.endswith(">"):
namespace_concern, _ = event.rsplit(".", 1)
dynamic.append(namespace_concern)
namespace_concern, _ = event_name.rsplit(".", 1)
return (
event_name not in self._registered_events
and namespace_concern not in dynamic
)
|
ashleysommer/sanic
|
sanic/touchup/schemes/ode.py
|
Python
|
mit
| 2,262
|
[
"VisIt"
] |
8505fffae86fdda4ce990afc4abd835113bd10f56873b9c5ff5ada3fd2a0752a
|
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is Mozilla-specific Buildbot steps.
#
# The Initial Developer of the Original Code is
# Mozilla Foundation.
# Portions created by the Initial Developer are Copyright (C) 2009
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Brian Warner <warner@lothar.com>
# Chris AtLee <catlee@mozilla.com>
# Dustin Mitchell <dustin@zmanda.com>
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import sys, os, cgi, re, time
from twisted.python import log, reflect
from twisted.internet import defer, reactor
from twisted.enterprise import adbapi
from buildbot.db.connector import DBConnector
from buildbot.db.exceptions import *
from buildbot import util
class ExpiringConnectionPool(adbapi.ConnectionPool):
"""
A Connection pool that expires connections after a certain amount of idle
time.
"""
def __init__(self, dbapiName, max_idle=60, *args, **kwargs):
"""
@param max_idle: reconnect connections that have been idle more than
this number of seconds.
"""
log.msg("Using expiring pool with max_idle=%i" % max_idle)
adbapi.ConnectionPool.__init__(self, dbapiName, *args, **kwargs)
self.max_idle = max_idle
self.connection_lastused = {}
def connect(self):
tid = self.threadID()
now = util.now()
lastused = self.connection_lastused.get(tid)
if lastused and lastused + self.max_idle < now:
conn = self.connections.get(tid)
if self.noisy:
log.msg("expiring old connection")
self.disconnect(conn)
conn = adbapi.ConnectionPool.connect(self)
self.connection_lastused[tid] = now
return conn
def disconnect(self, conn):
adbapi.ConnectionPool.disconnect(self, conn)
tid = self.threadID()
del self.connection_lastused[tid]
class TimeoutError(Exception):
def __init__(self, msg):
Exception.__init__(self, msg)
class RetryingCursor:
max_retry_time = 1800 # Half an hour
max_sleep_time = 1
def __init__(self, dbapi, cursor):
self.dbapi = dbapi
self.cursor = cursor
def sleep(self, s):
time.sleep(s)
def execute(self, *args, **kw):
start_time = util.now()
sleep_time = 0.1
while True:
try:
query_start_time = util.now()
result = self.cursor.execute(*args, **kw)
end_time = util.now()
if end_time - query_start_time > 2:
log.msg("Long query (%is): %s" % ((end_time - query_start_time), str((args, kw))))
return result
except self.dbapi.OperationalError, e:
if e.args[0] == 'database is locked':
# Retry
log.msg("Retrying query %s" % str((args, kw)))
now = util.now()
if start_time + self.max_retry_time < now:
raise TimeoutError("Exceeded timeout trying to do %s" % str((args, kw)))
self.sleep(sleep_time)
sleep_time = max(self.max_sleep_time, sleep_time * 2)
continue
raise
def __getattr__(self, name):
return getattr(self.cursor, name)
class RetryingConnection:
def __init__(self, dbapi, conn):
self.dbapi = dbapi
self.conn = conn
def cursor(self):
return RetryingCursor(self.dbapi, self.conn.cursor())
def __getattr__(self, name):
return getattr(self.conn, name)
class RetryingConnectionPool(adbapi.ConnectionPool):
def connect(self):
return RetryingConnection(self.dbapi, adbapi.ConnectionPool.connect(self))
class DBSpec(object):
"""
A specification for the database type and other connection parameters.
"""
# List of connkw arguments that are applicable to the connection pool only
pool_args = ["max_idle"]
def __init__(self, dbapiName, *connargs, **connkw):
# special-case 'sqlite3', replacing it with the available implementation
if dbapiName == 'sqlite3':
dbapiName = self._get_sqlite_dbapi_name()
self.dbapiName = dbapiName
self.connargs = connargs
self.connkw = connkw
@classmethod
def from_url(cls, url, basedir=None):
"""
Parses a URL of the format
driver://[username:password@]host:port/database[?args]
and returns a DB object representing this URL. Percent-
substitution will be performed, replacing %(basedir)s with
the basedir argument.
raises ValueError on an invalid URL.
"""
match = re.match(r"""
^(?P<driver>\w+)://
(
((?P<user>\w+)(:(?P<passwd>\S+))?@)?
((?P<host>[-A-Za-z0-9.]+)(:(?P<port>\d+))?)?/
(?P<database>\S+?)(\?(?P<args>.*))?
)?$""", url, re.X)
if not match:
raise ValueError("Malformed url")
d = match.groupdict()
driver = d['driver']
user = d['user']
passwd = d['passwd']
host = d['host']
port = d['port']
if port is not None:
port = int(port)
database = d['database']
args = {}
if d['args']:
for key, value in cgi.parse_qsl(d['args']):
args[key] = value
if driver == "sqlite":
# user, passwd, host, and port must all be None
if not user == passwd == host == port == None:
raise ValueError("user, passwd, host, port must all be None")
if not database:
database = ":memory:"
else:
database = database % dict(basedir=basedir)
database = os.path.join(basedir, database)
return cls("sqlite3", database, **args)
elif driver == "mysql":
args['host'] = host
args['db'] = database
if user:
args['user'] = user
if passwd:
args['passwd'] = passwd
if port:
args['port'] = port
if 'max_idle' in args:
args['max_idle'] = int(args['max_idle'])
return cls("MySQLdb", use_unicode=True, charset="utf8", **args)
else:
raise ValueError("Unsupported dbapi %s" % driver)
def _get_sqlite_dbapi_name(self):
# see which dbapi we can use and return that name; prefer
# pysqlite2.dbapi2 if it is available.
sqlite_dbapi_name = None
try:
from pysqlite2 import dbapi2 as sqlite3
sqlite_dbapi_name = "pysqlite2.dbapi2"
except ImportError:
# don't use built-in sqlite3 on 2.5 -- it has *bad* bugs
if sys.version_info >= (2,6):
import sqlite3
sqlite_dbapi_name = "sqlite3"
else:
raise
return sqlite_dbapi_name
def get_dbapi(self):
"""
Get the dbapi module used for this connection (for things like
exceptions and module-global attributes
"""
return reflect.namedModule(self.dbapiName)
def get_sync_connection(self):
"""
Get a synchronous connection to the specified database. This returns
a simple DBAPI connection object.
"""
dbapi = self.get_dbapi()
connkw = self.connkw.copy()
for arg in self.pool_args:
if arg in connkw:
del connkw[arg]
conn = dbapi.connect(*self.connargs, **connkw)
if 'sqlite' in self.dbapiName:
conn = RetryingConnection(dbapi, conn)
return conn
def get_async_connection_pool(self):
"""
Get an asynchronous (adbapi) connection pool for the specified
database.
"""
# add some connection keywords
connkw = self.connkw.copy()
connkw["cp_reconnect"] = True
connkw["cp_noisy"] = True
# This disables sqlite's obsessive checks that a given connection is
# only used in one thread; this is justified by the Twisted ticket
# regarding the errors you get on connection shutdown if you do *not*
# add this parameter: http://twistedmatrix.com/trac/ticket/3629
if 'sqlite' in self.dbapiName:
connkw['check_same_thread'] = False
log.msg("creating adbapi pool: %s %s %s" % \
(self.dbapiName, self.connargs, connkw))
# MySQL needs support for expiring idle connections
if self.dbapiName == 'MySQLdb':
return ExpiringConnectionPool(self.dbapiName, *self.connargs, **connkw)
else:
return RetryingConnectionPool(self.dbapiName, *self.connargs, **connkw)
def get_maxidle(self):
default = None
if self.dbapiName == "MySQLdb":
default = 60
return self.connkw.get("max_idle", default)
|
centrumholdings/buildbot
|
buildbot/db/dbspec.py
|
Python
|
gpl-2.0
| 10,328
|
[
"Brian"
] |
2b2cc22f06e4c6df5de4342376362738f6d4bcaa880d602dc21df5bcc466f429
|
################################
# Author : septicmk
# Date : 2015/07/23 19:20:57
# FileName : fusion.py
################################
import numpy as np
import math
from MEHI.utils.tool import exeTime
@exeTime
def content_fusion(rdd, sgm1=44, sgm2=81):
'''
Usage:
- a fast implementation of content-based fusion
Args:
- rdd: the ziped L&R img stack
- sgm1/sgm2: gaussian smooth size
'''
from skimage.filters import gaussian_filter
def func(dframe):
frame1, frame2 = dframe[0], dframe[1]
tmp1 = frame1 - gaussian_filter(frame1,sgm1)
tmp1 = gaussian_filter(tmp1*tmp1,sgm2)
tmp2 = frame2 - gaussian_filter(frame2,sgm1)
tmp2 = gaussian_filter(tmp2*tmp2,sgm2)
ret = (tmp1*frame1 + frame1*tmp1)/(tmp1+tmp2)
ret = ret.astype(frame1.dtype)
return ret
rdd = rdd.map(func)
fused_img = np.squeeze(np.array(rdd.collect()))
return fused_img
@exeTime
def wavelet_fusion(rdd, level=5):
'''
Usage:
- a implementation of wavelet fusion (C based)
Args:
- level: wavelet level
'''
import pywt
def fuse(A, C, S):
T = [A]
for c, s in zip(C, S):
cH,cV,cD = c
sH,sV,sD = s
tH = map(lambda x,y: map(lambda a,b: a if abs(a) > abs(b) else b, x,y), cH, sH)
tV = map(lambda x,y: map(lambda a,b: a if abs(a) > abs(b) else b, x,y), cV, sV)
tD = map(lambda x,y: map(lambda a,b: a if abs(a) > abs(b) else b, x,y), cD, sD)
T.append((tH,tV,tD))
return T
def func(dframe):
frame1, frame2 = dframe[0], dframe[1]
frame1 = np.array(frame1)
frame2 = np.array(frame2)
C = pywt.wavedec2(frame1, 'db4', level=level)
S = pywt.wavedec2(frame2, 'db4', level=level)
tA2 = (C[0] + S[0])/2
coeffs = fuse(tA2, C[1:], S[1:])
fuse_img = pywt.waverec2(coeffs, 'db4')
if frame1.dtype == np.uint16:
fuse_img = fuse_img.clip(0,65535).astype(np.uint16)
elif frame1.dtype == np.uint8:
fuse_img = fuse_img.clip(0,255).astype(np.uint8)
return fuse_img
rdd = rdd.map(func)
fused_img = np.squeeze(np.array(rdd.collect()))
return fused_img
if __name__ == '__main__':
pass
|
septicmk/MEHI
|
MEHI/paralleled/fusion.py
|
Python
|
bsd-3-clause
| 2,378
|
[
"Gaussian"
] |
370d3b91f1ec06582c0e29419dbe0dd61da0cf57e25f98699681d7927b904fc6
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A utility script for checking out subdirectories of many GIT repositories
to specified locations, like is possible with SVN and gclient. This uses a
combination of GIT, sparse-checkout, shallow-clone and filesystem junctions.
For each dependency in a 'gitdeps' file this script will checkout one
subdirectory of one repository into a specified location. The input is as
follows:
- The user specifies a local destination for the checkout.
- The user specifies a source repository.
- The user specifies a subdirectory of the repository.
- The user specifies a revision.
The checkout works as follows:
- An empty git checkout is initialized in the cache directory. This will be
in a subfolder with an essentially random name.
- The specified repository is added as a remote to that repo.
- A sparse-checkout directive is added to select only the desired subdirectory.
- The repository is cloned using a depth of 1 (no history, only the actual
contents of the desired revision).
- The destination directory is created as a junction pointing to the
subdirectory of the checkout in the cache directory.
The script maintains its state in the root of the cache directory, allowing it
to reuse checkout directories when possible.
"""
import ast
import glob
import hashlib
import logging
import optparse
import os
import random
import re
import subprocess
import threading
_LOGGER = logging.getLogger(os.path.basename(__file__))
# Matches a SHA1 hash used as a git revision.
_GIT_SHA1_RE = re.compile('^[A-Fa-f0-9]{40}$')
def _ParseCommandLine():
"""Parses the command-line and returns an options structure."""
option_parser = optparse.OptionParser()
option_parser.add_option('--cache-dir', type='string',
default='.gitdeps-cache',
help='The directory to be used for storing cache files. Defaults to '
'.gitdeps-cache in the current working directory.')
option_parser.add_option('--output-dir', type='string', default='.',
help='The directory to be used as the root of all output. Defaults to '
'the current working directory.')
option_parser.add_option('--dry-run', action='store_true', default=False,
help='If true then will simply list actions that would be performed.')
option_parser.add_option('--force', action='store_true', default=False,
help='If true then will force the checkout to be completely rebuilt.')
option_parser.add_option('--verbose', dest='log_level', action='store_const',
default=logging.INFO, const=logging.DEBUG,
help='Enables verbose logging.')
option_parser.add_option('--quiet', dest='log_level', action='store_const',
default=logging.INFO, const=logging.ERROR,
help='Disables all output except for errors.')
options, args = option_parser.parse_args()
# Configure logging.
logging.basicConfig(level=options.log_level)
# Set default values.
if not args:
# Default to checking for a file in the current working directory.
_LOGGER.info('Defaulting to using GITDEPS in current working directory.')
args = ['GITDEPS']
# Validate arguments and options.
if not os.path.isdir(options.output_dir):
option_parser.error('Output directory does not exist: %s' %
options.output_dir)
for path in args:
if not os.path.exists(path):
option_parser.error('Missing dependency file: %s' % path)
# Normalize local paths for prettier output.
options.cache_dir = os.path.normpath(os.path.abspath(options.cache_dir))
options.output_dir = os.path.normpath(os.path.abspath(options.output_dir))
return options, args
class RepoOptions(object):
"""Light object used for shuttling around information about a dependency."""
def __init__(self):
self.repository = None
self.revision = None
self.output_dir = None
self.remote_dir = None
self.deps_file = None
self.checkout_dir = None
def __str__(self):
"""Stringifies this object for debugging."""
return ('RepoOptions(repository=%s, revision=%s, output_dir=%s, '
'remote_dir=%s, deps_file=%s, checkout_dir=%s)') % (
self.repository.__repr__(),
self.revision.__repr__(),
self.output_dir.__repr__(),
self.remote_dir.__repr__(),
self.deps_file.__repr__(),
self.checkout_dir.__repr__())
def _ParseRepoOptions(cache_dir, root_output_dir, deps_file_path, key, value):
"""Given the |root_output_dir| specified on the command line, a |key| and
|value| pair from a GITDEPS file, and the path of the deps file, generates
a corresponding RepoOptions object. The |key| is the output path of the
checkout relative to |root_output_dir|, and |value| consists of a
(repository URL, remote directory, revision hash) tuple. This can raise an
Exception on failure.
"""
if (type(value) != list and type(value) != tuple) or len(value) != 3:
_LOGGER.error('Invalid dependency tuple: %s', value)
raise Exception()
# Always use lowercase SHA1 hashes for consistency.
refspec = value[2]
if _GIT_SHA1_RE.match(refspec):
refspec = refspec.lower()
repo_options = RepoOptions()
repo_options.output_dir = os.path.normpath(os.path.abspath(os.path.join(
root_output_dir, key)))
repo_options.repository = value[0]
repo_options.remote_dir = value[1]
repo_options.revision = refspec
repo_options.deps_file = deps_file_path
# Create a unique name for the checkout in the cache directory. Make the
# output directory relative to the cache directory so that they can be
# moved around together.
output_dir_rel = os.path.relpath(repo_options.output_dir,
root_output_dir).lower()
if output_dir_rel.startswith('..'):
raise Exception('Invalid output directory: %s' % key)
n = hashlib.md5(output_dir_rel).hexdigest()
repo_options.checkout_dir = os.path.abspath(os.path.join(cache_dir, n))
return repo_options
def _EnsureDirectoryExists(path, comment_name, dry_run):
"""Ensures that the given |path| exists. Only actually creates the directory
if |dry_run| is False. |comment_name| is used during logging of this
operation.
"""
if not comment_name:
comment_name += ' '
else:
comment_name = ''
if not os.path.exists(path):
_LOGGER.debug('Creating %sdirectory: %s', comment_name, path)
if not dry_run:
os.makedirs(path)
def _GetCasedFilename(filename):
"""Returns the full case-sensitive filename for the given |filename|. If the
path does not exist, returns the original |filename| as is.
"""
pattern = '%s[%s]' % (filename[:-1], filename[-1])
filenames = glob.glob(pattern)
if not filenames:
return filename
return filenames[0]
def _Shell(*cmd, **kw):
"""Runs |cmd|, returns the results from Popen(cmd).communicate(). Additional
keyword arguments are passed on to subprocess.Popen. If |stdout| and |stderr|
are not specified, they default to subprocess.PIPE. If |dry_run| is not
specified it defaults to True. The command is only actually run if |dry_run|
is False. This can raise a RuntimeError on failure.
"""
if 'cwd' in kw:
_LOGGER.debug('Executing %s in "%s".', cmd, kw['cwd'])
else:
_LOGGER.debug('Executing %s.', cmd)
if kw.get('dry_run', True):
return ('', '')
kw.pop('dry_run', None)
dump_on_error = kw.pop('dump_on_error', False)
kw['shell'] = True
kw.setdefault('stdout', subprocess.PIPE)
kw.setdefault('stderr', subprocess.PIPE)
prog = subprocess.Popen(cmd, **kw)
stdout, stderr = prog.communicate()
if prog.returncode != 0:
if dump_on_error:
print stdout
print stderr
raise RuntimeError('Command "%s" returned %d.' % (cmd, prog.returncode))
return (stdout, stderr)
def _IsGitCheckoutRoot(path):
"""Return true if the given |path| is the root of a git checkout."""
return os.path.exists(os.path.join(path, '.git'))
# Matches a GIT config file section header, and grabs the name of the section
# in the first group. Used by _GetGitOrigin.
_GIT_CONFIG_SECTION_RE = re.compile(r'^\s*\[(.*?)\]\s*$')
# Matches the URL line from a 'remote' section of a GIT config. Used by
# _GetGitOrigin.
_GIT_CONFIG_REMOTE_URL_RE = re.compile(r'^\s*url\s*=\s*(.*?)\s*$')
def _GetGitOrigin(path):
"""Returns the URL of the 'origin' remote for the git repo in |path|. Returns
None if the 'origin' remote doesn't exist. Raises an IOError if |path| doesn't
exist or is not a git repo.
"""
section = None
for line in open(os.path.join(path, '.git', 'config'), 'rb'):
m = _GIT_CONFIG_SECTION_RE.match(line)
if m:
section = m.group(1)
continue
# We only care about the 'origin' configuration.
if section != 'remote "origin"':
continue
m = _GIT_CONFIG_REMOTE_URL_RE.match(line)
if m:
return m.group(1).strip()
return None
def _GetGitHead(path):
"""Returns the hash of the head of the git repo in |path|. Raises an IOError
if |path| doesn't exist or is not a git repo.
"""
return open(os.path.join(path, '.git', 'HEAD'), 'rb').read().strip()
def _NormalizeGitPath(path):
"""Given a |path| in a GIT repository (relative to its root), normalizes it so
it will match only that exact path in a sparse checkout.
"""
path = path.strip()
if not path.startswith('/'):
path = '/' + path
if not path.endswith('/'):
path += '/'
return path
def _RenameCheckout(path, dry_run):
"""Renames the checkout in |path| so that it can be subsequently deleted.
Only actually does the work if |dry_run| is False. Returns the path of the
renamed checkout directory. Raises an Exception on failure.
"""
def _RenameCheckoutImpl(path, dry_run):
if dry_run:
return path + '-old-dryrun'
attempts = 0
while attempts < 10:
newpath = '%s-old-%04d' % (path, random.randint(0, 999))
try:
os.rename(path, newpath)
return newpath
except WindowsError:
attempts += 1
raise Exception('Unable to rename checkout directory: %s' % path)
newpath = _RenameCheckoutImpl(path, dry_run)
_LOGGER.debug('Renamed checkout directory: %s', newpath)
return newpath
def _DeleteCheckout(path, dry_run):
"""Deletes the checkout in |path|. Only actually deletes the checkout if
|dry_run| is False.
"""
_LOGGER.debug('Deleting checkout directory: %s', path)
if dry_run:
return
_Shell('rmdir', '/S', '/Q', path, dry_run=False)
def _GenerateSparseCheckoutPathAndContents(repo):
"""Generates the path to the sparse checkout file, and the desired
contents. Returns a tuple of (path, contents). |repo| is a RepoOptions object.
"""
sparse_file = os.path.join(repo.checkout_dir, '.git', 'info',
'sparse-checkout')
if not repo.remote_dir:
contents = '*\n'
else:
contents = _NormalizeGitPath(repo.remote_dir) + '\n'
return (sparse_file, contents)
def _HasValidSparseCheckoutConfig(repo):
"""Determines if the GIT repo in |path| has a valid sparse-checkout
configuration as configured by the RepoOptions |repo|. Returns True or False.
"""
(sparse_file, contents) = _GenerateSparseCheckoutPathAndContents(repo)
try:
if open(sparse_file, 'rb').read() == contents:
return True
return False
except IOError:
return False
def _CreateCheckout(path, repo, dry_run):
"""Creates a checkout in the provided |path|. The |path| must not already
exist. Uses the repository configuration from the provided |repo| RepoOptions
object. Only actually creates the checkout if |dry_run| is false.
"""
# We expect the directory not to exist, as this is a fresh checkout we are
# creating.
if not dry_run:
if os.path.exists(path):
raise Exception('Checkout directory already exists: %s' % path)
_LOGGER.debug('Creating checkout directory: %s', path)
if not dry_run:
os.makedirs(path)
_LOGGER.debug('Initializing the checkout.')
_Shell('git', 'init', cwd=path, dry_run=dry_run)
_Shell('git', 'remote', 'add', 'origin', repo.repository, cwd=path,
dry_run=dry_run)
_Shell('git', 'config', 'core.sparsecheckout', 'true', cwd=path,
dry_run=dry_run)
if not dry_run:
_LOGGER.debug('Creating sparse checkout configuration file for '
'directory: %s', repo.remote_dir)
if not dry_run:
(path, contents) = _GenerateSparseCheckoutPathAndContents(repo)
with open(path, 'wb') as io:
io.write(contents)
def _UpdateCheckout(path, repo, dry_run):
"""Updates a GIT checkout in |path| by pulling down a specific revision
from it, as configured by RepoOptions |repo|. Only actually runs if
|dry_run| is False.
"""
try:
# Try a checkout first. If this fails then we'll actually need to fetch
# the revision.
_LOGGER.debug('Trying to checkout revision %s.', repo.revision)
_Shell('git', 'checkout', repo.revision, cwd=path,
dry_run=dry_run)
return
except RuntimeError:
pass
# Fetch the revision and then check it out. Let output go to screen rather
# than be buffered.
_LOGGER.debug('Fetching and checking out revision %s.', repo.revision)
_Shell('git', 'fetch', '--depth=1', 'origin', repo.revision,
cwd=path, dry_run=dry_run, stdout=None, stderr=None)
_Shell('git', 'checkout', repo.revision, cwd=path,
dry_run=dry_run, stdout=None, stderr=None)
# Used by _GetJunctionInfo to extract information about junctions for the
# output of a 'dir' command.
_DIR_JUNCTION_RE = re.compile(r'^.*<JUNCTION>\s+(.+)\s+\[(.+)\]$')
# TODO(chrisha): This is ugly, and there has to be a better way!
def _GetJunctionInfo(junction):
"""Returns the target of a junction, if it exists, None otherwise."""
dirname = os.path.dirname(junction)
basename = os.path.basename(junction)
stdout, dummy_stderr = _Shell('dir', '/AL', '/N', dirname, dry_run=False)
for line in stdout.splitlines(False):
m = _DIR_JUNCTION_RE.match(line)
if not m:
continue
if m.group(1).lower() == basename.lower():
return m.group(2)
return None
def _EnsureJunction(cache_dir, options, repo):
"""Ensures that the appropriate junction exists from the configured output
directory to the specified sub-directory of the GIT checkout.
"""
# Ensure that the target directory was created.
target_dir = _GetCasedFilename(os.path.normpath(
os.path.join(cache_dir, repo.remote_dir)))
if not options.dry_run and not os.path.isdir(target_dir):
raise Exception('Checkout does not contain the desired remote folder.')
# Determine if the link needs to be created.
create_link = True
if os.path.exists(repo.output_dir):
dest = _GetJunctionInfo(repo.output_dir)
if dest is None:
raise Exception(
'Target exists and is not a junction: %s' % repo.output_dir)
# If the junction is valid nothing needs to be done. If it points to the
# wrong place then delete the existing junction and let it be remade.
if dest == target_dir:
_LOGGER.debug('Junction is up to date.')
create_link = False
else:
_LOGGER.debug('Erasing existing junction: %s', repo.output_dir)
_Shell('rmdir', '/S', '/Q', repo.output_dir, dry_run=options.dry_run)
if create_link:
_LOGGER.debug('Creating output junction: %s', repo.output_dir)
_Shell('mklink', '/J', repo.output_dir, target_dir,
dry_run=options.dry_run)
def _InstallRepository(options, repo):
"""Installs a repository as configured by the options. Assumes that the
specified cache directory already exists.
"""
_LOGGER.debug('Processing directory "%s" from repository "%s".',
repo.remote_dir, repo.repository)
# Ensure the output directory's *parent* exists.
output_dirname = os.path.dirname(repo.output_dir)
output_basename = os.path.basename(repo.output_dir)
_EnsureDirectoryExists(output_dirname, 'output', options.dry_run)
# Get the properly cased names for the output directories.
output_dirname = _GetCasedFilename(output_dirname)
repo.output_dir = os.path.join(output_dirname, output_basename)
# These are the 3 basic steps that need to occur. Depending on the state of
# the checkout we may not need to perform all of them. We assume initially
# that everything needs to be done, unless proven otherwise.
create_checkout = True
update_checkout = True
# If the cache directory exists then lookup the repo and the revision and see
# what needs to be updated.
threads = []
if os.path.exists(repo.checkout_dir):
keep_cache_dir = False
# Only run these checks if we're not in 'force' mode. Otherwise, we
# deliberately turf the cache directory and start from scratch.
if not options.force and _IsGitCheckoutRoot(repo.checkout_dir):
# Get the repo origin.
repo_url = _GetGitOrigin(repo.checkout_dir)
if (repo_url == repo.repository and
_HasValidSparseCheckoutConfig(repo)):
_LOGGER.debug('Checkout is for correct repository and subdirectory.')
keep_cache_dir = True
create_checkout = False
# Get the checked out revision.
revhash = _GetGitHead(repo.checkout_dir)
if revhash == repo.revision:
_LOGGER.debug('Checkout is already up to date.')
update_checkout = False
if not keep_cache_dir:
# The old checkout directory is renamed and erased in a separate thread
# so that the new checkout can start immediately.
_LOGGER.debug('Erasing stale checkout directory: %s', repo.checkout_dir)
newpath = _RenameCheckout(repo.checkout_dir, options.dry_run)
body = lambda: _DeleteCheckout(newpath, options.dry_run)
thread = threading.Thread(target=body)
threads.append(thread)
thread.start()
# Create and update the checkout as necessary.
if create_checkout:
_CreateCheckout(repo.checkout_dir, repo, options.dry_run)
else:
_LOGGER.debug('Reusing checkout directory: %s', repo.checkout_dir)
if update_checkout:
_UpdateCheckout(repo.checkout_dir, repo, options.dry_run)
# Ensure the junction exists.
_EnsureJunction(repo.checkout_dir, options, repo)
# Join any worker threads that are ongoing.
for thread in threads:
thread.join()
def _FindGlobalVariableInAstTree(tree, name, functions=None):
"""Finds and evaluates to global assignment of the variables |name| in the
AST |tree|. Will allow the evaluations of some functions as defined in
|functions|.
"""
if functions is None:
functions = {}
class FunctionEvaluator(ast.NodeTransformer):
"""A tree transformer that evaluates permitted functions."""
def visit_BinOp(self, binop_node):
"""Is called for BinOp nodes. We only support string additions."""
if type(binop_node.op) != ast.Add:
return binop_node
left = ast.literal_eval(self.visit(binop_node.left))
right = ast.literal_eval(self.visit(binop_node.right))
value = left + right
new_node = ast.Str(s=value)
new_node = ast.copy_location(new_node, binop_node)
return new_node
def visit_Call(self, call_node):
"""Evaluates function calls that return a single string as output."""
func_name = call_node.func.id
if func_name not in functions:
return call_node
func = functions[func_name]
# Evaluate the arguments. We don't care about starargs, keywords or
# kwargs.
args = [ast.literal_eval(self.visit(arg)) for arg in
call_node.args]
# Now evaluate the function.
value = func(*args)
new_node = ast.Str(s=value)
new_node = ast.copy_location(new_node, call_node)
return new_node
# Look for assignment nodes.
for node in tree.body:
if type(node) != ast.Assign:
continue
# Look for assignment in the 'store' context, to a variable with
# the given name.
for target in node.targets:
if type(target) != ast.Name:
continue
if type(target.ctx) != ast.Store:
continue
if target.id == name:
value = FunctionEvaluator().visit(node.value)
value = ast.fix_missing_locations(value)
value = ast.literal_eval(value)
return value
def _ParseDepsFile(path):
"""Parsed a DEPS-like file at the given |path|."""
# Utility function for performing variable expansions.
vars_dict = {}
def _Var(s):
return vars_dict[s]
contents = open(path, 'rb').read()
tree = ast.parse(contents, path)
vars_dict = _FindGlobalVariableInAstTree(tree, 'vars')
deps_dict = _FindGlobalVariableInAstTree(
tree, 'deps', functions={'Var': _Var})
return deps_dict
def _RemoveOrphanedJunction(options, junction):
"""Removes an orphaned junction at the path |junction|. If the path doesn't
exist or is not a junction, raises an Exception.
"""
_LOGGER.debug('Removing orphaned junction: %s', junction)
absdir = os.path.join(options.output_dir, junction)
if not _GetJunctionInfo(absdir):
_LOGGER.error('Path is not a junction: %s', absdir)
raise Exception()
_Shell('rmdir', '/S', '/Q', absdir, dry_run=options.dry_run)
reldir = os.path.dirname(junction)
while reldir:
print reldir
absdir = os.path.join(options.output_dir, reldir)
if os.listdir(absdir):
return
_LOGGER.debug('Removing empty parent directory of junction: %s', absdir)
_Shell('rmdir', '/S', '/Q', absdir, dry_run=options.dry_run)
reldir = os.path.dirname(reldir)
def main():
options, args = _ParseCommandLine()
# Ensure the cache directory exists and get the full properly cased path to
# it.
_EnsureDirectoryExists(options.cache_dir, 'cache', options.dry_run)
options.cache_dir = _GetCasedFilename(options.cache_dir)
# Read junctions that have been written in previous runs.
state_path = os.path.join(options.cache_dir, '.gitdeps_junctions')
old_junctions = {}
if os.path.exists(state_path):
_LOGGER.debug('Loading list of existing junctions.')
for j in open(state_path, 'rb'):
old_junctions[j.strip()] = True
# Parse each deps file in order, and extract the dependencies, looking for
# conflicts in the output directories.
output_dirs = {}
all_deps = []
for deps_file in args:
deps = _ParseDepsFile(deps_file)
for key, value in deps.iteritems():
repo_options = _ParseRepoOptions(
options.cache_dir, options.output_dir, deps_file, key, value)
if repo_options.output_dir in output_dirs:
other_repo_options = output_dirs[repo_options.output_dir]
_LOGGER.error('Conflicting output directory: %s',
repo_options.output_dir)
_LOGGER.error('First specified in file: %s',
other_repo_options.deps_file)
_LOGGER.error('And then specified in file: %s', repo_options.deps_file)
output_dirs[repo_options.output_dir] = repo_options
all_deps.append(repo_options)
output_dirs = {}
# Handle each dependency, in order of shortest path names first. This ensures
# that nested dependencies are handled properly.
checkout_dirs = {}
deps = sorted(all_deps, key=lambda x: len(x.deps_file))
junctions = []
for repo in all_deps:
_InstallRepository(options, repo)
checkout_dirs[repo.checkout_dir] = True
junction = os.path.relpath(repo.output_dir, options.output_dir)
old_junctions.pop(junction, None)
# Write each junction as we create it. This allows for recovery from
# partial runs.
if not options.dry_run:
open(state_path, 'ab').write(junction + '\n')
junctions.append(junction)
# Clean up orphaned junctions if there are any.
if old_junctions:
_LOGGER.debug('Removing orphaned junctions.')
for j in old_junctions.iterkeys():
_RemoveOrphanedJunction(options, j)
# Output the final list of junctions.
_LOGGER.debug('Writing final list of junctions.')
if not options.dry_run:
with open(state_path, 'wb') as io:
for j in sorted(junctions):
io.write(j)
io.write('\n')
# Iterate all directories in the cache directory. Any that we didn't
# specifically create or update should be cleaned up. Do this in parallel
# so things are cleaned up as soon as possible.
threads = []
for path in glob.glob(os.path.join(options.cache_dir, '*')):
if path not in checkout_dirs:
_LOGGER.debug('Erasing orphaned checkout directory: %s', path)
body = lambda: _DeleteCheckout(path, options.dry_run)
thread = threading.Thread(target=body)
threads.append(thread)
thread.start()
for thread in threads:
thread.join()
return
if __name__ == '__main__':
main()
|
pombreda/syzygy
|
syzygy/build/gitdeps.py
|
Python
|
apache-2.0
| 25,203
|
[
"VisIt"
] |
9f711511b98206a570a4faf6bd5bb033a7a88a4ad5e836b50bd5b8ab16ce7b09
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pylid
from collections import Counter
cs = pylid.PyLID(3)
cs.total_ngrams = 84202116
cs.lang = 'cs'
cs.ngrams = Counter({
u'n\xed#': 669406,
u'#po': 531349,
u'#pr': 516175,
u'ch#': 482809,
u'#a#': 479692,
u'pro': 378527,
u'#p\u0159': 374999,
u'#je': 335427,
u'#na': 329680,
u'to#': 329630,
u'ost': 324813,
u'#ne': 296387,
u'je#': 295588,
u'na#': 289415,
u'se#': 274673,
u'#v#': 268707,
u'ou#': 266895,
u'#se': 250066,
u'\u017ee#': 240080,
u'#do': 235352,
u'en\xed': 226532,
u'ho#': 218795,
u'#za': 216222,
u'#\u017ee': 203475,
u'#st': 202189,
u'em#': 200601,
u'p\u0159e': 198694,
u'e#p': 197656,
u'\xfdch': 194890,
u'ova': 191506,
u'\xed#p': 190747,
u'n\u011b#': 185743,
u'ter': 184249,
u'#by': 183741,
u'kte': 182697,
u'\u0159ed': 182384,
u'k\xe9#': 182373,
u'#to': 179086,
u'sti': 177991,
u'n\xe9#': 176572,
u'e#s': 174932,
u'ce#': 172621,
u'\xedm#': 171441,
u'me#': 167582,
u'#kt': 164535,
u'rop': 164320,
u'a#p': 164169,
u'#ro': 163480,
u'nos': 163336,
u'e#v': 161464,
u'ti#': 161307,
u'#ko': 160538,
u'pod': 158054,
u'o#p': 156695,
u'evr': 155405,
u'vro': 155003,
u'by#': 151579,
u'pr\xe1': 149073,
u'#pa': 148324,
u'at#': 146170,
u'\xe1n\xed': 146129,
u'#ev': 145027,
u'sk\xe9': 144678,
u'#ob': 144479,
u'#vy': 144061,
u'\xedch': 143803,
u'mi#': 137891,
u'it#': 135887,
u'la#': 135589,
u'spo': 134579,
u'sou': 132867,
u'e#n': 131856,
u'#ve': 131460,
u'pol': 130593,
u'ov\xe1': 130275,
u'li#': 130004,
u'#o#': 127222,
u'#sp': 126618,
u't\u0159e': 126323,
u'ci#': 124753,
u'edn': 123679,
u'sta': 122372,
u'v\xe1n': 121603,
u'pra': 121335,
u'r\xe1v': 120993,
u'ops': 120402,
u'#n\xe1': 120260,
u'psk': 119914,
u'\xed#s': 118143,
u'#js': 117968,
u'c\xed#': 116447,
u'ro#': 116150,
u'roz': 115294,
u'e#t': 113615,
u'e#z': 113176,
u'hod': 111889,
u'vat': 110973,
u'st#': 110961,
u'ny#': 110766,
u'u#p': 109662,
u'p\u0159i': 109418,
u'#z\xe1': 109354,
u'ent': 109302,
u'\xe9ho': 109226,
u'p\u0159\xed': 108666,
u'ky#': 107734,
u'e\u010dn': 105222,
u'#ja': 105207,
u'le#': 103600,
u'nov': 102262,
u'rav': 102083,
u'kon': 102022,
u'o#s': 100474,
u'#v\xfd': 100193,
u'st\xe1': 100124,
u'odn': 98874,
u'i#p': 98723,
u'ak#': 97917,
u'm#p': 97666,
u'rov': 97489,
u'#so': 97467,
u'a#s': 97332,
u'jak': 97201,
u'a#v': 96307,
u'aj\xed': 96099,
u'\xedc\xed': 95699,
u'#ta': 95281,
u'\xed#a': 94894,
u'j\xed#': 94726,
u'uje': 94216,
u'\xed#v': 94053,
u'oli': 93953,
u'cho': 93804,
u'pan': 93735,
u'j\xedc': 93227,
u'#ze': 93100,
u'e#j': 91664,
u'vn\xed': 90978,
u'n\xedm': 90925,
u'len': 90899,
u'ech': 90612,
u'dy#': 90554,
u'de#': 90515,
u'ick': 90483,
u'#s#': 90429,
u'bud': 89851,
u'#od': 88781,
u'tu#': 88631,
u'las': 87649,
u'mu#': 87621,
u'sk\xfd': 87383,
u'oto': 87132,
u'#ab': 86747,
u'o#n': 85451,
u'aby': 85443,
u'a#z': 84610,
u'n\xedc': 84501,
u'v\xe9#': 83728,
u'jed': 83719,
u'h#p': 83591,
u'str': 82966,
u'pos': 82781,
u'o#z': 82557,
u'#k#': 82005,
u'a#n': 81993,
u't\xed#': 81859,
u'dn\xed': 81650,
u'lit': 81241,
u'oho': 81208,
u'ych': 81033,
u'o#v': 80761,
u'ast': 80506,
u'#bu': 80469,
u'men': 80462,
u'byl': 80411,
u'tak': 80369,
u'omi': 80341,
u'ist': 80112,
u'ran': 79755,
u'\xed#n': 79653,
u'kom': 79460,
u'led': 79312,
u'e#m': 79253,
u'm#s': 79242,
u'\u017eit': 79135,
u'tn\xed': 79020,
u'nou': 78954,
u'\xe9#p': 77688,
u'ku#': 77223,
u'\u0161\xed#': 77119,
u'er\xe9': 76797,
u'#mo': 76758,
u'tra': 76230,
u'r\xe9#': 76227,
u'#sv': 75969,
u'mis': 75871,
u'\xe9#s': 75779,
u'rot': 75765,
u'u#a': 75250,
u'a#t': 75015,
u'y#p': 74652,
u'lo#': 74625,
u'val': 74480,
u'nsk': 74401,
u'u#s': 74319,
u'e#o': 73799,
u'nic': 73776,
u'i#v': 73412,
u'ov\xe9': 73350,
u'i#s': 72524,
u'ly#': 72383,
u'eds': 72140,
u'v\xed#': 71667,
u'sku': 71535,
u'ole': 71377,
u'\xed#z': 71096,
u'\xfdm#': 70872,
u't#p': 70798,
u'ty#': 70315,
u'y#a': 69688,
u'tv\xed': 69437,
u'byc': 69393,
u'ros': 69254,
u'#zp': 69238,
u'odp': 69235,
u'\xe1m#': 69226,
u'sed': 69079,
u'ovn': 68537,
u'y#s': 68371,
u'dpo': 68239,
u'e#d': 68142,
u'du#': 68073,
u'dno': 67863,
u't\xe1t': 67799,
u'k\xfdc': 67692,
u'stu': 67105,
u'u#v': 66934,
u'zem': 66896,
u'\xed#k': 66797,
u'a#j': 66667,
u'n\xe1#': 66622,
u'mus': 66584,
u'ie#': 66099,
u'#v\u0161': 66046,
u'\u017een': 65731,
u'ako': 65648,
u'ln\xed': 65338,
u'e#k': 64780,
u'pot': 64620,
u'sto': 64286,
u'zpr': 64248,
u'#re': 64187,
u'n\xfdc': 64142,
u'#si': 63731,
u'ko#': 63719,
u'h#s': 63710,
u'obl': 63601,
u'tel': 62807,
u'#in': 62787,
u've#': 62744,
u'tic': 62662,
u'lad': 62493,
u'o#j': 62247,
u'\xe1ln': 61658,
u'ame': 61348,
u'e#a': 61000,
u'\xed#o': 60980,
u'uj\xed': 60955,
u'i#a': 60892,
u'ali': 60656,
u'#z#': 60483,
u'i#n': 60393,
u'hla': 60369,
u'par': 60328,
u'y#v': 60292,
u'iti': 60041,
u'ens': 60035,
u'#d\u016f': 59822,
u'do#': 59722,
u'oje': 59661,
u'#mu': 59501,
u'o#o': 59452,
u'sem': 58931,
u'#li': 58873,
u'\xedho': 58529,
u'us\xed': 58409,
u'#zd': 58176,
u'pok': 58090,
u'daj': 57805,
u'\u010dn\xed': 57709,
u'a#k': 57407,
u'eme': 56958,
u'#m\u011b': 56946,
u'kla': 56772,
u'\u0159en': 56699,
u'por': 56671,
u'ne#': 56669,
u'en#': 56645,
u'#ho': 56582,
u'uni': 56580,
u'\xe1va': 56292,
u'dse': 56291,
u'\xe9m#': 56285,
u'n\xfd#': 55889,
u'o#d': 55683,
u'\xed#d': 55449,
u'kou': 55216,
u'ate': 55008,
u'v\u011bt': 54949,
u'#te': 54688,
u'u#n': 54621,
u'#ji': 54544,
u'tom': 54487,
u'o#t': 54387,
u't#s': 54335,
u'dem': 54178,
u'dob': 54144,
u'oru': 54069,
u'k\xe1#': 53967,
u'rac': 53947,
u'ale': 53915,
u'za#': 53777,
u'y#n': 53592,
u'a#o': 53569,
u'nit': 53545,
u'e\u017ei': 53516,
u'e#b': 53411,
u'ohl': 53355,
u'dou': 53044,
u'ude': 53039,
u'#me': 52682,
u'tat': 52626,
u'#sk': 52604,
u'i#z': 52564,
u'est': 52438,
u'nej': 52325,
u'#tr': 52247,
u'ze#': 52107,
u'ina': 52088,
u'ka#': 51763,
u't#v': 51759,
u'a#d': 51696,
u'\u010dle': 51539,
u'#ch': 51532,
u'dos': 51368,
u'le\u017e': 51275,
u'\u0159eb': 51261,
u'nes': 51044,
u'jso': 51035,
u'pov': 50781,
u'nu#': 50695,
u'#eu': 50683,
u'oku': 50677,
u'd\u011bl': 50660,
u'neb': 50369,
u'mez': 50179,
u'ich': 50080,
u'va#': 49971,
u'#un': 49902,
u'm\u011bl': 49796,
u'ace': 49697,
u'opa': 49585,
u'#\u010dl': 49569,
u'om#': 49489,
u'ani': 49431,
u'kol': 49152,
u'ede': 49098,
u'#v\u011b': 48689,
u'u#z': 48594,
u'\xedme': 48535,
u'jen': 48443,
u'lam': 48144,
u'vel': 47986,
u'm#z': 47717,
u'#t\xe9': 47681,
u'u#k': 47673,
u't#n': 47474,
u'a#m': 47341,
u'nem': 47153,
u'rob': 47128,
u'#al': 46860,
u'\xed#j': 46774,
u'\u010dno': 46641,
u'#ce': 46543,
u'kov': 46518,
u'ati': 46507,
u'tov': 46481,
u'ise': 46275,
u'st\u0159': 46236,
u'm#v': 46200,
u'n\xe1v': 46102,
u'vy#': 46097,
u'ane': 45939,
u'\u0161en': 45912,
u'ili': 45905,
u'lid': 45863,
u'\u011b#p': 45834,
u'ji#': 45826,
u'hu#': 45719,
u'ru#': 45606,
u'stn': 45515,
u'zna': 45508,
u'si#': 45295,
u'ste': 45061,
u'ezi': 44941,
u'y#k': 44864,
u'\xed#t': 44785,
u'lov': 44754,
u'#mi': 44682,
u'#n\u011b': 44659,
u'hle': 44560,
u'arl': 44456,
u'n\xedh': 44333,
u'rla': 44251,
u'\u010den': 44045,
u'nan': 43869,
u'le\u010d': 43808,
u'o#r': 43730,
u'rok': 43600,
u'rod': 43556,
u't\u011b#': 43423,
u'\xe9#z': 43374,
u'#hl': 43359,
u'tiv': 43320,
u'er\xfd': 43312,
u'ck\xe9': 43311,
u'ovo': 43264,
u'da#': 43054,
u'#kr': 42975,
u'i#j': 42932,
u'#i#': 42875,
u'i#k': 42855,
u'\xed#m': 42853,
u'y#m': 42846,
u'sle': 42829,
u'#pl': 42745,
u'ni#': 42708,
u'a#b': 42678,
u'v#p': 42673,
u'ak\xe9': 42462,
u'ov\xfd': 42421,
u'#sm': 42251,
u'cov': 41879,
u't#a': 41841,
u'jse': 41687,
u'#op': 41532,
u'tup': 41467,
u'vrh': 41393,
u'stv': 41343,
u'\u010din': 41226,
u'v\u0161e': 41205,
u'k\xe9h': 41111,
u'zen': 41062,
u'\xe1le': 40979,
u'm#n': 40770,
u'bla': 40718,
u'#co': 40594,
u'te\u010d': 40567,
u'jej': 40473,
u'm#k': 40358,
u'v\u011bd': 40228,
u'#le': 40141,
u'bez': 40103,
u'ved': 40081,
u'#ma': 40041,
u'o\u017ee': 39976,
u'y#z': 39861,
u'hra': 39786,
u'o#m': 39539,
u'sko': 39531,
u'v#t': 39477,
u'\u011bn\xed': 39471,
u'udo': 39469,
u'aci': 39394,
u'em\u011b': 39334,
u'n\xfdm': 39320,
u'\xed#b': 39197,
u'a#r': 39118,
u'#b\xfd': 39089,
u'\xfd#p': 39062,
u'rad': 39015,
u'\u010das': 39012,
u'a#a': 39001,
u'eda': 38941,
u'v\xe1#': 38903,
u'u#j': 38844,
u'h#z': 38844,
u'na\u0161': 38764,
u's\xed#': 38758,
u'o#b': 38716,
u'\u016fm#': 38680,
u'st\xed': 38675,
u'#m\xed': 38637,
u'pad': 38593,
u'ot\u0159': 38581,
u'tik': 38510,
u'#kd': 38493,
u'rat': 38389,
u'dov': 38221,
u'my#': 38092,
u'vu#': 38073,
u'v\xfdc': 38057,
u'roj': 38034,
u'ito': 37922,
u'eu#': 37901,
u'sob': 37842,
u'y#b': 37798,
u'n\u011bn': 37783,
u'tro': 37751,
u'vod': 37741,
u'och': 37729,
u'itu': 37712,
u'o#k': 37588,
u'lat': 37572,
u'er\xe1': 37536,
u'\xfdt#': 37492,
u'#sl': 37466,
u'poz': 37339,
u'#de': 37329,
u'\xe9#v': 37237,
u'b\xfdt': 37201,
u'\xed#r': 37198,
u'\xe9to': 37160,
u'lou': 37018,
u'odu': 36892,
u'#vz': 36802,
u'#ra': 36797,
u'nie': 36764,
u'cht': 36760,
u'co#': 36639,
u't\xe9t': 36436,
u'oci': 36309,
u'r\xe1#': 36104,
u'i#o': 35996,
u'toh': 35968,
u'\xfdmi': 35912,
u'osp': 35877,
u'#ot': 35867,
u'egi': 35824,
u'm#a': 35709,
u'v\u011b#': 35678,
u'jem': 35670,
u'i#d': 35659,
u'tav': 35656,
u'\xedst': 35639,
u'\xe1vn': 35602,
u'ek#': 35512,
u'ite': 35377,
u'k\xfd#': 35289,
u'ten': 35276,
u'ner': 35274,
u'slo': 35256,
u'mo\u017e': 35235,
u'u#o': 35232,
u'sme': 35228,
u'vn\u011b': 35183,
u'ano': 35149,
u'm\u011b#': 35061,
u'sv\u011b': 34928,
u'#be': 34914,
u'h#v': 34911,
u'sk\xe1': 34763,
u's\xedm': 34728,
u'#zn': 34654,
u'y#j': 34644,
u'tec': 34625,
u'n\xe9h': 34611,
u'ac\xed': 34608,
u'nik': 34519,
u'#r\xe1': 34501,
u'\xedle': 34491,
u'nam': 34487,
u'ice': 34446,
u'pom': 34352,
u'te#': 34214,
u'od\xe1': 34207,
u'd\u011b#': 34170,
u'fin': 34131,
u'ovi': 34073,
u'out': 34071,
u'okr': 34065,
u'jsm': 34060,
u'ud#': 34058,
u'\xe9#a': 34035,
u'\xe1#p': 34002,
u'j\u0161\xed': 33960,
u'\xe1vr': 33906,
u'kut': 33883,
u'm#\u017e': 33875,
u'eno': 33819,
u'#da': 33808,
u'ekt': 33713,
u'asn': 33700,
u'moh': 33553,
u'#no': 33549,
u'cel': 33518,
u'no#': 33508,
u'dni': 33501,
u'od#': 33449,
u't\u016f#': 33378,
u'den': 33331,
u'ebo': 33320,
u'#fi': 33266,
u'nut': 33237,
u'vol': 33117,
u'\xe9#u': 33113,
u'ck\xfd': 33000,
u'k\xfdm': 32984,
u't#z': 32952,
u'dal': 32949,
u'hom': 32940,
u'm\xe9n': 32868,
u'sov': 32803,
u'#\u0159e': 32701,
u'#ka': 32621,
u'i#t': 32572,
u'\u0159\xed#': 32487,
u'\u0159i#': 32481,
u'uto': 32470,
u'edo': 32356,
u'al#': 32313,
u'ven': 32302,
u'en\xfd': 32298,
u'o\u017en': 32278,
u'n\xe1s': 32240,
u't\xedm': 32230,
u'inn': 32080,
u'et#': 32060,
u'adn': 32053,
u'ala': 32035,
u'van': 32032,
u'#t\u011b': 32010,
u'oce': 31863,
u'r\xe1c': 31859,
u'kdy': 31813,
u'ejn': 31791,
u'\xe1me': 31788,
u'\u0161t\u011b': 31739,
u'ntu': 31647,
u'v\xe1m': 31630,
u'\u0159ej': 31589,
u'#ni': 31538,
u'\u011bj\u0161': 31429,
u'omo': 31379,
u'hov': 31350,
u'#v\xe1': 31300,
u'#v\xed': 31299,
u'm#j': 31287,
u'ivn': 31270,
u'ysl': 31258,
u'\u016fle': 31181,
u'edk': 31128,
u'k\u016f#': 31126,
u'n\u011bj': 30979,
u'anc': 30949,
u'pat': 30919,
u'r\xfd#': 30806,
u'chn': 30804,
u'#zv': 30772,
u'ejm': 30760,
u'isk': 30758,
u'e#\u017e': 30748,
u'v#r': 30664,
u'zi#': 30653,
u'ele': 30569,
u'h#o': 30456,
u'\xe1ro': 30441,
u'an\xed': 30403,
u'd\u016fl': 30400,
u'ute': 30398,
u'en\xe9': 30335,
u'avi': 30311,
u'i#m': 30241,
u'z\xe1k': 30167,
u'\u0159ij': 30128,
u'\u011b#v': 30101,
u'ila': 30067,
u'e#r': 29983,
u'o#a': 29983,
u'olu': 29930,
u'ot\xe1': 29925,
u'liv': 29852,
u'\xe9mu': 29789,
u'sil': 29706,
u'orm': 29673,
u'\xe1#s': 29643,
u'voj': 29627,
u'moc': 29546,
u'u#m': 29446,
u'il#': 29435,
u'\xed#e': 29406,
u'alo': 29352,
u'pou': 29299,
u'\xe1\u0159s': 29191,
u'zac': 29149,
u't\xe9#': 29060,
u'#vl': 28986,
u'u#d': 28934,
u'c\xedc': 28887,
u'ii#': 28885,
u't\xe1z': 28865,
u'\u0161\xedm': 28847,
u'aco': 28806,
u'ad\u011b': 28730,
u'#m\xe1': 28709,
u'hos': 28691,
u'\u0159es': 28677,
u'let': 28674,
u'ut#': 28655,
u't\xe9m': 28549,
u'pla': 28523,
u'\xe1v\xe1': 28450,
u'e##': 28423,
u'oda': 28421,
u'kra': 28395,
u'ad#': 28340,
u'dop': 28317,
u'kud': 28296,
u'ylo': 28258,
u'#us': 28256,
u'kro': 28253,
u'y#o': 28216,
u'eji': 28211,
u'#bo': 28120,
u'for': 28077,
u'y#t': 28024,
u'vin': 28004,
u'dn\u011b': 27981,
u'd\xed#': 27979,
u'nce': 27958,
u'\u0159\xeds': 27953,
u'eln': 27944,
u'\xe1kl': 27883,
u'#os': 27881,
u'ozp': 27869,
u'n\xe1m': 27864,
u'ry#': 27857,
u'ove': 27832,
u'mno': 27786,
u'm\u011bn': 27754,
u'n\u011bk': 27750,
u'h#a': 27713,
u'nez': 27706,
u'ezp': 27696,
u'tit': 27694,
u'tan': 27650,
u'm\u016f\u017e': 27642,
u'ta#': 27640,
u'zm\u011b': 27635,
u'#zm': 27629,
u'ej\xed': 27615,
u'odl': 27571,
u'edl': 27559,
u'\u016f\u017ee': 27546,
u't#j': 27521,
u'\u010dn\u011b': 27518,
u'\u0159sk': 27485,
u'vou': 27476,
u'#di': 27455,
u'ov\u011b': 27446,
u'n\xe1r': 27416,
u'\xe9#n': 27391,
u'otn': 27334,
u'aso': 27321,
u'avo': 27320,
u'adu': 27283,
u'ave': 27255,
u'mys': 27254,
u'\u016f#a': 27253,
u'lo\u017e': 27209,
u'ces': 27202,
u'\u011bla': 27196,
u'yst': 27171,
u'\xedt#': 27166,
u'e#e': 27158,
u'\u011b#s': 27096,
u'kri': 27063,
u'dst': 26996,
u'ene': 26936,
u'\u011b#n': 26900,
u'c\xedl': 26881,
u'vl\xe1': 26817,
u'oti': 26797,
u'po\u010d': 26786,
u'ion': 26768,
u'\u011bch': 26745,
u'e#u': 26656,
u'cen': 26643,
u'd\xe1\u0159': 26619,
u'm\u011br': 26576,
u'omu': 26556,
u'doh': 26540,
u'#tu': 26520,
u'\u0148ov': 26471,
u'k#p': 26470,
u'nep': 26468,
u'dne': 26454,
u'ika': 26451,
u'pe\u010d': 26438,
u'#sn': 26339,
u'ady': 26332,
u'sch': 26327,
u'jm\xe9': 26302,
u'aji': 26250,
u'\xe1st': 26245,
u'l\xe1d': 26243,
u'v\xfdz': 26231,
u'elm': 26227,
u'mat': 26208,
u'u#t': 26206,
u'#c\xed': 26160,
u'l\xe9m': 26128,
u'zho': 26075,
u'dis': 26062,
u'vni': 26010,
u'\xe9#d': 25956,
u't#o': 25926,
u'dr\u017e': 25903,
u'v\u0161a': 25871,
u'\u0161ak': 25864,
u'\u0161ec': 25864,
u'dn\xe1': 25845,
u'lmi': 25838,
u'v#o': 25838,
u'ouv': 25767,
u'm#t': 25708,
u'ini': 25662,
u'z\xe1v': 25655,
u'e\u017e#': 25630,
u'tou': 25581,
u'ena': 25559,
u'l#p': 25538,
u'dom': 25533,
u'm#o': 25518,
u'iky': 25515,
u'#t\u0159': 25506,
u'ato': 25501,
u't#k': 25407,
u'#mn': 25173,
u'#t\xfd': 25172,
u'sla': 25129,
u'olo': 25120,
u'o#\u017e': 25096,
u'tur': 25057,
u'osl': 25055,
u'ero': 25005,
u'emo': 24982,
u'eho': 24895,
u'#or': 24814,
u'zpe': 24795,
u'#\xfa\u010d': 24751,
u'chr': 24744,
u'at\u0159': 24665,
u'\xe9#o': 24657,
u'\u011b#z': 24542,
u'i#b': 24520,
u'jic': 24505,
u'h#n': 24486,
u'jis': 24478,
u'vis': 24475,
u'n\u016f#': 24419,
u'zam': 24342,
u'org': 24330,
u'\xedv\xe1': 24317,
u'eli': 24301,
u'es#': 24277,
u'zdr': 24269,
u'a#e': 24263,
u'ozv': 24205,
u't#d': 24191,
u'nst': 24177,
u'oko': 24121,
u'\xe9#j': 24094,
u'ou\u010d': 24078,
u'\u0159\xedp': 24062,
u'upi': 23994,
u'nto': 23889,
u'it\xe9': 23858,
u'poj': 23803,
u'not': 23755,
u'\u010dan': 23744,
u'ozh': 23739,
u'vo\u0159': 23715,
u't#\u017e': 23710,
u'vot': 23672,
u'akt': 23617,
u'zaj': 23603,
u'zor': 23590,
u'i\xe1l': 23586,
u'to\u017e': 23566,
u'ob\u010d': 23560,
u'n\u010dn': 23550,
u'tos': 23539,
u'b\u010da': 23533,
u'ivo': 23521,
u'ln\u011b': 23500,
u'nad': 23499,
u'v#s': 23492,
u'slu': 23482,
u'roc': 23474,
u'm#m': 23473,
u'#sa': 23420,
u'dlo': 23357,
u'a#\u017e': 23354,
u'kup': 23344,
u'rit': 23322,
u'isa': 23288,
u'v\xedc': 23262,
u'\xe1v\u011b': 23231,
u'p\u016fs': 23194,
u'\u016fso': 23189,
u'nil': 23167,
u'\xe9#k': 23144,
u'los': 23143,
u'm\xedn': 23110,
u'\xe1zk': 23108,
u'em\xed': 23108,
u'o#c': 23053,
u'u#b': 23023,
u'yto': 22989,
u'ctv': 22872,
u'#oc': 22847,
u'nal': 22829,
u'riz': 22793,
u'v#n': 22788,
u'odo': 22782,
u'lem': 22782,
u'ici': 22731,
u'#\u010di': 22681,
u'an\xe9': 22666,
u'k#t': 22659,
u'\u011bli': 22599,
u'i\u017e#': 22598,
u'\u010dn\xe9': 22592,
u'rom': 22584,
u'\u017e\xe1d': 22563,
u'lik': 22547,
u'sv\xe9': 22516,
u'\u011b#j': 22516,
u'obn': 22457,
u'am\u011b': 22399,
u'gra': 22361,
u'ada': 22360,
u'emi': 22359,
u'\u017eiv': 22269,
u'bo#': 22232,
u'bl\xe9': 22231,
u'tuj': 22208,
u'zov': 22204,
u'i#e': 22141,
u'bor': 22136,
u't\xe1l': 22111,
u'min': 22104,
u'sp\u011b': 22098,
u'ebu': 22043,
u'#vo': 21959,
u's#p': 21930,
u'ch\xe1': 21904,
u'\xe1ty': 21895,
u'st\xe9': 21866,
u'in\xe1': 21859,
u'el#': 21844,
u'\u0161e#': 21814,
u'\xe9#m': 21729,
u'iza': 21718,
u'k#n': 21715,
u'u\u017e\xed': 21712,
u'nen': 21710,
u'#\u017ei': 21695,
u'n\xe9m': 21694,
u'nte': 21694,
u'ba#': 21645,
u'uji': 21644,
u'an\u010d': 21641,
u'ci\xe1': 21626,
u'v#e': 21617,
u'noh': 21600,
u'vys': 21552,
u'sm\u011b': 21525,
u'nno': 21510,
u'nap': 21485,
u'm#d': 21463,
u'm\u011bs': 21432,
u'zvo': 21429,
u'jin': 21420,
u'raz': 21410,
u'ouh': 21356,
u'l\xe1n': 21355,
u'elk': 21340,
u'reg': 21336,
u'nav': 21326,
u'\xedmi': 21308,
u'l\u0161\xed': 21305,
u'ob\u011b': 21256,
u'hot': 21227,
u'zas': 21190,
u'ned': 21190,
u'h#k': 21110,
u'tor': 21099,
u'mto': 21093,
u'al\u0161': 21090,
u'#\u0159\xed': 21062,
u'hop': 21017,
u'lu#': 20988,
u'm\xe1#': 20970,
u'i#\u017e': 20967,
u'\xe1ch': 20946,
u'v\xe1\u017e': 20922,
u'trh': 20890,
u'zpo': 20889,
u'\xe1dn': 20871,
u'a#u': 20809,
u'#ba': 20739,
u'atn': 20731,
u't#t': 20714,
u'\xedze': 20703,
u'dn\xe9': 20697,
u'y#e': 20692,
u'pin': 20659,
u'rhu': 20635,
u'r\xe1m': 20622,
u'#dn': 20593,
u'soc': 20532,
u'isl': 20499,
u'#sc': 20459,
u'\u011bl#': 20451,
u'spr': 20451,
u'zej': 20424,
u'ren': 20417,
u'#en': 20374,
u'sit': 20371,
u'tn\u011b': 20358,
u'#up': 20323,
u'ody': 20316,
u'prv': 20315,
u'en\xe1': 20311,
u'teg': 20309,
u'#uv': 20278,
u'o#e': 20275,
u'lep': 20223,
u'tn\xe9': 20222,
u'k#v': 20190,
u'#sy': 20175,
u'\u0159e\u0161': 20174,
u'#d\xe1': 20131,
u'pln': 20105,
u'ouc': 20085,
u'enc': 20075,
u't\xfdk': 20013,
u'upr': 19977,
u'obe': 19951,
u'y#d': 19944,
u'\xe1da': 19935,
u'kti': 19904,
u'iny': 19874,
u'\u011bt\u0161': 19838,
u'z\xe1s': 19835,
u'zp\u016f': 19819,
u'nt#': 19805,
u'#kl': 19756,
u'o\u0159i': 19751,
u'erg': 19749,
u'yla': 19742,
u'opr': 19733,
u'#\xfas': 19723,
u'ori': 19717,
u'vyt': 19715,
u'r\xe1d': 19712,
u'a\u0161e': 19699,
u'm\xed#': 19678,
u'nci': 19663,
u'\u011bst': 19660,
u'ht\u011b': 19654,
u'eny': 19638,
u'ep\u0161': 19562,
u'ska': 19559,
u'zav': 19534,
u'ict': 19491,
u'#ty': 19474,
u'o#u': 19432,
u'\u0159\xedz': 19412,
u'emn': 19372,
u'lsk': 19363,
u'rh#': 19333,
u'any': 19314,
u'#ud': 19307,
u'bod': 19300,
u'leg': 19286,
u'ln\xe9': 19241,
u'u#r': 19235,
u'svo': 19208,
u'\u0159\xedk': 19126,
u'k\xe9m': 19120,
u'r\xe1n': 19111,
u'vid': 19065,
u'sys': 19004,
u'lan': 18993,
u'\u011b#a': 18984,
u'm\xeds': 18978,
u'ji\u017e': 18909,
u'ese': 18888,
u'tot': 18877,
u'obc': 18818,
u'vit': 18774,
u'\xedce': 18771,
u'#vn': 18694,
u't\u011bc': 18682,
u'\u017e#j': 18673,
u'obr': 18655,
u'bil': 18654,
u'ami': 18650,
u'h#d': 18611,
u'ilo': 18555,
u'sam': 18552,
u'###': 18537,
u'l\xe9#': 18520,
u'\xe1#v': 18494,
u'k#j': 18491,
u'sky': 18469,
u'dok': 18460,
u'n\xe1n': 18456,
u'\xedpa': 18426,
u'eti': 18388,
u'v\xfdm': 18375,
u'ode': 18372,
u'\u017eno': 18364,
u'#dr': 18362,
u'a\u010dn': 18358,
u'vac': 18349,
u'hy#': 18348,
u'ino': 18323,
u'rou': 18312,
u'eko': 18307,
u'tno': 18307,
u'u##': 18298,
u'\xe1t\u016f': 18290,
u'jde': 18285,
u'tua': 18254,
u'#m\u016f': 18248,
u'oj#': 18198,
u'\u0159it': 18198,
u'lis': 18191,
u'i#r': 18182,
u'tvo': 18147,
u'dle': 18083,
u'ytv': 18052,
u'#an': 18050,
u'oby': 18036,
u'mn\xed': 18034,
u'dru': 18021,
u'o\u017e#': 18017,
u'#t\xed': 18011,
u'te\u0159': 17969,
u'#\u010de': 17923,
u'dsk': 17901,
u'ed#': 17868,
u't\u011bl': 17841,
u'v\xedm': 17836,
u'din': 17782,
u'##p': 17766,
u'ont': 17760,
u'd\xe1l': 17756,
u'#oh': 17752,
u'\xed#c': 17742,
u'm#b': 17736,
u'sn\u011b': 17685,
u'pis': 17670,
u'ana': 17649,
u'\u011b#t': 17638,
u'eba': 17629,
u'zku': 17597,
u'uac': 17578,
u'ra#': 17570,
u'd\u016fv': 17539,
u'e#c': 17529,
u'a#c': 17521,
u'iko': 17507,
u'azn': 17484,
u'eck': 17464,
u'\xe1ny': 17463,
u'\u0159e#': 17457,
u'hno': 17446,
u'\xe9#b': 17429,
u'\xed#\u017e': 17423,
u'rni': 17405,
u'#d\u011b': 17405,
u'vaj': 17379,
u'b\u011b#': 17363,
u'\xe9#r': 17351,
u'inf': 17317,
u'u#e': 17290,
u'l\xed#': 17250,
u'e\u0159\xed': 17241,
u'kem': 17233,
u'edi': 17175,
u'ern': 17166,
u'\xe1no': 17095,
u'#dv': 17092,
u'#hr': 17080,
u'm\xedt': 17074,
u'tyt': 17056,
u'h\xe1z': 17053,
u'hto': 17041,
u'\u010dit': 17025,
u'\u011bd\u011b': 17010,
u'\u017e\xedv': 16976,
u'yl#': 16961,
u'lup': 16959,
u'e#i': 16955,
u'an\u011b': 16938,
u'v\xfdr': 16927,
u'l\xe1\u0161': 16927,
u'\u016fvo': 16924,
u'odv': 16921,
u'nec': 16874,
u'ne\u017e': 16867,
u'#\u017e\xe1': 16866,
u'po\u017e': 16857,
u'\u011bko': 16847,
u'\xe1mc': 16845,
u'\xe1ce': 16799,
u'\xe1t#': 16743,
u'm\xe1m': 16732,
u'ruh': 16726,
u'e\u0159e': 16704,
u'oso': 16697,
u'nat': 16691,
u'm#r': 16632,
u'\u011b#d': 16631,
u'\u016f#v': 16603,
u'osk': 16595,
u'po#': 16585,
u'\u011brn': 16585,
u'e\u010de': 16574,
u'\u011bt#': 16572,
u'obo': 16569,
u'm\u011b\u0159': 16545,
u'kyt': 16544,
u'log': 16490,
u'et\xed': 16483,
u'u#u': 16472,
u'\u0159ad': 16468,
u'd\u016f#': 16462,
u'sad': 16451,
u'ete': 16367,
u'oro': 16354,
u'exi': 16346,
u'tal': 16340,
u'tem': 16326,
u'ity': 16319,
u'mn\u011b': 16314,
u'\xed#\xfa': 16308,
u've\u0159': 16303,
u'\u016fst': 16292,
u'\u017ead': 16281,
u'\xe1\u017ee': 16259,
u'n\xedk': 16249,
u't#m': 16235,
u'avu': 16168,
u'ons': 16167,
u'a\u0161i': 16153,
u'ktu': 16148,
u'bch': 16081,
u'ouz': 16079,
u'#my': 16015,
u'gie': 15981,
u'#fo': 15976,
u'oji': 15972,
u'ide': 15962,
u'\u0159ek': 15954,
u'one': 15953,
u'zah': 15908,
u'\u010det': 15907,
u'dky': 15886,
u'kaj': 15883,
u'\u016f#p': 15865,
u'\u0159il': 15851,
u'omn': 15848,
u'ust': 15826,
u'jeh': 15706,
u'res': 15688,
u'ogr': 15685,
u'\xe1ze': 15661,
u'amo': 15641,
u'u\u010da': 15633,
u'orn': 15625,
u'\xe1sa': 15599,
u'ome': 15575,
u'nis': 15571,
u'\xe9na': 15559,
u'nyn': 15549,
u'po\u0159': 15543,
u'\xe1s#': 15533,
u'ids': 15505,
u'ram': 15499,
u'\u017e#s': 15490,
u'onk': 15469,
u'ut\xed': 15428,
u'h#r': 15419,
u'zni': 15413,
u'gen': 15410,
u'jas': 15398,
u'h#m': 15355,
u'dra': 15339,
u'zn\xe1': 15335,
u'h#t': 15331,
u'\xfaro': 15329,
u'yn\xed': 15321,
u'\u0148uj': 15313,
u'd\xe1v': 15313,
u's#n': 15295,
u'ade': 15289,
u'\u0159\xedm': 15288,
u'sml': 15288,
u'v\xfdb': 15283,
u'#\xfar': 15235,
u'v\xe1d': 15235,
u'aly': 15219,
u'ou\u017e': 15219,
u'vyu': 15215,
u'u\u017ei': 15209,
u'\u011b#k': 15207,
u'\u0159is': 15177,
u'#uk': 15155,
u'ans': 15154,
u'tut': 15138,
u'rem': 15113,
u'v#d': 15110,
u'yu\u017e': 15093,
u'ono': 15091,
u'liz': 15085,
u'v\xe9h': 15074,
u'd\xe1n': 15073,
u'#ak': 15072,
u'odm': 15071,
u'dro': 15061,
u'opo': 15061,
u'l\u016f#': 15023,
u'k#d': 14966,
u'oud': 14935,
u'\u011b#o': 14915,
u'\u011b\u017e#': 14910,
u'lav': 14893,
u'\xe1ci': 14869,
u'dv\u011b': 14857,
u'z\xe1j': 14856,
u'mok': 14855,
u'sen': 14853,
u'd#j': 14853,
u'niz': 14831,
u'a#\xfa': 14825,
u'\xed#l': 14818,
u's\xe1h': 14788,
u'\u0159\xedl': 14777,
u'inu': 14748,
u'spe': 14740,
u'#ke': 14737,
u'int': 14724,
u'dnu': 14706,
u'eur': 14691,
u'r\xe1t': 14688,
u'osa': 14676,
u'l#j': 14674,
u'k#s': 14667,
u'\xe1#o': 14661,
u'ytn': 14660,
u'zab': 14652,
u'o#h': 14648,
u'sa\u0159': 14622,
u'avn': 14620,
u'anu': 14614,
u'\xe1v#': 14610,
u'edu': 14598,
u'rog': 14589,
u'duj': 14582,
u'ija': 14571,
u'\u0159ip': 14562,
u'\xfd#s': 14548,
u'i\u010dn': 14532,
u'\xed#h': 14513,
u'a##': 14511,
u'\u017e#p': 14511,
u'#r\u016f': 14494,
u'o\u010dt': 14490,
u'c\xedm': 14479,
u'udr': 14468,
u'\xed#i': 14465,
u'e#h': 14444,
u'ula': 14421,
u'##a': 14420,
u'\u010dov': 14417,
u'ke#': 14411,
u'l#v': 14380,
u'rn\xed': 14350,
u'\xe1te': 14346,
u'edp': 14342,
u'\u017eov': 14321,
u'use': 14316,
u'gio': 14308,
u'v\xfd#': 14297,
u's#v': 14290,
u'zin': 14290,
u'ed\xe1': 14289,
u'#ur': 14286,
u'\u011bkt': 14284,
u'ods': 14269,
u'uze': 14264,
u'#kv': 14260,
u'y\u017e#': 14253,
u'dy\u017e': 14252,
u'an#': 14249,
u'\u010dn\xfd': 14230,
u'omt': 14203,
u'kaz': 14184,
u'd\xe1m': 14182,
u'mil': 14143,
u'\xe1d#': 14140,
u'ruk': 14138,
u'op\u011b': 14136,
u'\xed#u': 14133,
u'buj': 14120,
u'xis': 14103,
u'd#n': 14103,
u'v\u011br': 14091,
u'od\u011b': 14082,
u'a#h': 14050,
u'\xfdzn': 14046,
u'in#': 14045,
u'sna': 14040,
u'nom': 14039,
u'\u011b##': 14035,
u'mlo': 14020,
u'dm\xed': 14017,
u'a\u017ed': 14012,
u'ukt': 14007,
u'rsk': 14007,
u'\u0161ic': 13989,
u't\u0161\xed': 13980,
u'ok#': 13979,
u'yly': 13970,
u'#ry': 13954,
u'an\xfd': 13941,
u'\xfdbo': 13928,
u'nfo': 13925,
u'uvi': 13923,
u'\u017e\xed#': 13913,
u'poc': 13909,
u'cha': 13899,
u'v\xfd\u0161': 13847,
u'ahu': 13839,
u'chc': 13820,
u'tej': 13811,
u'k#z': 13808,
u'e\u0161e': 13803,
u'\u017en\xe9': 13792,
u'byt': 13782,
u'z\xed#': 13777,
u'kum': 13776,
u'\xfa\u010di': 13773,
u'\xe1#j': 13769,
u'rma': 13753,
u'st\u011b': 13746,
u'\xe1#n': 13725,
u'pu#': 13715,
u'\xfdro': 13711,
u'#p\xe1': 13688,
u'ik#': 13678,
u'zap': 13661,
u'tli': 13639,
u'ni\u010d': 13638,
u'g\xe1n': 13626,
u'epo': 13613,
u'u\u0161e': 13611,
u'rg\xe1': 13593,
u'\u010di#': 13587,
u'sn\xe9': 13584,
u'atu': 13577,
u'zal': 13567,
u'\u017e#b': 13562,
u'#zl': 13548,
u'ntr': 13525,
u'\xe1#u': 13496,
u'n\u011bm': 13484,
u'dod': 13471,
u'd#p': 13466,
u'v\u011bc': 13463,
u'maj': 13451,
u'ozn': 13447,
u'\u011blo': 13419,
u'or#': 13411,
u'rol': 13410,
u'\u016f#k': 13407,
u'avy': 13403,
u'n\u011b\u017e': 13381,
u'\xfdka': 13380,
u'pr\u016f': 13371,
u'\xedkl': 13360,
u'opn': 13345,
u'zde': 13341,
u'hem': 13328,
u'ins': 13325,
u'#um': 13315,
u'zuj': 13307,
u'z\u0159e': 13289,
u'za\u010d': 13286,
u'\xedli': 13275,
u'am#': 13249,
u'ote': 13249,
u'iln': 13186,
u'\u016f#n': 13184,
u'u#\u017e': 13163,
u'l#b': 13160,
u'n\xe1l': 13150,
u'mal': 13145,
u'ka\u017e': 13140,
u'#pe': 13121,
u'apo': 13109,
u'h#j': 13105,
u'ozo': 13101,
u'e\u0161t': 13083,
u'm\u011bd': 13081,
u'uhl': 13043,
u'c\u016f#': 13042,
u'el\xe9': 13035,
u'mci': 13030,
u'd\xedl': 13004,
u'cko': 12989,
u'\u010d\xe1s': 12987,
u'mov': 12979,
u'#ex': 12977,
u'\xe1#z': 12965,
u'izo': 12950,
u'it\xe1': 12943,
u'\xed#\u010d': 12941,
u'hyb': 12930,
u'py#': 12921,
u'tru': 12914,
u'\u0159\xe1d': 12903,
u'iku': 12899,
u'hou': 12891,
u'rea': 12888,
u'o#l': 12886,
u'na\u010d': 12885,
u'\u0161\xedc': 12881,
u'dat': 12856,
u'#jd': 12843,
u'ahr': 12835,
u'a#i': 12831,
u'#ru': 12794,
u'til': 12766,
u'vyj': 12757,
u'av\xed': 12739,
u'bu#': 12733,
u's\xedl': 12722,
u'a#l': 12721,
u'ily': 12704,
u'zce': 12682,
u'nev': 12681,
u'p\xeds': 12672,
u'boj': 12648,
u's#t': 12639,
u'ach': 12615,
u'\u016fra': 12612,
u'o\u017ei': 12603,
u'd\u016fr': 12594,
u'n#p': 12592,
u'z#n': 12591,
u'vzd': 12571,
u'\u0159et': 12559,
u'\xe1d\u011b': 12557,
u'\u011bls': 12553,
u'su#': 12544,
u'\xedse': 12543,
u'ozi': 12530,
u'd\u011bk': 12525,
u'on\xe1': 12503,
u'd\u011bt': 12502,
u'#ny': 12501,
u'm\xedr': 12488,
u'#\u010da': 12482,
u'usn': 12470,
u'\xe1ni': 12447,
u'cem': 12440,
u't\xe1v': 12408,
u'usk': 12402,
u'up#': 12398,
u'pen': 12384,
u'l\xedm': 12380,
u'e#\u010d': 12365,
u'sah': 12359,
u'i#u': 12316,
u'm\u016f#': 12309,
u'j\xedm': 12305,
u'gov': 12263,
u'ur\u010d': 12262,
u'va\u017e': 12220,
u'lno': 12212,
u't\u011bn': 12202,
u'ohu': 12185,
u'#m\xe9': 12172,
u'pak': 12148,
u'mec': 12146,
u'chl': 12135,
u'tna': 12126,
u'chy': 12125,
u'j\u016f#': 12123,
u'r\xe1l': 12119,
u'o\u010de': 12118,
u'nii': 12107,
u't#r': 12082,
u'je\u0161': 12067,
u'mo\u0159': 12060,
u'hny': 12052,
u'ro\u010d': 12049,
u'\xe9#\xfa': 12049,
u'ozm': 12020,
u'n\xe1z': 11994,
u'\xe1n#': 11981,
u'hro': 11960,
u'\xe1#k': 11945,
u'vla': 11941,
u'oln': 11937,
u'\xe9#e': 11919,
u'odr': 11890,
u'v#b': 11871,
u'i\xed#': 11870,
u'a\u0159\xed': 11849,
u'd#s': 11838,
u'o\u0159e': 11836,
u'umo': 11833,
u'imi': 11826,
u'v\xfds': 11820,
u'uc\xed': 11816,
u'chu': 11815,
u'zby': 11815,
u'ovu': 11813,
u'an\u016f': 11805,
u'nac': 11744,
u'#br': 11741,
u'u\u010di': 11736,
u'zd\u011b': 11735,
u'edy': 11735,
u'art': 11693,
u'bit': 11684,
u'd#b': 11673,
u'v\xedt': 11672,
u'co\u017e': 11666,
u'ogi': 11663,
u'os\xe1': 11661,
u'n\xedv': 11654,
u'##m': 11651,
u'n\xed\u017e': 11650,
u'd\u011bn': 11636,
u'#p\u016f': 11573,
u'dpi': 11543,
u'ism': 11515,
u'del': 11496,
u'ec#': 11495,
u'ize': 11485,
u'jev': 11478,
u'ru\u0161': 11455,
u'ntn': 11446,
u'z#t': 11436,
u'vuj': 11434,
u'ob\xed': 11418,
u'\u017eem': 11416,
u'zat': 11415,
u'dit': 11407,
u'al\xe9': 11394,
u'hli': 11390,
u'#ek': 11373,
u'ruj': 11367,
u'gan': 11356,
u'lu\u017e': 11351,
u'\xe1tn': 11350,
u'v#k': 11334,
u'sn\xed': 11312,
u'ory': 11304,
u'avr': 11295,
u'dna': 11286,
u's#o': 11284,
u'\u0161it': 11272,
u'\xe1d\xe1': 11267,
u'vob': 11262,
u'etn': 11254,
u'kli': 11251,
u'pl\xe1': 11242,
u'v#z': 11237,
u'\u0161em': 11234,
u'#nu': 11234,
u'zvl': 11229,
u'vno': 11209,
u'\xe1vy': 11203,
u'pek': 11190,
u'sl\xed': 11177,
u'jek': 11166,
u'ond': 11158,
u'oba': 11150,
u'\u017eil': 11143,
u'kat': 11117,
u'\u011b#m': 11115,
u'odi': 11101,
u'ava': 11096,
u'era': 11094,
u'nky': 11073,
u'il\xed': 11073,
u'\xe1#a': 11066,
u'd\xe1#': 11063,
u'z\xe1l': 11039,
u'o\u017ea': 11037,
u'vil': 11023,
u'\xe9#t': 11014,
u'\u011bdn': 10995,
u'hr\xe1': 10981,
u'ap\u0159': 10970,
u'ure': 10969,
u'ere': 10961,
u'#u\u017e': 10956,
u'\xe1vu': 10948,
u'ln\xfd': 10942,
u'ado': 10932,
u'rvn': 10925,
u'les': 10920,
u'k\xe1z': 10912,
u'l#n': 10897,
u'u\u017ee': 10892,
u'\xe1zo': 10887,
u'zle': 10869,
u'y#u': 10861,
u'\u011bno': 10852,
u'jov': 10850,
u'bno': 10840,
u'o#\xfa': 10820,
u'u#c': 10813,
u'a\u017ee': 10810,
u'\u011bd\u010d': 10802,
u'\xedmu': 10784,
u'p\u011b#': 10782,
u'ela': 10762,
u'v#m': 10756,
u'ed\xed': 10729,
u'eby': 10718,
u'\xe1za': 10706,
u'\u017e#v': 10687,
u'#vi': 10673,
u'h#c': 10673,
u'p\u011bt': 10648,
u'bal': 10630,
u'rga': 10621,
u'r\u016fm': 10608,
u'\xe1#b': 10599,
u'rge': 10587,
u'\xedci': 10579,
u'gic': 10552,
u'eze': 10545,
u't\u011b\u017e': 10535,
u'id\xed': 10518,
u'#uz': 10508,
u'upu': 10500,
u'nco': 10494,
u'e#l': 10489,
u'o#i': 10481,
u'o#\u010d': 10475,
u'\u017e#n': 10475,
u'izi': 10464,
u'y#r': 10457,
u'b\xed#': 10437,
u'man': 10436,
u'utn': 10435,
u'rus': 10423,
u'sne': 10419,
u'huj': 10404,
u'obi': 10396,
u'\xedva': 10390,
u'ase': 10390,
u'l#s': 10359,
u'v#j': 10344,
u'zd\u016f': 10342,
u'is\xed': 10342,
u'and': 10334,
u'oup': 10320,
u'#dl': 10314,
u'\xe1na': 10306,
u'vzt': 10291,
u'o#f': 10273,
u'dli': 10258,
u'vaz': 10256,
u'zta': 10227,
u'nku': 10225,
u'\u011btv': 10225,
u'ejv': 10224,
u'dn\xfd': 10222,
u'upl': 10220,
u'\u016f#s': 10218,
u'i\u0161t': 10202,
u'oni': 10198,
u'a#\u010d': 10197,
u'it\u011b': 10191,
u'onc': 10169,
u'\u011bl\xe1': 10164,
u'pre': 10159,
u'eni': 10156,
u'dvo': 10149,
u'vo#': 10141,
u'ma#': 10129,
u'vyp': 10125,
u'p\xe1n': 10119,
u'tin': 10113,
u'dla': 10112,
u'd\u016fs': 10110,
u'lim': 10104,
u'jet': 10101,
u'fon': 10093,
u'b\xfdv': 10092,
u'mac': 10084,
u'rno': 10080,
u'i#c': 10077,
u'lic': 10076,
u'#ml': 10063,
u'#va': 10048,
u'vy\u0161': 10048,
u'ji\u0161': 10045,
u'ank': 10045,
u'\u010dek': 10039,
u'uro': 10025,
u'm\u017e#': 10018,
u'\xfdsl': 10008,
u'lek': 9980,
u'tah': 9974,
u'kam': 9957,
u'pop': 9947,
u'ru\u010d': 9944,
u't#b': 9928,
u'kva': 9919,
u'en\u011b': 9919,
u'ozd': 9914,
u'tne': 9890,
u'ep\u0159': 9874,
u't#e': 9871,
u'\xfd#v': 9866,
u'v\xe9m': 9862,
u'dan': 9853,
u'jat': 9843,
u'kl\xe1': 9838,
u'i#l': 9825,
u'm#c': 9819,
u'h\xe9#': 9801,
u'get': 9789,
u'jim': 9778,
u'\u010del': 9776,
u'ted': 9764,
u'h#\u010d': 9755,
u'r\u016fs': 9748,
u'ekl': 9714,
u'm#u': 9709,
u'esp': 9708,
u'rgi': 9689,
u'zv\xfd': 9684,
u'hni': 9682,
u'ima': 9678,
u'os\xed': 9672,
u'sv\xfd': 9652,
u'k#a': 9650,
u'eje': 9641,
u'ne\u010d': 9641,
u'chv': 9633,
u'cia': 9629,
u'zne': 9627,
u'smu': 9626,
u'kto': 9625,
u'poh': 9604,
u'b\u011bh': 9591,
u'u#h': 9573,
u'in\xfd': 9553,
u'\u016f#z': 9552,
u'nve': 9551,
u'ves': 9547,
u'isp': 9540,
u'dev': 9539,
u'k#m': 9534,
u'zky': 9530,
u'\u017eel': 9530,
u'#su': 9524,
u'\xe9#\u017e': 9523,
u'\xfdva': 9522,
u'ifi': 9521,
u'kus': 9519,
u'otr': 9519,
u'roh': 9512,
u'\u011bny': 9504,
u'n#v': 9495,
u'\u0161in': 9489,
u'ona': 9488,
u'l\xe9h': 9480,
u'm#h': 9470,
u'avd': 9436,
u'#n\xed': 9415,
u'p\u0161\xed': 9406,
u'e\u0148#': 9389,
u'ref': 9387,
u'aze': 9385,
u'sa\u017e': 9381,
u'opy': 9379,
u'abo': 9375,
u'el\u016f': 9370,
u'#om': 9354,
u'ojo': 9352,
u'otl': 9350,
u'elo': 9344,
u'ez#': 9341,
u'sol': 9332,
u'\u016f#j': 9327,
u'mit': 9318,
u'jm\u011b': 9314,
u'\xe9st': 9310,
u'uho': 9308,
u'eri': 9303,
u'#ku': 9301,
u'\u016f#e': 9299,
u'luv': 9294,
u'ore': 9289,
u'mlu': 9288,
u'\xed#f': 9278,
u'\xe1\u0161t': 9273,
u'pri': 9268,
u'br\xe1': 9264,
u'it\xfd': 9262,
u'hl\xe1': 9260,
u'opu': 9248,
u'jn\xe9': 9245,
u'\xe1jm': 9239,
u'vyv': 9232,
u'esv': 9232,
u'ra\u010d': 9229,
u'k#o': 9228,
u'moz': 9227,
u'##s': 9221,
u'#v\u016f': 9215,
u'i#\u010d': 9207,
u'\u011bto': 9200,
u'bec': 9198,
u'tre': 9192,
u'\xfasp': 9192,
u'a\u0159i': 9190,
u'z#p': 9186,
u'jn\u011b': 9179,
u'vyz': 9178,
u'o\u0159\xed': 9171,
u'kdo': 9164,
u'rtn': 9160,
u'im#': 9147,
u'azu': 9137,
u'\xed\u017ee': 9137,
u'nek': 9125,
u'iv#': 9120,
u'ous': 9113,
u'aje': 9112,
u'\xfdv\xe1': 9112,
u'd#v': 9099,
u'a\u0161\xed': 9086,
u'zda': 9074,
u'd\xe9#': 9066,
u'ryc': 9064,
u'\u011ble': 9063,
u'o#\u0159': 9061,
u'k#b': 9058,
u'ari': 9052,
u'\u011b#b': 9047,
u'voz': 9047,
u'kr\xe1': 9043,
u'l\xed\u010d': 9043,
u'dav': 9032,
u'v\u0161\xed': 9031,
u'uh\xe9': 9030,
u'\xe9n\u011b': 9019,
u'm#e': 9016,
u'vn\xe9': 9014,
u'ita': 8997,
u'inv': 8982,
u'r\u016fz': 8981,
u'ucn': 8978,
u'\xedk\u016f': 8973,
u'n#a': 8971,
u'\u017euj': 8962,
u'r\xfdc': 8956,
u'set': 8946,
u'h#l': 8945,
u'\u011bly': 8944,
u'bon': 8940,
u'nul': 8936,
u'oz\u0159': 8931,
u'ezb': 8907,
u'h\u016f#': 8902,
u'\u010d\xed#': 8891,
u'ers': 8889,
u'\xe9my': 8879,
u've\u0148': 8870,
u'ebi': 8867,
u'\u010der': 8865,
u'\u0161t\xed': 8865,
u'pa#': 8865,
u'r\u016f#': 8864,
u'ud\u011b': 8853,
u'\xe1vi': 8849,
u'tr\xe1': 8847,
u'tsk': 8834,
u'rhy': 8800,
u'der': 8799,
u'toj': 8794,
u'l#z': 8793,
u'\xe1my': 8789,
u'hce': 8788,
u'omp': 8785,
u'\xed##': 8782,
u'z\xe1r': 8779,
u'neu': 8774,
u'#z\xed': 8771,
u'ult': 8769,
u'v\u0159e': 8768,
u'ata': 8749,
u'je\u017e': 8746,
u'ver': 8734,
u'mun': 8729,
u'lac': 8715,
u'tab': 8715,
u'fun': 8688,
u'\xfd#r': 8668,
u'nkr': 8660,
u'd\u010de': 8660,
u'ad\xe1': 8657,
u'ek\xe1': 8653,
u'ouf': 8648,
u'\u016fsl': 8645,
u'iv\xfd': 8641,
u'abi': 8634,
u'r\u010di': 8628,
u'isi': 8615,
u'r\xe9t': 8602,
u'\u017en\xed': 8599,
u'\xe1#d': 8590,
u'ic#': 8583,
u'mou': 8569,
u'tvr': 8564,
u'uve': 8560,
u'\u0161ko': 8560,
u'tek': 8559,
u'#v\u010d': 8554,
u'ev\u0161': 8547,
u'ejs': 8543,
u'nn\xe9': 8542,
u'#\u010d\xed': 8541,
u'jst': 8537,
u'#\u0159a': 8536,
u'li\u0161': 8522,
u'vst': 8520,
u'amn': 8514,
u'\xe1hn': 8511,
u'\u0159\xedc': 8499,
u'dk\u016f': 8490,
u'hor': 8488,
u'h#e': 8474,
u't\u011bj': 8467,
u'kr\xe9': 8463,
u'zko': 8462,
u'a#f': 8452,
u'osu': 8443,
u'sab': 8435,
u'u\u017e#': 8432,
u'oc#': 8431,
u'\xe9#i': 8423,
u'\xednk': 8414,
u'\xe9tn': 8411,
u'#tv': 8411,
u'rec': 8410,
u'#zo': 8393,
u'bra': 8384,
u'\xe1nu': 8384,
u'ovs': 8382,
u'sy#': 8359,
u'##v': 8357,
u'm\xe1l': 8356,
u'kde': 8348,
u'on\u010d': 8345,
u'lst': 8345,
u'\xe1lo': 8345,
u'\xe9#h': 8340,
u'b\u0159e': 8338,
u'jme': 8335,
u'#fr': 8324,
u'#zb': 8302,
u'ric': 8296,
u'\u016fzn': 8295,
u'\xe1je': 8292,
u'ine': 8287,
u'\xfd#n': 8285,
u'\xe9#c': 8284,
u'v\xe1l': 8280,
u'u#i': 8273,
u'har': 8256,
u'vyh': 8256,
u'#fu': 8247,
u'obs': 8244,
u'o##': 8243,
u'#lo': 8235,
u'm\u011b\u0148': 8233,
u't\xe1#': 8222,
u'zit': 8217,
u'e#f': 8210,
u'ebn': 8201,
u'nak': 8172,
u'sm\xed': 8165,
u'tn\xfd': 8154,
u'imo': 8147,
u'upe': 8136,
u'#b\u011b': 8128,
u'na\u0159': 8121,
u'pit': 8110,
u'r\u017ee': 8101,
u'kul': 8094,
u'\xe1sl': 8092,
u'ck\xe1': 8089,
u'\u0159n\xed': 8085,
u'#ti': 8081,
u'ur#': 8065,
u'\xe1#\u017e': 8063,
u'rmo': 8059,
u'n#n': 8052,
u'#as': 8029,
u'cie': 8007,
u'v\u011b\u0159': 8007,
u'ora': 8002,
u'ha#': 8000,
u'zm\xed': 7999,
u'lko': 7995,
u'\u011bdo': 7993,
u'#\u010d\xe1': 7992,
u'ozu': 7988,
u'\xedt\xe1': 7986,
u'at\xed': 7985,
u'bro': 7981,
u'om\xed': 7974,
u'i#h': 7954,
u'itr': 7954,
u'\xe1\u0161e': 7951,
u'\xedky': 7950,
u'i##': 7933,
u'n#k': 7903,
u'cno': 7899,
u'ezn': 7890,
u'od\u016f': 7876,
u'ryb': 7866,
u't#u': 7864,
u'els': 7864,
u'zn\u011b': 7861,
u'esm': 7861,
u'yso': 7857,
u'obd': 7855,
u'##n': 7852,
u'\u011b\u0159e': 7849,
u'dar': 7845,
u'rdi': 7844,
u'\xe1rn': 7843,
u'u\u010de': 7843,
u'\xed\u0159e': 7843,
u'evi': 7837,
u'in\xe9': 7835,
u'p\u011bc': 7835,
u'ext': 7834,
u'lob': 7828,
u'#au': 7822,
u'upo': 7822,
u'ury': 7821,
u'\u016fmy': 7815,
u'\u010dtu': 7812,
u'fik': 7799,
u'rak': 7795,
u'mic': 7784,
u'v#a': 7778,
u'nor': 7756,
u'rn\xe9': 7746,
u'v\u010de': 7743,
u'\u017e#z': 7740,
u'\xfd#z': 7725,
u'uch': 7721,
u'u#l': 7712,
u'iv\xe9': 7707,
u'vsk': 7699,
u'ard': 7697,
u'#u\u010d': 7689,
u'is#': 7684,
u'ije': 7680,
u'oha': 7673,
u'\u011b\u0148o': 7669,
u'kur': 7661,
u'j\xe1d': 7661,
u's\xedc': 7653,
u'#l\xe9': 7647,
u'ivi': 7645,
u'fra': 7637,
u'kuj': 7625,
u'\xe1tk': 7622,
u'tv\xe1': 7616,
u'bou': 7608,
u'mim': 7599,
u'kod': 7599,
u'r#p': 7589,
u'\xe9ne': 7587,
u'av\u011b': 7580,
u'\xe1#m': 7572,
u'ida': 7562,
u'pno': 7557,
u'zno': 7556,
u'ipo': 7543,
u'in\u011b': 7540,
u'aut': 7529,
u'rva': 7526,
u'\u017e#d': 7523,
u'h#b': 7519,
u'\u0161i#': 7518,
u'ota': 7515,
u'enk': 7510,
u'as#': 7508,
u'spa': 7507,
u'k##': 7506,
u'v\xe9s': 7503,
u'aho': 7502,
u'ban': 7487,
u'oj\u016f': 7486,
u'e#\xfa': 7478,
u'dec': 7470,
u'\u011b#u': 7467,
u'efo': 7465,
u't\u011bm': 7459,
u'##j': 7447,
u'ch\u017e': 7433,
u'h\u017e#': 7433,
u'bn\xed': 7432,
u'cit': 7431,
u'vi#': 7431,
u'u#\u010d': 7429,
u'per': 7428,
u't#i': 7420,
u'tuc': 7417,
u'lio': 7411,
u'o\u0165#': 7408,
u'uvy': 7405,
u'\u0161ov': 7402,
u'apr': 7401,
u'bo\u0165': 7400,
u'duk': 7397,
u'akc': 7391,
u'adi': 7387,
u'ant': 7385,
u'#ok': 7370,
u'n\xe1k': 7362,
u't#c': 7361,
u'ron': 7359,
u'#sd': 7348,
u'\xe1hl': 7336,
u'on\u016f': 7335,
u'u#\xfa': 7334,
u'\xfa\u010de': 7329,
u'l\xe1v': 7328,
u'r\xfdm': 7322,
u'nti': 7320,
u'bsa': 7319,
u'mon': 7311,
u'ov\xed': 7292,
u'\xed#\u0159': 7291,
u'tni': 7284,
u'\xe1ko': 7270,
u'om\u011b': 7263,
u'\u011bku': 7259,
u't\xe1m': 7250,
u'em\u016f': 7248,
u'ial': 7245,
u'mo#': 7244,
u'f\xe1m': 7238,
u'idl': 7232,
u'uce': 7229,
u'ber': 7228,
u'oky': 7226,
u'obu': 7216,
u'\xe1z\xed': 7211,
u'l#a': 7208,
u'a\u010do': 7200,
u'amu': 7191,
u'uf\xe1': 7191,
u'nc\u016f': 7187,
u'#ag': 7186,
u'i#i': 7172,
u'\xe9#\u010d': 7169,
u'lni': 7167,
u'\u011bmi': 7157,
u'v\xe1\u0159': 7153,
u'bn\u011b': 7146,
u'\xe1d\u0159': 7140,
u'yj\xe1': 7140,
u'are': 7133,
u'l#\u017e': 7124,
u'\xfa\u010da': 7119,
u'#\xfad': 7112,
u'as\xed': 7112,
u'age': 7101,
u'vky': 7099,
u'o\u0159\xe1': 7095,
u'\u0159ec': 7085,
u'al\xfd': 7065,
u'koj': 7064,
u's\u016f#': 7059,
u'\u011b#r': 7057,
u'#s\xed': 7051,
u'v\u0161i': 7049,
u'\u011bji': 7049,
u'y#\u010d': 7044,
u'eb#': 7041,
u'\xfd#j': 7029,
u'tex': 7029,
u'iat': 7026,
u'ok\xe1': 7019,
u'zka': 7003,
u'nsp': 7002,
u'n#s': 6991,
u'dku': 6987,
u't\xe1n': 6981,
u's#c': 6980,
u'tis': 6973,
u'#ha': 6971,
u'\xe1vo': 6965,
u'esu': 6964,
u'od\xed': 6960,
u'r\u017en': 6957,
u'\u0159in': 6954,
u'jn\xfd': 6949,
u'ni\u017e': 6949,
u'\xe1#r': 6948,
u'\u011bci': 6944,
u'um#': 6939,
u'eve': 6938,
u'm\xedc': 6930,
u'#zj': 6924,
u'kce': 6922,
u'nn\xfd': 6921,
u'\xedsl': 6918,
u'k\xe1v': 6914,
u'\xedsk': 6911,
u'tla': 6909,
u'bli': 6899,
u'#d\xed': 6892,
u'efe': 6892,
u'ris': 6887,
u'm#\u010d': 6886,
u'n#z': 6877,
u'k#k': 6867,
u'it\u0159': 6867,
u'\xfada': 6857,
u'epr': 6855,
u'nim': 6853,
u'nod': 6851,
u'ob#': 6841,
u'\xfdk\xe1': 6837,
u'asi': 6830,
u'\xfd\u0161e': 6828,
u'du\u0161': 6814,
u'#zk': 6808,
u'\xfd#a': 6805,
u'\xfd#b': 6803,
u'tev': 6790,
u'ob\xe1': 6786,
u'evn': 6783,
u'v\xfdh': 6780,
u'd#t': 6769,
u'hrn': 6765,
u'bem': 6764,
u'\xe1#t': 6761,
u'ukr': 6738,
u'rn\u011b': 6734,
u'z\xeds': 6730,
u'raj': 6726,
u'e#\u0159': 6714,
u'glo': 6710,
u's#j': 6709,
u'noc': 6702,
u'esn': 6700,
u'usi': 6700,
u't\u0159\xed': 6688,
u'uko': 6681,
u'nko': 6679,
u'ecn': 6676,
u'net': 6674,
u'j#p': 6667,
u'ak\xfd': 6667,
u'\xedc#': 6666,
u'umu': 6659,
u'z\xe1p': 6657,
u'dia': 6656,
u'tol': 6654,
u'd\u011bj': 6651,
u'ad\u016f': 6632,
u'at\u0148': 6632,
u'y##': 6627,
u'ben': 6626,
u'kty': 6625,
u'r\u017ei': 6617,
u'cky': 6616,
u'##l': 6615,
u'oma': 6613,
u'rio': 6609,
u'c\xedh': 6607,
u'hlo': 6603,
u'avk': 6600,
u'ebe': 6598,
u'onu': 6591,
u'lil': 6590,
u'av\xe1': 6588,
u'han': 6584,
u'\u017en\u011b': 6584,
u'j\xe1#': 6581,
u'rez': 6578,
u'ab\xfd': 6568,
u'ume': 6561,
u'bar': 6557,
u'ouk': 6553,
u'azy': 6552,
u'sno': 6541,
u'\xe1l#': 6525,
u'oj\xed': 6524,
u'\u017e#m': 6520,
u'y#i': 6519,
u'\xed\u017e#': 6518,
u'ann': 6513,
u'\xfasi': 6511,
u'##z': 6507,
u'a\u017e#': 6506,
u'trv': 6495,
u'\xfdzk': 6493,
u'bje': 6492,
u'bol': 6478,
u'oze': 6476,
u'om\xe1': 6475,
u'sok': 6474,
u'\u011bre': 6465,
u't\xed\u017e': 6463,
u'u\u017eb': 6461,
u'\xe9#l': 6447,
u'#gl': 6446,
u'zhl': 6442,
u's#d': 6441,
u'\xe1si': 6440,
u'y#c': 6437,
u'\u010de#': 6436,
u'fer': 6431,
u'o\u010di': 6425,
u'viz': 6419,
u'ejd': 6412,
u'#\u0161k': 6411,
u'm\xe9#': 6407,
u'bav': 6406,
u'l#o': 6405,
u'eza': 6398,
u'lib': 6398,
u's#k': 6392,
u'upn': 6389,
u'mod': 6385,
u'oul': 6382,
u'#am': 6379,
u't\u0161i': 6362,
u'##k': 6357,
u'dch': 6355,
u'vov': 6352,
u'kt#': 6350,
u'#fa': 6348,
u'loh': 6345,
u'aj\u016f': 6344,
u'\u0161\xed\u0159': 6340,
u'n\xe1\u0161': 6331,
u'#ci': 6326,
u'zn\xed': 6324,
u'ior': 6322,
u'\xe9m\u016f': 6321,
u'kt\u016f': 6321,
u'ral': 6316,
u'v\xfdv': 6313,
u'okl': 6306,
u'\xfdvo': 6306,
u'aru': 6305,
u'bdo': 6298,
u'hl\xed': 6294,
u'zn\xfd': 6294,
u'sel': 6288,
u'onf': 6285,
u'uzn': 6285,
u'zic': 6285,
u'vzh': 6282,
u'tar': 6282,
u'an\xe1': 6280,
u'\u010d\xedn': 6279,
u'h#h': 6278,
u'#\xfal': 6277,
u'us#': 6274,
u'lze': 6273,
u'hud': 6272,
u'\xfd#d': 6267,
u'mem': 6265,
u'\xfalo': 6265,
u'sud': 6264,
u'ipr': 6261,
u'h#\xfa': 6255,
u'k\u016fm': 6244,
u'ulo': 6234,
u'ras': 6228,
u'#em': 6221,
u'h#u': 6220,
u'yva': 6216,
u'rst': 6212,
u'jmo': 6210,
u't\u0159i': 6203,
u'ara': 6203,
u'm\xe9h': 6201,
u'kal': 6200,
u'#ov': 6193,
u'tn\xe1': 6184,
u'saz': 6178,
u'#jm': 6168,
u's#r': 6162,
u'ij\xed': 6161,
u'#sh': 6157,
u'#j\xe1': 6154,
u'uv\u011b': 6148,
u'#o\u010d': 6142,
u'ob\u0159': 6141,
u'nta': 6138,
u'va\u0161': 6131,
u'd#z': 6127,
u'mn\xe9': 6126,
u'op\u0159': 6125,
u'iro': 6123,
u'\u0159st': 6107,
u'en\u0161': 6101,
u'u\u0161n': 6099,
u'\u0159e\u010d': 6097,
u'vyk': 6082,
u'#zc': 6079,
u'kac': 6074,
u'#a\u017e': 6074,
u'oz\u0161': 6072,
u'eto': 6065,
u'gis': 6063,
u'evy': 6061,
u'arm': 6057,
u's#m': 6054,
u'ema': 6046,
u't\u0159n': 6045,
u't\u0148o': 6042,
u'\xe9ma': 6037,
u'boh': 6029,
u'ord': 6020,
u'neo': 6020,
u'ltu': 6017,
u'hat': 6014,
u'ars': 6010,
u'h#i': 6007,
u'\u011bti': 6005,
u'ung': 6004,
u'#bl': 6004,
u't#l': 6001,
u'tku': 6000,
u'\xedje': 5992,
u'v\xe1s': 5975,
u'\u010duj': 5974,
u'\u011bna': 5973,
u'\xedte': 5953,
u's#a': 5951,
u'opl': 5951,
u'#\xfa\u0159': 5939,
u'k#u': 5938,
u'a#\u0159': 5937,
u'udi': 5927,
u'ik\u016f': 5916,
u'\xe1li': 5910,
u'#\u0161p': 5903,
u'nuj': 5899,
u'l\xfdc': 5889,
u'byv': 5883,
u'#ir': 5878,
u'\u011b\u0159\xed': 5859,
u'nel': 5854,
u'ndu': 5850,
u'zid': 5848,
u'egu': 5847,
u'\u016fj#': 5842,
u'lyn': 5842,
u'ez\xe1': 5841,
u'inc': 5841,
u'\xe1\u017en': 5841,
u'pon': 5836,
u'#\u010dt': 5832,
u'y#\u017e': 5828,
u'#sr': 5827,
u'erv': 5819,
u'rve': 5817,
u'##d': 5816,
u'uka': 5812,
u'\xe1\u0159e': 5810,
u'r\u017eo': 5804,
u'#oz': 5802,
u't\u016fm': 5796,
u'l#k': 5788,
u'zv\xed': 5784,
u'it\xed': 5783,
u'pob': 5779,
u'luj': 5772,
u'jno': 5771,
u'z\xfdv': 5771,
u'l\xfd#': 5766,
u'lu\u0161': 5763,
u'jal': 5758,
u'#el': 5756,
u'uva': 5754,
u'#\u0159\xe1': 5750,
u'yli': 5750,
u'ply': 5740,
u'n\u016fm': 5739,
u'#z\u0159': 5738,
u'\u0161n\xed': 5738,
u'\xfast': 5736,
u'\xedl#': 5735,
u'uk\xe1': 5733,
u'ijm': 5733,
u'hv\xe1': 5727,
u'ngo': 5723,
u'gii': 5711,
u'#ri': 5707,
u'spl': 5707,
u'h#f': 5706,
u'ura': 5705,
u'l#t': 5704,
u'l\xe1#': 5702,
u'gul': 5693,
u't\xfd#': 5690,
u'\u010dil': 5687,
u'jad': 5681,
u'naj': 5650,
u'k#r': 5649,
u'#ge': 5646,
u'#zr': 5646,
u'rof': 5644,
u'h\xe1j': 5643,
u'id\xe9': 5637,
u'\u010dem': 5635,
u'nol': 5630,
u'na\u017e': 5628,
u'\u0161il': 5620,
u'sn\xfd': 5617,
u'vn\xfd': 5606,
u'\u011bhe': 5605,
u'm\xedm': 5605,
u'v\u016fl': 5597,
u'zu#': 5586,
u'\xe1n\u016f': 5580,
u'p\u0159\xe1': 5576,
u'\u0161le': 5573,
u'lk\xfd': 5561,
u'#u#': 5559,
u'ohr': 5557,
u'lte': 5553,
u'\xedr\xe1': 5550,
u'v#\u010d': 5544,
u'd\u0159i': 5532,
u'mot': 5530,
u't#h': 5528,
u'\xfd\u0161i': 5521,
u'kl\xed': 5502,
u'\xedl\u016f': 5497,
u'lec': 5495,
u'yby': 5484,
u'mer': 5478,
u'z#v': 5477,
u'oka': 5476,
u'di#': 5469,
u'#\xfaz': 5467,
u'vli': 5466,
u'lk\xe9': 5464,
u'a\u017eu': 5464,
u'aro': 5462,
u'nab': 5456,
u'm#\xfa': 5441,
u'j\xedt': 5438,
u'm#\u0159': 5432,
u'\xe1tu': 5424,
u'ne\u0161': 5412,
u'\u016f#o': 5411,
u'\u0161pa': 5408,
u'osi': 5406,
u'che': 5398,
u'dva': 5394,
u'n\u011bl': 5387,
u'nt\u016f': 5381,
u'nas': 5377,
u'\xfa\u0159a': 5371,
u'd#k': 5369,
u'rev': 5368,
u'\xe9#f': 5368,
u'idi': 5363,
u'odb': 5357,
u'\u0161tn': 5355,
u'zd\xe1': 5352,
u'ev\u0159': 5351,
u'oty': 5347,
u'\u011bt\xed': 5340,
u'kl#': 5329,
u'\xfd#m': 5328,
u'rh\u016f': 5327,
u'opi': 5319,
u'\u011bho': 5314,
u'\u0159\xedt': 5311,
u'#\xfam': 5309,
u'p\u0161e': 5308,
u'tud': 5308,
u'\u0159ez': 5300,
u'dil': 5300,
u'zva': 5297,
u'\u017en\xe1': 5290,
u'\xfd#k': 5287,
u'#af': 5276,
u'\u017ed\xe9': 5276,
u'mob': 5276,
u'isy': 5271,
u'sl#': 5269,
u'zvy': 5254,
u'\u011bj\xed': 5253,
u'e\u0161i': 5252,
u'tky': 5250,
u'v#c': 5250,
u'egr': 5250,
u'#\xfav': 5245,
u'ybo': 5245,
u'\u011bni': 5235,
u'kos': 5235,
u'zum': 5234,
u'#vs': 5232,
u'i\u010de': 5231,
u'#j\xed': 5228,
u'm#l': 5227,
u'd\xe1t': 5223,
u'\xfd#t': 5220,
u'n#j': 5218,
u'esc': 5216,
u'hl#': 5215,
u'koo': 5210,
u'm#i': 5205,
u'zar': 5204,
u'\u010dto': 5203,
u'vy\u017e': 5200,
u'dek': 5194,
u'iva': 5191,
u'v#l': 5188,
u'\u0161\u0161\xed': 5178,
u'\u011br#': 5170,
u'd\u0159\xed': 5164,
u't\xfdc': 5163,
u'k\xe1c': 5154,
u'\u017edy': 5151,
u'da\u0148': 5137,
u'\u0159id': 5135,
u'e#\u0161': 5131,
u'lia': 5130,
u'\u0159\xedj': 5125,
u'zik': 5124,
u'#vr': 5121,
u'hci': 5117,
u'azo': 5116,
u'kap': 5111,
u'yz\xfd': 5109,
u'ort': 5105,
u'dbo': 5102,
u'\xe1ve': 5102,
u'k\xe1\u017e': 5099,
u'dot': 5098,
u'rmy': 5095,
u'h##': 5092,
u'az#': 5091,
u'k\xe1m': 5090,
u'\xfdzv': 5086,
u'obj': 5085,
u'\u011bt\u011b': 5084,
u'#\xfap': 5072,
u'ola': 5066,
u'ile': 5063,
u'oor': 5062,
u'nah': 5059,
u'end': 5059,
u'o\u010dn': 5057,
u'cuj': 5056,
u'eal': 5055,
u'#du': 5053,
u'esk': 5053,
u'o\u010d\xed': 5049,
u'z\u0161\xed': 5044,
u'kan': 5042,
u'##b': 5041,
u'is\u016f': 5041,
u'\xedpr': 5039,
u'osv': 5034,
u'fic': 5032,
u'skr': 5031,
u'o\u010d#': 5027,
u'z\u016fs': 5025,
u'\u011bru': 5020,
u'\u016f#t': 5012,
u'vek': 5011,
u'uza': 5007,
u'v\u011bn': 5002,
u'tok': 4999,
u'ahy': 4999,
u'ic\xed': 4997,
u'k\xe1n': 4995,
u'eso': 4992,
u'\u017e#t': 4990,
u'cio': 4987,
u'mik': 4983,
u'rim': 4980,
u'rnu': 4959,
u'ah\xe1': 4954,
u'v\u017ed': 4954,
u'zd\xed': 4951,
u'atk': 4947,
u'i\u0161#': 4940,
u'tam': 4937,
u's#s': 4935,
u'#bi': 4934,
u'des': 4923,
u'mn\xfd': 4922,
u'rep': 4913,
u'ope': 4910,
u'or\u016f': 4906,
u'k#e': 4905,
u'oz\xed': 4904,
u'sp\xed': 4895,
u'z#d': 4892,
u'rv\xe9': 4884,
u't\u011b\u0161': 4881,
u'jmu': 4879,
u'zek': 4878,
u'shr': 4878,
u'ovy': 4875,
u'kor': 4872,
u'pal': 4863,
u'\u011bs\xed': 4862,
u'vom': 4860,
u'vy\u0159': 4860,
u'vyn': 4860,
u'\xed\u010do': 4857,
u'z#h': 4857,
u'\u0159\xedv': 4856,
u'\u0161lo': 4839,
u'\u0161uj': 4838,
u'oc\xed': 4832,
u'tac': 4832,
u's#e': 4828,
u'\u016fv\u011b': 4827,
u'\u011bn\xfd': 4821,
u'ab\xed': 4821,
u'\u017eet': 4820,
u'\xe9ha': 4815,
u't\xfdm': 4814,
u'zak': 4812,
u'\xe1dy': 4809,
u'l#d': 4803,
u'ozs': 4799,
u'vzn': 4799,
u'\u017en\xfd': 4799,
u'ia#': 4796,
u'\u016f#m': 4791,
u'zra': 4789,
u'v\xedd': 4789,
u'\u010dl\xe1': 4778,
u'ama': 4775,
u'ega': 4774,
u'\u016fli': 4763,
u'\u0159ov': 4760,
u'#ar': 4759,
u'd\xedk': 4756,
u'smy': 4754,
u'h\xe1#': 4751,
u'\u010dn\xe1': 4749,
u'zel': 4745,
u'\u0159iz': 4744,
u'y\u0161\u0161': 4738,
u'av\u0161': 4732,
u'y#l': 4727,
u'y\u0161l': 4720,
u'av\xe9': 4715,
u'osn': 4712,
u'ror': 4712,
u'\u0159ev': 4705,
u'\u011bry': 4703,
u'abe': 4703,
u'zod': 4699,
u'v#\xfa': 4696,
u'\xedma': 4695,
u'uma': 4693,
u'u\u010d\xe1': 4688,
u'ikt': 4682,
u'nn\u011b': 4682,
u't\u011bh': 4681,
u'bo\u017e': 4674,
u'oly': 4668,
u'\u0159\xed\u0161': 4665,
u'p#k': 4662,
u'c#p': 4661,
u'\u0161n\xe9': 4655,
u'vah': 4652,
u'\xe1do': 4642,
u'pil': 4637,
u'uv\xe1': 4636,
u'##o': 4633,
u'sni': 4632,
u'met': 4631,
u'#z\u016f': 4631,
u'y\u017ea': 4629,
u'ypr': 4618,
u'hav': 4616,
u't\xedc': 4611,
u'\u017ed\xfd': 4600,
u'zad': 4598,
u'on#': 4593,
u'ing': 4584,
u'zje': 4583,
u'\xe1mi': 4582,
u'\u017e\xedt': 4577,
u'mu\u017e': 4574,
u'mla': 4574,
u'\xfako': 4574,
u'nka': 4573,
u'\xedve': 4565,
u'y#h': 4564,
u'ktr': 4564,
u'\xe1zc': 4561,
u'h\xe1n': 4560,
u'z#o': 4559,
u'd\xfd#': 4558,
u'vyd': 4553,
u'gi\xed': 4551,
u'iar': 4545,
u'hly': 4541,
u'sek': 4531,
u'odh': 4527,
u'\u0161et': 4522,
u'pac': 4521,
u'bn\xe9': 4519,
u'\u0161eh': 4517,
u'ony': 4515,
u'edc': 4512,
u'm##': 4511,
u'er#': 4510,
u'\xed\u0161t': 4509,
u'dmi': 4504,
u'c#n': 4502,
u'sty': 4497,
u'#\xfak': 4490,
u'n#o': 4487,
u'mco': 4479,
u'ti\u017e': 4478,
u'n#\u017e': 4472,
u't#\xfa': 4471,
u'afr': 4470,
u'kci': 4468,
u'obk': 4463,
u'itn': 4462,
u'd#a': 4457,
u'o\u0161l': 4456,
u't##': 4447,
u'zy#': 4439,
u'a\u0148o': 4433,
u'fri': 4429,
u'vrd': 4422,
u'yko': 4418,
u'l#m': 4413,
u'\u017e##': 4412,
u'\u011b\u0161n': 4408,
u'u#f': 4408,
u'n\u011bc': 4406,
u'\u0159\xedd': 4404,
u'i#\xfa': 4401,
u'\xfdho': 4398,
u'\u011b#e': 4393,
u'n\xe1c': 4382,
u'esl': 4373,
u'o\u017e\xe1': 4372,
u'yho': 4365,
u'iv\u011b': 4364,
u'mut': 4364,
u'lej': 4363,
u'y\u0159e': 4361,
u'al\xed': 4360,
u'\u011bmo': 4358,
u'm\u011bt': 4354,
u'\xe1ne': 4322,
u'\xedmt': 4321,
u'\u017eeb': 4319,
u'vz\xe1': 4319,
u'av\u0159': 4319,
u'irs': 4317,
u'\u016f\u010di': 4315,
u'v\u016f\u010d': 4315,
u'\u011bn\xe9': 4312,
u'emu': 4311,
u'\xedna': 4308,
u'z\xedt': 4300,
u'#v\u017e': 4296,
u't\xfdd': 4291,
u'im\xe1': 4289,
u'da\u0159': 4289,
u'\xedni': 4287,
u'i#\u0159': 4287,
u'u\xe1l': 4286,
u'\u010d\xedm': 4285,
u'vho': 4283,
u'dl#': 4276,
u'p\u011b\u0161': 4271,
u'tri': 4265,
u'ego': 4264,
u'my\u0161': 4262,
u'i\u017ee': 4262,
u'ikd': 4262,
u'\u011bn\u011b': 4259,
u'n\u010de': 4258,
u'haj': 4257,
u'v\xedj': 4255,
u'seb': 4255,
u'em\xe1': 4253,
u'ohy': 4249,
u'##t': 4249,
u'ivu': 4248,
u'd#o': 4244,
u'\u011b\u0159i': 4241,
u'#ef': 4240,
u'fek': 4235,
u'ndi': 4234,
u'tle': 4233,
u'ria': 4227,
u'kle': 4224,
u'acu': 4221,
u'\xedro': 4217,
u'o\u017e\xed': 4214,
u'udu': 4214,
u'i\u0161\u0165': 4201,
u'vn\xe1': 4200,
u'ibe': 4197,
u'#it': 4190,
u'r\xe9m': 4181,
u'top': 4179,
u'vor': 4176,
u'\xfd#o': 4174,
u'a\u010d\xed': 4174,
u'ubl': 4158,
u'icm': 4158,
u'cm\xe9': 4157,
u'v\u016fj': 4157,
u'sv\u016f': 4155,
u'id\u011b': 4153,
u'ivy': 4151,
u'p\u016fv': 4150,
u'\u016f#b': 4150,
u'n\xedz': 4143,
u'tas': 4143,
u'rn\xfd': 4142,
u'#zt': 4142,
u'\xe1\u0161#': 4137,
u'pe#': 4136,
u'\xedne': 4121,
u'vna': 4121,
u'rch': 4119,
u'jit': 4115,
u'p\u011bl': 4107,
u'hal': 4105,
u'msk': 4093,
u'sli': 4091,
u'do\u0161': 4087,
u'b\xe1l': 4086,
u'otv': 4083,
u'ytu': 4080,
u'up\u0159': 4078,
u'z\xedc': 4077,
u'a\u017eo': 4073,
u'\u016f#d': 4071,
u'asp': 4071,
u'udn': 4069,
u'm#f': 4067,
u'\u011btl': 4064,
u'vra': 4060,
u'li\u017e': 4054,
u'\u011bn#': 4049,
u'\u011bco': 4045,
u'ko\u017e': 4043,
u'd#d': 4039,
u'mbi': 4036,
u'sd\u011b': 4034,
u'\u0161\xedh': 4025,
u'yn\u011b': 4024,
u'ak\xe1': 4023,
u's\xedt': 4021,
u'ehl': 4020,
u'\u011b#\u017e': 4015,
u'abs': 4015,
u'm\xe1n': 4011,
u'lah': 4011,
u'd#m': 4011,
u'ive': 4006,
u'\xedzk': 4003,
u'mpr': 4003,
u'lex': 3995,
u'i#f': 3993,
u'e\u0161n': 3989,
u'ztr': 3984,
u'\xedda': 3981,
u'nda': 3978,
u'\xedta': 3976,
u'dor': 3970,
u'sub': 3967,
u'\xfaze': 3964,
u't#\u010d': 3957,
u'ske': 3947,
u'r\u0161\xed': 3946,
u'l\u016fm': 3943,
u'\xedm\xe1': 3941,
u'onz': 3937,
u'\u011bnu': 3937,
u'ik\xe1': 3936,
u'\u017e#o': 3932,
u'\xedno': 3924,
u'\xe9#\u0159': 3922,
u'z#r': 3922,
u'n#d': 3914,
u'##e': 3907,
u'kyn': 3897,
u'rm\xe1': 3893,
u'lin': 3892,
u'\u010d\xe1t': 3891,
u'\xedn\u011b': 3890,
u'yro': 3879,
u'd#r': 3874,
u'upc': 3871,
u'dm\u011b': 3860,
u'\u0159uj': 3854,
u'obt': 3853,
u'tnu': 3850,
u'\u0161im': 3848,
u'\u017eij': 3844,
u'amb': 3843,
u'api': 3843,
u'amy': 3840,
u'dyb': 3839,
u'ind': 3833,
u'bl\xed': 3826,
u'ejl': 3822,
u'br\xe9': 3821,
u'enz': 3821,
u'\xe1pa': 3816,
u'hu\u017e': 3814,
u'onn': 3813,
u'ker': 3813,
u'y\u0161e': 3809,
u'lal': 3809,
u'\xed\u017en': 3807,
u'odk': 3805,
u'plo': 3801,
u'm\u011bm': 3799,
u'ezk': 3790,
u'vyb': 3790,
u'olt': 3788,
u'\xe1nk': 3786,
u'ot#': 3785,
u'enn': 3782,
u'doj': 3770,
u'e\u0161k': 3770,
u'\xedka': 3770,
u'egy': 3767,
u'ybu': 3764,
u'olb': 3763,
u'kv\u016f': 3760,
u'l\xed\u017e': 3757,
u'\xedra': 3756,
u'a\u010dl': 3756,
u'ot\u011b': 3754,
u'sa#': 3752,
u'n#t': 3750,
u'mos': 3748,
u'rne': 3741,
u'#es': 3736,
u'eru': 3735,
u'v\u011bz': 3734,
u'sej': 3733,
u'\u011bme': 3731,
u'asa': 3731,
u'#la': 3728,
u'rum': 3727,
u'i\u017en': 3726,
u'ho\u017e': 3726,
u'sho': 3717,
u'nt\xe1': 3717,
u'\xedrn': 3704,
u'a\u017e\xed': 3704,
u'\u017eni': 3700,
u't\xe9r': 3699,
u'zn\xe9': 3691,
u'jv\u011b': 3690,
u'mar': 3687,
u'\u011bte': 3685,
u'ple': 3683,
u'#\u0161e': 3683,
u'noz': 3677,
u'stl': 3677,
u'ok\xe9': 3666,
u'ol#': 3662,
u'ien': 3661,
u'\xe9ri': 3660,
u'#ad': 3660,
u'#hu': 3657,
u'duc': 3653,
u'bt\xed': 3645,
u'fak': 3641,
u'k\u010dn': 3638,
u'a\u0159k': 3626,
u'eak': 3625,
u'\xe1#e': 3624,
u'b\u011bt': 3620,
u'u\u0161t': 3619,
u'a\u017en': 3619,
u'c\u016fm': 3617,
u'#vh': 3616,
u'ol\xe1': 3614,
u'kc\xed': 3609,
u'zlo': 3607,
u'fr#': 3606,
u'lev': 3606,
u'etr': 3606,
u'lez': 3604,
u'\u011b#i': 3604,
u'azk': 3602,
u'pub': 3602,
u'alt': 3593,
u'\u0161ir': 3592,
u'tim': 3592,
u'#lz': 3591,
u'sk#': 3588,
u'#zh': 3586,
u'ohe': 3585,
u'nkc': 3574,
u'doc': 3572,
u'd#e': 3572,
u'n#m': 3570,
u'\xedt\u011b': 3566,
u'z#z': 3565,
u'vd\u011b': 3562,
u'rtu': 3560,
u'\u017eby': 3559,
u'vdu': 3556,
u'ybn': 3556,
u'\u0159em': 3547,
u'ut\u011b': 3547,
u'#\u0161i': 3546,
u'adr': 3544,
u'nty': 3539,
u'abr': 3529,
u'\xe1d\xed': 3521,
u'pus': 3519,
u'\u011b#c': 3517,
u'sly': 3516,
u'k#\u017e': 3515,
u'bys': 3509,
u'd\xedm': 3508,
u'\u0161n\xfd': 3502,
u'lak': 3498,
u'gu#': 3497,
u'cii': 3493,
u'ido': 3493,
u'o\u017e\u0148': 3490,
u'pl\u0148': 3489,
u'p\u0161i': 3484,
u'ti\u010d': 3484,
u'eob': 3477,
u'u#\u0159': 3475,
u'v\xe1z': 3474,
u'#gr': 3473,
u'jel': 3472,
u'\u010d\xedt': 3472,
u'lon': 3470,
u'#\xfat': 3465,
u'imu': 3465,
u'rin': 3464,
u'\u011b#h': 3462,
u'ov#': 3459,
u'\u0161ke': 3457,
u'nfl': 3455,
u'#p\u011b': 3455,
u'bov': 3450,
u'ago': 3449,
u'ln\xe1': 3448,
u'\xed\u017ei': 3447,
u'apl': 3446,
u'\u0159i\u010d': 3440,
u'rie': 3437,
u'ign': 3436,
u'e\u010di': 3434,
u'epu': 3434,
u'edm': 3432,
u'usp': 3428,
u've\u0161': 3426,
u'l\xe9k': 3424,
u'ro\u017e': 3421,
u'cn\u011b': 3419,
u'om\u016f': 3415,
u'lod': 3414,
u'vos': 3410,
u'hne': 3409,
u'nar': 3408,
u'bio': 3408,
u'n#b': 3403,
u'jmy': 3402,
u'eag': 3397,
u'sev': 3396,
u's#b': 3391,
u'ols': 3385,
u'rik': 3383,
u'nc\xed': 3379,
u'pev': 3377,
u'oum': 3375,
u'otu': 3375,
u'asu': 3372,
u'#p\xe9': 3371,
u'rti': 3364,
u'mrt': 3363,
u'\xedly': 3362,
u'za\u0159': 3360,
u'tif': 3360,
u'naz': 3359,
u'dkl': 3358,
u'als': 3357,
u'ect': 3356,
u'rho': 3355,
u'ysv': 3355,
u'ur\xe1': 3352,
u'#hi': 3352,
u'p\xe9\u010d': 3347,
u'ton': 3339,
u'z\xf3n': 3337,
u'eci': 3336,
u'sn\xe1': 3334,
u'vem': 3331,
u'im\u011b': 3330,
u'\u011blc': 3328,
u'urn': 3328,
u'neh': 3325,
u'nfe': 3325,
u'v\xfdd': 3324,
u'pl#': 3322,
u'vym': 3320,
u'#k\u0159': 3319,
u'r#a': 3318,
u'v#v': 3316,
u'j#v': 3313,
u'ejp': 3306,
u're\u017e': 3305,
u'vyl': 3296,
u'gal': 3287,
u'\u0165ov': 3287,
u'\xe1pi': 3283,
u'ru\u017e': 3282,
u'a\u010fa': 3278,
u'\u017eim': 3267,
u'\xedm\u017e': 3262,
u'g\xe1l': 3261,
u'ma\u010f': 3260,
u'etk': 3258,
u'\xfdra': 3257,
u'pec': 3256,
u'div': 3251,
u'\u010dty': 3248,
u'st\u016f': 3247,
u't\xe1r': 3245,
u'ubo': 3245,
u'v#i': 3245,
u'ah#': 3242,
u'bic': 3241,
u'oke': 3238,
u'ely': 3231,
u'\xedsn': 3231,
u'eg\xe1': 3228,
u'mpl': 3221,
u'\u011bje': 3219,
u'a#\u0161': 3218,
u'vec': 3218,
u's#u': 3217,
u'adm': 3215,
u'leh': 3214,
u'\xe1\u0159\xed': 3213,
u'\u011bm#': 3208,
u'hol': 3207,
u'b\xedd': 3206,
u'leb': 3204,
u'\u017e#k': 3201,
u'\u0159eh': 3201,
u'a#g': 3199,
u'\u010far': 3193,
u'\xe9me': 3191,
u'\xe9pe': 3188,
u'z#j': 3188,
u'l\xe9p': 3188,
u'zru': 3186,
u'\xed#g': 3178,
u'ou\u0161': 3175,
u'dke': 3173,
u'#ga': 3172,
u'a\u017ei': 3172,
u'vr\xe1': 3172,
u'nzu': 3168,
u'no\u017e': 3167,
u'\u011bc\xed': 3166,
u'nde': 3165,
u'ot\xe9': 3158,
u'\xe1du': 3153,
u'def': 3149,
u'u\u0161i': 3148,
u'\xe1ka': 3148,
u'ej\u0161': 3144,
u'\u017e\u0148u': 3139,
u'zk\xe1': 3138,
u'ezo': 3136,
u'nd#': 3135,
u'm\u017ei': 3123,
u'mel': 3122,
u'ni\xed': 3121,
u'\u017est': 3120,
u'am\u017e': 3120,
u'san': 3119,
u'hl\xe9': 3119,
u'ynu': 3118,
u'\u010dko': 3115,
u'\xe1bo': 3108,
u'sn#': 3106,
u'\xed\u0161e': 3103,
u'v\xe1h': 3103,
u'mig': 3102,
u'r#v': 3102,
u'\xfd\u010de': 3101,
u'ta\u010d': 3101,
u'v\xedz': 3099,
u'a\u010dk': 3099,
u'moj': 3097,
u'ku\u0161': 3094,
u'\u0159im': 3093,
u'\u0159el': 3091,
u'juj': 3089,
u'izp': 3088,
u'n\u0161i': 3077,
u'm\xe1h': 3075,
u'zbr': 3073,
u'rmu': 3073,
u'ngu': 3070,
u's#z': 3070,
u'ak\u017e': 3067,
u'v#u': 3066,
u'aku': 3066,
u'efi': 3065,
u'zv\xe1': 3065,
u'l\xe1t': 3062,
u'idu': 3060,
u'\u011bja': 3060,
u'av\xfd': 3057,
u'#d\u0159': 3054,
u'umn': 3053,
u'igr': 3050,
u'evo': 3048,
u'taj': 3047,
u'\xfapl': 3045,
u'et\u0159': 3044,
u'\xe1ti': 3040,
u'\xe1t\u011b': 3040,
u't\xfd\u010d': 3040,
u'##h': 3038,
u'la\u010d': 3035,
u'k\u017ee': 3033,
u'\u0161el': 3028,
u'o\u017es': 3026,
u'yni': 3026,
u'e#g': 3023,
u'k#i': 3021,
u'\xe1mo': 3020,
u'\xedk\xe1': 3019,
u'\u017e\xedm': 3018,
u'\xe1ns': 3018,
u'#tl': 3016,
u'puj': 3015,
u'vyr': 3014,
u'vk\u016f': 3011,
u'in\xed': 3004,
u'y#\xfa': 3004,
u'jle': 3004,
u'zdo': 2998,
u's#\u010d': 2997,
u'\xedru': 2995,
u'#on': 2993,
u'l\xe1m': 2993,
u'ud\xe1': 2991,
u'kar': 2988,
u'niv': 2987,
u'#pi': 2984,
u'ch\u016f': 2980,
u'p\xed\u0161': 2979,
u'r\u010de': 2979,
u'ypl': 2976,
u'o#\u0161': 2973,
u'sin': 2973,
u'ept': 2970,
u'a\u010du': 2969,
u'em\u017e': 2965,
u'##c': 2961,
u'evu': 2959,
u'd\u016fk': 2954,
u'fli': 2949,
u'ob\xe9': 2948,
u'dep': 2946,
u'isu': 2945,
u'\xe1#i': 2943,
u'\u017e\u0161\xed': 2942,
u'y#f': 2942,
u'uci': 2941,
u'\u011bkd': 2937,
u'av#': 2936,
u'tva': 2932,
u'zer': 2929,
u'v\xfdj': 2929,
u'ykl': 2922,
u'or\u0161': 2921,
u't\xe9h': 2920,
u'erz': 2918,
u'tka': 2918,
u'sum': 2918,
u'am\xed': 2916,
u'\u017e#u': 2914,
u'z#e': 2914,
u'k\u0159e': 2911,
u'l\xe1s': 2908,
u'r#n': 2908,
u'oju': 2907,
u'p\u011bv': 2906,
u'br\xfd': 2903,
u'\xedto': 2903,
u'z#k': 2902,
u'jaz': 2899,
u'\xfdji': 2899,
u'zbo': 2898,
u'b\xedh': 2897,
u'ofi': 2896,
u'ble': 2896,
u'##\u010d': 2892,
u'c#a': 2889,
u'l\xfdm': 2888,
u'uvo': 2879,
u'lab': 2879,
u'pul': 2879,
u'iz\xed': 2878,
u'v\u011bk': 2875,
u'lk\xe1': 2867,
u'oub': 2863,
u'dl\xe1': 2862,
u'tes': 2862,
u'asl': 2859,
u'hum': 2859,
u'\u011b#\u0159': 2857,
u'pt#': 2854,
u'di\u010d': 2849,
u'apa': 2843,
u'axi': 2839,
u'eb\xed': 2836,
u'nee': 2833,
u'elu': 2830,
u'ly\u0161': 2830,
u'nfr': 2830,
u'iz#': 2827,
u'v\u011bj': 2825,
u'vrz': 2822,
u'rd#': 2817,
u'nzi': 2814,
u'ev\xed': 2814,
u'#iz': 2813,
u'hlu': 2811,
u'l\xe9\u010d': 2810,
u'ej#': 2810,
u'mas': 2808,
u'eta': 2807,
u'aj#': 2806,
u'ajk': 2805,
u'\xfava': 2804,
u'trp': 2803,
u'l\xedk': 2803,
u'zve': 2801,
u'zoh': 2800,
u'zet': 2800,
u'v\xed\u0159': 2800,
u'his': 2800,
u'a\u010da': 2799,
u'zma': 2798,
u'z#m': 2798,
u'\xfaml': 2796,
u'k#l': 2796,
u'at\u016f': 2796,
u'zbu': 2788,
u'cn\xe9': 2787,
u'el\xed': 2784,
u'uru': 2781,
u'lo\u010d': 2776,
u'dlu': 2775,
u'v\xfdk': 2772,
u'lub': 2772,
u't#f': 2770,
u'\u0159\xe1t': 2770,
u'\xedsp': 2770,
u'r\u016fb': 2764,
u'\u010des': 2755,
u'\xe1vk': 2751,
u'az\u0148': 2749,
u'#at': 2749,
u'pn\xe9': 2746,
u'\xedl\xed': 2745,
u'\u011bc#': 2743,
u'jil': 2742,
u'dln': 2741,
u'\u017ed\u011b': 2739,
u'sd\xed': 2738,
u'm\xed\u0159': 2737,
u'a\u0159e': 2736,
u'lc\u016f': 2735,
u'yv\xe1': 2735,
u'zji': 2733,
u'sro': 2732,
u'c#v': 2731,
u'u\u0161o': 2729,
u'obv': 2728,
u'uli': 2726,
u'bn\xfd': 2720,
u'#zi': 2719,
u'uga': 2718,
u'a\u010de': 2717,
u'ilu': 2705,
u'\u011b#\u010d': 2705,
u'#uj': 2704,
u'mor': 2704,
u'\xe9m\u011b': 2703,
u'\xedmo': 2701,
u'\u0159\xed\u010d': 2698,
u'eus': 2695,
u'\u010dte': 2693,
u'nge': 2690,
u'hnu': 2688,
u'\xe1nc': 2687,
u'\u011b\u017en': 2686,
u'tmi': 2682,
u'umm': 2673,
u'k#\u010d': 2672,
u'skl': 2672,
u'\xe1#h': 2669,
u'mmi': 2665,
u'uh\xfd': 2665,
u'rna': 2663,
u'ojn': 2659,
u'\u011b\u0159#': 2656,
u'lor': 2654,
u'kv\u011b': 2650,
u'pni': 2648,
u'\xe9\u010di': 2646,
u'#l\xe1': 2643,
u'yje': 2637,
u'ime': 2635,
u'unk': 2632,
u'uns': 2631,
u'yb#': 2628,
u'mul': 2625,
u'\xedmn': 2622,
u's#l': 2619,
u'##r': 2618,
u'guj': 2616,
u'm\xe1c': 2615,
u'\u011b\u017ee': 2613,
u'e\u017e\xed': 2613,
u'cip': 2608,
u'xtu': 2605,
u'arr': 2605,
u'#ut': 2602,
u'ert': 2599,
u'n\u010di': 2599,
u'd\xe1r': 2594,
u'dly': 2593,
u'li\u010d': 2593,
u'\u0159ep': 2592,
u'#et': 2589,
u'enu': 2588,
u'etu': 2587,
u'k#h': 2586,
u'\xe1ji': 2586,
u'k#\xfa': 2583,
u'b\u011b\u017e': 2581,
u'upy': 2578,
u'ose': 2577,
u'\xfato': 2575,
u'r\xe1\u017e': 2573,
u'nn\xed': 2573,
u'ozb': 2572,
u'l#r': 2572,
u'\u011bta': 2572,
u'jm\u016f': 2571,
u'\xf3zn': 2563,
u'\u016fb\u011b': 2561,
u'\u0159at': 2561,
u'bso': 2559,
u'l##': 2559,
u'\u017e#c': 2559,
u'bri': 2558,
u'\xe1lu': 2554,
u'b\xe1c': 2552,
u'#id': 2551,
u'\u011b#l': 2550,
u'd\u0159e': 2548,
u'yna': 2548,
u'\xedmc': 2546,
u'ocn': 2546,
u'eex': 2545,
u'n\u0161\xed': 2545,
u'sal': 2543,
u'p\u016f#': 2539,
u'dpa': 2536,
u'ndy': 2535,
u'z\xfa\u010d': 2533,
u'\xe1ru': 2530,
u'v#h': 2527,
u'i\xf3z': 2524,
u'esh': 2522,
u'var': 2521,
u'#he': 2517,
u'h#\u017e': 2517,
u'jes': 2516,
u'#az': 2515,
u'mn\xe1': 2512,
u'v\xe1v': 2512,
u'luh': 2510,
u'jpr': 2509,
u'\u0165an': 2503,
u'lut': 2501,
u'\xed\u0159a': 2501,
u'i\u017eo': 2499,
u'\xfdda': 2498,
u'yja': 2498,
u'um\u011b': 2497,
u'avz': 2496,
u'\u0159\xedr': 2490,
u'nd\u016f': 2489,
u't\xe9\u017e': 2488,
u'n\xe1b': 2484,
u'ats': 2480,
u'z\xe1m': 2480,
u'\xedry': 2476,
u'\xe1mk': 2472,
u'ovl': 2471,
u'\xe1#c': 2470,
u'k#\u0159': 2469,
u'aty': 2469,
u'ark': 2467,
u'jd\u016f': 2462,
u'hoz': 2461,
u'eng': 2460,
u'#pt': 2459,
u'ang': 2458,
u'dol': 2457,
u'atr': 2457,
u'\xe9\u017e#': 2456,
u'\u011bmt': 2455,
u'#z\xfa': 2453,
u'uv\xed': 2452,
u'tk\xe1': 2452,
u'akl': 2452,
u'tod': 2449,
u'yte': 2449,
u'j#s': 2448,
u'b\u011bl': 2446,
u'b#j': 2444,
u'cou': 2439,
u'zaz': 2437,
u'sl\xe1': 2435,
u'v\u011b\u010f': 2434,
u'ah\u016f': 2433,
u'z\xedm': 2429,
u'tko': 2428,
u'rek': 2425,
u'rae': 2424,
u'ikl': 2424,
u'\xfdko': 2422,
u'usa': 2418,
u'ev#': 2417,
u'dve': 2417,
u'lha': 2417,
u'jn\xed': 2414,
u'c#s': 2414,
u'eka': 2414,
u'dej': 2411,
u'iov': 2411,
u'k#c': 2410,
u'cet': 2409,
u'd\u016fm': 2409,
u'obz': 2408,
u'trn': 2408,
u'zk\xe9': 2406,
u'ael': 2405,
u'ubj': 2402,
u'zul': 2401,
u'ety': 2400,
u'#ca': 2396,
u'alk': 2396,
u'ty\u0159': 2392,
u'\u011b\u010f#': 2391,
u'oz\xf3': 2388,
u'sez': 2386,
u'd##': 2385,
u'\u0159ko': 2384,
u'bat': 2384,
u'tiz': 2381,
u'h#\u0159': 2381,
u'p\xed#': 2378,
u'dha': 2378,
u'n\xe1\u0159': 2378,
u'mce': 2378,
u'sig': 2376,
u'tad': 2376,
u'\u011b\u0161\xed': 2374,
u'ukc': 2372,
u'\xfdm\u011b': 2372,
u'sun': 2370,
u'\xe1ha': 2365,
u'\u010div': 2357,
u'\xed#\u0161': 2356,
u'##f': 2355,
u'bzv': 2353,
u'uhu': 2353,
u'ad\xfd': 2352,
u'stm': 2350,
u'u\u010dn': 2348,
u'civ': 2347,
u'psa': 2347,
u'c#j': 2345,
u'#b\u0159': 2345,
u'bk\u016f': 2344,
u'elz': 2344,
u'ece': 2342,
u'rro': 2341,
u'#uh': 2340,
u'dik': 2340,
u't#\u0159': 2339,
u'\u017edo': 2336,
u'og#': 2336,
u'd\xedv': 2336,
u'bsk': 2335,
u'b\xedz': 2331,
u'\u011bm\u017e': 2330,
u'am\u016f': 2329,
u'#\u0161v': 2327,
u'\u011bpo': 2327,
u'\u016fbe': 2326,
u'\u016f#u': 2325,
u'\xfddn': 2325,
u'\u0159er': 2322,
u'uta': 2322,
u'yhn': 2318,
u'#im': 2314,
u'\u010dal': 2313,
u'odd': 2312,
u'yb\xe1': 2309,
u'\xeddk': 2305,
u'mne': 2299,
u'a\u010d\xe1': 2297,
u'jsk': 2294,
u'utu': 2294,
u'\u011bhu': 2291,
u'zis': 2291,
u'a\u0165#': 2290,
u'os#': 2289,
u'p\xedv': 2288,
u'izr': 2286,
u'suz': 2284,
u'ov\u0161': 2282,
u'\u010di\u0161': 2282,
u'ziv': 2281,
u'z\u0159\xed': 2279,
u'exu': 2279,
u'koz': 2279,
u'#av': 2277,
u'smr': 2274,
u'ot\xed': 2274,
u'd\xe9m': 2271,
u'z\u0148u': 2269,
u'iho': 2268,
u'bul': 2266,
u'lom': 2264,
u'\u0161es': 2261,
u'hv\xed': 2258,
u'dic': 2258,
u'\xed\u017e\xed': 2258,
u'd\u011bp': 2256,
u'typ': 2255,
u'esi': 2254,
u'ruz': 2250,
u'jvy': 2250,
u'opt': 2246,
u'\u011bde': 2245,
u'rag': 2244,
u'\xe1to': 2243,
u'cku': 2242,
u'ok\u016f': 2241,
u'v\xedl': 2237,
u'm\xfdc': 2236,
u'j#n': 2235,
u'jv\xed': 2235,
u's\u0165a': 2235,
u'es\u0165': 2235,
u'az\xed': 2234,
u's#h': 2233,
u'po\u0161': 2231,
u'ian': 2229,
u'nd\xe1': 2229,
u'i\u0161n': 2228,
u'l#u': 2225,
u'iv\xe1': 2224,
u'pet': 2220,
u'up\u016f': 2219,
u'\xed\u010dk': 2217,
u'gac': 2217,
u'\xedjm': 2217,
u'itl': 2217,
u'\xe1r\u016f': 2217,
u'\xfd#\xfa': 2216,
u'p\u016fd': 2212,
u'c#k': 2212,
u'um\xed': 2210,
u'e\xe1l': 2208,
u'tug': 2208,
u'\u0159iv': 2206,
u'r#s': 2206,
u'd\xfdc': 2205,
u'\xe1\u017e\xed': 2203,
u'deb': 2203,
u'\u011bma': 2203,
u'\u0159ib': 2202,
u'v#\u0159': 2202,
u'ad\xed': 2199,
u'ink': 2196,
u'o\u0159s': 2195,
u'ogu': 2195,
u'\xe1\u017ek': 2192,
u'a\u0159#': 2192,
u'el\xfd': 2191,
u'gy#': 2190,
u'at\xfd': 2187,
u'eps': 2186,
u'ezm': 2183,
u'ci\xf3': 2181,
u'vrt': 2181,
u'omy': 2180,
u'\xe9ko': 2179,
u'ra\u017e': 2177,
u'pem': 2176,
u'pru': 2176,
u'\u0161eo': 2175,
u'zyk': 2175,
u'v\xedr': 2173,
u'rn\xe1': 2172,
u'yvo': 2172,
u'h\xe1p': 2171,
u'n#r': 2171,
u'o\u0159n': 2170,
u'i\u017e\u0161': 2169,
u'b\xe1m': 2169,
u'klo': 2166,
u'ovz': 2163,
u'yzv': 2162,
u'ul\xfd': 2162,
u'\u0159e\u017e': 2161,
u'\u0161\u0165o': 2159,
u'hna': 2158,
u'zyl': 2158,
u'gru': 2158,
u'zsk': 2155,
u'\xedtn': 2150,
u'dso': 2146,
u'b\xe1\u0159': 2145,
u'l#e': 2144,
u'#fl': 2142,
u'ibl': 2142,
u'pam': 2138,
u'ruc': 2137,
u'\u016f##': 2136,
u'eku': 2135,
u'\u010dku': 2133,
u'en\u010d': 2130,
u'v\u016fb': 2129,
u'\xedk#': 2129,
u'#gu': 2128,
u'vyc': 2126,
u'\xed\u010di': 2124,
u's#i': 2122,
u'\xe1so': 2122,
u'cif': 2121,
u'bru': 2119,
u'\xedtr': 2119,
u'git': 2115,
u'dam': 2113,
u'rax': 2112,
u'\xe1n\u011b': 2111,
u'cep': 2110,
u'l\u0148u': 2110,
u'j#k': 2109,
u'\xfd#\u010d': 2109,
u'lif': 2109,
u'rz\xed': 2102,
u'jd\u0159': 2102,
u'mad': 2101,
u'dce': 2101,
u'd#c': 2101,
u'yd\xe1': 2100,
u'pl\xfd': 2100,
u'\u011bdi': 2095,
u'\u011b\u0159u': 2095,
u'\u0165#j': 2095,
u'her': 2091,
u'rvk': 2089,
u'raf': 2088,
u'pu\u0161': 2086,
u'at\xe9': 2083,
u'b#a': 2081,
u'#\xedr': 2080,
u'\xe9\u010de': 2080,
u'pci': 2079,
u'pce': 2076,
u'\u011btn': 2073,
u'oza': 2072,
u'bam': 2070,
u'dho': 2067,
u'\xedha': 2066,
u'\xe1#\u010d': 2064,
u'en\u017e': 2063,
u'yhl': 2061,
u'\u0165uj': 2059,
u'ak\u010d': 2057,
u'm\u016fm': 2056,
u'\u010dtv': 2056,
u'rze': 2055,
u'ekr': 2055,
u'\xeddi': 2053,
u'vy\u010d': 2053,
u'\xedn\xe1': 2051,
u'hen': 2051,
u'#v\xe9': 2050,
u'i\u010di': 2048,
u'adl': 2043,
u'zau': 2042,
u'y#\u0159': 2040,
u'ud\xed': 2039,
u'ml\xe9': 2036,
u'l\xedz': 2036,
u'\u0159\xedn': 2033,
u'amp': 2032,
u'eg\u016f': 2032,
u'\u0159ic': 2030,
u'auj': 2030,
u'z\xe1t': 2029,
u'vzb': 2029,
u'\u016fch': 2027,
u're#': 2026,
u'taz': 2025,
u'\xe1m\u011b': 2024,
u'#s\xe1': 2021,
u'cnu': 2021,
u'zsa': 2019,
u'\xed\u010de': 2018,
u'uzi': 2016,
u'u#g': 2013,
u'o\u0161k': 2012,
u'gro': 2012,
u'k\xe1l': 2011,
u'r\xe9h': 2011,
u'stk': 2010,
u'uzs': 2009,
u'l\xe1c': 2009,
u'\u011bn\xe1': 2006,
u'die': 2005,
u'aza': 2004,
u'eu\u017e': 2003,
u'\xfdde': 2003,
u'rv\xe1': 2003,
u'\xedt\xed': 2002,
u'\u011b\u0161e': 2001,
u'vir': 2001,
u'akr': 2000,
u'uti': 1997,
u'r#z': 1997,
u'o\u0148#': 1993,
u'\u016f#r': 1992,
u'po\u0148': 1992,
u'nuc': 1990,
u'ypo': 1989,
u'izu': 1989,
u'd\u016fc': 1988,
u'exn': 1988,
u'det': 1988,
u'n\u011bt': 1987,
u'\xedc\u016f': 1987,
u'#f\xe1': 1985,
u'\xfapr': 1983,
u'gn\xe1': 1980,
u'e\u010d\xed': 1976,
u'mpe': 1976,
u'f\xe1z': 1976,
u'ler': 1975,
u'rhl': 1975,
u'rhe': 1974,
u'\u017eky': 1971,
u'tih': 1971,
u'lie': 1968,
u'v\xe1c': 1968,
u'rdy': 1968,
u'ikr': 1967,
u'\u011b\u017ek': 1967,
u'nk\xe1': 1966,
u'ehr': 1965,
u'\u011bro': 1962,
u'kno': 1958,
u'\u017ei#': 1958,
u'doz': 1954,
u'be#': 1954,
u'd#u': 1954,
u'on\u011b': 1953,
u'um\xe1': 1953,
u'#hn': 1952,
u'\xe9ka': 1950,
u'j\u016fm': 1949,
u'i\u010dt': 1949,
u'aha': 1948,
u'pt\xe1': 1948,
u'eva': 1947,
u'afi': 1947,
u'pas': 1945,
u'lta': 1939,
u'\u010d\xeds': 1935,
u'ct#': 1930,
u'utr': 1930,
u'#a\u0165': 1928,
u'v\xe9d': 1927,
u'cn\xfd': 1927,
u'\u0161v\xe9': 1927,
u'muj': 1926,
u'tr\u017e': 1923,
u'uvu': 1923,
u'ery': 1920,
u'arg': 1919,
u'laj': 1918,
u'\u010dis': 1917,
u'alc': 1916,
u'\xe1lk': 1912,
u'n\xe1h': 1908,
u'neg': 1908,
u'gat': 1907,
u'ytl': 1905,
u'bur': 1902,
u'\xeddl': 1900,
u'ikn': 1896,
u'sur': 1894,
u'mpa': 1893,
u'id\xe1': 1892,
u'r#k': 1892,
u'xn\xed': 1890,
u'\xe1c\xed': 1889,
u'pn\xfd': 1888,
u'at\u011b': 1887,
u'zan': 1884,
u'cka': 1883,
u'sex': 1882,
u'evz': 1881,
u'\xedzn': 1881,
u'ekv': 1881,
u'#\u010d#': 1880,
u'byr': 1879,
u'brz': 1878,
u'ulh': 1878,
u'mak': 1873,
u'ibi': 1871,
u'tun': 1871,
u'lok': 1865,
u'zs\xe1': 1862,
u'pta': 1862,
u'idm': 1861,
u'uzo': 1860,
u'ojd': 1853,
u'nsa': 1852,
u'ne\xfa': 1850,
u'kop': 1848,
u'kis': 1847,
u'zp\u011b': 1846,
u'ok\xfd': 1846,
u'i#\u0161': 1845,
u'\xe1#l': 1844,
u't\u011bs': 1843,
u'teh': 1842,
u'e\u010f#': 1841,
u'vda': 1837,
u'v\u0161t': 1835,
u'ad\u0159': 1835,
u'ed\u011b': 1834,
u'aop': 1832,
u'otk': 1829,
u'om\xe9': 1828,
u'\u0148sk': 1828,
u'v\u016fr': 1827,
u'isn': 1826,
u'\u011bze': 1825,
u'b\xedr': 1823,
u'prc': 1821,
u'bel': 1820,
u'\u011b#f': 1820,
u'ma\u010d': 1817,
u'\xedko': 1816,
u'd\xedt': 1816,
u'\xe1#\xfa': 1811,
u'c#z': 1811,
u'j#a': 1809,
u'#ju': 1809,
u'\u010dni': 1808,
u'n\xe9s': 1808,
u'ihl': 1808,
u'd\xe1c': 1808,
u'fle': 1807,
u'#fe': 1806,
u'odc': 1803,
u'\u0161\u0165#': 1799,
u'r#b': 1799,
u'vr\u017e': 1797,
u'azb': 1794,
u'alu': 1793,
u'rce': 1788,
u'avc': 1785,
u'\xfamy': 1783,
u'xt#': 1783,
u'tep': 1783,
u'ake': 1781,
u'u\u0161\xed': 1779,
u'n\u011bz': 1778,
u'#\xfan': 1774,
u'lky': 1773,
u're\xe1': 1773,
u'sic': 1772,
u'r#j': 1772,
u'uri': 1771,
u'epl': 1769,
u'kre': 1768,
u'nse': 1768,
u'nao': 1768,
u'ir\xe1': 1766,
u'lce': 1766,
u'or\xe1': 1766,
u'bky': 1765,
u'nn\xe1': 1764,
u'\xf3ny': 1760,
u'b\u011br': 1756,
u'd#h': 1755,
u'dr\xe1': 1754,
u'\xe9ds': 1754,
u'adc': 1753,
u'j\xed\u017e': 1752,
u'yb\xed': 1752,
u'st\xfd': 1750,
u'm\xe1t': 1749,
u'u#\u0161': 1749,
u'te\u010f': 1747,
u'\xe1\u017ei': 1746,
u'lby': 1746,
u'yv\xed': 1744,
u'ri\xed': 1743,
u'zmi': 1742,
u'h\u016fz': 1741,
u'l\xfdz': 1740,
u'pn\u011b': 1739,
u'vic': 1739,
u'ash': 1736,
u't\xedn': 1735,
u'jn\xe1': 1735,
u'uku': 1731,
u'#is': 1727,
u'sje': 1725,
u'\u016fka': 1724,
u'kot': 1722,
u'\xfav\u011b': 1721,
u'lu\u010d': 1721,
u'don': 1720,
u'\u010d\xedv': 1719,
u'uvn': 1717,
u'o\u017ek': 1717,
u'rku': 1714,
u'p\xedr': 1713,
u'\u011bvk': 1710,
u'#\u0161t': 1709,
u'nl#': 1708,
u'\u011b\u0159o': 1707,
u'uzu': 1706,
u'ehd': 1703,
u'\u0161#p': 1701,
u'ol\xed': 1701,
u'xim': 1700,
u'u\u010du': 1699,
u'y\u0161o': 1698,
u'srb': 1697,
u'max': 1697,
u'n\u017e#': 1693,
u'ors': 1691,
u'i\u010d\xed': 1691,
u'b\xe9#': 1690,
u'o#g': 1688,
u'ob\xfd': 1686,
u'u\u0159e': 1686,
u'\xed\u0159i': 1685,
u'xu\xe1': 1685,
u'rzi': 1684,
u'ipl': 1684,
u'\u017e#r': 1683,
u'mi\u017e': 1678,
u'el\xe1': 1676,
u'yda': 1676,
u'm\u016fj': 1676,
u'e\u0161\xed': 1675,
u'o\u0159a': 1674,
u'koh': 1674,
u'zil': 1669,
u'\xedns': 1669,
u'\u0161\u0165u': 1665,
u'ot\xfd': 1661,
u'n\xe1t': 1659,
u'sat': 1657,
u'vrc': 1657,
u'#kn': 1656,
u'l\xe9z': 1654,
u'lot': 1653,
u'dka': 1653,
u'n\xe1p': 1652,
u'z#f': 1652,
u'hdy': 1652,
u'l\xfdv': 1651,
u'zvi': 1650,
u'erp': 1649,
u'\xe9di': 1649,
u'mky': 1649,
u'p\u011bj': 1647,
u'\u016f#c': 1646,
u'siv': 1646,
u'#ul': 1644,
u'kry': 1641,
u'\xfdhr': 1640,
u'rte': 1638,
u'rm\u011b': 1637,
u'p\u016fj': 1636,
u't\xedh': 1633,
u'\xedzo': 1633,
u'##i': 1633,
u'vz\xed': 1632,
u'\xe1\u0161\xed': 1630,
u'\xe1h\xe1': 1630,
u'imk': 1630,
u'rii': 1630,
u'orv': 1629,
u'y#g': 1628,
u'#sj': 1628,
u'o\u010d\xe1': 1628,
u'kru': 1627,
u'k\xe9k': 1627,
u'ri\xe1': 1627,
u'had': 1625,
u'ga#': 1623,
u'zba': 1622,
u'iol': 1618,
u'u\u017en': 1616,
u'blo': 1616,
u'\xe1ly': 1615,
u'nvi': 1610,
u'ajn': 1609,
u'ajo': 1609,
u'\u0165#s': 1608,
u'\xfd#e': 1607,
u'#m\xfd': 1607,
u'\xfd#f': 1604,
u'\u011br\u016f': 1604,
u'uty': 1602,
u'n#c': 1602,
u'\xe1zi': 1598,
u'\u011b#\xfa': 1596,
u'\u0159\xe1l': 1595,
u'\xe1ku': 1595,
u'env': 1594,
u'lel': 1592,
u'nme': 1590,
u'\u017e#e': 1585,
u'so#': 1583,
u'luc': 1583,
u'rub': 1582,
u'#eg': 1580,
u'suj': 1579,
u'r\xe1r': 1578,
u'ret': 1577,
u'd\xe9h': 1575,
u'ald': 1574,
u'b#p': 1573,
u'y\u0159i': 1572,
u'c\xedt': 1571,
u'vce': 1571,
u'z#c': 1569,
u'\xe9##': 1568,
u'all': 1566,
u'orb': 1565,
u'\u0159\u016f#': 1564,
u'mav': 1562,
u't\u011bz': 1561,
u'\u011bdk': 1560,
u'pli': 1560,
u'tu\xe1': 1560,
u'vza': 1560,
u'i#g': 1559,
u'v#\u017e': 1559,
u'lar': 1559,
u'lb\xe1': 1559,
u'\u010d##': 1557,
u'p#p': 1557,
u'onm': 1556,
u'z\xe1n': 1556,
u'j#z': 1555,
u'\xe1#\u0159': 1554,
u'ir\u0161': 1551,
u'rbs': 1549,
u'hva': 1546,
u'sln': 1545,
u'#p\xed': 1543,
u'v\u011bm': 1543,
u'old': 1541,
u'skv': 1540,
u'jiv': 1539,
u'edv': 1539,
u'oh\xe9': 1539,
u'en\u016f': 1539,
u'ige': 1536,
u'z\xe1c': 1535,
u'rej': 1535,
u'l\u0148o': 1531,
u'nih': 1531,
u'p\xe1c': 1529,
u'hru': 1528,
u'kv\xf3': 1528,
u'uh\xe1': 1526,
u'v\xf3t': 1526,
u'v\xe1t': 1524,
u't\u010de': 1524,
u'fro': 1523,
u'ofe': 1523,
u'l#h': 1523,
u'#sb': 1522,
u'\xe1de': 1522,
u'erm': 1520,
u'did': 1520,
u'\xfa\u0159e': 1517,
u'ejr': 1517,
u'#ky': 1517,
u'ppe': 1516,
u'fes': 1516,
u'am\xe1': 1515,
u'ka\u010d': 1515,
u'ev\u011b': 1513,
u'ape': 1513,
u'lku': 1512,
u'idn': 1512,
u'c#d': 1510,
u'im\u017e': 1508,
u'oml': 1506,
u'n##': 1506,
u'\xfd#c': 1504,
u'p#n': 1504,
u'ys\xed': 1503,
u'u\u017e\u016f': 1502,
u'#ol': 1499,
u'\u0161ti': 1499,
u'ivc': 1497,
u'hok': 1497,
u'gre': 1497,
u'ekc': 1496,
u'son': 1495,
u'\u017eb\xe1': 1495,
u'ed\u016f': 1494,
u'aka': 1494,
u'ujm': 1492,
u'ti\u0161': 1492,
u'\u011bz#': 1491,
u'v#g': 1491,
u'n\u010d\xed': 1490,
u'ems': 1488,
u'di\xed': 1488,
u'ezv': 1487,
u'mbo': 1485,
u'ryh': 1485,
u'ab\xe1': 1484,
u'z#a': 1483,
u'vku': 1482,
u'\u0159i\u0161': 1479,
u'b#v': 1479,
u'\xedku': 1477,
u'\u010deh': 1476,
u'#lu': 1475,
u'rto': 1474,
u'#\u0161\xed': 1474,
u'\u016f#l': 1473,
u'lig': 1473,
u'eod': 1472,
u'osm': 1471,
u'\u0159ir': 1471,
u'#of': 1470,
u'l#c': 1470,
u'do\u010d': 1467,
u'n#e': 1467,
u'd#\u017e': 1467,
u'a\u0148#': 1466,
u'gh\xe1': 1465,
u'o\u010da': 1465,
u'r\xe1z': 1465,
u'afg': 1464,
u'yvi': 1463,
u'ex#': 1462,
u'#uc': 1461,
u'\xe1di': 1461,
u'jte': 1461,
u'lci': 1460,
u'cid': 1452,
u'\u0148#p': 1450,
u'\xe1\u017ed': 1450,
u'mi\u0148': 1450,
u'jka': 1450,
u'd\u010di': 1450,
u'c#b': 1448,
u'b\xe1v': 1448,
u'p\xe1k': 1446,
u'fgh': 1445,
u'l#i': 1445,
u'v\u011bl': 1443,
u'its': 1442,
u'rup': 1440,
u'iby': 1439,
u'atl': 1439,
u'buz': 1439,
u'#pp': 1439,
u'izn': 1439,
u'r#m': 1438,
u'yno': 1438,
u'y\u0161u': 1437,
u'a\u010di': 1437,
u'ses': 1434,
u'\xe1la': 1431,
u'rzy': 1430,
u'rd\xed': 1430,
u'kni': 1429,
u'v##': 1429,
u'\u017eto': 1428,
u'sak': 1428,
u'\u0161li': 1427,
u'ezd': 1426,
u'ezu': 1425,
u'vez': 1424,
u'tk\u016f': 1422,
u'ngl': 1421,
u'#ps': 1420,
u'o\u017et': 1419,
u'c#m': 1418,
u'zt#': 1417,
u'\u0161no': 1417,
u'\u017e\u016f#': 1415,
u'\xfd#\u017e': 1415,
u'zdu': 1415,
u'h\xfdm': 1415,
u'zv\u011b': 1414,
u'\xe9zt': 1412,
u's\xe1m': 1411,
u'iv\xed': 1410,
u'\xedtk': 1403,
u'dny': 1400,
u'dac': 1398,
u'\u016fkl': 1396,
u'\xe1hu': 1396,
u'\u017e#h': 1394,
u'rib': 1394,
u'up\xed': 1391,
u'sym': 1390,
u'ozk': 1388,
u'vam': 1388,
u'arc': 1387,
u'pti': 1385,
u'h\xfd#': 1384,
u't\xe1c': 1381,
u'm\xfdm': 1379,
u'rgu': 1377,
u'\xedz\xed': 1377,
u'urk': 1375,
u'\u016fzk': 1375,
u's\xedd': 1375,
u'##u': 1375,
u'\u011bts': 1375,
u'#ep': 1368,
u'ibu': 1366,
u'\xedlu': 1366,
u'zep': 1366,
u'sis': 1364,
u'sl\xe9': 1362,
u's##': 1358,
u'dub': 1358,
u'd\u0161\xed': 1357,
u'l#\u010d': 1357,
u'\u0161n\u011b': 1356,
u'#a\u010d': 1355,
u'erc': 1354,
u'##\u017e': 1353,
u'd\xfdm': 1349,
u'\u0148#z': 1349,
u'p\u0161t': 1349,
u'\xedh\xe1': 1348,
u'tr\u016f': 1346,
u'lde': 1346,
u'eov': 1345,
u'ryt': 1345,
u'bdr': 1344,
u'dd\u011b': 1344,
u'\xe1rc': 1341,
u'yba': 1339,
u'zdv': 1338,
u'\u011bmu': 1338,
u'm\xfd\u0161': 1337,
u'#lh': 1337,
u'\xfd\u0161l': 1336,
u'\xedln': 1336,
u'op\u0161': 1336,
u'm\xe1\u017e': 1336,
u'ipu': 1333,
u'#ec': 1332,
u'uky': 1331,
u'\xe1l\u016f': 1331,
u'\xedti': 1328,
u'd\xed\u017e': 1328,
u'tke': 1325,
u'ehn': 1324,
u'\u011bzn': 1323,
u'edb': 1322,
u'\xedl\xe1': 1322,
u'ul\xe9': 1317,
u'dko': 1315,
u'nop': 1314,
u'\xe1z\xe1': 1313,
u'\u010d\u016f#': 1313,
u'hel': 1312,
u'aud': 1312,
u'rab': 1311,
u'bn\xe1': 1309,
u'k#f': 1306,
u'ypu': 1306,
u'ukl': 1304,
u'\xedjn': 1304,
u'xtr': 1304,
u'kr\xfd': 1302,
u'imp': 1302,
u'ev\xfd': 1299,
u'uh#': 1299,
u'uhy': 1299,
u'ar\u0161': 1299,
u'\u010dky': 1299,
u'cti': 1298,
u'gum': 1298,
u't\u0159o': 1298,
u'\xfano': 1294,
u'ihu': 1294,
u'dip': 1294,
u'zvu': 1291,
u'lka': 1289,
u'z\xe1\u0159': 1287,
u'\xe1ki': 1286,
u'rhn': 1286,
u'car': 1284,
u'cej': 1284,
u'vad': 1284,
u'chi': 1283,
u'rdn': 1283,
u'\u0159az': 1281,
u'ne\u0159': 1281,
u'teb': 1281,
u'\u017e#a': 1280,
u'\xfd#u': 1279,
u'sv\xe1': 1278,
u'\xeddn': 1278,
u'zol': 1276,
u'i\xe9r': 1275,
u'ped': 1275,
u'h#\u0161': 1275,
u'in\u016f': 1274,
u'\xe1\u0161\u0165': 1272,
u'\u010dlo': 1270,
u'jce': 1269,
u'\u017e#\u0159': 1269,
u'z#b': 1266,
u'l\xedb': 1266,
u'as\xe1': 1264,
u'mko': 1264,
u'b#s': 1260,
u'lii': 1260,
u'azi': 1257,
u'rel': 1257,
u'rp\xed': 1256,
u'l\xedt': 1254,
u'es\xe1': 1254,
u'\xe9#\u0161': 1252,
u'dsu': 1251,
u'sid': 1248,
u'xib': 1248,
u'l\xe9t': 1248,
u'v\u016f#': 1248,
u'\xe1##': 1246,
u'\u016f#i': 1245,
u'z##': 1245,
u'oz#': 1244,
u'nze': 1243,
u'#g#': 1241,
u'\u017e#i': 1240,
u'med': 1238,
u'\u011bj#': 1238,
u'geo': 1237,
u'c#o': 1235,
u'ad\xe9': 1235,
u'our': 1232,
u'vnu': 1232,
u'c#t': 1231,
u'es\xed': 1231,
u'dv\xe1': 1230,
u'om\xfd': 1230,
u'#\u0161a': 1230,
u'd#\u010d': 1230,
u'\u0161n\xe1': 1230,
u'sar': 1229,
u'sl\xfd': 1229,
u'bin': 1229,
u'm\xe9m': 1228,
u'\u0159\xe1n': 1227,
u'ar\u016f': 1227,
u'bvi': 1226,
u'm#\u0161': 1225,
u'rty': 1224,
u'\u017eik': 1224,
u'\u017eek': 1223,
u'ra\u0148': 1223,
u'uv#': 1222,
u'tib': 1222,
u'gno': 1221,
u'\u0161i\u0159': 1220,
u'zim': 1218,
u'zme': 1215,
u't\u0148u': 1215,
u'p#a': 1215,
u'z\u0161i': 1214,
u'yne': 1214,
u't#\u0161': 1212,
u'za\u017e': 1212,
u'c##': 1211,
u'i\u010d\u016f': 1211,
u'\u0148#s': 1209,
u'atb': 1208,
u'er\u016f': 1207,
u'ozl': 1206,
u'u\u0161u': 1206,
u'#vd': 1205,
u'evl': 1204,
u'uv\xe9': 1204,
u'nso': 1204,
u'y\u010de': 1203,
u'omb': 1203,
u'yme': 1202,
u'elh': 1202,
u'lur': 1202,
u'uh\u016f': 1201,
u'g\u016f#': 1199,
u'\u016f#\u010d': 1199,
u'uja': 1198,
u'op\xed': 1198,
u'pir': 1198,
u'h\xe1m': 1197,
u'a\u0159s': 1197,
u'z#\u010d': 1197,
u'sv#': 1196,
u'\xe1ct': 1195,
u'una': 1194,
u'jap': 1193,
u'fir': 1192,
u'ejc': 1192,
u'a\u0159a': 1191,
u'xi#': 1191,
u'\xedd\xe1': 1190,
u'\u0165#p': 1187,
u'ko\u0159': 1187,
u'r##': 1187,
u'esa': 1184,
u'lgi': 1183,
u'\u017eko': 1183,
u'epa': 1182,
u's\xe1n': 1181,
u'sht': 1180,
u'\xedny': 1176,
u'v\u010da': 1176,
u'\u016fj\u010d': 1175,
u'\u0148#v': 1175,
u'un#': 1175,
u'n\u017ee': 1174,
u'arn': 1174,
u'#vp': 1173,
u'\u010d#j': 1173,
u'\u011bd\xed': 1172,
u'tl#': 1172,
u'yhr': 1170,
u'elg': 1169,
u'dio': 1166,
u'ul#': 1165,
u'#ac': 1163,
u'asy': 1163,
u'y#\u0161': 1162,
u'vap': 1162,
u'e\u010dl': 1160,
u'iso': 1160,
u'\u016fr#': 1158,
u'pna': 1157,
u'dv\u016f': 1157,
u'rci': 1157,
u'y\u010dl': 1156,
u'cyk': 1156,
u'udk': 1155,
u'zk\u016f': 1153,
u'ka\u0159': 1153,
u'#ii': 1153,
u'e\u0161l': 1153,
u'oev': 1152,
u'zkr': 1152,
u'ng#': 1152,
u'ubs': 1152,
u'ylu': 1152,
u'va\u010d': 1152,
u'l\xedd': 1152,
u'\u0161la': 1152,
u'yn\u016f': 1152,
u'dum': 1152,
u'\xe1#f': 1151,
u'ot\u010d': 1151,
u'\u017e#\u010d': 1151,
u'\u017eba': 1150,
u'ut\xfd': 1150,
u'dif': 1150,
u'dez': 1150,
u'\xe1cn': 1147,
u'bvy': 1146,
u'r\xfdv': 1146,
u'srd': 1145,
u'\xe1\u0159\u016f': 1144,
u'\u0165#u': 1143,
u'n#h': 1143,
u'mol': 1143,
u'\xfdb\u011b': 1141,
u'\u016fm\u011b': 1141,
u'al\xe1': 1141,
u'p\xe1t': 1139,
u'obh': 1138,
u'#\u017ea': 1137,
u'j\xedh': 1136,
u'jih': 1134,
u'gor': 1134,
u'ubn': 1131,
u'ut\xe9': 1131,
u'iop': 1131,
u'zro': 1130,
u'ium': 1130,
u'd\u011b\u010d': 1130,
u'\xe1dk': 1129,
u'gyp': 1129,
u'g\u016fm': 1127,
u'r\xe1b': 1127,
u'v\u016fd': 1127,
u'zon': 1125,
u'\u0159ik': 1122,
u'vny': 1122,
u'ce\u0148': 1121,
u'tul': 1121,
u'b\xe1n': 1120,
u'fal': 1119,
u'\u0159ka': 1118,
u'\u010dli': 1117,
u've\u010d': 1115,
u'ire': 1112,
u'\u011bk#': 1112,
u'hl\xfd': 1112,
u'b#k': 1111,
u'v\u0159\xed': 1111,
u'lts': 1110,
u'o\u010dk': 1109,
u'pel': 1109,
u'vc\u016f': 1105,
u'#tz': 1104,
u'h\u016ft': 1101,
u'lh\u016f': 1101,
u'rt#': 1101,
u'\xedtl': 1101,
u'nef': 1101,
u'fil': 1100,
u'dp#': 1100,
u'bet': 1100,
u'\xedct': 1100,
u'nin': 1098,
u'\u0159ih': 1097,
u'il\xe1': 1097,
u'sec': 1094,
u'vka': 1093,
u'vke': 1093,
u'ty\u010d': 1093,
u'bus': 1093,
u'vci': 1093,
u'imn': 1092,
u'\xe1re': 1091,
u'l\xed\u0159': 1091,
u'ell': 1090,
u'fed': 1090,
u'#sz': 1088,
u'#go': 1087,
u'far': 1087,
u'ymb': 1086,
u'at\xe1': 1086,
u'ypt': 1084,
u'\u016fdy': 1083,
u'#hy': 1083,
u'#ap': 1082,
u'id#': 1082,
u'\xedd\xed': 1081,
u'et\xe9': 1081,
u'ule': 1081,
u'd\u0159u': 1079,
u'\xe1zn': 1078,
u'tr#': 1078,
u'dci': 1077,
u'vp\u0159': 1077,
u'dre': 1076,
u'oxi': 1076,
u'knu': 1075,
u'jv\xfd': 1073,
u'bok': 1073,
u'ezl': 1072,
u'\u011b\u010dn': 1071,
u'iv\u0148': 1070,
u'ekn': 1070,
u'zli': 1070,
u'\u0161#v': 1068,
u'\xe1hr': 1068,
u'et\u011b': 1066,
u'd\u0148u': 1065,
u'gar': 1064,
u'h\u016fm': 1063,
u'tst': 1063,
u'#h\xe1': 1063,
u'r#o': 1062,
u'#jo': 1061,
u'ol\u016f': 1060,
u'hrd': 1058,
u'\xe1\u0159#': 1058,
u'o\u017ed': 1057,
u'pap': 1057,
u'zhr': 1056,
u'#l\xed': 1056,
u'rdo': 1056,
u'eol': 1056,
u'\xfd#h': 1055,
u'rko': 1055,
u'nij': 1055,
u'me\u010d': 1054,
u'\xfd#i': 1053,
u'\u017eej': 1049,
u'zoz': 1048,
u'unu': 1048,
u'pag': 1048,
u'\u0161ev': 1047,
u'#mr': 1047,
u'sva': 1046,
u'm\xfd#': 1045,
u'd\xe9l': 1044,
u'ejt': 1044,
u'#o\u017e': 1043,
u'aga': 1043,
u'red': 1043,
u'abu': 1042,
u'oms': 1041,
u'\u011bve': 1041,
u'ar\xe1': 1041,
u'm#g': 1040,
u'z#s': 1040,
u'\xedr#': 1040,
u'p\u0159#': 1039,
u'p\u0161o': 1038,
u'zkl': 1036,
u's#\xfa': 1036,
u'vzr': 1036,
u'ez\u0159': 1035,
u'ev\xe1': 1035,
u'zb\xfd': 1035,
u'ejz': 1035,
u'nk\u010d': 1034,
u'gaz': 1034,
u'\xfdst': 1034,
u'ier': 1033,
u'i\u0161l': 1033,
u'edt': 1032,
u't\u011bv': 1032,
u'b\xe9h': 1031,
u'trz': 1030,
u'mpi': 1030,
u'n#u': 1029,
u'\u011b\u017ei': 1028,
u're\u010d': 1027,
u'\xe9nu': 1026,
u'ytk': 1026,
u'\xe1hy': 1025,
u'ktn': 1025,
u'p#v': 1024,
u'#d\xe9': 1022,
u'\u0161#d': 1021,
u'eut': 1021,
u'##\xfa': 1021,
u'alb': 1019,
u'o\u017eu': 1019,
u'm\xe9d': 1019,
u'zp\u0159': 1019,
u'h\xfdc': 1019,
u'hdp': 1018,
u'mba': 1016,
u'\u0165#v': 1016,
u'seh': 1016,
u'v\u016fm': 1016,
u'd#l': 1015,
u'r\u017e\xed': 1014,
u'z#l': 1014,
u'\xedm\xe9': 1013,
u'\u016fdc': 1013,
u'ra\u0161': 1012,
u'\u0161#n': 1011,
u'nga': 1010,
u'j#m': 1009,
u'yzn': 1009,
u'#er': 1009,
u'zev': 1007,
u'\u0148#j': 1006,
u'#wa': 1006,
u'vdo': 1006,
u'yle': 1006,
u'bu\u010f': 1006,
u'sbu': 1004,
u'\xedla': 1004,
u'\xfavo': 1003,
u'ask': 1002,
u'\xedm\xfd': 1001,
u'tu\u0159': 1001,
u'#ki': 1001,
u'ajc': 1000,
u'ou\u0159': 1000,
u'#t\xe1': 997,
u'tzv': 993,
u'v\xe1\u0161': 992,
u'dt\xed': 992,
u'#hd': 992,
u'rmi': 992,
u'\u0161#m': 991,
u'l#\u0159': 991,
u'roe': 991,
u'bsi': 990,
u'zv#': 989,
u'ovk': 988,
u'ube': 988,
u'tve': 987,
u'\u0165#t': 987,
u'avb': 986,
u't\u011bt': 985,
u'\xfd#l': 983,
u'lt#': 983,
u'r\u017e#': 982,
u'\u010dt\u011b': 982,
u'\u011btu': 982,
u'\u011b\u017eo': 979,
u'bja': 978,
u'lda': 977,
u'lo\u0148': 976,
u'\xe9#g': 976,
u'dii': 976,
u'mog': 976,
u'leo': 975,
u'plu': 975,
u'vak': 975,
u'ps\xe1': 974,
u'gyn': 974,
u'lli': 973,
u'#ub': 973,
u'o\u0148s': 972,
u'#pu': 972,
u'dn\u016f': 971,
u'rp\u011b': 971,
u'e\u0159s': 971,
u'et\xfd': 970,
u'r#t': 970,
u'c#e': 968,
u'cs#': 968,
u'e\u0148u': 968,
u'r#d': 967,
u'yr\xe1': 967,
u'ud\u0161': 967,
u'bha': 965,
u'pes': 965,
u'e\u0148o': 965,
u'rah': 964,
u'ual': 963,
u'\u010dka': 962,
u'j#o': 961,
u'jna': 961,
u'or\u0148': 961,
u'nau': 961,
u'\u010dt\xed': 960,
u'sts': 959,
u'bce': 957,
u'j##': 956,
u'il\u016f': 955,
u'ham': 953,
u'bos': 952,
u'p\u011bs': 951,
u'riu': 951,
u'rzu': 950,
u'xe#': 950,
u'cke': 949,
u'b#z': 949,
u'n#l': 949,
u'b#n': 947,
u'riv': 945,
u'j#d': 944,
u'osy': 944,
u'de\u0161': 944,
u'\u017e#\u017e': 943,
u'\xedlo': 942,
u'hon': 941,
u'uen': 941,
u'avs': 940,
u'loz': 939,
u'\u0148#n': 939,
u'kav': 939,
u'emy': 938,
u'ib#': 938,
u'pn\xed': 938,
u'wto': 938,
u'tlu': 938,
u'b#o': 937,
u'itk': 937,
u'i\u0148u': 935,
u'mka': 935,
u'\xe9ta': 934,
u'h\xe1v': 933,
u'\xe1\u0159i': 933,
u'l\xe9d': 933,
u'ill': 932,
u'fis': 931,
u'kta': 931,
u'zk\xfd': 930,
u'uba': 929,
u'mzd': 928,
u'##\u0159': 927,
u'jdo': 927,
u'ejh': 927,
u'ed\u0148': 925,
u'j#j': 923,
u'rut': 923,
u'ork': 923,
u'#b\xed': 922,
u'asb': 922,
u'ndo': 921,
u'ojm': 920,
u'#mz': 920,
u'\u0159#p': 919,
u'\xe1v\u0161': 919,
u'ock': 918,
u'z\xedl': 917,
u'\u010f#n': 917,
u's\xe1t': 916,
u'le\u0148': 916,
u'\u017e#l': 916,
u'de\u017e': 915,
u'b\xe9m': 914,
u'i\u017eu': 914,
u't\xedt': 913,
u'\u0159#v': 913,
u'z\u016f#': 913,
u'sm\u016f': 912,
u'agu': 912,
u'exp': 912,
u'es\u016f': 912,
u'kzv': 911,
u'akz': 911,
u'o\u0161e': 911,
u'\u010dl#': 911,
u'v\u0148u': 910,
u'#dy': 910,
u'hai': 909,
u'#wt': 909,
u'r\xe1k': 909,
u'tid': 909,
u'\xe1ts': 908,
u'\xfd\u0161\xed': 907,
u'kyp': 907,
u'op\xe1': 907,
u'p#s': 907,
u'r\u017eu': 906,
u'h\xe9m': 904,
u'sf\xe9': 904,
u'eop': 904,
u'buc': 904,
u'ico': 904,
u'\xfazk': 903,
u's#f': 902,
u'igi': 902,
u'zop': 900,
u'jni': 900,
u'oh\xe1': 900,
u'kne': 899,
u'ean': 899,
u'umi': 899,
u'd#i': 899,
u'ira': 898,
u'#ig': 898,
u'\u0161an': 897,
u'spi': 897,
u's\u0165o': 896,
u'fy#': 896,
u'zie': 895,
u't\xe1\u0159': 891,
u'dl\xe9': 891,
u'\u011bl\xfd': 890,
u'dyn': 888,
u'\u011b#\u0161': 887,
u'kvi': 885,
u'ser': 885,
u'err': 884,
u's#\u017e': 884,
u'td#': 883,
u'd\u0148o': 881,
u'itv': 881,
u'\xedke': 881,
u'zii': 877,
u'\u011bl\xe9': 876,
u'i\u0159o': 876,
u'u\u010d\xed': 876,
u'ydl': 876,
u'ced': 875,
u'zsu': 875,
u'ary': 875,
u'i\u010dk': 875,
u'p#j': 874,
u'ijs': 873,
u'is\u0165': 872,
u'\xe1b\u011b': 871,
u'r\xe1\u010d': 870,
u'c\xe9n': 869,
u'und': 869,
u'd\u011bd': 869,
u'epi': 869,
u't#g': 868,
u'sc\xe9': 867,
u'ufa': 866,
u'p\xe1r': 866,
u'tr\xe9': 865,
u'vc\xed': 865,
u'jry': 864,
u'atd': 864,
u'tby': 863,
u'zdn': 863,
u'axe': 863,
u'ait': 861,
u'gue': 861,
u'ein': 860,
u'krm': 860,
u'y\u010dn': 859,
u'xti': 859,
u'vzp': 859,
u'rsp': 858,
u'nk#': 857,
u'\xfa\u010dt': 857,
u'v\xedn': 857,
u'z#u': 857,
u'pn\xe1': 856,
u'nli': 856,
u'ohn': 856,
u'sdr': 856,
u'br\u017e': 855,
u'une': 855,
u'r#\u017e': 854,
u'ph#': 853,
u'zp#': 852,
u'ml\xe1': 852,
u'b\xedl': 852,
u'try': 851,
u'szp': 851,
u'i\u0148o': 851,
u'n\xfdb': 851,
u'\xfdbr': 850,
u'p\u016fl': 850,
u'ozy': 850,
u'#n\xfd': 850,
u'eor': 850,
u'\xe1pu': 849,
u'\u0161tr': 846,
u'dig': 846,
u'ofy': 845,
u'ro\u0161': 844,
u'p\xe1s': 844,
u'cil': 843,
u'd\u010d\xed': 843,
u'p\u016fr': 841,
u'tce': 841,
u'byd': 841,
u'pse': 840,
u'dp\u016f': 840,
u'akm': 838,
u'r#r': 837,
u'idr': 834,
u'pic': 834,
u'kad': 832,
u'ked': 832,
u'bc\u016f': 832,
u'ipa': 832,
u'r\u0148u': 832,
u'ass': 831,
u'#hm': 831,
u'zeb': 829,
u's\xfad': 829,
u'kmi': 828,
u'\xfact': 828,
u'bci': 827,
u'#dp': 827,
u'\xfad\xe1': 827,
u'db\u011b': 826,
u'\u011bhl': 826,
u'dph': 824,
u'e\u0159i': 823,
u'#th': 823,
u'ugo': 822,
u'f\xf3r': 822,
u'\u010fov': 822,
u'#ei': 821,
u'\xedj\xed': 820,
u'agi': 819,
u'ez\xed': 818,
u'ad\u0161': 818,
u'\xe9\u010db': 818,
u'reo': 817,
u'gma': 816,
u'rke': 816,
u'mpu': 816,
u'ess': 814,
u'emc': 813,
u'rco': 813,
u'zes': 813,
u'mku': 811,
u'#f\xf3': 810,
u'tsa': 810,
u'\xe1r#': 810,
u'mes': 805,
u'jko': 804,
u'\u0165#n': 804,
u'n#\u010d': 804,
u'\u017ek\xe9': 803,
u'\u011bce': 803,
u'ap\xed': 803,
u'ta\u017e': 801,
u'rm\xed': 801,
u'dv\xed': 800,
u'ket': 800,
u'tlo': 800,
u'mek': 799,
u'lti': 799,
u'cas': 797,
u'ong': 796,
u'i\xedc': 795,
u'deh': 795,
u'lke': 794,
u'\xe9dn': 794,
u't\u0159#': 794,
u'rny': 793,
u'urd': 793,
u'#z\xf3': 792,
u'la\u0161': 792,
u'#\xfac': 791,
u'ebr': 791,
u'ma\u017e': 789,
u'epc': 789,
u'\u011b\u017e\xed': 787,
u'klu': 787,
u'l\xe1r': 786,
u'\xfdze': 785,
u'#s\xfa': 785,
u'\xe1k\u016f': 785,
u'ol\xe9': 784,
u'b\u011b\u0165': 784,
u'ul\xe1': 784,
u'ain': 784,
u'ybr': 784,
u'\u016f#h': 783,
u'tus': 782,
u'ikv': 782,
u'rvo': 782,
u'nog': 781,
u'sma': 781,
u'jch': 781,
u'\u017ed\xe1': 780,
u'lum': 779,
u'n#i': 779,
u'\xe9ry': 778,
u'irm': 778,
u'bse': 778,
u'rde': 778,
u'\u017e\xe1r': 778,
u'n\u011b\u010d': 777,
u'euv': 777,
u'rmn': 777,
u'\u0148#b': 776,
u'n\u011bd': 776,
u'\xfazc': 776,
u'miz': 775,
u'deo': 775,
u'b\u016f#': 774,
u'uis': 774,
u'\u016fl#': 774,
u'vze': 774,
u'eb\u011b': 773,
u'ciz': 772,
u'v\xe1r': 772,
u'ijd': 771,
u'rkt': 771,
u'onv': 770,
u'\u011bl\xed': 770,
u'rp\xe1': 770,
u'a\u017e\u010f': 770,
u'\u010dt\u016f': 770,
u'dex': 770,
u'esy': 770,
u'to\u0161': 769,
u'iem': 769,
u'ike': 769,
u'ejb': 767,
u'as\u016f': 766,
u'jl\xe9': 766,
u'upa': 765,
u'p\xe1d': 765,
u'r#e': 765,
u'toc': 764,
u'l\xedc': 764,
u'p#z': 764,
u'j#t': 762,
u'obb': 761,
u'p#d': 761,
u'sio': 760,
u'mie': 759,
u'hys': 759,
u'ulu': 759,
u't\xedv': 758,
u'sse': 758,
u'e\xfas': 758,
u'iga': 758,
u'#fy': 757,
u'#il': 757,
u'rts': 756,
u'\u0148#o': 756,
u'act': 756,
u'fi#': 755,
u'lus': 755,
u'ut\xe1': 751,
u'gui': 750,
u'lms': 750,
u'\u010d\xedc': 749,
u'eud': 748,
u'tch': 748,
u'dvi': 747,
u'ga\u017e': 747,
u'dc\xed': 747,
u'jky': 747,
u'ip#': 747,
u'tly': 747,
u'f\xe9r': 747,
u'\u010d#s': 746,
u'j#e': 745,
u'qui': 745,
u'\u016f#\u017e': 743,
u'd\xedc': 743,
u'veb': 743,
u's\xed\u0165': 742,
u'aps': 742,
u'of#': 741,
u'\u010d#b': 741,
u'kub': 740,
u'isc': 740,
u'ypa': 740,
u'the': 739,
u'b\u0161\xed': 738,
u'b\xe1#': 738,
u'\xfdhl': 738,
u'ar#': 738,
u'\xe9h\xe1': 737,
u'\xe1t\xed': 736,
u'o\u0161n': 736,
u'sd#': 736,
u'ger': 735,
u'ehk': 735,
u's#\u0159': 735,
u'l#\xfa': 735,
u'z#\xfa': 735,
u'agn': 733,
u'dun': 733,
u'h\xe9h': 732,
u'#we': 732,
u'alm': 732,
u'\xfamr': 730,
u'nsu': 730,
u'rt\xed': 729,
u'ihn': 728,
u'sp\xe1': 727,
u'luk': 727,
u'asc': 726,
u'bab': 725,
u'au\u010d': 724,
u'ofo': 723,
u'n\u0161t': 723,
u'e#w': 722,
u'\xfdd\u011b': 722,
u'fla': 722,
u'ajs': 722,
u'ski': 722,
u'eum': 721,
u'\xe1l\xe9': 721,
u'an\u017e': 721,
u'g#a': 720,
u'b\xfd#': 720,
u'#nl': 719,
u'yt\xed': 719,
u'emb': 718,
u'lau': 718,
u'l#l': 718,
u'oek': 715,
u'r\u010d\xed': 715,
u'to\u0159': 714,
u'ap\u011b': 714,
u'\u017e\u010fo': 713,
u'lla': 713,
u'oct': 713,
u'rre': 713,
u'yzi': 711,
u'e\u017ee': 711,
u'\xf3t#': 711,
u'#cy': 710,
u'fyz': 710,
u'wal': 709,
u'dim': 709,
u'\u0159##': 708,
u'n\u011bh': 708,
u'#c#': 707,
u'am\xe9': 707,
u'ub\u011b': 707,
u'suv': 707,
u'vej': 707,
u'uz\xed': 706,
u'pc\u016f': 705,
u'\u0161kr': 705,
u'\xe1pr': 705,
u'dc\u016f': 704,
u'vie': 704,
u'tv\u016f': 703,
u'i\u0161\xed': 703,
u'ch\xe9': 703,
u'ump': 703,
u't\xedr': 702,
u'ail': 701,
u'nts': 701,
u'm\u011bj': 701,
u'ci\xed': 700,
u'kin': 700,
u'cqu': 699,
u'b\u011bj': 699,
u'v\xfdl': 699,
u'ymp': 698,
u'm\xe1d': 698,
u'acq': 698,
u'jan': 697,
u'to\u010d': 697,
u'b\xe1k': 697,
u'yka': 697,
u'ar\xe9': 697,
u'cer': 696,
u'haz': 695,
u'\u0161sk': 695,
u'uhr': 695,
u'\xed\u0165#': 695,
u'ndr': 694,
u'tai': 694,
u'l\xe1k': 693,
u'\xednu': 691,
u'lh\xe1': 691,
u'krt': 690,
u'tk\xe9': 690,
u'\xedvi': 689,
u'svr': 689,
u'z\xedh': 688,
u'odz': 688,
u'\xe1sm': 688,
u'n\xedt': 687,
u'tte': 686,
u'gos': 686,
u'v\xfdp': 685,
u'd#f': 684,
u'a\u0161o': 684,
u'usu': 684,
u'hma': 684,
u'#cu': 683,
u'pto': 683,
u'tj#': 682,
u'\u0161pi': 682,
u'rid': 682,
u'uov': 681,
u'hko': 681,
u'#tj': 681,
u't\xe1h': 680,
u'rka': 680,
u'olv': 680,
u'rys': 680,
u'nd\u011b': 679,
u'lba': 679,
u'lyz': 678,
u'xte': 677,
u'imb': 676,
u'b\u011bm': 676,
u'jug': 676,
u'k\xe1t': 675,
u'erk': 675,
u'n#f': 675,
u'ahe': 674,
u'gli': 674,
u'a\u0161l': 674,
u'eja': 673,
u'dus': 673,
u'sie': 672,
u'rd\u016f': 672,
u'\u0161#s': 671,
u'vih': 671,
u'zvr': 671,
u'ivl': 670,
u'eib': 670,
u'sso': 670,
u'hul': 670,
u'cec': 669,
u'oh\xfd': 669,
u's\u016fm': 669,
u'tfo': 668,
u'usm': 668,
u'ch\xfd': 665,
u'vi\u010d': 665,
u'\u016f#f': 664,
u'azv': 663,
u'\u0161ej': 663,
u'mi\xe9': 663,
u'miv': 663,
u'z#i': 663,
u'eb\xe1': 663,
u'usl': 662,
u'xik': 661,
u'\u0148\u016f#': 660,
u'#\u017e\xed': 660,
u'\xedbi': 659,
u'\u010dne': 657,
u'kt\xe9': 657,
u'\xedn#': 656,
u'ovc': 656,
u'aos': 656,
u'em\xfd': 655,
u'u\u010do': 655,
u'tti': 655,
u'o\u0161t': 654,
u'd#\u0159': 654,
u'ym\xe1': 653,
u'con': 653,
u'tio': 653,
u'wan': 652,
u'agr': 652,
u'\xed\u0159\xed': 652,
u'\xedz#': 651,
u'att': 651,
u'cna': 651,
u'uno': 650,
u'olm': 650,
u'cos': 650,
u'yzd': 649,
u'ep#': 649,
u'op#': 649,
u'bis': 649,
u'e\u010d\u0148': 648,
u'g#s': 648,
u'tkn': 648,
u'\u010d#n': 648,
u'ty\u0161': 648,
u'evs': 647,
u'uin': 646,
u'\xedt\u0159': 646,
u'mst': 646,
u'ozc': 645,
u'j#b': 644,
u'pny': 644,
u'du\xe1': 644,
u'\u017eku': 643,
u'rdc': 643,
u'jho': 643,
u'bna': 642,
u'lle': 642,
u'az\u016f': 640,
u'\u011b\u017eb': 640,
u'id\u016f': 640,
u'\u017e\xedc': 639,
u'yb\u0148': 639,
u'\u011b\u0161i': 638,
u'\u016frn': 638,
u't\xe1b': 636,
u'a\u0148s': 636,
u'gua': 635,
u'a\u0161u': 635,
u'g\xe9d': 635,
u'ag\xe9': 635,
u'l\u010d\xed': 634,
u'ar\xfd': 634,
u'tvu': 633,
u'oga': 632,
u'ysk': 632,
u'mom': 631,
u'am\xfd': 630,
u'\u0161\u0165a': 629,
u'ken': 628,
u'asm': 627,
u'li\xed': 626,
u'\u011blu': 625,
u'syn': 625,
u'\u0159#n': 624,
u'v\u0148o': 624,
u'nio': 624,
u'utk': 624,
u'mag': 624,
u'#ox': 623,
u'\u017ebu': 623,
u'd\u0159o': 623,
u'\xe1v\u016f': 623,
u'\u0165mi': 622,
u'mam': 622,
u'rba': 621,
u'\u011b\u0165m': 621,
u'tyk': 621,
u'i\u0161e': 620,
u'\xe1ho': 620,
u'vln': 619,
u'\u017e#\xfa': 619,
u'of\xe1': 618,
u'\u0165as': 618,
u'\xe1dr': 618,
u'#ct': 617,
u'e\u017en': 617,
u'voc': 617,
u'ytr': 617,
u'#\u0159i': 616,
u'\u017eka': 616,
u'aus': 616,
u'ri\xe9': 616,
u'r\u0161e': 615,
u'mu\u010d': 615,
u'r#c': 615,
u'erl': 613,
u'\u017eal': 613,
u'u\u010f#': 613,
u'ss#': 613,
u'\xf3n\u011b': 612,
u'yny': 612,
u'bni': 610,
u'rs#': 609,
u'ett': 609,
u'rap': 609,
u'vzo': 608,
u'#b#': 607,
u'xid': 607,
u'll#': 605,
u'#u\u0161': 605,
u'eir': 604,
u'loe': 604,
u'\u0165#z': 604,
u'ahl': 604,
u'ho\u0159': 604,
u'lap': 604,
u'p\xe1v': 604,
u'kt\u0159': 603,
u'#lt': 603,
u'usy': 603,
u'nke': 601,
u'sav': 600,
u'yk\u016f': 600,
u'arb': 600,
u'vi\u0161': 599,
u'ufe': 598,
u'pa\u010d': 598,
u'\xe1zv': 597,
u'mra': 597,
u'un\xe1': 597,
u'eom': 597,
u'\xe9mo': 596,
u'rta': 595,
u'az\xfd': 594,
u'er\u010d': 594,
u'\xe1po': 594,
u'j#r': 593,
u'ijn': 592,
u'udc': 592,
u'rt\xe9': 592,
u'\xfdzu': 591,
u'yty': 589,
u'udy': 589,
u'aur': 587,
u'cuk': 587,
u'st\u0148': 587,
u'\xedde': 586,
u'nne': 586,
u'\xe1t\xe9': 585,
u'luo': 584,
u'dur': 584,
u'ulz': 583,
u'etl': 582,
u'vet': 581,
u'v\u011bs': 581,
u'h#g': 579,
u'v\xfdn': 579,
u'esr': 579,
u'\xfdno': 578,
u'abw': 578,
u'\u0161ka': 577,
u'rpe': 577,
u'ye#': 575,
u'\u011bpi': 575,
u'c#u': 574,
u'he#': 573,
u'\xed\u010d#': 573,
u'y\u0161s': 572,
u'#cl': 572,
u'mrz': 572,
u'akk': 572,
u'\xfdca': 572,
u'\u0161v\xfd': 572,
u'lo\u010f': 571,
u'mpo': 571,
u'pa\u0148': 571,
u'ajd': 571,
u'it\u016f': 570,
u'rso': 569,
u'odt': 569,
u'fa#': 569,
u'yt#': 569,
u'bek': 569,
u'\u0161#z': 568,
u'\xe9mn': 568,
u'm\u011bp': 568,
u'#k\xe1': 568,
u'pid': 567,
u't\xe1\u017e': 566,
u'zmo': 566,
u'\xfdlu': 566,
u'fej': 566,
u'erb': 565,
u'yi#': 565,
u'\u0148#m': 565,
u'\u011bzc': 565,
u'teo': 565,
u'aja': 565,
u'r\u0161u': 564,
u'uel': 564,
u'gl#': 564,
u'ri\u010d': 564,
u'\u0159\xed\u017e': 563,
u'\u017ece': 563,
u'\xfani': 563,
u'a\u0159\u016f': 563,
u'el\u0161': 563,
u'ob\u016f': 561,
u'zou': 560,
u'rei': 560,
u'ud\xfd': 560,
u'tip': 560,
u'\u017esk': 559,
u'uk#': 558,
u'v\xe9t': 558,
u'idy': 558,
u'b\xfdc': 558,
u'de\u010d': 558,
u'sk\u016f': 558,
u'#vk': 557,
u'urz': 556,
u'nds': 555,
u'sep': 555,
u'g#p': 554,
u'at\u0161': 554,
u'uly': 554,
u'tob': 553,
u'l\xe1\u0159': 553,
u'\u0159#s': 553,
u'kko': 552,
u'jus': 552,
u'efu': 552,
u'e\xfa\u010d': 551,
u'iba': 551,
u'd\u0161k': 550,
u'rpa': 550,
u'vo\u010d': 550,
u'vkl': 549,
u'mbu': 548,
u'eml': 548,
u'air': 548,
u'non': 548,
u'#sw': 548,
u'b\xe1z': 548,
u'sp#': 548,
u'\u011b\u010de': 548,
u'\u0165#d': 547,
u'l\xedn': 547,
u'tmo': 546,
u'\u017eli': 546,
u'\xedpu': 546,
u'hyl': 546,
u'yn#': 546,
u'zdi': 545,
u'kho': 545,
u'mex': 544,
u'\xfd\u017ei': 544,
u'anz': 544,
u'apu': 543,
u'\u011b\u0148u': 543,
u'vzk': 543,
u'zif': 543,
u'xu#': 543,
u'rd\xe1': 542,
u'\xe1zd': 541,
u'som': 541,
u'\u016frc': 541,
u'\u0161ly': 541,
u'\xe1tc': 541,
u'boc': 540,
u'pl\xe9': 540,
u'\xe1ms': 539,
u'j\u017e#': 539,
u'ub\xed': 539,
u'\xe1l\xfd': 539,
u'osf': 538,
u'lym': 538,
u'srp': 537,
u'adk': 537,
u'\xe1j\xed': 537,
u'yk#': 536,
u'zai': 536,
u'#\u0161l': 536,
u'c#r': 535,
u'dr\xe9': 535,
u'iu#': 535,
u'l#f': 535,
u'\xe1pe': 535,
u'a\u0161t': 534,
u'uob': 533,
u'lef': 533,
u'b#m': 533,
u'\u010dk\xe1': 533,
u'yan': 532,
u'aky': 532,
u'em\u0159': 531,
u'smi': 530,
u'ov\u016f': 530,
u'cor': 530,
u'jvz': 529,
u'ceg': 529,
u'\u010d\u0148o': 529,
u'sbl': 528,
u'vlo': 528,
u'emp': 527,
u'dyk': 527,
u'\xedl\u010d': 526,
u'v\xfd\u017e': 526,
u'rud': 526,
u'\xe9ra': 525,
u'\xfd\u0161#': 525,
u'cco': 523,
u'rc\u016f': 523,
u'y\u0161\xed': 523,
u'wsk': 523,
u'roa': 521,
u'nk\u016f': 520,
u'boz': 520,
u'u\u0161k': 520,
u'eo#': 520,
u'zst': 519,
u'atf': 519,
u'kt\xe1': 519,
u'znu': 519,
u'#gi': 518,
u'orc': 517,
u'kly': 517,
u'ecy': 517,
u'hei': 516,
u'm\xe1s': 516,
u'ej\u0148': 516,
u'n#g': 516,
u'sii': 515,
u'ka\u0161': 515,
u'\xf3ty': 514,
u'luz': 514,
u'af#': 514,
u'llo': 513,
u'xic': 513,
u'\u011bhn': 512,
u'r\u0148o': 512,
u'de\xe1': 512,
u'm\u011b\u0161': 512,
u'k#\u0161': 511,
u'il\xfd': 511,
u'eki': 511,
u'hmo': 511,
u'zdy': 510,
u'pie': 510,
u'p#m': 510,
u'abl': 509,
u'\u011bz\u0148': 509,
u'\xe1tl': 508,
u'z\xe1z': 508,
u'agm': 508,
u'prs': 508,
u'atm': 507,
u'rd\u011b': 507,
u'#\u0161o': 507,
u'cb#': 507,
u'\u010dej': 506,
u'hin': 506,
u'j\u010dk': 506,
u'we#': 505,
u'\xe1ls': 505,
u'nea': 505,
u'r#h': 505,
u'yce': 503,
u'\u011bsn': 503,
u'jnu': 502,
u'atc': 502,
u'nsf': 501,
u'z\u0148o': 501,
u'ckh': 500,
u'd#\xfa': 500,
u'd\u011bc': 499,
u'\xfdkl': 499,
u'm\u0159e': 499,
u'\u0161ud': 498,
u'v\u0161u': 498,
u'cr#': 498,
u'ed\u010d': 498,
u'ej\xfa': 498,
u'sac': 497,
u'b\xedj': 497,
u'\u011bt\u0159': 497,
u'gna': 496,
u'sb\u011b': 496,
u'cla': 496,
u'z\u0148\u016f': 496,
u'chk': 495,
u'erh': 495,
u'\xe1#\u0161': 493,
u'ym\xfd': 493,
u'k\xfdk': 493,
u'b#d': 493,
u'upk': 493,
u'l\xe9n': 493,
u'v#\xed': 492,
u'sm#': 490,
u'ea#': 490,
u'iet': 490,
u'\u0159\xedb': 490,
u'tr\xf6': 489,
u'r\xf6m': 489,
u'fg#': 488,
u'utv': 488,
u'mo\u010d': 488,
u'ezt': 486,
u'\xed#w': 486,
u'efg': 486,
u'yb\u011b': 486,
u'oj\xe1': 485,
u'd\u0161e': 485,
u's#g': 485,
u'\u011b\u017eu': 484,
u'\xe1l\xed': 484,
u'zao': 484,
u'oco': 484,
u'zyc': 483,
u'otc': 483,
u'sor': 482,
u'kuz': 482,
u'rda': 482,
u'tie': 482,
u'jbl': 481,
u'pky': 481,
u'\u011bva': 481,
u'e\u0161\u0165': 481,
u'\u010f#p': 480,
u'\xfdtv': 479,
u'uan': 479,
u'ofa': 478,
u'kep': 478,
u'bwe': 477,
u'#mc': 475,
u'\xe1ri': 475,
u'cat': 474,
u'b\xedc': 474,
u'r\u010dn': 473,
u'yma': 472,
u'\u0165#b': 472,
u'y\u017e\xe1': 472,
u'\u0161ok': 472,
u'ypi': 472,
u'tha': 472,
u'\u0159ce': 472,
u'd\u010da': 471,
u'pij': 471,
u'c#c': 470,
u'#pn': 470,
u'yku': 470,
u'y\u0159#': 470,
u'i\u010da': 470,
u'hry': 469,
u'\xfd##': 469,
u'aba': 469,
u'go#': 469,
u's\u0148o': 468,
u'u\u0148\xe1': 468,
u'z#\u0159': 468,
u'aln': 468,
u'di\xe1': 468,
u'tu\u0148': 468,
u'\xe9\u010dn': 467,
u'y\u0165#': 467,
u'yd\u011b': 467,
u'\u016fd\u011b': 467,
u'cn\xe1': 467,
u'\u0148\xe1k': 467,
u'u\u017e\u0161': 466,
u'j\u0148o': 465,
u'\xe1pl': 465,
u'can': 464,
u'i\xe1t': 464,
u'oak': 464,
u'od\u0161': 464,
u'ot\u016f': 464,
u'io#': 464,
u'vok': 462,
u'adb': 462,
u'jez': 461,
u'##g': 461,
u'\u016fjd': 461,
u'ejo': 461,
u'bbi': 461,
u'iml': 460,
u'\u010fme': 460,
u'p#b': 460,
u'\xfdzy': 459,
u'g#v': 459,
u'nd\xfd': 459,
u'cis': 458,
u'vy\xfa': 458,
u'ecb': 458,
u'a#w': 457,
u'ubi': 457,
u'd\u011bv': 457,
u'ml\u010d': 457,
u'b\xfdm': 457,
u'by\u010d': 457,
u'd\xfdn': 456,
u'dby': 456,
u'rb\xe1': 456,
u'ipy': 456,
u'ack': 456,
u'\u011b#g': 456,
u'eco': 455,
u'r\u0161i': 454,
u'yky': 454,
u'uso': 454,
u'rbu': 453,
u'bun': 453,
u'db\xe1': 452,
u'zmr': 452,
u'\u0148#t': 451,
u'ozt': 451,
u'n\u011bv': 451,
u'\xe9le': 451,
u'dba': 450,
u'\xf6mo': 450,
u'y\u0159a': 450,
u'\xf3nu': 450,
u'ki#': 450,
u'xpe': 450,
u'\xe1ta': 450,
u'ysp': 450,
u'es\u0148': 450,
u'od\u0148': 449,
u'ecr': 449,
u'rtv': 448,
u'vig': 448,
u'cni': 448,
u'\xe1m\xe9': 448,
u'\u010d#p': 447,
u'\xe1\u010de': 446,
u'ihy': 446,
u'#oe': 445,
u'k\xe1k': 445,
u'ub\xe9': 445,
u'tkl': 445,
u'v#\u0161': 445,
u'l\xfdt': 445,
u'et\u016f': 444,
u'kve': 443,
u'e\u010dt': 443,
u'ovr': 443,
u'ufi': 442,
u'imy': 442,
u'pt\u011b': 442,
u'a\u0159c': 442,
u'gol': 442,
u'\xedm\u011b': 441,
u'bsu': 441,
u'h\u016fd': 441,
u'\u011bra': 440,
u'ds#': 440,
u'laf': 440,
u'up\u0148': 440,
u'ogo': 440,
u'u\u0161\u0161': 439,
u'kl\xe9': 439,
u'\u010d#t': 439,
u'z#g': 439,
u'rnn': 438,
u'bg#': 438,
u'spu': 438,
u'\xe1k#': 437,
u'#sf': 436,
u'kil': 436,
u'#eh': 436,
u'iom': 436,
u'luf': 436,
u'reh': 436,
u'byi': 436,
u'adv': 435,
u'\u011bka': 435,
u'jzr': 435,
u'cok': 434,
u'uha': 434,
u'psy': 433,
u'loo': 433,
u'\xed\u010dn': 433,
u'ts#': 433,
u'\u011bty': 433,
u'uco': 432,
u'\u017eou': 431,
u'zla': 431,
u'm\xedj': 431,
u'\u0159ky': 431,
u'\xe1vs': 431,
u'\xe9r#': 430,
u'#cs': 430,
u'k\u0159\xed': 430,
u'r#u': 430,
u'uhe': 429,
u'mni': 428,
u'\u0148#d': 428,
u'dd\xed': 427,
u'nob': 426,
u'kel': 426,
u'cvi': 425,
u'sif': 425,
u'shi': 425,
u'row': 424,
u'd\u017e\xe1': 424,
u'smo': 424,
u'os\u016f': 423,
u'iot': 423,
u'i\xedm': 423,
u'mf#': 423,
u'yzo': 422,
u'\u0148#a': 422,
u's\xe1z': 422,
u'rby': 421,
u'y\xfas': 421,
u'ge#': 421,
u'\xe9\u0159e': 421,
u'b#b': 421,
u'mc\u016f': 421,
u'\xe1vc': 421,
u'g#m': 420,
u'\xfdpo': 420,
u'uls': 420,
u'fot': 419,
u'\xedza': 418,
u'od\u0159': 418,
u'dtr': 418,
u'nna': 418,
u'ci\u0165': 417,
u'\u0159#d': 416,
u'r\u016fh': 416,
u'\u016fhl': 416,
u'dm#': 416,
u'aju': 416,
u'sci': 415,
u'jar': 415,
u'eog': 414,
u'bas': 414,
u'ejk': 414,
u'jun': 414,
u'oja': 413,
u'evh': 413,
u'\xedv\u011b': 412,
u'jz\xe1': 412,
u'\u011bln': 412,
u'zpl': 412,
u'dda': 412,
u'lmu': 412,
u'hyt': 412,
u'\xf3ru': 411,
u'urs': 411,
u'ad\u0148': 411,
u'fos': 411,
u'\u0161k\xe1': 411,
u'jpo': 411,
u'dei': 411,
u'uk\u010d': 410,
u'kn\u011b': 410,
u'lho': 410,
u'r\u010do': 410,
u'ubu': 409,
u'bom': 409,
u'm\xe1r': 409,
u'\xfatv': 408,
u'fob': 408,
u'lto': 408,
u'us\u016f': 408,
u'j#h': 407,
u'\u011bdc': 407,
u'v\xfdt': 407,
u'b\xedm': 407,
u'h\xfdb': 407,
u'rz\xe1': 406,
u'r\u0161t': 406,
u'\u010dad': 406,
u'myl': 406,
u'urg': 405,
u'p\u0161u': 405,
u'alz': 404,
u'ns#': 403,
u'k\u0159i': 403,
u'\u0159\u016fm': 403,
u'krv': 402,
u'bc\xed': 402,
u'mmf': 402,
u'#ue': 402,
u'ots': 402,
u'd#\u0161': 402,
u'rzn': 401,
u'\u017ek\xe1': 401,
u'syc': 401,
u'f#a': 401,
u'bst': 400,
u'm\xed\u0161': 400,
u'ath': 400,
u'e\xe1n': 399,
u'cta': 399,
u'eoc': 398,
u'bre': 397,
u'\u0165#m': 397,
u'\xfd#\u0159': 397,
u'ca#': 396,
u'#\xfah': 396,
u'gem': 396,
u'sfo': 396,
u'\xe9ru': 395,
u'web': 395,
u'e\u017e\xe1': 395,
u'kme': 395,
u'j\xe1k': 395,
u'zd#': 394,
u'ce\xe1': 394,
u'epe': 394,
u'\u0161l\xed': 393,
u'n#\xfa': 393,
u'\u011bzi': 392,
u'oel': 392,
u'diu': 392,
u'i\u0159u': 391,
u'rpn': 391,
u'\u011btm': 391,
u'#mm': 391,
u'eim': 390,
u'\u0148#k': 390,
u'dd\xe1': 390,
u'rd\xfd': 390,
u'ko\u0161': 390,
u'hes': 388,
u'\xe1#g': 388,
u'j#i': 388,
u'j\u0161k': 388,
u'nza': 387,
u'ev\u016f': 387,
u'ucc': 387,
u'\u011bh#': 387,
u'pja': 387,
u'puy': 387,
u'c#l': 387,
u'gel': 387,
u'lun': 387,
u'r\xe9n': 387,
u'zci': 385,
u'\u010f#j': 385,
u'fie': 384,
u'olk': 384,
u'itt': 384,
u'f\xe1l': 384,
u'jr\u016f': 383,
u'\u011brk': 383,
u'et\xe1': 383,
u'bed': 383,
u'tyl': 383,
u'sbo': 382,
u'iag': 382,
u'hte': 382,
u'ceu': 381,
u'tub': 381,
u'\u011bz\xed': 381,
u'bvo': 380,
u'ozr': 380,
u'ree': 380,
u'oge': 380,
u'\u016fty': 379,
u'\u0159#z': 379,
u'\u017eb\u011b': 378,
u'ias': 378,
u'r\u010du': 378,
u'job': 378,
u'ht\xed': 378,
u'\xe9n\xe1': 377,
u'dzi': 377,
u'eup': 377,
u'bye': 377,
u'j\xfa\u010d': 377,
u'sco': 376,
u'\u0161#k': 376,
u'z\xedn': 376,
u'\u011bzs': 376,
u'il\xe9': 376,
u'run': 376,
u'hie': 376,
u'cin': 375,
u'opc': 375,
u'yse': 375,
u'#km': 375,
u'p#o': 375,
u'\u0161#\u010d': 374,
u'ets': 374,
u'zr\u016f': 373,
u'#n\xe9': 373,
u'at\u010d': 373,
u'tap': 373,
u'ott': 372,
u'r\xe1j': 372,
u'\u010d\u016fm': 371,
u'oaf': 371,
u'\u0159#t': 370,
u's\xed\u010d': 370,
u'l\u0148k': 370,
u'aki': 370,
u'rbo': 369,
u'puk': 369,
u'\u017ek\xfd': 369,
u'#wi': 369,
u'yp#': 369,
u'fat': 369,
u'ls#': 368,
u'zec': 368,
u'b#\u017e': 368,
u'vk\xe1': 367,
u'tox': 367,
u'eou': 367,
u'ype': 367,
u'fan': 367,
u'chm': 366,
u'elc': 366,
u'\xe1lc': 366,
u'\u016fdu': 366,
u'bie': 366,
u'lmo': 366,
u'j\u010de': 366,
u'bot': 365,
u'\u016f#\xfa': 365,
u'sus': 365,
u'l\u0161t': 365,
u'tv\xfd': 364,
u's\xfdr': 364,
u'e\u010d#': 364,
u'\u016fta': 364,
u'j#u': 363,
u'zpa': 363,
u'ioa': 363,
u'\xedrk': 362,
u'vdi': 362,
u'ies': 362,
u'u\u017eo': 362,
u'p\u011bn': 361,
u'jwa': 361,
u'hob': 361,
u'\xe9lk': 361,
u'ajw': 361,
u'kj\xf3': 359,
u'aul': 359,
u'j\xf3t': 359,
u'gab': 359,
u'o\u017eo': 359,
u'le\u0161': 359,
u'a\u0148u': 358,
u'zi\u010d': 358,
u'fen': 358,
u'mau': 357,
u'oll': 357,
u'koa': 357,
u'jer': 356,
u'n\xf3z': 355,
u'dri': 355,
u'#b\xe1': 355,
u'eld': 355,
u'\u010f#v': 355,
u'#s\xfd': 354,
u'd\u017ei': 354,
u'tta': 354,
u'\xfdsk': 354,
u'ssi': 354,
u'#ai': 353,
u'iac': 353,
u'nip': 353,
u'\u017e\u0148o': 352,
u'\u0148me': 352,
u'eot': 352,
u'n#\u0159': 352,
u'fre': 351,
u'ps#': 351,
u'\u0159#k': 351,
u'uby': 351,
u'sim': 351,
u'loc': 350,
u'dde': 350,
u'i\u0161i': 349,
u'\u0161to': 349,
u'rms': 349,
u'mum': 348,
u'ab\u0161': 348,
u'\u011bnn': 347,
u'sr\xed': 347,
u'tba': 347,
u'#kj': 347,
u'ri\xf3': 347,
u'gn\xf3': 346,
u'eas': 346,
u'hs#': 346,
u'yh\xfd': 346,
u'uus': 346,
u'cto': 346,
u'yt\xe9': 346,
u'gur': 346,
u'\xe1t\xfd': 346,
u'tv\u011b': 345,
u'b\u0148u': 345,
u'\u0159#m': 345,
u'b\xe1j': 345,
u'cre': 344,
u'rbe': 344,
u'i\u0161o': 344,
u'\xfdsa': 344,
u'\u011b\u010d\xed': 344,
u'xn\u011b': 344,
u'zb\u011b': 344,
u'rm\xe9': 344,
u'\xe1ky': 343,
u'ssa': 343,
u'ows': 343,
u'\xf3n#': 342,
u'ld#': 342,
u'n\xe1d': 342,
u'diz': 342,
u'r\u016fk': 341,
u'aid': 341,
u'#tc': 341,
u'\u017e\xe1n': 341,
u'lvi': 341,
u'\xe1m\xfd': 340,
u'eh\u010d': 340,
u'ybe': 340,
u'ncu': 339,
u'jd\u017e': 339,
u'\u011btr': 339,
u'has': 338,
u'uge': 338,
u'a\u0161n': 338,
u'zue': 338,
u'nni': 338,
u'rme': 338,
u'\xe1ry': 338,
u'bow': 337,
u'\xe1\u017eo': 337,
u'ub\u0161': 337,
u'uda': 337,
u'\xe1jd': 337,
u'hez': 336,
u'yvs': 336,
u'hyc': 336,
u'sup': 335,
u'yjm': 335,
u'eam': 334,
u'\u0165#k': 334,
u'pc\xed': 334,
u'ptu': 334,
u'\u0159#o': 334,
u'lve': 334,
u'oss': 333,
u'ym\u011b': 333,
u'g#n': 333,
u'\u016fd\u010d': 333,
u'\xfdli': 333,
u'oob': 333,
u'iki': 333,
u'pea': 333,
u'\xedbe': 332,
u'mbe': 332,
u'hev': 332,
u'\u011bj\u017e': 332,
u'nz\xed': 331,
u'gin': 331,
u'ndd': 331,
u'flo': 331,
u'rif': 331,
u'e\u010do': 330,
u'eu\u010d': 330,
u'oam': 330,
u'dm\xe9': 330,
u'zre': 329,
u'o\u010fs': 329,
u'\u010fst': 329,
u'\u0159#j': 329,
u'im\u016f': 329,
u'rt\xe1': 329,
u'vby': 329,
u'ba\u017e': 329,
u'hut': 329,
u'hil': 329,
u'nzo': 328,
u'ims': 328,
u'lbu': 328,
u'rri': 327,
u'usc': 327,
u'sop': 326,
u'\u016fr\u010d': 326,
u'oun': 326,
u'g#j': 326,
u'k\xe1r': 325,
u'kas': 325,
u'm\xfdt': 325,
u'kok': 325,
u'usd': 325,
u'\u0159#e': 324,
u'l\u010de': 324,
u'\xfdl\xed': 324,
u'pa\u0159': 324,
u'\u011bsi': 323,
u'auz': 323,
u'zto': 323,
u'suc': 322,
u'\u0148ko': 322,
u'ujt': 322,
u'put': 321,
u'dox': 321,
u'ku\u0159': 321,
u'elv': 321,
u'ft#': 321,
u'hiv': 321,
u'zhe': 320,
u'tto': 320,
u'zp\xe1': 320,
u'p#e': 320,
u'rvy': 320,
u'ojs': 319,
u'lzh': 319,
u'\u010d#v': 319,
u'nbe': 319,
u'rer': 319,
u'j#c': 318,
u'\xe9vo': 318,
u'gf#': 318,
u'f\u011b#': 318,
u'hyp': 318,
u'of\u011b': 317,
u'mii': 317,
u'g#o': 317,
u'uss': 317,
u'\u017e#f': 316,
u'sot': 316,
u'ct\xed': 316,
u'rdu': 316,
u'iha': 316,
u'v\xe9v': 316,
u'nai': 316,
u'j\xedl': 316,
u'iru': 315,
u'onl': 315,
u'vte': 315,
u'#ic': 315,
u'zt\u0159': 315,
u'aov': 315,
u'\u0161#r': 314,
u'c#\u017e': 314,
u'euz': 314,
u'\u0161te': 314,
u'akv': 314,
u'gri': 314,
u'bnu': 313,
u'ud\u016f': 313,
u'jp\u0159': 313,
u'\u016fze': 313,
u'map': 313,
u'vav': 313,
u'new': 313,
u'rzd': 312,
u'c#\u010d': 312,
u'hoj': 312,
u'a\u017el': 312,
u'zot': 311,
u'\xedzd': 311,
u'ads': 311,
u'dl\u016f': 311,
u'ap\xe1': 311,
u'enl': 311,
u'p\xedm': 310,
u'ibo': 310,
u'\xe1sn': 310,
u'dys': 310,
u'hn#': 309,
u'\u0165#o': 309,
u'ga\u010d': 309,
u'vii': 309,
u'dto': 309,
u'\u011bt\u016f': 309,
u'eac': 308,
u'eug': 308,
u'\xe1ck': 308,
u'b\u0148o': 308,
u'r\xe1s': 308,
u'h\u0159e': 308,
u'fu#': 308,
u'\xe1rf': 308,
u'lvy': 308,
u'\u0161#o': 307,
u't\xe1d': 307,
u'dk\xe1': 307,
u'vik': 307,
u'com': 307,
u'pi\u010d': 307,
u'zry': 306,
u'heu': 306,
u'#wo': 306,
u'yp\u016f': 306,
u'het': 305,
u'\u0159\xedh': 305,
u'\u0148at': 305,
u'c\xfan': 305,
u'nc\xfa': 305,
u'#\xe1z': 305,
u'fou': 305,
u'ipe': 305,
u'vi\u0148': 305,
u'i\xf3n': 305,
u'\xe9k\u016f': 305,
u'#vt': 305,
u'r#l': 305,
u'ezc': 304,
u'\xfdza': 304,
u'jea': 304,
u'rb\u011b': 304,
u'vdy': 304,
u'sha': 304,
u'cty': 304,
u'\u0161ek': 303,
u'dvr': 303,
u'iam': 303,
u'jsi': 303,
u'\xedlk': 303,
u'ryn': 303,
u'eoh': 302,
u'\xfdce': 302,
u'rf\xfa': 301,
u'egf': 301,
u'f\xfar': 301,
u'#ms': 301,
u'bku': 300,
u'\xedl\xe9': 300,
u'rky': 300,
u'sea': 300,
u'lul': 300,
u'zu\xe1': 300,
u'\xfdri': 300,
u'omm': 299,
u'z\u0159i': 299,
u'ges': 299,
u'\u016f#\u0159': 299,
u'von': 299,
u'ak\u016f': 299,
u'erd': 299,
u'hac': 298,
u'ngr': 298,
u'hn\u011b': 298,
u'mug': 298,
u'\u011bdy': 298,
u'm\xfdl': 298,
u'yti': 298,
u'pa\u0161': 298,
u'h\xfdl': 298,
u'c\xedr': 297,
u'\xedn\xed': 297,
u'n\u011bp': 297,
u'ege': 297,
u'i\u010do': 297,
u'\xe1dl': 296,
u'\xe9no': 296,
u'add': 296,
u'ygi': 296,
u'hyg': 296,
u'swi': 296,
u'tv\xe9': 295,
u'kab': 295,
u'mli': 295,
u'eos': 295,
u'av\u016f': 295,
u'\u010f#z': 294,
u'zbe': 294,
u'\xf3ts': 293,
u'dr\xfd': 293,
u'l\xf3z': 293,
u'jci': 293,
u'hyn': 293,
u'gou': 293,
u'lz#': 292,
u'c#i': 292,
u'\u016fzc': 292,
u'yt\xe1': 292,
u'uts': 292,
u'\xe1nn': 292,
u'kv\xe1': 292,
u'umy': 291,
u'j\xedz': 291,
u'vko': 290,
u'\xfanu': 290,
u'ehs': 290,
u'ul\xf3': 290,
u'p#t': 290,
u'dl\xed': 290,
u'nof': 289,
u'gam': 289,
u'ubk': 289,
u'kun': 289,
u'rpr': 289,
u'od\xe9': 289,
u'ct\u011b': 289,
u'nig': 289,
u'#h\u0159': 289,
u'hr\u016f': 289,
u'#cr': 288,
u'\u0159#b': 288,
u'deg': 288,
u'\u011bvy': 288,
u'g#k': 288,
u'msp': 288,
u'wat': 287,
u'orl': 287,
u'muk': 287,
u'dhl': 287,
u'nia': 287,
u'naf': 287,
u'ud\xe9': 287,
u'nid': 287,
u'by\u0165': 287,
u'bp#': 287,
u'p\u0148o': 286,
u'lop': 286,
u'ew#': 286,
u'u\u0159i': 286,
u'p##': 286,
u't\u0161t': 285,
u'mid': 285,
u'\u010d#m': 285,
u'vas': 285,
u'ctu': 285,
u'ypy': 285,
u'gle': 285,
u'igu': 285,
u'nr#': 284,
u'eat': 284,
u'oh\u0159': 284,
u'cot': 284,
u'h\u0159i': 284,
u'rig': 284,
u'pnr': 283,
u'on\xed': 283,
u'f\xe9\u0159': 283,
u'p\xe1l': 283,
u'\xed\u0161#': 282,
u'rt\u016f': 282,
u'uer': 282,
u'zij': 282,
u'on\u017e': 281,
u'pog': 281,
u'zt\u011b': 281,
u'bh\xe1': 281,
u'b\xe1t': 281,
u'r\xfdt': 281,
u'\u016f\u0159e': 281,
u'\xfapa': 280,
u'was': 280,
u'i\u0165u': 280,
u'wif': 280,
u'adt': 280,
u'rd\xe9': 280,
u'lai': 280,
u'\xe9k\xe1': 280,
u'ojk': 279,
u'zkv': 279,
u'ife': 279,
u'ift': 279,
u'n\u017es': 279,
u'\u017edi': 279,
u'ils': 279,
u'\xfate': 278,
u'\u0159mi': 278,
u'\u011b\u0161o': 278,
u'i#w': 278,
u'jos': 278,
u'xty': 278,
u'vbu': 278,
u'cob': 278,
u'\xe9re': 277,
u'\u0148#u': 277,
u'd\u011bs': 277,
u'iss': 277,
u'dap': 277,
u'\xf3na': 276,
u'uye': 276,
u'z\xe1b': 276,
u'\u017eu#': 276,
u'p#u': 276,
u'ica': 276,
u'tvy': 275,
u'edr': 275,
u'xpo': 275,
u'bi\u010d': 275,
u'piv': 275,
u'tag': 275,
u'\xfdba': 274,
u'me\u0148': 272,
u'\u0159#a': 272,
u'ipi': 272,
u'saj': 272,
u'kl\xfd': 272,
u'\u016fdn': 272,
u'ab\xe9': 272,
u'#o\u0161': 271,
u'\u011bvn': 271,
u'#v\u0159': 271,
u'km#': 271,
u'arv': 271,
u'\xedb\xed': 270,
u'iv\u016f': 270,
u'ub\xe1': 270,
u'rk#': 270,
u'xt\u016f': 270,
u'#\xfa\u017e': 269,
u'ams': 269,
u'\u010d\xedh': 269,
u'nss': 269,
u'\xe9mi': 269,
u'sij': 269,
u'#yo': 269,
u'yke': 269,
u'yco': 269,
u'mcc': 269,
u'fuk': 269,
u'j\xedd': 269,
u'unt': 268,
u'iii': 268,
u'yln': 268,
u'ze\u0161': 268,
u'yov': 268,
u'\xe9ky': 268,
u'rra': 268,
u'\u011b\u017ec': 267,
u'\xfdmk': 267,
u'#\u0161\u0165': 267,
u'd#g': 267,
u'vrs': 267,
u'avl': 267,
u'hao': 266,
u'no\u010d': 266,
u'\xe1\u017e#': 266,
u's\xe1#': 266,
u'reb': 266,
u'hit': 266,
u'\u010dbu': 266,
u'j#\xfa': 265,
u'umb': 265,
u'gi\u010d': 265,
u'bej': 265,
u'\xe1p\xe1': 264,
u'\u010d#k': 264,
u'tc\xed': 264,
u'xen': 264,
u'th#': 264,
u'mop': 264,
u'h\u016f\u0159': 263,
u'alg': 263,
u'bdi': 263,
u'\u010f#k': 263,
u'ull': 263,
u'ogy': 263,
u'\xe9ni': 262,
u'#gs': 261,
u'owa': 261,
u'rgo': 261,
u'que': 261,
u'ze\u0148': 261,
u'vne': 261,
u'tum': 261,
u'dyc': 261,
u'sca': 260,
u'eev': 260,
u'#r\xe9': 260,
u'laz': 260,
u'ua#': 260,
u'zef': 260,
u'zy\u010d': 260,
u'#xe': 260,
u'dr\u016f': 259,
u's\u0148u': 258,
u'ngt': 258,
u'mai': 258,
u'pl\xed': 258,
u'fac': 258,
u'ir#': 257,
u'tia': 257,
u'z\xedr': 257,
u'#py': 257,
u'\u010fte': 257,
u'\xe9r\u016f': 256,
u'\xedr\u0148': 256,
u'cap': 256,
u'ncl': 256,
u'tho': 256,
u'tet': 256,
u'nch': 255,
u'i\u0159e': 255,
u'g#b': 255,
u'vuu': 255,
u'pe\u0148': 255,
u'lyh': 255,
u'tig': 255,
u'mna': 254,
u'orp': 254,
u'ldn': 254,
u'tso': 254,
u'oal': 254,
u'se\u0161': 254,
u'b\u011bs': 254,
u'rl\xed': 254,
u'l#g': 254,
u'\u0148te': 254,
u'abc': 254,
u'uit': 253,
u'n#w': 253,
u'oom': 253,
u'#db': 253,
u'gto': 252,
u'aas': 252,
u'alv': 252,
u'r\xe1h': 252,
u'\u017eas': 252,
u'lva': 252,
u'\xfam\u011b': 251,
u'uny': 251,
u'b\u011bn': 251,
u'ih#': 251,
u'vm\u011b': 251,
u'b\xe1d': 251,
u'sof': 250,
u'ub\xfd': 249,
u'tco': 249,
u'anb': 249,
u'lea': 249,
u'b#t': 249,
u'rui': 249,
u'bev': 249,
u'bak': 249,
u'f#v': 249,
u'\u010dke': 249,
u'eb\xfd': 249,
u'urt': 248,
u'maa': 248,
u'ieb': 248,
u'ioe': 248,
u'thi': 247,
u'beh': 247,
u'esd': 247,
u'd\xedr': 247,
u'ez\u016f': 246,
u'oz\u016f': 246,
u'gau': 246,
u'#ih': 246,
u'hoa': 246,
u'\xfaru': 246,
u'yor': 246,
u'lip': 246,
u'\u017eic': 246,
u'anm': 246,
u'dv\xfd': 245,
u'gas': 245,
u'rar': 245,
u'l\xeds': 245,
u'p#r': 245,
u'h\xe1c': 244,
u'\xfdn\u011b': 244,
u'\xe1ke': 244,
u'rvi': 244,
u'rza': 243,
u'\u0161ed': 243,
u'ro\xfa': 243,
u'\u0161ky': 243,
u'gsp': 243,
u'tma': 243,
u'ej\u010d': 243,
u'ukn': 242,
u'lo\u0161': 242,
u'eo\u010d': 242,
u'col': 242,
u'za\u0161': 242,
u'ofu': 241,
u'\u011bcn': 241,
u'zio': 241,
u'gik': 240,
u'lei': 240,
u'zi\xed': 240,
u'\u010f#a': 240,
u'bah': 240,
u's\xe1l': 239,
u'rzk': 239,
u'\u017e#\u0161': 239,
u'l#\u0161': 239,
u'yj\xed': 239,
u'r\u0161o': 238,
u'y\u010di': 238,
u'c#h': 238,
u'acc': 238,
u'\u010f#m': 238,
u'nma': 238,
u'baj': 238,
u'r#i': 238,
u'p\u016fm': 238,
u'pts': 237,
u'jne': 237,
u'\xe1s\xed': 237,
u'jvh': 237,
u'\xfatl': 236,
u'\u010dar': 236,
u'otb': 236,
u'\xedcn': 236,
u'fig': 235,
u'ev\u0148': 235,
u'kie': 235,
u'ch\u0159': 235,
u'm\xe1v': 235,
u'\u011bpr': 235,
u'\u0161#b': 234,
u'v\u010d\xed': 234,
u'e\u010dk': 234,
u'\u016fv#': 234,
u'ey#': 234,
u'sf#': 234,
u'ad\u017e': 234,
u'\xeddy': 234,
u'o\u0161i': 234,
u'\u010f#s': 234,
u'ecd': 234,
u'r\xed#': 234,
u'dau': 234,
u'zb\xe1': 234,
u'\u011bda': 233,
u'\xedd\u011b': 233,
u'iou': 233,
u'hme': 233,
u'#zu': 232,
u'\u011bv\u011b': 232,
u'jor': 232,
u'joh': 232,
u'\xfa\u017ea': 232,
u'okt': 232,
u'pr\u0161': 232,
u'hk\xfd': 231,
u'nt\u011b': 231,
u'ghe': 231,
u'#ly': 231,
u'\u010dti': 231,
u'dog': 230,
u'kip': 230,
u'lso': 230,
u'dk\xe9': 230,
u'rkm': 230,
u'ip\u016f': 230,
u'kim': 230,
u'\u0159ku': 230,
u'utl': 230,
u'ilm': 230,
u'\xe1rs': 230,
u'j\u010da': 230,
u'rli': 229,
u'bes': 229,
u'c\xedn': 228,
u'#vm': 228,
u'ml#': 228,
u'li\xf3': 228,
u'ri#': 228,
u'\xf3zy': 227,
u'ob\u017e': 227,
u'unc': 227,
u'eha': 227,
u'exa': 227,
u'la\u0159': 227,
u'hab': 226,
u'yt\u011b': 226,
u'jac': 226,
u'viv': 226,
u'#qu': 226,
u'ec\xed': 226,
u'ajl': 226,
u'cn\xed': 226,
u'on\xe9': 225,
u'yd\xed': 225,
u'coe': 225,
u'oxn': 225,
u'\u011bv#': 224,
u'\xe9t\u011b': 224,
u'ley': 224,
u'ck#': 224,
u'ysi': 224,
u'maz': 223,
u'iel': 223,
u'lna': 223,
u'zig': 223,
u'b#u': 223,
u'\u0148#r': 223,
u'bii': 223,
u'hic': 223,
u'zch': 222,
u'cam': 222,
u'orf': 222,
u'\u016f#g': 222,
u'ahm': 222,
u'mya': 222,
u'tei': 222,
u'spj': 222,
u'rau': 222,
u'nus': 222,
u'u\u010fm': 222,
u'\xedb\u011b': 221,
u'\u0165#a': 221,
u'e#\xed': 221,
u'rkv': 221,
u'jsl': 221,
u'igo': 221,
u'hts': 221,
u'x#a': 221,
u'\u010fuj': 221,
u'lih': 221,
u'ish': 221,
u'p\u010dn': 221,
u'i\u010du': 221,
u'byj': 221,
u'ehu': 220,
u'\u017eid': 220,
u'oop': 220,
u'hus': 220,
u'efd': 220,
u'c\xe1t': 219,
u'\xedms': 219,
u'beu': 219,
u'\u011bvu': 219,
u'\u0161ku': 219,
u'b##': 219,
u'yck': 219,
u'ebl': 219,
u'oet': 218,
u'cd#': 218,
u'\u010d#e': 218,
u'utt': 218,
u'#dc': 218,
u'nny': 218,
u'tir': 218,
u'd\xe1f': 218,
u'\xe9nn': 217,
u'gne': 217,
u'app': 217,
u'ma\u0159': 217,
u'ol\u0148': 217,
u'seg': 217,
u'cof': 217,
u'lma': 217,
u'wob': 217,
u'swo': 217,
u'\xe1f\xed': 217,
u'haa': 216,
u'\xfdbu': 216,
u'hri': 216,
u'em\xe9': 216,
u'etv': 216,
u'uth': 216,
u'j#\u010d': 215,
u'bve': 215,
u'or\u010d': 215,
u'dk\xfd': 215,
u'ubv': 215,
u'\u010dat': 215,
u'\xfdmu': 215,
u'ehe': 215,
u'v\xfdu': 215,
u'r\u016f\u017e': 215,
u'a\u010d#': 215,
u'ulk': 215,
u'ej\u017e': 215,
u'fii': 214,
u'sny': 214,
u'v\xedk': 214,
u'ao#': 214,
u'yra': 214,
u'az\u0161': 213,
u'o\xfav': 213,
u'#s\xe9': 213,
u'oec': 213,
u'how': 213,
u'fd#': 213,
u'#d\u017e': 213,
u'pr#': 213,
u'hir': 213,
u'ril': 213,
u'pne': 212,
u'e\u010du': 212,
u'nag': 212,
u'bac': 212,
u'iri': 211,
u'pnu': 211,
u'\u011b\u0165#': 211,
u'nei': 211,
u'dag': 211,
u'ogn': 210,
u'ymu': 210,
u'gia': 210,
u'vep': 210,
u'z\xe1\u0161': 210,
u'coh': 210,
u'shl': 210,
u'esf': 210,
u'f#j': 210,
u'arz': 210,
u'jm\xed': 209,
u'zof': 209,
u'au#': 209,
u'mah': 209,
u'#\xe9r': 209,
u'z\xedv': 209,
u'paj': 209,
u'rai': 209,
u'oz\xe1': 208,
u'\xe1t\xe1': 208,
u'rds': 208,
u'yc\xed': 208,
u'l\xe9\u0159': 208,
u'f#p': 208,
u'y\u0148#': 208,
u'eul': 207,
u'\xf3ra': 207,
u'jns': 207,
u'#eb': 207,
u'giu': 207,
u'\xfdml': 207,
u'rpo': 207,
u'p\u011bk': 207,
u'u\u017eu': 207,
u'\u010dby': 207,
u'fol': 206,
u'\u011bkn': 206,
u'\u011bke': 206,
u'mde': 206,
u'gla': 206,
u'\xe9vr': 205,
u'gil': 205,
u'ho\u0161': 205,
u'n\xe9v': 205,
u'rle': 205,
u'vuz': 205,
u'#ug': 205,
u'pau': 205,
u'mk\xe1': 205,
u'uz\xe1': 204,
u'aue': 204,
u'chs': 204,
u'bs\xe1': 204,
u'elt': 204,
u'rp#': 204,
u'koe': 204,
u'lv#': 204,
u'p\xe9#': 203,
u'ev\xe9': 203,
u'war': 203,
u'yvr': 203,
u'y\u010d\xed': 203,
u'dt#': 203,
u'dte': 203,
u'r#f': 203,
u'e\u0148m': 203,
u'hau': 202,
u'ouo': 202,
u'it\u0161': 202,
u'an\u0161': 202,
u'\xed\u0159\u016f': 202,
u'dzb': 201,
u'd\u0161t': 201,
u'\u0148#i': 201,
u'#ia': 201,
u'hk\xe9': 201,
u'xan': 201,
u'be\u017e': 201,
u'biv': 201,
u'p\xedc': 200,
u'\xe1m\xe1': 200,
u'\xfduk': 200,
u'urc': 200,
u'bt\u011b': 200,
u'a\u010dt': 200,
u'eju': 200,
u'\xedbu': 199,
u'o#\xed': 199,
u'\xe9si': 199,
u'aiv': 199,
u'\xe1v\xed': 199,
u'hn\xe1': 198,
u'kya': 198,
u'rtm': 198,
u'ltr': 198,
u'ep\xed': 198,
u'cca': 198,
u'gsk': 198,
u'\xfdcv': 198,
u'j#\u017e': 197,
u'doa': 197,
u'jkr': 197,
u'saa': 197,
u'b#r': 197,
u'efl': 197,
u'k\xe1d': 196,
u'\xe1ma': 196,
u'\u0148##': 196,
u'vle': 196,
u'u#w': 196,
u'zur': 196,
u'nym': 196,
u'enr': 196,
u'i\u010d#': 196,
u'\xe9ro': 195,
u'\u0148an': 195,
u'enb': 195,
u'lbe': 195,
u'fam': 195,
u'f\xfaz': 195,
u'hmi': 195,
u'zke': 194,
u'nva': 194,
u'j#l': 194,
u'z#\u017e': 194,
u'#zf': 194,
u'eon': 194,
u'\xedv#': 194,
u'yjs': 194,
u'rca': 193,
u'ois': 193,
u'mi\xed': 193,
u'uy#': 193,
u'exe': 193,
u'doe': 193,
u'hti': 193,
u'lud': 193,
u'ak\u0159': 193,
u'#\u0165i': 193,
u'lny': 193,
u'e\xfan': 193,
u'igm': 193,
u'zob': 192,
u'\xedvn': 192,
u'krz': 192,
u'tkr': 192,
u'who': 192,
u'b\u0159\xed': 192,
u'sau': 192,
u'zpt': 192,
u'ep\xe9': 192,
u'eky': 192,
u'\xfale': 192,
u'bij': 192,
u'ijt': 191,
u'uv\u010d': 191,
u'gme': 191,
u'aag': 191,
u'rlo': 191,
u'ocu': 191,
u'r#\u010d': 191,
u'ja\u0159': 190,
u'cal': 190,
u'#f\xfa': 190,
u'ky\u0148': 190,
u'mp\xe1': 190,
u'\xe1b\xed': 190,
u'vr\u0161': 190,
u'do\u017e': 189,
u'nsi': 189,
u'#z\u010d': 189,
u'roo': 189,
u'd\u017e#': 189,
u'jou': 189,
u'\u011bjn': 189,
u'\u016f\u017eo': 189,
u'hek': 188,
u'cyn': 188,
u'r\u017eb': 188,
u's\xe9r': 188,
u'\xe9mk': 188,
u'\xe8so': 188,
u'zym': 188,
u'a\u010d\u016f': 188,
u'\u016fzi': 187,
u'og\u016f': 187,
u't\xf3n': 187,
u'ipk': 187,
u'agh': 187,
u'bik': 187,
u'\xe1j\u016f': 187,
u'\xedsm': 187,
u'ibr': 186,
u'een': 186,
u'm\u011bk': 186,
u'p\xe1#': 186,
u'yn\xe1': 186,
u'\u0161#t': 185,
u'w#y': 185,
u'inz': 185,
u'ibs': 185,
u'\xe1p\u011b': 185,
u'ylk': 185,
u'n\xe9r': 185,
u'fov': 185,
u'r\u016fl': 185,
u'ccr': 185,
u'u\u017es': 185,
u'ysy': 185,
u'ush': 185,
u'ez\xfa': 184,
u'ifo': 184,
u'kau': 184,
u'urb': 184,
u'aun': 184,
u'mme': 184,
u'rth': 184,
u'ed\u0159': 184,
u'g\xe9r': 184,
u'eth': 184,
u'rh\xe1': 184,
u'iev': 184,
u'e\u0148s': 184,
u'osc': 183,
u'tb\xe1': 183,
u'kev': 183,
u'\u011b\u017e\u0161': 183,
u'z\u010d\xe1': 183,
u'\xfahe': 183,
u'irk': 182,
u'#cv': 182,
u'\xf3re': 182,
u'\u0165#e': 182,
u'\u0159#r': 182,
u'#r\xfc': 182,
u'\xe9\u0159i': 182,
u'\u011bcu': 182,
u'but': 182,
u'\xed\u0159#': 182,
u'os\xe9': 181,
u'p\xedn': 181,
u'\u0161is': 181,
u'ymy': 181,
u'cl\xe9': 181,
u'zyh': 181,
u'esb': 181,
u'\u017eer': 181,
u'u\u010ft': 181,
u'\u010dce': 181,
u'jud': 181,
u'fa\u0161': 181,
u'erf': 180,
u'ldo': 180,
u'zdl': 180,
u'\u010deb': 180,
u'rki': 180,
u'hof': 180,
u'pte': 180,
u'b#e': 180,
u'\xfdle': 180,
u'aar': 179,
u'jro': 179,
u'urm': 179,
u'sfe': 179,
u'ogm': 179,
u'u#\u0165': 179,
u'a#\xed': 179,
u'bau': 179,
u'\xfadr': 179,
u'dy\u0165': 179,
u'hnb': 178,
u'\u0165#r': 178,
u'#b\xf6': 178,
u'm\xe1\u0159': 178,
u'e\u0165a': 178,
u'exc': 178,
u'\u017e\xe1k': 178,
u'cc#': 178,
u'vba': 178,
u'be\u0165': 178,
u'up\xe1': 178,
u'dup': 178,
u'euk': 177,
u'\u010dnu': 177,
u'rse': 177,
u'fst': 177,
u'wle': 177,
u'#ee': 177,
u'ko\u010d': 177,
u'aft': 177,
u'rv#': 177,
u'ybl': 177,
u'hed': 176,
u't\xe1p': 176,
u'ofs': 176,
u'\xedre': 176,
u'doy': 176,
u'\xe1\u017ec': 176,
u'i\u0161u': 176,
u'lls': 176,
u's\xe1d': 176,
u'ipc': 176,
u'n#\u0161': 176,
u'\u0159\u010de': 176,
u'apy': 176,
u'\xfd#\u0161': 176,
u'ys#': 176,
u'#gb': 175,
u'ij#': 175,
u'zc\u016f': 175,
u'snu': 175,
u'id\u0148': 175,
u'lds': 175,
u'ldt': 175,
u'#rt': 175,
u'\xfdto': 175,
u'#ah': 175,
u'x#p': 175,
u'pe\u0159': 175,
u'\xe1\u017el': 175,
u'\u016fko': 175,
u'kak': 174,
u'maf': 174,
u'oyl': 174,
u'ov\u017e': 174,
u'itc': 174,
u'\xedsa': 173,
u'sm\xfd': 173,
u'rlu': 173,
u'kob': 173,
u'lyt': 173,
u't\xfdl': 173,
u'\u0161ni': 173,
u'ig\xe9': 173,
u'\u017ebo': 172,
u'\u0161ar': 172,
u'ez\xf3': 172,
u'drt': 172,
u'\xe1\u010di': 172,
u'\u010d\u0148u': 172,
u'odj': 172,
u'\u016fda': 172,
u'xce': 171,
u'j\u0161i': 171,
u'eye': 171,
u'ahn': 171,
u'g#d': 171,
u'sey': 171,
u'b#c': 171,
u'\u017eeh': 171,
u'ow#': 171,
u'rgy': 170,
u'roi': 170,
u'ni\xe1': 170,
u'iry': 169,
u'f#s': 169,
u'ibn': 169,
u'aug': 169,
u'orr': 169,
u'\xfana': 169,
u'mio': 169,
u'kyr': 169,
u'o\u017ec': 169,
u'si\u010d': 169,
u'g#z': 169,
u'jku': 169,
u'y\xed#': 169,
u'lme': 169,
u't\u0159u': 169,
u'bby': 169,
u'h\xe1d': 168,
u'\u0148#c': 168,
u'\xe1jc': 168,
u'sv\xed': 168,
u'bkl': 168,
u'rc\xed': 168,
u'yt\xfd': 168,
u'dm\xfd': 168,
u'd\xe9\u0161': 168,
u'fid': 167,
u'\u011blk': 167,
u'\xe1\u010d\u016f': 167,
u'mir': 167,
u'\u016f#\u0161': 167,
u'r\xe8s': 167,
u'#h\u016f': 167,
u'ik\xe9': 167,
u'zos': 166,
u'aak': 166,
u'ead': 166,
u'may': 166,
u'lbo': 166,
u'tog': 166,
u'ah\xe9': 166,
u'lln': 166,
u'ed\xe9': 166,
u'apn': 166,
u'hoc': 166,
u'cop': 166,
u'\xedvk': 166,
u'jdu': 166,
u'dea': 166,
u'ogl': 166,
u'nn#': 166,
u'dj\xed': 165,
u'\xed\u0148a': 165,
u'uct': 165,
u'\xe1mu': 165,
u'eit': 165,
u'\xedzu': 165,
u'sb\xed': 165,
u'oui': 165,
u'oth': 165,
u'iod': 165,
u'\u0165#c': 164,
u'lag': 164,
u'rtl': 164,
u'dul': 164,
u'\u010d\xed\u0148': 164,
u'#ts': 164,
u'owl': 164,
u'cig': 163,
u'lgs': 163,
u'boe': 163,
u'k\xfdt': 163,
u'hst': 163,
u'tt#': 163,
u'\u010d#a': 163,
u'\xe1su': 163,
u'wor': 163,
u'er\xe8': 163,
u'f#k': 163,
u'a#x': 162,
u'\xf3n\u016f': 162,
u'yet': 162,
u'\u0159e\u010f': 162,
u's\xe9#': 162,
u'thy': 162,
u'\u010f#b': 162,
u'as\u0148': 162,
u'd\u010do': 162,
u'jla': 162,
u'ikc': 162,
u't\xeds': 161,
u'fru': 161,
u'iji': 161,
u'\u010dna': 161,
u'eea': 161,
u'ngy': 161,
u'ppa': 161,
u'\xf3za': 161,
u'ac\xe1': 161,
u't\u011bd': 161,
u'p#l': 161,
u'nui': 161,
u'afo': 160,
u'y#w': 160,
u'z\xe1d': 160,
u'nue': 160,
u'usz': 160,
u'ayo': 159,
u'#f\xfc': 159,
u'mi\u010d': 159,
u'pt\xfd': 159,
u'\xf6ge': 159,
u'zpu': 159,
u'bag': 159,
u'jm\xfd': 158,
u'eil': 158,
u'z\u016fm': 158,
u'eys': 158,
u'tk\xfd': 158,
u'bd\u011b': 158,
u'shm': 158,
u'ney': 158,
u'l\xe1h': 158,
u'lb\u011b': 158,
u'tii': 158,
u'\xed\u017ed': 158,
u'owi': 158,
u'zfo': 157,
u'me\u0161': 157,
u'bko': 157,
u'ouy': 157,
u'dl\u0148': 157,
u'a\u0161k': 157,
u'tl\xed': 157,
u'\xe9ke': 157,
u'fel': 157,
u'gom': 157,
u'\u0161\xedl': 156,
u'r\xf3n': 156,
u'eis': 156,
u'kyv': 156,
u'sia': 156,
u'wol': 156,
u'r\xedn': 156,
u'\xe1bi': 156,
u'f#n': 156,
u'ynt': 156,
u'#oo': 155,
u's\u0148a': 155,
u'i\u0165o': 155,
u'rt\xfd': 155,
u'v#f': 155,
u'tp#': 155,
u'sh#': 155,
u'zus': 155,
u'hde': 155,
u'\u0161\xedr': 154,
u'rf#': 154,
u'reu': 154,
u'\xedr\u016f': 154,
u'ymn': 154,
u'\u011bvo': 154,
u'vai': 154,
u'ac#': 154,
u'zir': 154,
u'\xe1sk': 154,
u'p#c': 154,
u't\xe9k': 153,
u'scu': 153,
u'#gh': 153,
u'\u010dba': 153,
u'uzp': 153,
u'amm': 153,
u'\xfcle': 153,
u'kma': 153,
u'y\u0161k': 153,
u'f\xfcl': 153,
u'ues': 153,
u'\xfahr': 153,
u'z\u0161t': 153,
u'mbr': 152,
u'eid': 152,
u'rgh': 152,
u'uij': 152,
u'rt\u011b': 152,
u'\u010d#z': 152,
u'ydr': 152,
u'\xf3ta': 152,
u'vb\u011b': 152,
u'ab\u011b': 152,
u'apt': 152,
u'gon': 152,
u'oj\u010f': 151,
u'#gm': 151,
u'y\u0161n': 151,
u'z\xe1h': 151,
u'\u017ed\xed': 151,
u'\u0161ik': 151,
u'ib\u016f': 151,
u'rip': 151,
u'azp': 150,
u'jef': 150,
u'nkl': 150,
u'y\u0159m': 150,
u'dr#': 150,
u'll\xe1': 150,
u'\u017ed#': 150,
u'rtf': 150,
u'pr\xe9': 150,
u'#oj': 149,
u'k#g': 149,
u'ei#': 149,
u'o\u010f#': 149,
u'doo': 149,
u'jsp': 149,
u'#gy': 149,
u'ui#': 149,
u'nif': 149,
u'il\u0148': 149,
u'nje': 149,
u'efr': 149,
u'#t\xf3': 149,
u'sc\xed': 148,
u'\u0148#e': 148,
u'ius': 148,
u'apj': 148,
u'\u010fa\u0159': 148,
u'aks': 148,
u'szb': 148,
u'\xe1se': 148,
u'\u0159te': 148,
u'yrg': 148,
u'\xe9k#': 148,
u'zbp': 148,
u'e\xfam': 148,
u'j\u010di': 148,
u'#ou': 147,
u'rzo': 147,
u'in\u017e': 147,
u'noj': 147,
u'amt': 147,
u'loa': 147,
u'bu\u010d': 147,
u'un\u016f': 147,
u'tvi': 147,
u'\xe1d\u017e': 147,
u'\u011bkk': 147,
u'\xf6tt': 147,
u'jur': 147,
u'hia': 147,
u'#gd': 146,
u'\xe9n#': 146,
u'\u010db\u011b': 146,
u'now': 146,
u'hv\u011b': 146,
u'\xe9\u010d\xed': 146,
u'gyz': 146,
u'aya': 146,
u'yzu': 146,
u'##\u0161': 146,
u'nbu': 146,
u'xua': 146,
u'st\u0161': 146,
u'f\xe1#': 146,
u'd\u0159a': 146,
u'tau': 146,
u'lvo': 146,
u'yzs': 145,
u'aes': 145,
u'wes': 145,
u'#s\xf3': 145,
u'rwa': 145,
u'dta': 145,
u'\xfaby': 145,
u'#vc': 145,
u'vy\u0148': 145,
u'dpr': 145,
u'j\u010fm': 145,
u'va\u0159': 145,
u'y\u0148a': 145,
u'x\xed#': 144,
u'p\xf6t': 144,
u'chw': 144,
u'hre': 144,
u'#ay': 144,
u'eef': 144,
u'sve': 144,
u'n\u011bs': 144,
u'tsu': 144,
u'pka': 144,
u'ia\u010d': 144,
u'edj': 144,
u'ax\xed': 144,
u'ue#': 144,
u'pez': 144,
u'\u017eed': 144,
u'pio': 144,
u'#p\xf6': 144,
u'fem': 144,
u'\u0161#a': 143,
u'uzk': 143,
u'dvs': 143,
u'\xe1\u017eu': 143,
u'oen': 143,
u'pk\u016f': 143,
u's#\xed': 143,
u'\u016ftu': 143,
u'ykr': 143,
u'd\u010du': 143,
u'#l\xf3': 143,
u'fr\xe1': 142,
u'#cc': 142,
u'aer': 142,
u'p\xed\u0159': 142,
u'\u011bmc': 142,
u'#ea': 142,
u'\u017ete': 142,
u'zmu': 142,
u'od\xfd': 142,
u'she': 142,
u'tyr': 142,
u'e\u0159m': 142,
u'fas': 142,
u'm#w': 141,
u'#\xfab': 141,
u'uj#': 141,
u'l\xf3p': 141,
u'\xf3pe': 141,
u'b\xf6g': 141,
u'own': 141,
u'\u0161#j': 140,
u'uz\xe8': 140,
u'#s\u0148': 140,
u'z\xe8s': 140,
u'dc#': 140,
u'rsi': 140,
u'rg#': 140,
u'\u010d#o': 140,
u'oh#': 140,
u'#l\u017e': 140,
u'ool': 140,
u'goe': 140,
u'ms#': 140,
u'r#\xfa': 140,
u'yz\xe1': 139,
u'\xe9ny': 139,
u'uzb': 139,
u'#iv': 139,
u'i#\xed': 139,
u'\xfdti': 139,
u'tc\u016f': 139,
u'#ed': 139,
u'rdt': 139,
u'bop': 139,
u'rey': 139,
u'u\u0159\xed': 139,
u'rao': 139,
u'#k\xfd': 139,
u'hej': 138,
u'\u0161#c': 138,
u'emk': 138,
u'\xedzl': 138,
u'#\xfaj': 138,
u'ay#': 138,
u'u\u0165#': 138,
u'\xfajm': 138,
u'\u016flo': 138,
u'\u0161#h': 138,
u'gda': 138,
u'lao': 138,
u'x#c': 138,
u'\u016ft\u011b': 138,
u'u\u0159\xe1': 138,
u'\xfdky': 138,
u'j#f': 137,
u'h\xe1t': 137,
u'ah\xfd': 137,
u'\u017eme': 137,
u'\u010dtr': 137,
u'faj': 137,
u'avt': 137,
u'rn#': 136,
u'lly': 136,
u'hwa': 136,
u'#r\xfd': 136,
u'iec': 136,
u'#ip': 136,
u'ilk': 136,
u'\xe9dl': 136,
u'e\xfap': 136,
u'aio': 135,
u'guc': 135,
u'\u0117ov': 135,
u'ied': 135,
u'atz': 135,
u'n\xfdr': 135,
u'tuk': 135,
u'goo': 135,
u'\xeds\xe1': 135,
u'icc': 135,
u'\u0161#\xfa': 134,
u'oz\u010d': 134,
u'kej': 134,
u'dow': 134,
u'mmu': 134,
u'yus': 134,
u'\xe9za': 134,
u'\xe9lc': 134,
u'nba': 134,
u'ewa': 134,
u'pe\u0161': 134,
u'pi\xe1': 134,
u'r#\u0159': 134,
u'by\xed': 134,
u'\xe1zr': 133,
u'hae': 133,
u'bry': 133,
u'#wh': 133,
u'do\u0159': 133,
u'ha\u0159': 133,
u'dij': 133,
u'gog': 133,
u'ryl': 133,
u'rij': 133,
u'smn': 132,
u'\xe1m\xed': 132,
u'eau': 132,
u'\xf3nn': 132,
u'oaj': 132,
u'iao': 132,
u'euf': 132,
u'kys': 132,
u'zte': 132,
u'ep\xe1': 132,
u'\xe9d\u0161': 132,
u'mcg': 132,
u'k\u0159k': 132,
u'es\u010d': 132,
u'izm': 132,
u'gr\xf3': 132,
u'rz#': 131,
u'uki': 131,
u'ais': 131,
u'nok': 131,
u'op\u016f': 131,
u'lk\u016f': 131,
u'ngs': 131,
u'sir': 131,
u'oua': 131,
u'ihe': 131,
u'asv': 131,
u'uvr': 131,
u'bo\u0159': 131,
u'iz\xe1': 131,
u'e\u0148\u016f': 131,
u'pso': 130,
u'wit': 130,
u'o#w': 130,
u'sei': 130,
u'eof': 130,
u'ek\u010d': 130,
u't\u0159\xe1': 130,
u'\xe9l#': 130,
u'bad': 130,
u'h\u0159b': 130,
u'#k\xf3': 130,
u'bbo': 130,
u'o\u0159\u010d': 129,
u'ap\u016f': 129,
u'aou': 129,
u'ood': 129,
u'bne': 128,
u'sos': 128,
u'\u0161al': 128,
u'mnu': 128,
u'jam': 128,
u'gnu': 128,
u'v\u010d#': 128,
u's#\u0161': 128,
u'cp#': 128,
u'od\u017e': 128,
u'lux': 128,
u'oog': 128,
u'ik\xfd': 128,
u'l\xe9v': 128,
u'j\u010do': 128,
u'\u0159i\u017e': 127,
u'kv\xed': 127,
u'yve': 127,
u'nck': 127,
u'oin': 127,
u'ujg': 127,
u'\xedmk': 127,
u'\u0159#u': 127,
u'eyc': 127,
u'jbo': 127,
u'ubd': 127,
u's\u010d\xed': 127,
u'sut': 127,
u'lty': 127,
u'ips': 127,
u'jgu': 127,
u'ze\u010d': 127,
u'cgu': 127,
u'a\u017e\u0161': 127,
u'dad': 127,
u'ejj': 127,
u'uo#': 126,
u'lro': 126,
u'zo#': 126,
u'dze': 126,
u'\u0159#h': 126,
u'mma': 126,
u'r\u016fv': 126,
u'a\xfad': 126,
u'u\u010dk': 126,
u'g#c': 126,
u'\xfara': 126,
u'e\u0161o': 126,
u'um\u016f': 126,
u'sa\xfa': 126,
u'xem': 126,
u'\u010f#t': 126,
u'r\xe1p': 126,
u'rr#': 126,
u'\u0159\xe1s': 125,
u'uz\u0161': 125,
u'jv\xe1': 125,
u'\xe1ml': 125,
u'\u0159ou': 125,
u'hn\xed': 125,
u'jc\u016f': 125,
u'ryu': 125,
u'\u0159n\u011b': 125,
u'r\xe9k': 125,
u'\xfads': 125,
u'osb': 124,
u'nru': 124,
u'\u0148#\u010d': 124,
u'\xf3ns': 124,
u'kuc': 124,
u't\u016fl': 124,
u'n\u010du': 124,
u'igy': 124,
u'oat': 124,
u'j\xe1c': 124,
u'sr\xe1': 123,
u'rz\u016f': 123,
u'kir': 123,
u'\u0159#i': 123,
u'zl\xe9': 123,
u'l\u017e\xed': 123,
u'\u010de\u010d': 123,
u'k\xe1\u0159': 123,
u'zt\xed': 123,
u'vme': 123,
u'\xe9\u0159k': 123,
u'\xfd\u017e#': 123,
u't\xfd\u017e': 123,
u'nuk': 123,
u'\xeds\u0165': 123,
u'mso': 123,
u'x#n': 122,
u'\xfdb\xe1': 122,
u'zc\xed': 122,
u'oes': 122,
u'eua': 122,
u'yu\u010d': 122,
u'b\u017ea': 122,
u'dma': 122,
u'isd': 122,
u'bi#': 122,
u'sdi': 122,
u'\xe1nl': 122,
u'nfi': 122,
u'av\u0148': 122,
u'lc\xed': 121,
u'ke\u0148': 121,
u'dpu': 121,
u'su\u017e': 121,
u'ohd': 121,
u'uil': 121,
u'b\xe1s': 121,
u'ht#': 121,
u'egn': 121,
u'tyd': 121,
u'tu\u0161': 121,
u'zbi': 121,
u'teu': 121,
u'ta\u0148': 121,
u'kr#': 120,
u'cm#': 120,
u'y\u010dk': 120,
u'#s\u010d': 120,
u'c#f': 120,
u'aj\xe1': 120,
u'#ik': 120,
u'\u010dic': 120,
u'\u016ft#': 120,
u'yzk': 119,
u'tr\xfd': 119,
u'\u016f\u017eu': 119,
u'\u0165#i': 119,
u'eok': 119,
u'x#s': 119,
u'sl\u016f': 119,
u'ews': 119,
u'\xe1n\u0161': 119,
u'an\xe7': 119,
u'izv': 119,
u'guv': 119,
u'\u0161eb': 118,
u'ojv': 118,
u'euu': 118,
u'ymi': 118,
u'dv\xe9': 118,
u'\u011b\u0161u': 118,
u'\xfd#g': 118,
u'xin': 118,
u'\u017eda': 118,
u'lew': 118,
u'nt\xfd': 118,
u'ilv': 118,
u'yk\xe1': 118,
u'lm#': 118,
u'l\xe1\u017e': 118,
u'mof': 118,
u'mk\u016f': 118,
u'lyo': 118,
u'\xfahl': 118,
u'\xfase': 118,
u'fe#': 118,
u't\xe9z': 117,
u'sce': 117,
u'dvl': 117,
u'omk': 117,
u'\u0159#f': 117,
u'sba': 117,
u'f#z': 117,
u'cu#': 117,
u'n\u010do': 117,
u'h\u0159\xed': 117,
u'l\u0161o': 117,
u'\xe9ze': 117,
u'\u010f#o': 117,
u'fi\xed': 116,
u'ym#': 116,
u'via': 116,
u'd\u017ea': 116,
u'yl\xe9': 116,
u'jon': 116,
u'ndt': 116,
u'mt#': 116,
u'mps': 116,
u'ai#': 116,
u'\xedn\u016f': 115,
u'kr\u016f': 115,
u'cek': 115,
u'ld\xe1': 115,
u'\xe1dc': 115,
u'qua': 115,
u'u\u0161l': 115,
u'apk': 115,
u'\xfdte': 115,
u'\u0161k\u016f': 115,
u'd\u011b\u0161': 115,
u'a\u017em': 115,
u'ep\u016f': 115,
u'p\xedt': 114,
u'sre': 114,
u'sv\u010d': 114,
u'geb': 114,
u's\u0165u': 114,
u'pko': 114,
u'\u0144sk': 114,
u'epp': 114,
u'pe\u017e': 114,
u'fia': 113,
u'xov': 113,
u'\u011b\u0159t': 113,
u'\xb0c#': 113,
u'key': 113,
u'aum': 113,
u'iin': 113,
u'quo': 113,
u'ub#': 113,
u'kha': 113,
u'niu': 113,
u'\u017ei\u0161': 113,
u'da\u010d': 113,
u'lnu': 113,
u'efa': 113,
u'#tk': 113,
u't\xe9\u0159': 112,
u'cra': 112,
u'\xf3ze': 112,
u'lzi': 112,
u'p\u0159o': 112,
u'#\xb0c': 112,
u'c#\u0159': 112,
u'boo': 112,
u'bo\u010d': 112,
u'jh\u016f': 112,
u'poo': 112,
u'uml': 112,
u'egg': 112,
u'sbi': 112,
u'jhl': 112,
u'ud\u0159': 112,
u'bu\u0161': 112,
u'#kh': 112,
u'jm\xe1': 111,
u'wab': 111,
u'wie': 111,
u'ear': 111,
u'yer': 111,
u'edz': 111,
u'hk\xe1': 111,
u'deu': 111,
u'big': 111,
u'yjd': 111,
u'hi#': 111,
u'rih': 111,
u'irt': 110,
u'k\xe9s': 110,
u'lsi': 110,
u'dhe': 110,
u'tts': 110,
u'n\xe1j': 110,
u'ags': 110,
u'v\xfd\u010d': 110,
u'ook': 110,
u'i\xe8r': 110,
u'ild': 110,
u'owh': 110,
u'jec': 109,
u'lco': 109,
u'\u0159me': 109,
u't\u0161u': 109,
u'c\u0103u': 109,
u'\u0159#c': 109,
u'\u017e\u010fu': 109,
u'xpl': 109,
u'\xfdt\xe1': 109,
u'rd\u0161': 109,
u'ks#': 109,
u'ra\u0165': 109,
u'\xeds\u0148': 109,
u'ic\u0103': 109,
u'ynn': 109,
u'\xe9rn': 108,
u'\u0163ic': 108,
u'#bd': 108,
u'#\u0163i': 108,
u'wis': 108,
u'ieu': 108,
u'm\xedv': 108,
u'n\xedl': 108,
u'zn#': 108,
u'tuh': 108,
u'f#b': 108,
u'f#m': 108,
u'uds': 108,
u'\u0161\xe1l': 108,
u'aml': 107,
u'cir': 107,
u'sze': 107,
u'ja#': 107,
u'\xfdns': 107,
u'\u0148#h': 107,
u'ymo': 107,
u'wil': 107,
u'yal': 107,
u'unn': 107,
u'\xeda#': 107,
u'#bh': 107,
u'v\xe1k': 107,
u'\xe1hi': 107,
u'n\u0161u': 107,
u'gbp': 107,
u'yta': 107,
u'vuo': 107,
u'dp\xed': 107,
u'zub': 107,
u'y\u0159\xed': 107,
u'oca': 107,
u'hub': 107,
u'fao': 107,
u'tze': 106,
u'ezr': 106,
u'\xe9ns': 106,
u'y\u010d#': 106,
u'ng\u016f': 106,
u'\u016fz\xed': 106,
u'mix': 106,
u'rc#': 106,
u'kuk': 106,
u'b\u017ei': 106,
u'ehy': 106,
u'ov\u0148': 106,
u'\u017eis': 106,
u'mo\u0161': 106,
u'hym': 106,
u't\u011bp': 106,
u'hur': 106,
u'huz': 106,
u'#k\u016f': 106,
u'czo': 105,
u'tzn': 105,
u'\u0165me': 105,
u'gns': 105,
u'i\xe1n': 105,
u'\u0159#l': 105,
u'#bg': 105,
u'\xfdmy': 105,
u'eht': 105,
u'c#\xfa': 105,
u'xil': 105,
u'\xfdpl': 105,
u'ith': 105,
u'pty': 105,
u'py\u0161': 105,
u'bea': 105,
u'tyg': 105,
u'ba\u0161': 105,
u'hu\u0165': 105,
u'e\xfar': 105,
u'\xed\u017es': 105,
u'\u0161\xe9f': 105,
u'smd': 104,
u'kna': 104,
u'uc#': 104,
u'tny': 104,
u'\u0165ij': 104,
u'mrh': 104,
u'rsu': 104,
u'rck': 104,
u'\xe1h#': 104,
u'eh\u016f': 104,
u'ieg': 104,
u'\u010d#c': 104,
u'gha': 104,
u'rha': 104,
u'#d#': 104,
u'pia': 104,
u'j\xeds': 104,
u'p#h': 104,
u'anj': 104,
u'\xe1ra': 104,
u'\u010dk\u016f': 104,
u'ebh': 104,
u'#gn': 103,
u'cmi': 103,
u'\xe1dh': 103,
u'm\xe9s': 103,
u'\u0161ce': 103,
u'ol\u0161': 103,
u'ip\xed': 103,
u'\xedhr': 103,
u'n\xe7o': 103,
u'\xf3tu': 103,
u'#lv': 103,
u'l\xe1z': 103,
u'oos': 103,
u'r\u016ft': 102,
u'ugu': 102,
u'y#\xed': 102,
u'uzd': 102,
u't\xfdr': 102,
u'no\u0159': 102,
u'mei': 102,
u'\xe1\u0161n': 102,
u'su\u0161': 102,
u'#if': 102,
u'\u010d#d': 102,
u'zmy': 102,
u'#e#': 102,
u'uaf': 102,
u'kof': 102,
u'\u0159#\u017e': 102,
u'fit': 101,
u'ir\u016f': 101,
u'nk\xe9': 101,
u'sp+': 101,
u'\xeddr': 101,
u'uxu': 101,
u'mpt': 101,
u'ryj': 101,
u'x#j': 101,
u'rvu': 101,
u'm\u017ee': 101,
u'p\u0148u': 100,
u'tsb': 100,
u'oft': 100,
u'ioc': 100,
u'win': 100,
u'aup': 100,
u'ays': 100,
u'n\u011be': 100,
u'vti': 100,
u'#rw': 100,
u'\u0159li': 100,
u's\xe1\u017e': 100,
u'\u017e\u016fm': 100,
u'zip': 100,
u'\xe1k\xe1': 100,
u'\u0103uo': 100,
u're\u0161': 100,
u'iox': 100,
u'ar\xed': 100,
u'p+#': 100,
u't\xedk': 99,
u'hr#': 99,
u'ifr': 99,
u'amk': 99,
u'ufo': 99,
u'nha': 99,
u'mur': 99,
u'h\u010di': 99,
u'\xe1p\xed': 99,
u'ed\u0161': 99,
u'r\xedl': 99,
u'\xfat\u011b': 98,
u'kn\xed': 98,
u'l\u010di': 98,
u'eap': 98,
u'kih': 98,
u'rk\u016f': 98,
u'jbe': 98,
u'bsl': 98,
u'elr': 98,
u'jk\xe1': 98,
u'bl\xe1': 98,
u'unh': 98,
u'up\u010d': 98,
u'#\u0161\xe9': 98,
u'vv#': 98,
u'f\xedh': 98,
u'#tb': 98,
u'cro': 97,
u'cad': 97,
u'cy#': 97,
u'dof': 97,
u'aye': 97,
u'tsc': 97,
u'gmo': 97,
u'alp': 97,
u'\u011brs': 97,
u'seo': 97,
u'\u010f#d': 97,
u'eke': 97,
u'b\xedt': 97,
u'b#i': 97,
u'pur': 97,
u'ar\u017e': 97,
u'dih': 97,
u'g\u0159#': 97,
u'\u011bek': 97,
u'ilt': 97,
u'buk': 97,
u'#t#': 97,
u'\u0161\xedt': 96,
u'\u0159\xe1k': 96,
u'geh': 96,
u'u\u0161a': 96,
u'imc': 96,
u'cle': 96,
u'#+#': 96,
u'dfo': 96,
u'\xeddc': 96,
u'exo': 96,
u'lay': 96,
u'yp\u0159': 96,
u'ghu': 96,
u'ul\u016f': 96,
u'dmd': 96,
u't\xe9n': 95,
u'\u011bdu': 95,
u'inj': 95,
u'##\xb0': 95,
u'ubr': 95,
u'v\xedv': 95,
u'u#\xed': 95,
u'cki': 95,
u'nth': 95,
u'ze\u010f': 95,
u'\u017e\xedr': 95,
u'idt': 95,
u'\u0161vi': 95,
u'kk\xe9': 95,
u'tuz': 95,
u'pi#': 95,
u'mse': 95,
u'hug': 95,
u'sm\xe9': 95,
u'av\u017e': 95,
u'nz#': 94,
u'yzb': 94,
u'ea\u010d': 94,
u'iqu': 94,
u'oep': 94,
u'adp': 94,
u'ce\xfa': 94,
u'kyb': 94,
u'g#t': 94,
u'\xe1cs': 94,
u'\xe1cp': 94,
u'ckx': 94,
u'\u011bjm': 94,
u'jt\u011b': 94,
u'ry\u010d': 94,
u'pry': 94,
u'\xfdrs': 94,
u'oj\xfa': 93,
u'jav': 93,
u'wei': 93,
u'k\xe9\u0159': 93,
u'b\u016fm': 93,
u'\xfdmt': 93,
u'\u0161ef': 93,
u'b\u0159i': 93,
u'\xe1l\xe1': 93,
u'\u016fdo': 93,
u'\xe1\u0159k': 93,
u'bia': 93,
u'bi\xe1': 93,
u'ios': 93,
u'\u0159#\u0161': 93,
u'fai': 93,
u'j\xfah': 93,
u'cza': 92,
u'dje': 92,
u'\u010dsk': 92,
u'scr': 92,
u'uca': 92,
u'\xe1tr': 92,
u'kai': 92,
u'gy\u0151': 92,
u'un\u011b': 92,
u't\xeat': 92,
u'zlu': 92,
u'syt': 92,
u'\u016fl\xed': 92,
u'e\u010fu': 92,
u'ppi': 92,
u'iad': 92,
u'a\u0161v': 92,
u'\u0151ri': 92,
u'agl': 92,
u'et\xea': 92,
u'yon': 92,
u'\xe9la': 92,
u'\xfd\u017ee': 92,
u'og\xe1': 92,
u'bub': 92,
u'u\u0159a': 92,
u'y\u0151r': 92,
u'#pm': 92,
u'kvr': 91,
u'\xed#\u0163': 91,
u'lse': 91,
u'oi#': 91,
u'bws': 91,
u'clo': 91,
u'z\xe9l': 91,
u'od\u010d': 91,
u'x#v': 91,
u'b#\u010d': 91,
u'utm': 91,
u'gba': 91,
u'ggl': 91,
u'\u017eec': 91,
u'udf': 91,
u'buo': 91,
u'p#i': 91,
u'\xfade': 91,
u'j\xe1l': 91,
u'jeg': 90,
u'ei\u0159': 90,
u'c#\u0161': 90,
u'twa': 90,
u'\u0161l\xfd': 90,
u'\u0161l\xe9': 90,
u'eh#': 90,
u'\xe9#\xe9': 90,
u'jzn': 90,
u'\xfdpa': 90,
u'k\u016f\u017e': 90,
u'nt\xe9': 90,
u'xus': 90,
u'oo#': 90,
u'ggi': 90,
u'bi\xed': 90,
u'#cd': 90,
u'#p\u0161': 90,
u'hiz': 90,
u'#g\u0159': 89,
u'e\xe1r': 89,
u'\xed#\xed': 89,
u'\u010d\xe1r': 89,
u'kuu': 89,
u'ehc': 89,
u'\xfdta': 89,
u'lt\u011b': 89,
u'\xe1gn': 89,
u'\u017edu': 89,
u'b#f': 89,
u'k\xf3d': 89,
u'tyh': 89,
u'\xe1ba': 89,
u'pav': 89,
u't\u011br': 89,
u'j\xe1n': 89,
u'fav': 89,
u'i\u010dc': 89,
u'tzi': 88,
u'uke': 88,
u'b#l': 88,
u'yr\u016f': 88,
u'#aj': 88,
u'y\u017ei': 88,
u'ehm': 88,
u'uez': 88,
u'phi': 88,
u'i\u0148t': 88,
u'uad': 88,
u'\xfdlo': 88,
u'\xfdsi': 88,
u'v\u011b\u010d': 88,
u'\u0161er': 88,
u'zu\u0159': 88,
u'fur': 88,
u'vrn': 88,
u'anl': 88,
u'lne': 88,
u'#pv': 88,
u'efs': 88,
u'arp': 88,
u'gr\xe4': 88,
u'ezh': 87,
u'hea': 87,
u'au\u0161': 87,
u'zly': 87,
u'k\xfds': 87,
u'\xe1\u0161l': 87,
u'\xe1\u0161k': 87,
u'l\u017ei': 87,
u'mub': 87,
u'ih\xe1': 87,
u'zez': 87,
u'egm': 87,
u'rug': 87,
u'ygr': 87,
u'akh': 87,
u'\xe7oi': 87,
u'\xfad\u011b': 87,
u'#ph': 87,
u't#w': 86,
u'udb': 86,
u'ffe': 86,
u'm\u010di': 86,
u'rss': 86,
u'kic': 86,
u'ma\u0161': 86,
u'oit': 86,
u'mi\u0159': 86,
u'#z\xe9': 86,
u'si\xed': 86,
u'sas': 86,
u'\u0159an': 86,
u'#eo': 86,
u'b\xedn': 86,
u'ok\u0159': 86,
u'afe': 86,
u'ef\u010d': 86,
u'f\u010do': 86,
u'j\xfas': 86,
u'skn': 86,
u'\xe1zu': 85,
u'm\u0161t': 85,
u'mny': 85,
u'\u011bdr': 85,
u't\xe1k': 85,
u'eer': 85,
u'\xedml': 85,
u'sb#': 85,
u'gaa': 85,
u'pyr': 85,
u'n\u0161o': 85,
u'uyi': 85,
u'#a\u0161': 85,
u'uiz': 85,
u'uei': 85,
u'eh\xe1': 85,
u'yot': 85,
u'hlc': 85,
u'#dh': 85,
u'fei': 85,
u'kio': 85,
u'hun': 85,
u'irr': 84,
u'hah': 84,
u'\xedv\xed': 84,
u'yvy': 84,
u'lol': 84,
u'\xfcbi': 84,
u'tb#': 84,
u'syr': 84,
u'oan': 84,
u'r\xfcb': 84,
u's\u010de': 84,
u'apc': 84,
u'\u0165#l': 84,
u'uid': 84,
u'zm\xe1': 84,
u'\xe9z\xe1': 84,
u'#h\xfc': 84,
u'uat': 84,
u'okd': 84,
u'sdo': 84,
u'ti\u0148': 84,
u'az\xe1': 83,
u'zr\xe1': 83,
u'hec': 83,
u'psu': 83,
u'dry': 83,
u'\xe1\u010d\xed': 83,
u'\u010drt': 83,
u'\u011bcm': 83,
u'u\u0161\xe1': 83,
u's\xedr': 83,
u'pp#': 83,
u'a\u0148t': 83,
u'edh': 83,
u'\u010f#u': 83,
u'pyl': 83,
u't\u0159k': 83,
u'opk': 83,
u'dir': 83,
u'feu': 83,
u'u\u0159l': 83,
u'j\u010du': 83,
u'jdy': 83,
u'\u0165ia': 83,
u'#kg': 83,
u'azc': 82,
u'm\xf3d': 82,
u'\xfdz#': 82,
u'\u0161#e': 82,
u'\xf3zu': 82,
u'kv\xe9': 82,
u'\xfcbn': 82,
u'b\u016fh': 82,
u'euo': 82,
u'cli': 82,
u'a\u0148m': 82,
u'al\u010d': 82,
u'\xfdtn': 82,
u'olf': 82,
u'uar': 82,
u'vuh': 82,
u'pud': 82,
u'\xfa\u017ee': 82,
u'h\xfcb': 82,
u'eca': 82,
u'n\u0161e': 82,
u'a\u017eb': 82,
u'\u011bm\u010d': 82,
u'guy': 82,
u's#w': 82,
u'frs': 81,
u'ukv': 81,
u'lzo': 81,
u'dbu': 81,
u'eic': 81,
u'o\u010fa': 81,
u'kes': 81,
u'omr': 81,
u'\u0159#\u010d': 81,
u'#f\xe9': 81,
u'\u010de\u0161': 81,
u'f#e': 81,
u'e\u0161u': 81,
u'\u0148ky': 81,
u'ox#': 81,
u'ef#': 81,
u'nys': 81,
u'a\u017et': 81,
u'bui': 81,
u'him': 81,
u'#tf': 81,
u'\xednc': 80,
u'aic': 80,
u'jje': 80,
u'g##': 80,
u'h\u010d\xed': 80,
u'df#': 80,
u'ap#': 80,
u'sip': 80,
u'seu': 80,
u'se\u010d': 80,
u'#ya': 80,
u'r\xfdk': 80,
u'x#k': 80,
u'ccc': 80,
u'utc': 80,
u'el\u017e': 80,
u'l\xe9c': 80,
u'ys\u016f': 80,
u'tkv': 80,
u'fix': 79,
u'ezs': 79,
u'rff': 79,
u'vop': 79,
u'sr#': 79,
u'rue': 79,
u'zha': 79,
u'\u0165te': 79,
u'ct\xe9': 79,
u'r\u016f\u0159': 79,
u'pyt': 79,
u'dpl': 79,
u'\xfdzo': 78,
u'ucu': 78,
u'rbi': 78,
u'emr': 78,
u'jja': 78,
u'pcp': 78,
u'tse': 78,
u'dco': 78,
u'toi': 78,
u'tci': 78,
u'lp#': 78,
u'\xe1cl': 78,
u'#g\xe1': 78,
u'upp': 78,
u'mc\xed': 78,
u's\xf3j': 78,
u'tae': 78,
u'f#t': 78,
u'p#\u010d': 78,
u'num': 78,
u'us\u0165': 78,
u'je\u010d': 77,
u'ojc': 77,
u'tr\xed': 77,
u'\xfdna': 77,
u'eio': 77,
u'tft': 77,
u'\xe9\u017ee': 77,
u'zl#': 77,
u'v\xe1g': 77,
u'muz': 77,
u'elb': 77,
u'\xfdm\u016f': 77,
u'gdo': 77,
u'ftp': 77,
u'ftw': 77,
u'cct': 77,
u'r\xe1\u0159': 77,
u'f\xe1c': 77,
u'#d\xfd': 77,
u'#b\xe9': 77,
u'\u010dim': 77,
u'ig#': 77,
u'r#g': 77,
u'tao': 77,
u'\xe9va': 76,
u'hvi': 76,
u'ff#': 76,
u'tbu': 76,
u'nsy': 76,
u'eun': 76,
u'oa#': 76,
u'kyj': 76,
u'fty': 76,
u'ctb': 76,
u'stc': 76,
u'id\u017e': 76,
u'\xe1bu': 76,
u'jd\u011b': 76,
u'l\xe9b': 76,
u'si\xe8': 76,
u'pii': 76,
u'd\xedn': 76,
u'goj': 76,
u'ynd': 76,
u'dua': 76,
u'du\xed': 76,
u'rry': 76,
u'\xe1z#': 75,
u'mbs': 75,
u'\u0161#u': 75,
u'e\xe1t': 75,
u'inh': 75,
u'\xf3no': 75,
u'fje': 75,
u'yha': 75,
u'\u011bvc': 75,
u'\u010di\u0148': 75,
u'ih\u016f': 75,
u'ki\u0161': 75,
u'ntv': 75,
u'ghr': 75,
u'r\u016f\u010d': 75,
u'b#h': 75,
u'wej': 75,
u'tym': 75,
u'oki': 75,
u'\u017e#g': 75,
u'\xe1rm': 75,
u'uzl': 75,
u'zzo': 75,
u'off': 74,
u'gn\xed': 74,
u'mrc': 74,
u'nca': 74,
u'eet': 74,
u'wik': 74,
u'ngd': 74,
u'kiv': 74,
u'\xe1\u0161i': 74,
u'#h#': 74,
u'gay': 74,
u'#fj': 74,
u'rcz': 74,
u'pku': 74,
u'bhu': 74,
u'eoz': 74,
u'alf': 74,
u'\u011bjt': 74,
u'b#\u0159': 74,
u'uld': 74,
u'a\u010dr': 74,
u'rym': 74,
u'gcc': 74,
u'\xfaho': 74,
u'aj\u010d': 74,
u'icz': 74,
u'f\xed#': 74,
u'\u011bl\u0161': 73,
u'ngh': 73,
u'f\u016f#': 73,
u'bsd': 73,
u'o\u0159m': 73,
u'b\u011bd': 73,
u'al\u017e': 73,
u'yls': 73,
u'see': 73,
u'agb': 73,
u'zeo': 73,
u'xy#': 73,
u'u\xed#': 73,
u'\u011bnk': 73,
u'\xf3t\xe1': 73,
u'o\u010do': 73,
u'v\u0159o': 73,
u'awi': 73,
u'lui': 73,
u'ixu': 73,
u'pao': 73,
u'moe': 73,
u'la\u010f': 73,
u'fet': 73,
u'bu\u0159': 73,
u'nud': 73,
u'igh': 73,
u'avm': 73,
u'\xed\u0161i': 72,
u'osr': 72,
u'\u0161av': 72,
u'k\xe1h': 72,
u'#gc': 72,
u'\xf3zo': 72,
u'lo\u015f': 72,
u'#aa': 72,
u'\u0165#h': 72,
u'\u0165#\u010d': 72,
u'ch\u0165': 72,
u'euc': 72,
u'yhy': 72,
u'iew': 72,
u'#io': 72,
u'\u016fln': 72,
u'\xe1kr': 72,
u'lij': 72,
u'hig': 72,
u'en\u0117': 72,
u'thu': 72,
u'ssy': 72,
u'irb': 71,
u'dsm': 71,
u'\u011bzd': 71,
u'#gp': 71,
u'gni': 71,
u'ya#': 71,
u's\xedk': 71,
u'fsa': 71,
u'a\u0159o': 71,
u'l\u017ee': 71,
u'zmn': 71,
u'rk\xe1': 71,
u'gps': 71,
u'sib': 71,
u's#q': 71,
u'sag': 71,
u'ntk': 71,
u'gs#': 71,
u'uln': 71,
u'rya': 71,
u'ej\u016f': 71,
u'e\u0148t': 71,
u'jma': 70,
u'frr': 70,
u'\xe4\xdfl': 70,
u'wel': 70,
u'r\u0161a': 70,
u'mr\u0161': 70,
u'urj': 70,
u'\xedn\u0161': 70,
u'e\u010d\u016f': 70,
u'c\xeda': 70,
u'pcc': 70,
u'u\u0161c': 70,
u'sul': 70,
u'r\xe4\xdf': 70,
u'#vf': 70,
u'#mj': 70,
u'#gf': 70,
u'#p\xf3': 70,
u'eph': 70,
u'zib': 70,
u'ckm': 70,
u'hp#': 70,
u'ntl': 70,
u'xyn': 70,
u'r\xfdn': 70,
u'sp\u0159': 70,
u'\xe9\u0161i': 70,
u'jza': 70,
u'mcd': 70,
u'h\u0165#': 70,
u'abd': 70,
u'#hv': 70,
u't\u011bk': 70,
u'dak': 70,
u'#xy': 70,
u'm\u011bc': 70,
u'swa': 70,
u'kfu': 69,
u'fi\u010d': 69,
u'heg': 69,
u'mj#': 69,
u'ifa': 69,
u'ce\u0159': 69,
u'eez': 69,
u'\u016fru': 69,
u'ubb': 69,
u'ngi': 69,
u'n\xeds': 69,
u'nib': 69,
u'cch': 69,
u't\u0117o': 69,
u'\xe9nd': 69,
u'ti\u0159': 69,
u'i\u010ds': 69,
u'nzy': 68,
u'gfc': 68,
u'\xfchl': 68,
u'ijo': 68,
u'jab': 68,
u'nkf': 68,
u'\xedrs': 68,
u'amr': 68,
u'auv': 68,
u'\xf3n\xe1': 68,
u'\xe1\u010d#': 68,
u'syl': 68,
u'llm': 68,
u'd\u011bm': 68,
u'fcm': 68,
u'ni\u0161': 68,
u'x#m': 68,
u'vuk': 68,
u'yrs': 68,
u'xpa': 68,
u'\xfasu': 68,
u'a\u0161r': 68,
u'okn': 68,
u'f#d': 68,
u'gry': 68,
u'ght': 68,
u'zog': 67,
u't#\xed': 67,
u'\u011bd\u0159': 67,
u'\u011bd#': 67,
u'\u0161#l': 67,
u'cao': 67,
u'\u0148#f': 67,
u'wic': 67,
u'rsc': 67,
u'#h\xf6': 67,
u'ldi': 67,
u'oas': 67,
u'ix#': 67,
u'ddi': 67,
u'#ez': 67,
u'dtu': 67,
u'nay': 67,
u'd\xeda': 67,
u'wn#': 67,
u'h#w': 67,
u'x#z': 66,
u'trs': 66,
u'sge': 66,
u'sz\xe1': 66,
u'hrs': 66,
u'yv\xe9': 66,
u'cef': 66,
u'rj\xe1': 66,
u'amf': 66,
u'meg': 66,
u'ln#': 66,
u'\u0165#\u017e': 66,
u'#b\u016f': 66,
u'#\xe9\u0159': 66,
u'n\u0117o': 66,
u'lu\xfa': 66,
u'nnu': 66,
u'jsr': 66,
u'\xfaz\xed': 66,
u'qim': 66,
u'n\xed\u010d': 66,
u'hta': 66,
u'\xe1s\xe1': 66,
u'z#\u0161': 66,
u'v\u011b\u017e': 66,
u'u\xfa\u010d': 66,
u'ksi': 66,
u'ta\u0159': 66,
u'uff': 65,
u'kn\xe9': 65,
u'lza': 65,
u'cau': 65,
u'ib\xe1': 65,
u'ruo': 65,
u'\u0159i\u0159': 65,
u'bob': 65,
u'lsa': 65,
u'ut\u016f': 65,
u'\u011bk\xfd': 65,
u'g#r': 65,
u'\xe9s#': 65,
u'#m\xf3': 65,
u'\xe9t\xe1': 65,
u'\xe1kn': 65,
u'gus': 65,
u'la\u017e': 65,
u'za\xfa': 65,
u'zfa': 65,
u'vru': 65,
u'msi': 65,
u'hii': 65,
u'\xfdr\xe1': 65,
u'o\u0159k': 64,
u'\u0161am': 64,
u'ciu': 64,
u'lfa': 64,
u'ib\xfd': 64,
u'rsh': 64,
u'auk': 64,
u'\xedaz': 64,
u'vd#': 64,
u'ged': 64,
u'sbe': 64,
u'\u016flr': 64,
u'iab': 64,
u'rtz': 64,
u'm\xedl': 64,
u'g\xe1c': 64,
u'ylv': 64,
u'\u010fan': 64,
u'lax': 64,
u'x#b': 64,
u'vfr': 64,
u'op\u010d': 64,
u'di\u0161': 64,
u'l\xe9r': 64,
u'bib': 64,
u'u\u0159u': 64,
u'ltt': 64,
u'\u017era': 64,
u'\u016f\u0159#': 64,
u'igg': 64,
u'fau': 64,
u'p\xe9r': 63,
u'fma': 63,
u'k#w': 63,
u'\u011blb': 63,
u'wa#': 63,
u'\xfdn#': 63,
u'sra': 63,
u'ma\u0148': 63,
u'i\xe1d': 63,
u'i\u017e\xe1': 63,
u'syh': 63,
u'mu\u0161': 63,
u'#ib': 63,
u'lt\xed': 63,
u'jod': 63,
u'\xeddu': 63,
u'khl': 63,
u'\xf3to': 63,
u'tuo': 63,
u'udd': 63,
u'zb\xed': 63,
u'ob\u0161': 63,
u'mfa': 63,
u'\xe9v\xe1': 62,
u'ezf': 62,
u'\xed\u0161\xed': 62,
u'cri': 62,
u'\u0161#\u017e': 62,
u'ee#': 62,
u'woo': 62,
u'h\xf6k': 62,
u'gap': 62,
u'kua': 62,
u'gig': 62,
u'ehp': 62,
u'#r\u010d': 62,
u'\xf6km': 62,
u'xek': 62,
u'h\u010de': 62,
u'vun': 62,
u'j\xe1r': 62,
u'oot': 62,
u'ab#': 62,
u'jdi': 62,
u'usv': 62,
u'\u017eev': 62,
u'tms': 62,
u'ocy': 62,
u'a\xfat': 62,
u'nnk': 62,
u'anf': 62,
u'jul': 62,
u'sk\u0159': 62,
u'lv\xe1': 62,
u'j#\u0159': 61,
u'gut': 61,
u'oz\xfd': 61,
u'drz': 61,
u'\xe1\u010dk': 61,
u'r\xfch': 61,
u'd\u017eu': 61,
u'uu#': 61,
u'\u010d#r': 61,
u'dhi': 61,
u'\xfddo': 61,
u'yp\xe1': 61,
u'\u017e\xedh': 61,
u'ep\u011b': 61,
u'aot': 61,
u'ek\u016f': 61,
u'\u011bnl': 61,
u'#qi': 61,
u'dtp': 61,
u'\xfdso': 61,
u'dmo': 61,
u'ooi': 61,
u'moo': 61,
u'#dt': 61,
u'eb\u0159': 61,
u'ha\u010d': 60,
u'r\xf3k': 60,
u'in\u0161': 60,
u'aia': 60,
u'if#': 60,
u'\u0148#\u017e': 60,
u'i\u0159i': 60,
u'tbi': 60,
u'vde': 60,
u'oij': 60,
u'zdm': 60,
u'\u011br\u0148': 60,
u'\xfdma': 60,
u'\u011bv\u010d': 60,
u'aca': 60,
u'\xe1r\xf3': 60,
u'v\u011b\u0161': 60,
u'\u010f#e': 60,
u'#\u017er': 60,
u'wog': 60,
u'lua': 60,
u'jl\xe1': 60,
u'flu': 60,
u'\xedsc': 60,
u'\xe1zl': 59,
u'mby': 59,
u'tez': 59,
u'zrc': 59,
u't\xe1\u010d': 59,
u'dbl': 59,
u'kvy': 59,
u'psi': 59,
u'auf': 59,
u'\u0159eo': 59,
u'\u010d#u': 59,
u'saf': 59,
u'itz': 59,
u'\u016fh#': 59,
u'o\u0161\xed': 59,
u'r\xfd\u017e': 59,
u'\u011bnc': 59,
u'yt\u0159': 59,
u'vyo': 59,
u'cdd': 59,
u'e\u010fm': 59,
u'd#w': 59,
u'tu\u010d': 59,
u'z\xfa\u017e': 59,
u'hid': 59,
u'duf': 59,
u'hmu': 59,
u'k\xe1i': 58,
u'\xe1id': 58,
u'\xfd\u0161k': 58,
u'yv\u016f': 58,
u'eia': 58,
u'rwe': 58,
u'y\u010du': 58,
u'\xeate': 58,
u'c#g': 58,
u'aub': 58,
u'n\u017eo': 58,
u'gah': 58,
u'oee': 58,
u'mud': 58,
u'ppo': 58,
u's\xe1c': 58,
u'si\u0159': 58,
u'joe': 58,
u'ddh': 58,
u'vi\u017e': 58,
u'k\u016fd': 58,
u'\xedl\xfd': 58,
u'fta': 58,
u'woj': 58,
u'dmn': 58,
u'cse': 58,
u'z\xfah': 58,
u'piz': 58,
u'lbr': 58,
u'\xe9rs': 57,
u'czy': 57,
u'evc': 57,
u'szw': 57,
u'aef': 57,
u'ofr': 57,
u'a#\xe1': 57,
u'h\xe1\u0159': 57,
u'tme': 57,
u'a\u010fo': 57,
u'bc#': 57,
u'zwo': 57,
u'zl\xfd': 57,
u'r\xfcm': 57,
u'yh\xe1': 57,
u'v\xe9z': 57,
u'lch': 57,
u'rk\xe9': 57,
u'aa#': 57,
u'm\xed\u010d': 57,
u'#vu': 57,
u'hcr': 57,
u'zp\xed': 57,
u'acz': 57,
u'rb\u016f': 57,
u'jpa': 57,
u'\u017eih': 57,
u'r\xeda': 57,
u'a\u0161m': 57,
u'pex': 57,
u'lb\xed': 57,
u'thr': 57,
u'izb': 57,
u'ip\u011b': 57,
u'bbe': 57,
u'aeu': 56,
u'nki': 56,
u'aik': 56,
u'umf': 56,
u'jvo': 56,
u'azs': 56,
u'pum': 56,
u'\u0148#l': 56,
u'ncy': 56,
u'm#\xed': 56,
u'yei': 56,
u'u\u0148k': 56,
u'e#x': 56,
u'cur': 56,
u'oex': 56,
u'nhc': 56,
u'elf': 56,
u'gib': 56,
u'pr\xfc': 56,
u'\xe8se': 56,
u'p\u0165a': 56,
u'yp\u0165': 56,
u'aob': 56,
u'z\u010da': 56,
u'yt\u016f': 56,
u'egt': 56,
u'aw#': 56,
u'paa': 56,
u'de\u0148': 56,
u'abb': 56,
u'pei': 56,
u'enh': 56,
u'hyd': 56,
u'epy': 56,
u'\u011bpe': 56,
u'\xed\u017eo': 56,
u'\u011bzo': 56,
u'j#g': 55,
u'\u010dst': 55,
u'tv#': 55,
u'ugh': 55,
u'w#a': 55,
u'ffa': 55,
u'aqu': 55,
u'\u0159#\xfa': 55,
u'p\xf3l': 55,
u'k\xfd\u017e': 55,
u'\u010dec': 55,
u'm\xe1\u0161': 55,
u'u\u010d\u0148': 55,
u'sue': 55,
u'\u011bv\xe1': 55,
u'jo#': 55,
u'\u0161ra': 55,
u'olc': 55,
u'blu': 55,
u'zia': 55,
u'tee': 55,
u'epk': 55,
u'mti': 55,
u'ghs': 55,
u'you': 55,
u'l\xedv': 55,
u'af\xe9': 55,
u'afa': 55,
u'afu': 55,
u'bu\u0148': 55,
u'p#\u017e': 55,
u'\xedsu': 55,
u'pm#': 55,
u'\xfdku': 55,
u'ivd': 54,
u'tr\u010d': 54,
u'\u0161m\xed': 54,
u'mr#': 54,
u'bzo': 54,
u'ymk': 54,
u'rsn': 54,
u'gej': 54,
u'goz': 54,
u'rcu': 54,
u'bs#': 54,
u'\u010dme': 54,
u'lt\xe1': 54,
u'#m#': 54,
u'\u0159\u0161t': 54,
u'#ae': 54,
u'sai': 54,
u'exy': 54,
u'\xe3o#': 54,
u'al\u016f': 54,
u'aol': 54,
u'#\u017eh': 54,
u'gse': 54,
u'hlt': 54,
u'\u017ein': 54,
u'r\xe1\u0161': 54,
u'bid': 54,
u'dai': 54,
u'kso': 54,
u'jup': 54,
u'\xe1rk': 54,
u'jml': 53,
u'p\xedl': 53,
u'onb': 53,
u'gn#': 53,
u'\u0161pe': 53,
u'eie': 53,
u'p\u0159a': 53,
u'meo': 53,
u'\u0165##': 53,
u'un\u0161': 53,
u'ge\u013e': 53,
u'oei': 53,
u'nsc': 53,
u'adh': 53,
u'nhe': 53,
u'giz': 53,
u'icy': 53,
u'a\u0148k': 53,
u'rks': 53,
u'p\u0148\u016f': 53,
u'\u011bv\u016f': 53,
u'uum': 53,
u'it\u0117': 53,
u'rdf': 53,
u'ctk': 53,
u'cks': 53,
u'\xedtc': 53,
u'ut\u0159': 53,
u'tu\u017e': 53,
u'jd\xe9': 53,
u'fyr': 53,
u'goi': 53,
u'taf': 53,
u'tz#': 52,
u'#g\xf6': 52,
u'\u011bs#': 52,
u'ceb': 52,
u'ur\u017e': 52,
u'oaz': 52,
u'drs': 52,
u'omc': 52,
u'got': 52,
u'gop': 52,
u'cul': 52,
u'oed': 52,
u'mih': 52,
u'ur\xed': 52,
u'gi#': 52,
u'\xfdmo': 52,
u'h\u010du': 52,
u'm\xe1k': 52,
u'mev': 52,
u'\u016fzy': 52,
u'kl\u016f': 52,
u'#e\xfa': 52,
u'ep\u010d': 52,
u'pup': 52,
u'pvv': 52,
u'l\xe1\u010d': 52,
u'zny': 52,
u'a\u017e\xe1': 52,
u'esz': 52,
u'enw': 52,
u'pr\xfd': 52,
u'\xf3dn': 52,
u'uzz': 52,
u'xto': 51,
u'rfi': 51,
u'rfo': 51,
u'#gk': 51,
u'wer': 51,
u'kay': 51,
u'e\u010dc': 51,
u'\u017eoa': 51,
u'j\u0148u': 51,
u'adz': 51,
u'im\xed': 51,
u'a\u0161g': 51,
u'o\u017e\u010f': 51,
u'g\xe1t': 51,
u'dwa': 51,
u'\u0161ro': 51,
u'ydo': 51,
u'tth': 51,
u'u#q': 51,
u'ag#': 51,
u'le\xe1': 51,
u'ux#': 51,
u'\u011b#w': 51,
u'tu\u0165': 51,
u'\u0142aw': 51,
u'f#o': 51,
u'f##': 51,
u'pi\xed': 51,
u'buv': 51,
u'\xe9\u0161#': 51,
u'owe': 51,
u'uot': 50,
u'#cz': 50,
u'hee': 50,
u'vsa': 50,
u'cut': 50,
u'j#w': 50,
u'dzv': 50,
u'v\u010dl': 50,
u'ym\xed': 50,
u'e\u017el': 50,
u'dkr': 50,
u'mif': 50,
u'yll': 50,
u'\xf8rg': 50,
u'j\xf8r': 50,
u'i\u0107#': 50,
u'ipp': 50,
u'\u0161ga': 50,
u'zew': 50,
u'ypn': 50,
u'#\u010dr': 50,
u'#\xe0#': 50,
u'ghi': 50,
u'r\u016fj': 50,
u'jtr': 50,
u'ba\u0159': 50,
u'l\xedh': 50,
u'\xe1br': 50,
u'\xe1by': 50,
u'a\u017es': 50,
u'moi': 50,
u'kog': 50,
u'ewi': 50,
u'bi\u0159': 50,
u'a\u0165m': 50,
u'\xe9ku': 50,
u'ajm': 50,
u'pmo': 50,
u'yvl': 50,
u'cav': 50,
u'taw': 50,
u'myk': 50,
u'cik': 49,
u'tr\xfc': 49,
u'iff': 49,
u'jra': 49,
u'tfr': 49,
u'dr\xed': 49,
u'\u010dr#': 49,
u'eyn': 49,
u'#fd': 49,
u'ub\u016f': 49,
u'nwe': 49,
u'r\xfcp': 49,
u'ahi': 49,
u'g#w': 49,
u'd\u011br': 49,
u'lhe': 49,
u'uk\u016f': 49,
u'e\u0161#': 49,
u'v#\xe1': 49,
u'cpy': 49,
u'zi\u0161': 49,
u'uau': 49,
u'kij': 49,
u'h\u010do': 49,
u'z\u010di': 49,
u'\u011brc': 49,
u'yre': 49,
u'slz': 49,
u'#\u0161\u0148': 49,
u'\u010d#h': 49,
u'r\xedg': 49,
u'\xe1bn': 49,
u't\xe4l': 49,
u'hyo': 49,
u'okm': 49,
u'ewo': 49,
u'biz': 49,
u'\u017ee\u0161': 49,
u'das': 49,
u'm\u011b\u0165': 49,
u'ud\u017e': 49,
u'\xfado': 49,
u'\xe8s#': 49,
u'dyp': 49,
u'tea': 49,
u'x#o': 48,
u'ha\u0161': 48,
u'mb#': 48,
u'oer': 48,
u'\xe9ve': 48,
u'dja': 48,
u'\xf3cr': 48,
u'trk': 48,
u'lga': 48,
u'nkt': 48,
u'aie': 48,
u'\u0148#\xfa': 48,
u'if\u016f': 48,
u'\xedgu': 48,
u'ozz': 48,
u'r\xe3o': 48,
u'eyh': 48,
u'twi': 48,
u'eyr': 48,
u'\u011bk\xe9': 48,
u'gir': 48,
u'g#u': 48,
u'l\u016fv': 48,
u'\u010fmi': 48,
u'sik': 48,
u'r\xfan': 48,
u'\u010di\u010d': 48,
u'jot': 48,
u'\xedd#': 48,
u'hnd': 48,
u'anh': 48,
u'm\u016fr': 48,
u'agd': 48,
u'htf': 48,
u'#\u017el': 48,
u'\xfdsm': 48,
u'kg#': 48,
u'#lp': 48,
u's\xf3c': 48,
u'de\u0159': 48,
u'r\xe9v': 48,
u'ysa': 48,
u'\xfcpe': 48,
u'ryz': 48,
u'kvo': 48,
u'jut': 48,
u'rvc': 48,
u'\u011btk': 48,
u'\u016f\u017ei': 48,
u'gr\xe1': 48,
u'#cn': 47,
u'rfe': 47,
u'krs': 47,
u'\xe9bk': 47,
u'\xf3r#': 47,
u'\xed#\xe1': 47,
u'ifu': 47,
u'kae': 47,
u'eed': 47,
u'i\xe9\u0159': 47,
u'e#q': 47,
u'ufm': 47,
u'kuo': 47,
u'r\u016fd': 47,
u'aph': 47,
u'#ey': 47,
u'it\xe4': 47,
u'\xe9zn': 47,
u'phe': 47,
u'la\xdf': 47,
u'\xdfov': 47,
u'\u010dlu': 47,
u'#tw': 47,
u'\xe1\u0159n': 47,
u'l\xe9s': 47,
u'lfo': 47,
u'ksh': 47,
u'nja': 47,
u'nu\xe1': 47,
u'e\xfac': 47,
u'\xed\u0165o': 47,
u'lmy': 46,
u'\xe4l\xe4': 46,
u'ezj': 46,
u'sz#': 46,
u'eiz': 46,
u'srr': 46,
u'ozj': 46,
u'i\u0159a': 46,
u'eaf': 46,
u'e#\xe1': 46,
u'\u011b\u010fm': 46,
u'#bb': 46,
u'gae': 46,
u'j\u0161t': 46,
u'\xe9\u0159\u016f': 46,
u'+##': 46,
u'e\u010fo': 46,
u'su\xe1': 46,
u'eub': 46,
u'rpu': 46,
u'hoo': 46,
u'l\xf3r': 46,
u'nle': 46,
u'#ej': 46,
u'ued': 46,
u'leu': 46,
u'mp#': 46,
u'eg#': 46,
u'\u017eur': 46,
u'be\u0161': 46,
u'bai': 46,
u'opp': 46,
u'vri': 46,
u'p#f': 46,
u'msa': 46,
u'd\xe9s': 46,
u'rm#': 46,
u'#pc': 46,
u'hih': 46,
u'a\xdfo': 46,
u'\u016f\u017ek': 46,
u'uzv': 46,
u'cuo': 45,
u't\xe9v': 45,
u'vus': 45,
u'zki': 45,
u'\u0161#\u0161': 45,
u'cim': 45,
u'afc': 45,
u'\u0161iv': 45,
u'\u010dbo': 45,
u'#ss': 45,
u'oxe': 45,
u't\u0161o': 45,
u'yar': 45,
u'#j\xf8': 45,
u'meh': 45,
u'ayl': 45,
u'\xe1d\u016f': 45,
u'tsi': 45,
u'tof': 45,
u'ngk': 45,
u'u\u010dt': 45,
u'v\xe9r': 45,
u'\xe9#w': 45,
u'\xfdtu': 45,
u'whi': 45,
u'\u011bz\u016f': 45,
u'kn\xfd': 45,
u'fo#': 45,
u've\u010f': 45,
u'dyh': 45,
u'uay': 45,
u'\u016fdk': 45,
u'mt\xe9': 45,
u'r#\u0161': 45,
u'ae#': 45,
u'yos': 45,
u'lge': 45,
u'n\xfd\u0159': 45,
u'b\xe9n': 45,
u'awa': 45,
u'mkl': 45,
u'j\xedr': 45,
u'ic\xe1': 45,
u'f\xedm': 45,
u'igl': 45,
u'tef': 45,
u'\xe9r\xe1': 44,
u'\xe1zy': 44,
u'\xfatr': 44,
u'rzl': 44,
u'yzr': 44,
u'ezz': 44,
u'\xfdzb': 44,
u'nri': 44,
u'ijk': 44,
u'evm': 44,
u'daw': 44,
u'hrc': 44,
u'eik': 44,
u'dzk': 44,
u'low': 44,
u'#wr': 44,
u'#wu': 44,
u'lwa': 44,
u'gai': 44,
u's\xedn': 44,
u'roy': 44,
u'\xfdms': 44,
u'rkh': 44,
u'v\u0103l': 44,
u'sef': 44,
u'pha': 44,
u'lae': 44,
u'niq': 44,
u'r\xfds': 44,
u'aws': 44,
u'asr': 44,
u'\u010f\xe1b': 44,
u'beo': 44,
u'l\xe1b': 44,
u'\xe1be': 44,
u'bir': 44,
u'usj': 44,
u'\u0103le': 44,
u'xno': 44,
u'day': 44,
u't\xfds': 44,
u'e\u0159#': 44,
u'#v\u0103': 44,
u'm\u0159o': 44,
u'zv\u016f': 44,
u'fio': 43,
u'byz': 43,
u'\xf3z#': 43,
u'weg': 43,
u'wen': 43,
u'inb': 43,
u'\u0159\xe1c': 43,
u'k\xe9\u017e': 43,
u'azl': 43,
u'ohi': 43,
u'eeb': 43,
u'un\xe9': 43,
u'\xf3ne': 43,
u'or\xfd': 43,
u'\u0159#\u0159': 43,
u'aen': 43,
u'clu': 43,
u'\u010de\u0159': 43,
u'gt#': 43,
u'aht': 43,
u'g#e': 43,
u'fc#': 43,
u'alh': 43,
u'\u0161t\xe1': 43,
u'\u0161ta': 43,
u'sm\xe1': 43,
u'\xfdpr': 43,
u'i\u0107e': 43,
u'\xfar#': 43,
u'\u0148ke': 43,
u'z#w': 43,
u'#h\xe4': 43,
u'bee': 43,
u'kk#': 43,
u'gge': 43,
u'pik': 43,
u'gko': 43,
u'dym': 43,
u'ssc': 43,
u'hi\u0159': 43,
u'dji': 42,
u'\xfdfu': 42,
u'szy': 42,
u'yru': 42,
u'\xe9by': 42,
u'obm': 42,
u'ib\u011b': 42,
u'e\u017et': 42,
u'\xfan#': 42,
u's\u0165m': 42,
u'syp': 42,
u'\xe1he': 42,
u's\xe1k': 42,
u'poi': 42,
u'ieh': 42,
u'iek': 42,
u'\u017eh\xe1': 42,
u'rpc': 42,
u'uck': 42,
u'u#x': 42,
u'ftu': 42,
u'mlh': 42,
u'v\xfdf': 42,
u'akp': 42,
u'\xdfle': 42,
u'\u017eun': 42,
u'l#w': 42,
u'mcz': 42,
u'ul\xed': 42,
u'#\u0161r': 42,
u'#\u0161\xe1': 42,
u'd\u017eo': 42,
u'\xedkr': 42,
u'mkn': 42,
u'uhn': 42,
u'\xe9zy': 42,
u'nbo': 42,
u'n\u0165i': 42,
u'#\u010f\xe1': 42,
u'\xfdr\u016f': 42,
u'\u0159be': 41,
u'tzu': 41,
u'hew': 41,
u'k\xe1b': 41,
u'ojt': 41,
u'o\xfa\u010d': 41,
u'vco': 41,
u'guw': 41,
u'eij': 41,
u'cea': 41,
u'kao': 41,
u'l\u010do': 41,
u'\u011fan': 41,
u'drk': 41,
u'do\u011f': 41,
u'zl\xe1': 41,
u'equ': 41,
u'vbo': 41,
u'o\u011fa': 41,
u'eui': 41,
u'oar': 41,
u's\u010da': 41,
u'llu': 41,
u'g#h': 41,
u'\u0161l\xe1': 41,
u'#rp': 41,
u'cdc': 41,
u'jol': 41,
u'\xeddo': 41,
u'#tm': 41,
u'm\u016fs': 41,
u'wur': 41,
u'ihr': 41,
u'cow': 41,
u'vzl': 41,
u'\u010f#h': 41,
u'mp\xe9': 41,
u'uwu': 41,
u'i\u0144s': 41,
u'rp\u010d': 41,
u'ws#': 41,
u'yss': 41,
u'ics': 41,
u'eyb': 41,
u'ti\xed': 41,
u'rv\xed': 41,
u'\u010f#c': 41,
u'i\u0161k': 41,
u'\u0159ni': 41,
u'i\u010dl': 41,
u'e\u0148a': 41,
u'bbc': 41,
u'\u0159bi': 40,
u'fi\xe1': 40,
u'\xfati': 40,
u'uod': 40,
u'ojz': 40,
u'r\xf3z': 40,
u'db\xed': 40,
u'\xedr\u010d': 40,
u'\u011bl\u016f': 40,
u'way': 40,
u'n\u010fa': 40,
u'tf#': 40,
u'drc': 40,
u'\u0159#g': 40,
u'\xfcms': 40,
u'eyo': 40,
u'nhr': 40,
u'\u011b#\xed': 40,
u'\xfdtk': 40,
u'tc#': 40,
u'jkv': 40,
u'\u0159\xed\u0148': 40,
u'tty': 40,
u'blk': 40,
u'n\xe1\u017e': 40,
u'glu': 40,
u'nby': 40,
u'ep\xfd': 40,
u'b\u011b\u0161': 40,
u'lm\xe1': 40,
u'rl#': 40,
u'#\u017eo': 40,
u'ccs': 40,
u'\xe1sy': 40,
u'mpc': 40,
u'in\u0165': 40,
u't\u0159a': 40,
u'up\xe9': 40,
u'\u0107e#': 40,
u'oph': 40,
u'opj': 40,
u'\xe1bl': 40,
u'f#c': 40,
u'enm': 40,
u'\xe1nd': 40,
u'an\u010f': 40,
u'huc': 40,
u'o\u0161\u0165': 40,
u'\u0161\xedi': 39,
u'o\u0159u': 39,
u'jia': 39,
u'jah': 39,
u'brd': 39,
u'h\xe1l': 39,
u'h\xe1r': 39,
u'jv\u016f': 39,
u'\u016f\u010de': 39,
u'svv': 39,
u'keh': 39,
u'h\u016fr': 39,
u'\u0165#\xfa': 39,
u'\u0165#\u0159': 39,
u'\xf3ko': 39,
u'\xe1\u010da': 39,
u'\u0103se': 39,
u'oir': 39,
u'\xedit': 39,
u's\xe9h': 39,
u'a\u0144s': 39,
u'pkk': 39,
u'r\xfcf': 39,
u'jok': 39,
u'ssn': 39,
u'cp\xe1': 39,
u'#eq': 39,
u'vi\xe1': 39,
u'fto': 39,
u'bho': 39,
u'odf': 39,
u'kp#': 39,
u'#y#': 39,
u'i\u0148m': 39,
u'coc': 39,
u'\xe8ro': 39,
u'r\u016fc': 39,
u'ccp': 39,
u'dts': 39,
u'ne\xe1': 39,
u'up\u011b': 39,
u'a\u010ds': 39,
u'lqu': 39,
u'\xfask': 39,
u'x\u016f#': 39,
u'ly\u017e': 39,
u'ixn': 39,
u'\xe9ce': 39,
u'nur': 39,
u'zbl': 39,
u'pme': 39,
u'ar\xf3': 39,
u'arf': 39,
u'wna': 39,
u'yv\u011b': 39,
u'\xfcff': 39,
u'j#\u0161': 38,
u'zh\u016f': 38,
u'lry': 38,
u't\xe9c': 38,
u'fiu': 38,
u'fif': 38,
u'ivk': 38,
u'\xed\u0148k': 38,
u'heo': 38,
u'hz#': 38,
u'cib': 38,
u'szk': 38,
u'zco': 38,
u'y\xfa\u010d': 38,
u'v\xe1b': 38,
u'urp': 38,
u'auh': 38,
u'eep': 38,
u'hbe': 38,
u'o#\xe9': 38,
u'oef': 38,
u'vl\xe9': 38,
u'icu': 38,
u'g#i': 38,
u'g#l': 38,
u'dfi': 38,
u'\u0159le': 38,
u'#md': 38,
u'olz': 38,
u'\u017e\xe9r': 38,
u'gbo': 38,
u'x##': 38,
u'\u017e\xe1c': 38,
u'i\u017ei': 38,
u'dmy': 38,
u'\u017ea\u0159': 38,
u'\xfasl': 38,
u'koi': 38,
u'dge': 38,
u'y\u0159l': 38,
u'ysz': 38,
u'thb': 38,
u'csi': 38,
u'ssu': 38,
u'zzu': 38,
u'rru': 38,
u'cf#': 38,
u'd\xe1k': 38,
u'uo\u017e': 37,
u'yl\u016f': 37,
u'cij': 37,
u'cih': 37,
u'zty': 37,
u'er\u0148': 37,
u'br\xfa': 37,
u'rns': 37,
u't\xfad': 37,
u'no\u0161': 37,
u'dzn': 37,
u'ffo': 37,
u'svi': 37,
u'\xf3ji': 37,
u'eel': 37,
u'i\xe1r': 37,
u'fdp': 37,
u'oe#': 37,
u'muh': 37,
u'vta': 37,
u'jwe': 37,
u'o\u0159#': 37,
u'o\u015fi': 37,
u'\u0159u#': 37,
u'd\u017ee': 37,
u'i#q': 37,
u'\u0161ty': 37,
u'ol\u010d': 37,
u'yde': 37,
u'ssr': 37,
u'm\u016fc': 37,
u'ml\xfd': 37,
u'l\xfdn': 37,
u'law': 37,
u'yp\u011b': 37,
u'\xe8re': 37,
u'\xe8ra': 37,
u'\u017e\xe1b': 37,
u'b\xe9z': 37,
u'a\u010dc': 37,
u'zu\u017e': 37,
u'nya': 37,
u'\u016fd#': 37,
u'#\u0161u': 37,
u'jdr': 37,
u'ab\u0159': 37,
u'cso': 37,
u'peu': 37,
u'god': 37,
u'raw': 37,
u'nu\u017e': 37,
u'ti\xe9': 37,
u'ykn': 37,
u'#pk': 37,
u'ri\u0161': 37,
u'gr\u016f': 37,
u'#oi': 36,
u'\u0151k\xe9': 36,
u'lgr': 36,
u'aim': 36,
u'\xed#y': 36,
u'\xeato': 36,
u'veh': 36,
u'ify': 36,
u'b\u0103s': 36,
u'urv': 36,
u'\u016fzo': 36,
u'gei': 36,
u'#b\u0103': 36,
u'pcb': 36,
u'\u0161\u016f#': 36,
u'eux': 36,
u'k\u016fv': 36,
u'c\xeds': 36,
u'elj': 36,
u'vio': 36,
u'llh': 36,
u'\u010dch': 36,
u'l\xf3#': 36,
u'wro': 36,
u'ogg': 36,
u'di\u017e': 36,
u'gd\xe1': 36,
u'ueg': 36,
u'ctn': 36,
u'etf': 36,
u'alq': 36,
u'la\u0165': 36,
u'm\xe4k': 36,
u'ek\xe9': 36,
u'aoi': 36,
u'ecu': 36,
u'ecs': 36,
u'ot\xfa': 36,
u'#t\u0151': 36,
u'nyi': 36,
u'\xedov': 36,
u't\u0151k': 36,
u'oks': 36,
u'ogh': 36,
u'#bs': 36,
u'ra\xe7': 36,
u'ti\xfa': 36,
u'n\u016fv': 36,
u'ejw': 36,
u'ju\u017e': 36,
u'#mh': 36,
u'd\xe1d': 36,
u'ebb': 36,
u'rlg': 35,
u'cze': 35,
u'frv': 35,
u'tza': 35,
u'yze': 35,
u'nm\xe4': 35,
u'gb#': 35,
u'd\u0161i': 35,
u'ffi': 35,
u'\u016f\u010dk': 35,
u'\u0148#\u0159': 35,
u'#nd': 35,
u'lsu': 35,
u'\u0159ef': 35,
u'iun': 35,
u's\u0165t': 35,
u'gad': 35,
u'oeb': 35,
u'g\xe1m': 35,
u'kuh': 35,
u'#rh': 35,
u'yl\xfd': 35,
u'#vv': 35,
u'tt\xed': 35,
u'vi\u0107': 35,
u'uef': 35,
u'th\xe9': 35,
u'plp': 35,
u'\u0148a#': 35,
u'liu': 35,
u'#h\xfd': 35,
u'fl\xf3': 35,
u'\xe9se': 35,
u'kgb': 35,
u'oon': 35,
u'a\u017e\xe9': 35,
u'\u017ee\u0159': 35,
u'lfi': 35,
u'ys\xe9': 35,
u'\xfadi': 35,
u'oc\u0148': 35,
u'\xe4ki': 35,
u'fae': 35,
u'm\u0159\xed': 35,
u'\u0159b\xed': 34,
u'uos': 34,
u'\xe9rr': 34,
u'hag': 34,
u'lm\u016f': 34,
u's\xe9m': 34,
u'\u017ev\xfd': 34,
u'\xe1sz': 34,
u'h\xe9n': 34,
u'\xe9n\u016f': 34,
u'euj': 34,
u'y\u0159k': 34,
u'erw': 34,
u't\xfam': 34,
u'ib\xe9': 34,
u'e\u0159n': 34,
u'uvm': 34,
u'\xed\u010d\u016f': 34,
u'#j\xe4': 34,
u'yem': 34,
u'orw': 34,
u'l\u017eo': 34,
u'\u0148\u016fr': 34,
u'nsm': 34,
u'dcm': 34,
u'aan': 34,
u'j\u0161e': 34,
u's\xe1v': 34,
u'ylw': 34,
u'\xe4tt': 34,
u'hoh': 34,
u'e\u0161v': 34,
u'krk': 34,
u'u#\xe9': 34,
u'\xe9te': 34,
u'ju#': 34,
u'mte': 34,
u'szl': 34,
u'x#d': 34,
u'x#t': 34,
u'j\xe4\xe4': 34,
u'yri': 34,
u'pyk': 34,
u'e\u010ft': 34,
u'er\u0161': 34,
u'nyh': 34,
u'\xf6sc': 34,
u'pej': 34,
u'zfr': 34,
u'ilc': 34,
u'ys\xe1': 34,
u'\xe9ci': 34,
u'p\xfdc': 34,
u'\u0161\u0148\u016f': 34,
u'anp': 34,
u'faz': 34,
u'\xe4\xe4t': 34,
u'dui': 34,
u'rt\xfa': 34,
u'b\xf6s': 34,
u'bb\xe1': 34,
u'mfo': 34,
u'\u015foi': 33,
u'hak': 33,
u'lr#': 33,
u'jtv': 33,
u'iv\u0161': 33,
u'#cm': 33,
u'ezy': 33,
u'jew': 33,
u'ojj': 33,
u'b#\xfa': 33,
u'\xfchr': 33,
u'\u011bhy': 33,
u'rew': 33,
u'pje': 33,
u'uz\u016f': 33,
u'\u011b\u0159m': 33,
u'azz': 33,
u'\xed#x': 33,
u'#s\xf8': 33,
u'v\xe1w': 33,
u'ncz': 33,
u'eem': 33,
u'wir': 33,
u'kib': 33,
u'vd\xe1': 33,
u'\xe1wa': 33,
u'zl\xed': 33,
u'u\u015fo': 33,
u'nsh': 33,
u'tsv': 33,
u'\u010ded': 33,
u'poe': 33,
u'\u0161tu': 33,
u'vag': 33,
u'#mt': 33,
u'z\xe1\u017e': 33,
u'sa\xef': 33,
u'zpi': 33,
u'#pf': 33,
u'se\u0148': 33,
u'uev': 33,
u'npi': 33,
u'akd': 33,
u'zed': 33,
u'vul': 33,
u'pu\u010d': 33,
u'ma\u0144': 33,
u'v\u0159u': 33,
u'tga': 33,
u'\xedco': 33,
u'isg': 33,
u'o\u0159t': 33,
u'\u017ea#': 33,
u'jd\xe1': 33,
u'#d\xfc': 33,
u'f#u': 33,
u'y\u0159\u010d': 33,
u'ysm': 33,
u'bu\u015f': 33,
u'\xeds\xed': 33,
u'h\xe4u': 33,
u'p\xe1m': 33,
u'gsi': 33,
u'eff': 33,
u'hio': 33,
u'dut': 33,
u'tax': 33,
u'#mb': 33,
u'ry\xed': 32,
u'uor': 32,
u'ird': 32,
u'scf': 32,
u'k\xe1s': 32,
u'jep': 32,
u'nz\xe1': 32,
u'brn': 32,
u'hrk': 32,
u'lcu': 32,
u'rn\u016f': 32,
u'k#\xed': 32,
u'uvt': 32,
u'nc#': 32,
u'#ng': 32,
u'\xe7a#': 32,
u'he\u0148': 32,
u'adw': 32,
u'u\u0161\u0165': 32,
u'\xfdme': 32,
u'ex\u016f': 32,
u'rk\xfd': 32,
u'oya': 32,
u'ieo': 32,
u'ief': 32,
u'y\u0144s': 32,
u'\u010di\u016b': 32,
u'ndl': 32,
u'hoe': 32,
u'sss': 32,
u'\u0148ku': 32,
u'cpa': 32,
u'zm\xfd': 32,
u'vi\xed': 32,
u'i\u016bt': 32,
u'ag\xf3': 32,
u'np#': 32,
u'\u017e\xed\u0161': 32,
u'\u010f##': 32,
u'\u016ft\xe1': 32,
u'kxe': 32,
u'ecc': 32,
u'zy\u0144': 32,
u'di\u0107': 32,
u'kk\xfd': 32,
u'\xf6de': 32,
u'koc': 32,
u'enf': 32,
u'\xe9gu': 32,
u'gkm': 32,
u'ljo': 32,
u'ybi': 32,
u'jmi': 31,
u'cz#': 31,
u'#o\u0159': 31,
u'kx#': 31,
u'onh': 31,
u'g\xf6n': 31,
u'\xf6nc': 31,
u'aal': 31,
u'\u0161#i': 31,
u'zti': 31,
u'w#f': 31,
u'\u016fje': 31,
u'\xedvr': 31,
u'ai\u0165': 31,
u'uz#': 31,
u'ffl': 31,
u'c\u0148u': 31,
u'\xedzr': 31,
u'ur\u016f': 31,
u'aux': 31,
u'mej': 31,
u'\u0165#f': 31,
u'gyi': 31,
u'kia': 31,
u'kig': 31,
u'zl\xf3': 31,
u'goc': 31,
u'two': 31,
u'pc#': 31,
u'\xf8nd': 31,
u'ldu': 31,
u'uim': 31,
u'ubc': 31,
u'\xe9\u0159#': 31,
u'sru': 31,
u'rox': 31,
u'vt\u011b': 31,
u'\xe9#\xed': 31,
u's\xf8n': 31,
u'i\u0165a': 31,
u'\u010d#\u010d': 31,
u'fog': 31,
u'acy': 31,
u'\u010f#r': 31,
u'x#e': 31,
u'b\xe9r': 31,
u'\xfa\u017ei': 31,
u'rur': 31,
u'duh': 31,
u'#ln': 31,
u'd\xfch': 31,
u'bm\u011b': 31,
u'\u017ean': 31,
u'g\xf3n': 31,
u'dah': 31,
u'#x#': 31,
u'\xfd\u010dt': 31,
u'iu\u010d': 31,
u'tiu': 31,
u'd\xe9b': 31,
u'd\xe9r': 31,
u'hua': 31,
u'rv\xf3': 31,
u'\u010fto': 31,
u'te\u0161': 31,
u'gun': 31,
u'y\u016f#': 31,
u'\xfdve': 31,
u'\u0159n\xe1': 31,
u'v\xf3z': 31,
u'on\u0161': 30,
u'onr': 30,
u'y\u0159n': 30,
u'pfl': 30,
u'lzy': 30,
u'rb#': 30,
u'jve': 30,
u'gu\xe9': 30,
u'\u011bs\u016f': 30,
u'\xfdje': 30,
u'keo': 30,
u'yad': 30,
u'nfa': 30,
u'fn\xed': 30,
u'oia': 30,
u'o#y': 30,
u'ldr': 30,
u'uft': 30,
u'\u0161\u0165\xe1': 30,
u'mia': 30,
u'im\xe9': 30,
u'ahv': 30,
u'\u0161lu': 30,
u'uyo': 30,
u'yhu': 30,
u'\u0161pl': 30,
u'iep': 30,
u'edd': 30,
u'joa': 30,
u'y\u0161t': 30,
u'veg': 30,
u'v#\xe9': 30,
u'ax#': 30,
u'axn': 30,
u'\xedhl': 30,
u'ckr': 30,
u'mhz': 30,
u'\xedtu': 30,
u's\u0142a': 30,
u'\xedpo': 30,
u'\u0161\xedj': 30,
u'k\xf3s': 30,
u'bey': 30,
u'\xfdse': 30,
u'tyn': 30,
u'mo\xfa': 30,
u'o\u0161p': 30,
u'dee': 30,
u'\xf3dy': 30,
u'\xfalp': 30,
u'lyc': 30,
u'a\u0165t': 30,
u'\u0161tv': 30,
u'dl\xfd': 30,
u'e\xfal': 30,
u'\u010dou': 30,
u'arx': 30,
u'arh': 30,
u'\u0159n\xfd': 30,
u'bb#': 30,
u'lvu': 30,
u'uon': 29,
u'r\u016fp': 29,
u'\u0161ac': 29,
u'#cf': 29,
u'nra': 29,
u'\u0161#f': 29,
u'tr\xe3': 29,
u'euh': 29,
u'rbr': 29,
u'aig': 29,
u'wap': 29,
u'jvi': 29,
u'jre': 29,
u's\xe2r': 29,
u'#s\xe2': 29,
u'#j\u0119': 29,
u'eaz': 29,
u'auc': 29,
u'\u016fz#': 29,
u'\u017eci': 29,
u'gee': 29,
u'\u010d\xedr': 29,
u'\u0148\u016fm': 29,
u'eu\u0161': 29,
u'\xe9m\u017e': 29,
u'll\xed': 29,
u'pt\xe9': 29,
u'sui': 29,
u'#ie': 29,
u'yl\xed': 29,
u'\u010d#i': 29,
u'edg': 29,
u'lth': 29,
u'\u016flk': 29,
u'it\u0148': 29,
u'lee': 29,
u'ckd': 29,
u'etm': 29,
u'\xe2rb': 29,
u'\u016fto': 29,
u'plh': 29,
u'\u016bt\u0117': 29,
u'b\xe9g': 29,
u'z#\xed': 29,
u'ecv': 29,
u'jli': 29,
u'j\u016fv': 29,
u'isr': 29,
u'tyc': 29,
u'opm': 29,
u'b\u010de': 29,
u'iwo': 29,
u'\xf3d\u016f': 29,
u'f#r': 29,
u'ysu': 29,
u'j\u0119d': 29,
u'enp': 29,
u'tl\xe9': 29,
u'p\xfd#': 29,
u'yjc': 29,
u'\u0119dr': 29,
u'ync': 29,
u'eft': 29,
u'\xfdk\u0159': 29,
u'ar\u010d': 29,
u'ndh': 29,
u'vc#': 29,
u'hmy': 29,
u'\xe9rk': 28,
u'hap': 28,
u'mbl': 28,
u'\xe9vy': 28,
u'uou': 28,
u'#g\xe9': 28,
u'\xedja': 28,
u'w#d': 28,
u'\u0161id': 28,
u'b\u0159a': 28,
u'\xfdn\xed': 28,
u'y\u010do': 28,
u'\xed\u010dc': 28,
u'ym\u016f': 28,
u'\xedzy': 28,
u'urr': 28,
u'e\u017ev': 28,
u'ayi': 28,
u'\xe9un': 28,
u'zhm': 28,
u'rfu': 28,
u'nho': 28,
u'\u010de\u017e': 28,
u'rtk': 28,
u'si\xf3': 28,
u'rps': 28,
u'atv': 28,
u'lps': 28,
u'zt\xe1': 28,
u'ol\xfd': 28,
u'z\xe1#': 28,
u'ttp': 28,
u'itb': 28,
u'exk': 28,
u'etb': 28,
u'zeu': 28,
u'ilb': 28,
u'hth': 28,
u'zaa': 28,
u'\u017e\xe1h': 28,
u'n+#': 28,
u'#u\u0159': 28,
u'gso': 28,
u'rtr': 28,
u'beb': 28,
u'dpy': 28,
u'pai': 28,
u'r\xe9u': 28,
u'\xdflo': 28,
u'mke': 28,
u'bi\u0161': 28,
u'a\u0165k': 28,
u'vvd': 28,
u'icn': 28,
u'd\xe9n': 28,
u'ju\u0161': 28,
u'\u0165ie': 28,
u'rv\u011b': 28,
u'zzl': 28,
u'zza': 28,
u'y\u0148s': 28,
u'\xedba': 27,
u'\xedbr': 27,
u'irl': 27,
u'#g\xfc': 27,
u'zk\u0159': 27,
u'hr\xf6': 27,
u'wag': 27,
u'yih': 27,
u'vei': 27,
u'uvl': 27,
u'ifn': 27,
u'\xe1ur': 27,
u'#j\xf3': 27,
u'ovt': 27,
u'sn\u016f': 27,
u'bk\xe1': 27,
u'toe': 27,
u'#f#': 27,
u'iim': 27,
u'y\u017e\xed': 27,
u'c\u0148o': 27,
u'#zw': 27,
u'vp\xe1': 27,
u'poa': 27,
u'jsn': 27,
u'uuk': 27,
u'l\xe4#': 27,
u'bdu': 27,
u'k\u017ei': 27,
u'\xedd\u0159': 27,
u'y\u0161p': 27,
u'\xe1l\u010d': 27,
u'\xfakl': 27,
u'\xedhu': 27,
u'phn': 27,
u'et\u0161': 27,
u'hwo': 27,
u'pt\u016f': 27,
u'vum': 27,
u'#r#': 27,
u'k\xf3t': 27,
u'hl\u016f': 27,
u'www': 27,
u'r\xf6d': 27,
u'iob': 27,
u'esj': 27,
u'm\u011bv': 27,
u'j\xe1u': 27,
u'\xe9c\xed': 27,
u'\u016fck': 27,
u'hik': 27,
u'lja': 27,
u'rlh': 26,
u'\xedpl': 26,
u'jiz': 26,
u'zoo': 26,
u'#c\xf3': 26,
u'\u0161\u0165\u016f': 26,
u'rfa': 26,
u'w#s': 26,
u'aek': 26,
u'rex': 26,
u'em\u0161': 26,
u'\xf3rd': 26,
u'yis': 26,
u'#xv': 26,
u'\u0159ia': 26,
u'eey': 26,
u'\u0148#\u0161': 26,
u'lkl': 26,
u'tb\u011b': 26,
u'\u0165#\u0161': 26,
u'fne': 26,
u'zsc': 26,
u'jfu': 26,
u'\xe9in': 26,
u'gao': 26,
u'aea': 26,
u'cua': 26,
u'nsg': 26,
u'\xe9mt': 26,
u'd\u017es': 26,
u'a\u0148d': 26,
u'rtt': 26,
u'a\xe7a': 26,
u'i#\xe1': 26,
u'edf': 26,
u'\u010dip': 26,
u'#m\u0161': 26,
u'myz': 26,
u'hog': 26,
u'\u017elu': 26,
u'fp#': 26,
u'u#y': 26,
u'trm': 26,
u'agy': 26,
u'\xedh#': 26,
u'bt#': 26,
u'zeh': 26,
u'x#r': 26,
u'l\xfcg': 26,
u'u\xe9l': 26,
u'jpe': 26,
u'\u016fvk': 26,
u'\xfcge': 26,
u'mca': 26,
u'fl\xfc': 26,
u'pay': 26,
u'i#x': 26,
u'kmu': 26,
u'f\xe9i': 26,
u'p#\u0159': 26,
u'ajt': 26,
u'\u010dc\xed': 26,
u'c\xf3r': 26,
u'hip': 26,
u'mb\xf3': 25,
u'xkl': 25,
u'#c\xe1': 25,
u'yza': 25,
u'yz\xed': 25,
u'ij\xe1': 25,
u'cic': 25,
u'\xe1#w': 25,
u'xvi': 25,
u'a#y': 25,
u'ps\u016f': 25,
u'z\xe1\xed': 25,
u'wi#': 25,
u'rsm': 25,
u'rsa': 25,
u'mey': 25,
u'\u0159as': 25,
u'\xedam': 25,
u'#n+': 25,
u'#nk': 25,
u'fsf': 25,
u'pvc': 25,
u'ajf': 25,
u'mm#': 25,
u'zhn': 25,
u'vl\xed': 25,
u'\xe1ds': 25,
u'miu': 25,
u'v\xe1\u010d': 25,
u'adj': 25,
u'u\u0161\u010d': 25,
u'+#n': 25,
u'hsv': 25,
u'll\u016f': 25,
u'kyo': 25,
u'tbv': 25,
u'shw': 25,
u'oye': 25,
u'o\u017em': 25,
u'#rc': 25,
u'uuj': 25,
u'bda': 25,
u'ed\u017e': 25,
u'\xfazi': 25,
u'\u010dir': 25,
u'sms': 25,
u'lp\xed': 25,
u'\u015fi#': 25,
u'v\xed\u010d': 25,
u'#l#': 25,
u'\xfcnt': 25,
u'ml\u017e': 25,
u'g\xfcn': 25,
u'mhl': 25,
u'ntg': 25,
u'coo': 25,
u'epn': 25,
u'sps': 25,
u'zaf': 25,
u'csa': 25,
u'l#\xed': 25,
u'#ht': 25,
u'\u017eib': 25,
u'rqu': 25,
u'upt': 25,
u'isv': 25,
u'\u0161ou': 25,
u'zun': 25,
u'a\u017ek': 25,
u'd#\xed': 25,
u'\u017ear': 25,
u'aiz': 25,
u'\u0148d\u017e': 25,
u'\xe4us': 25,
u'juk': 25,
u'r#w': 25,
u'htt': 25,
u'efn': 25,
u'ta\u0161': 25,
u'bba': 25,
u'\xfdr#': 25,
u'czm': 24,
u'\xed\u0161o': 24,
u'a\u0142o': 24,
u'so\u010d': 24,
u'#\xf3#': 24,
u'xod': 24,
u'\xeffi': 24,
u'\xe1#\xed': 24,
u'w#v': 24,
u'cmr': 24,
u'\xfd\u0161\u016f': 24,
u'\u0161me': 24,
u'h\xe1\u0161': 24,
u'umr': 24,
u'eig': 24,
u'ceo': 24,
u'\xe9fa': 24,
u'lo\u0159': 24,
u'\xed#\xe9': 24,
u'dtv': 24,
u'tmu': 24,
u'dvy': 24,
u'ur\xe3': 24,
u'#j#': 24,
u'eab': 24,
u'hnp': 24,
u'ovd': 24,
u'wua': 24,
u'gyu': 24,
u'lfe': 24,
u'#bp': 24,
u'a\u0159\u010d': 24,
u'bke': 24,
u'bka': 24,
u'\u016fro': 24,
u'\u016f#\xed': 24,
u'vpu': 24,
u'\u011bk\u010d': 24,
u'toa': 24,
u'+#a': 24,
u'ahb': 24,
u'g#\u010d': 24,
u'apv': 24,
u'crd': 24,
u'\xf3#n': 24,
u'lp\u011b': 24,
u'cdu': 24,
u'jkm': 24,
u'a\xeff': 24,
u'gdy': 24,
u'mle': 24,
u'#\xe8#': 24,
u'\u0165ku': 24,
u'\u010f#\u010d': 24,
u'\xe9\u0161e': 24,
u'tl\xe1': 24,
u'\xedpk': 24,
u'\u011bne': 24,
u'z\u010d\xed': 24,
u'#\u017eu': 24,
u'\xe9do': 24,
u'stf': 24,
u'idh': 24,
u'utz': 24,
u'\xfdsu': 24,
u'nex': 24,
u'zuz': 24,
u'oov': 24,
u'kku': 24,
u'l\u0148#': 24,
u'\xfasm': 24,
u'fuj': 24,
u'bi\u0144': 24,
u'\xfahy': 24,
u'vre': 24,
u'u\u017ek': 24,
u'cod': 24,
u'yb\u016f': 24,
u'p#g': 24,
u'p#\xfa': 24,
u'l\u0161s': 24,
u'\u0161\xe1t': 24,
u'\u0161\u0148u': 24,
u'ryo': 24,
u'lng': 24,
u'p\u010de': 24,
u'juv': 24,
u'nb\u011b': 24,
u'pve': 24,
u'riq': 24,
u'\u0161\xe9#': 24,
u'ebs': 24,
u'\xed\u0161n': 23,
u'ir\xf3': 23,
u'azm': 23,
u'oib': 23,
u'\u0161ah': 23,
u'#cp': 23,
u'\xe1#\xe9': 23,
u'cve': 23,
u'i\xf1a': 23,
u'\xed\u010d\xed': 23,
u'yml': 23,
u'pwa': 23,
u'\xedzc': 23,
u'jn#': 23,
u'rs\xed': 23,
u'\u017eos': 23,
u'drh': 23,
u'\u0161\u010de': 23,
u'ovg': 23,
u'v#w': 23,
u'zsv': 23,
u'\u017ec\u016f': 23,
u'oim': 23,
u'oiz': 23,
u'ujs': 23,
u'eym': 23,
u'eyt': 23,
u'\u010d\xedl': 23,
u'\u010d\xedk': 23,
u'\u0165\u016f#': 23,
u'\u016fry': 23,
u'ldm': 23,
u'\u0161\u0165s': 23,
u'\xfd\u0159o': 23,
u'\u0103ne': 23,
u'm\xfad': 23,
u'imm': 23,
u'iae': 23,
u'uya': 23,
u'dp\xe1': 23,
u'v\xe9b': 23,
u'\xe9e#': 23,
u'rkr': 23,
u'y\u0161i': 23,
u'xie': 23,
u'up\xfd': 23,
u'dhr': 23,
u'\u016fzu': 23,
u'\xe8ne': 23,
u'etz': 23,
u'\xf1as': 23,
u'r\u016fn': 23,
u'yts': 23,
u'cci': 23,
u'ucp': 23,
u'oxf': 23,
u'ycv': 23,
u'ec\u0148': 23,
u'lue': 23,
u'ups': 23,
u'\xf3ly': 23,
u'a\u0161\xe1': 23,
u'i\xfat': 23,
u'epw': 23,
u'ryv': 23,
u'\xf3do': 23,
u'ikk': 23,
u'ikh': 23,
u'\u0165sk': 23,
u'kst': 23,
u'rm\xe0': 23,
u'efk': 23,
u'hm\xfa': 23,
u'\xfdry': 23,
u'zvn': 23,
u'lv\xed': 23,
u'ir\xe9': 22,
u'irw': 22,
u'\xed\u0148#': 22,
u'\xfdz\xe1': 22,
u'xot': 22,
u'aaa': 22,
u'\xfamu': 22,
u'vgo': 22,
u'o\xfar': 22,
u'w#n': 22,
u'dgr': 22,
u'emm': 22,
u'wa\u0142': 22,
u'g\xfan': 22,
u'kah': 22,
u'iy#': 22,
u'\xf3jo': 22,
u'#\xb0#': 22,
u'ng\xfa': 22,
u's\xfd#': 22,
u'e\u010dm': 22,
u'wu#': 22,
u'zwi': 22,
u'yeu': 22,
u'#nt': 22,
u'ay\u016f': 22,
u'hb\xe1': 22,
u'bog': 22,
u'mao': 22,
u'b\xedk': 22,
u'id\u0161': 22,
u'jb\u011b': 22,
u'\xfd\u0159i': 22,
u'#\xf6z': 22,
u'\u016f#w': 22,
u'bei': 22,
u'+#j': 22,
u'gid': 22,
u'h\xe9l': 22,
u'ro\u0159': 22,
u's\xe1g': 22,
u'iav': 22,
u'g#\u0159': 22,
u'\xf6zd': 22,
u'ke\u0159': 22,
u'o\xe1r': 22,
u'si\xe1': 22,
u'#\u010do': 22,
u'oue': 22,
u'#m\u0103': 22,
u'ndp': 22,
u'\xfdp\u016f': 22,
u'lpe': 22,
u'dh\xe1': 22,
u'fop': 22,
u'tt\xe0': 22,
u'tt\u011b': 22,
u'sae': 22,
u'\u015fe#': 22,
u'zm\u016f': 22,
u'kl\xf3': 22,
u'l\xe8n': 22,
u'yai': 22,
u'\u016fhv': 22,
u'laq': 22,
u'ntd': 22,
u'yps': 22,
u'm\u0103n': 22,
u'\xe1s\u016f': 22,
u'pun': 22,
u'pua': 22,
u'oxa': 22,
u'cdo': 22,
u'sh\xe1': 22,
u'\xedck': 22,
u'zse': 22,
u'ba\u010d': 22,
u'\u017emo': 22,
u'jhm': 22,
u'zn\u016f': 22,
u'ulm': 22,
u'nyo': 22,
u'\u010d#l': 22,
u'r\xeds': 22,
u'kk\u0159': 22,
u'iwi': 22,
u'kow': 22,
u'z\xfar': 22,
u'e\u0165#': 22,
u'lys': 22,
u'\u0161t#': 22,
u'e\u0159\u0161': 22,
u'ioz': 22,
u'ys\xfd': 22,
u'\xe9ct': 22,
u'ocl': 22,
u'\xedsi': 22,
u'cnd': 22,
u'tij': 22,
u'#p\xfd': 22,
u'jua': 22,
u'ri\xf1': 22,
u'zrn': 21,
u'onw': 21,
u'dft': 21,
u'ev\u010d': 21,
u'sza': 21,
u'lgo': 21,
u'jz\u0159': 21,
u'\xe0#l': 21,
u'\xedr\xed': 21,
u'jvl': 21,
u'\xe1m\u0161': 21,
u'cab': 21,
u'mr\xe1': 21,
u'e\u0159a': 21,
u'\xf6ld': 21,
u'\xe9f#': 21,
u'puo': 21,
u'ffs': 21,
u'lkn': 21,
u'aua': 21,
u'c\xedo': 21,
u'gy\xf6': 21,
u'#n\u016f': 21,
u'hbi': 21,
u'\xe1\xed#': 21,
u'#b\xfc': 21,
u'o#\xe1': 21,
u'#\xe9t': 21,
u'rgr': 21,
u'\xedi#': 21,
u'\xf6rg': 21,
u'tf\xe1': 21,
u'\xfcrt': 21,
u'n\u016f\u017e': 21,
u'kye': 21,
u'\xe1nv': 21,
u'\u017epa': 21,
u'o\u017ep': 21,
u'#iu': 21,
u'#ix': 21,
u'rpk': 21,
u'ndg': 21,
u'dsb': 21,
u'n\xedd': 21,
u'b\xfct': 21,
u'\u0148ka': 21,
u'itm': 21,
u'\u0161kl': 21,
u'ct\xfd': 21,
u'pho': 21,
u'zea': 21,
u'\xed\u0159k': 21,
u'ntc': 21,
u'\u017e\xedz': 21,
u'szo': 21,
u'kxo': 21,
u'#h\xe9': 21,
u'ulv': 21,
u'\u0161v\xe1': 21,
u'baz': 21,
u'ua\u010d': 21,
u'fka': 21,
u'hdi': 21,
u'r\xe9d': 21,
u'bih': 21,
u'f#f': 21,
u'nqu': 21,
u'io\u0159': 21,
u'fyt': 21,
u'pi\u0161': 21,
u'icl': 21,
u'o\u015fe': 21,
u'\u0161\xe1k': 21,
u'y#x': 21,
u'cfc': 21,
u'\xf3ka': 21,
u'zv\xe9': 21,
u't\xedp': 20,
u'uk\u0159': 20,
u'zok': 20,
u'gsa': 20,
u'byh': 20,
u'uih': 20,
u'amv': 20,
u'w#b': 20,
u'\u0148al': 20,
u'er\xf3': 20,
u'\xfd\u0161c': 20,
u'mva': 20,
u'rnd': 20,
u'\xe1m\u016f': 20,
u'noo': 20,
u'\xf3ry': 20,
u'b\u0161t': 20,
u'e\u0159k': 20,
u'loj': 20,
u'loi': 20,
u'pui': 20,
u'asz': 20,
u'l\u010du': 20,
u'eek': 20,
u'\u0161u#': 20,
u'ngv': 20,
u'\xfa#a': 20,
u'#jp': 20,
u'h\u016fl': 20,
u'xho': 20,
u'm\xf6l': 20,
u'#np': 20,
u'aym': 20,
u'ayr': 20,
u'\u010d\xe1k': 20,
u'e#\xe0': 20,
u'oic': 20,
u'gaf': 20,
u'\xe1h\u0159': 20,
u'oem': 20,
u'iia': 20,
u'vpr': 20,
u'yqu': 20,
u'hsh': 20,
u'el\u0148': 20,
u'g#g': 20,
u'l\xf3n': 20,
u'uyq': 20,
u'\u011bpn': 20,
u'alj': 20,
u'rkl': 20,
u'w\xfcr': 20,
u'm\xed\u0148': 20,
u'#vj': 20,
u'\u011bzt': 20,
u'#m\xf6': 20,
u'mdc': 20,
u'lpo': 20,
u'khs': 20,
u'sst': 20,
u'ssl': 20,
u'sap': 20,
u'ui\xe8': 20,
u'\xe9zu': 20,
u'\xedhn': 20,
u'eoi': 20,
u'\u010f#i': 20,
u'\u010f#\u017e': 20,
u'gh#': 20,
u'x#u': 20,
u'z\u010dl': 20,
u'puz': 20,
u'box': 20,
u'idg': 20,
u'in\u010d': 20,
u'tyz': 20,
u'bao': 20,
u'mcm': 20,
u'ooh': 20,
u'a\u0161s': 20,
u'b\xe1r': 20,
u'#dm': 20,
u'\xf6lz': 20,
u'p\xfdm': 20,
u'cne': 20,
u'o\u015f#': 20,
u'csu': 20,
u'uem': 20,
u'\xe1r\xe1': 20,
u'icr': 20,
u'#td': 20,
u'd\xfdl': 19,
u'\xedpi': 19,
u'osh': 19,
u'\u0161a#': 19,
u'\u0161at': 19,
u'hex': 19,
u'bj\xed': 19,
u'd\u0148a': 19,
u'aev': 19,
u'\u016fni': 19,
u'wad': 19,
u'z\xf3#': 19,
u'noa': 19,
u'e+#': 19,
u'#w#': 19,
u'a\u010ft': 19,
u'c\xedi': 19,
u'b\xf3#': 19,
u'y\xf6r': 19,
u'oid': 19,
u'vhl': 19,
u'j\xf3z': 19,
u'sby': 19,
u'mmo': 19,
u'dg#': 19,
u'jbr': 19,
u'rgn': 19,
u'gm#': 19,
u'kuv': 19,
u'muf': 19,
u'gi\xe1': 19,
u'gtc': 19,
u'shu': 19,
u'\xe1lv': 19,
u'\u0142\u0119s': 19,
u'z\xedz': 19,
u'\xfdm\u017e': 19,
u'jsc': 19,
u'ixe': 19,
u'bd\xed': 19,
u'ugr': 19,
u'yg\u0159': 19,
u'anq': 19,
u'olg': 19,
u'um\u0159': 19,
u'\u011b\u0161\u0148': 19,
u'ttg': 19,
u'\u016fza': 19,
u'dd#': 19,
u'rxi': 19,
u'zml': 19,
u'xac': 19,
u'n\xe1\u010d': 19,
u'ag\xe1': 19,
u'zei': 19,
u'\xed\u0159s': 19,
u'\xed\u0159o': 19,
u'\u016flm': 19,
u'u\xedn': 19,
u'\u010f#\u0159': 19,
u'\u0159kl': 19,
u'ytt': 19,
u'sth': 19,
u'boa': 19,
u'\u0161va': 19,
u'c\u0142a': 19,
u'\u0159e\u0159': 19,
u'ulg': 19,
u'nyl': 19,
u'r\xedj': 19,
u'v\xfa#': 19,
u'v\u016fz': 19,
u'#df': 19,
u'iks': 19,
u'\xfans': 19,
u'peh': 19,
u'\xfcti': 19,
u'mkr': 19,
u'a\u0142\u0119': 19,
u'f#l': 19,
u'lf#': 19,
u'vry': 19,
u'\xfd\u010di': 19,
u'ocr': 19,
u'oc\u0142': 19,
u'r\u0161k': 19,
u'gkl': 19,
u'unf': 19,
u'pv\xfa': 19,
u'wne': 19,
u'us\xe1': 19,
u'bbu': 19,
u'e\xed#': 18,
u'nzn': 18,
u'\u0161\xedk': 18,
u'\xfapo': 18,
u'k\xe1j': 18,
u'je\u010f': 18,
u'aae': 18,
u'\u0165\xe1l': 18,
u'\u011bd\xe9': 18,
u'evg': 18,
u'pf#': 18,
u'w#k': 18,
u'szi': 18,
u'db\xfd': 18,
u'\xf6m#': 18,
u'hrl': 18,
u'k\xe9r': 18,
u'wai': 18,
u'cai': 18,
u'cag': 18,
u'pn#': 18,
u'dz#': 18,
u'iya': 18,
u'os\u0148': 18,
u't\u0161e': 18,
u'\xe1\u010ds': 18,
u'om\u0161': 18,
u'gvi': 18,
u'axh': 18,
u'vpl': 18,
u'\u0142o#': 18,
u'pco': 18,
u'cus': 18,
u'oeu': 18,
u'chb': 18,
u'\xe9ms': 18,
u'oaq': 18,
u'ku\u010d': 18,
u'\xfcrk': 18,
u'muc': 18,
u'+#p': 18,
u'#\xe1l': 18,
u'hsk': 18,
u'vtr': 18,
u's\xe1\u0159': 18,
u'z\xedk': 18,
u'suy': 18,
u'iea': 18,
u'ugg': 18,
u'siw': 18,
u'oug': 18,
u'#mf': 18,
u'nd\u0161': 18,
u'uyt': 18,
u'ttw': 18,
u'viw': 18,
u'le\u0165': 18,
u'o\u0161#': 18,
u'\xf6st': 18,
u'il\u0161': 18,
u'm\xe0#': 18,
u'uxe': 18,
u'\u0159ti': 18,
u'rul': 18,
u'z#\xe1': 18,
u'ycu': 18,
u'l\xe1p': 18,
u'bay': 18,
u'\u011b\u010di': 18,
u'nyr': 18,
u'f\xe1h': 18,
u'\u017eah': 18,
u'b\xfdk': 18,
u'\xe1\u0159\u0161': 18,
u'jda': 18,
u'ip\xe1': 18,
u'ko\u0148': 18,
u'\xf3du': 18,
u'h\u0159\xe1': 18,
u'pee': 18,
u'af\xe1': 18,
u'aff': 18,
u'afy': 18,
u'f#i': 18,
u'\xfd#\xed': 18,
u'ysf': 18,
u'u\u0159k': 18,
u'lbi': 18,
u'ti\u0165': 18,
u'\u0161r\xe1': 18,
u'ig\xe1': 18,
u'ef\xe9': 18,
u'hib': 18,
u'\xfdvk': 18,
u'ta\u0142': 18,
u'tay': 18,
u'vch': 18,
u'vao': 18,
u'h#\xed': 18,
u'av\u010d': 18,
u't\xedl': 17,
u'\u015fov': 17,
u'x#f': 17,
u'rld': 17,
u'\xed\u0161k': 17,
u'ir\xfd': 17,
u'zr#': 17,
u'hep': 17,
u'vlh': 17,
u'rfm': 17,
u'nry': 17,
u'vge': 17,
u'evk': 17,
u'w#p': 17,
u'szt': 17,
u'sz\xe9': 17,
u'a#\xe8': 17,
u'da\u017e': 17,
u'udl': 17,
u'\xe9bo': 17,
u'noi': 17,
u'nox': 17,
u'\xe9f\u016f': 17,
u'psd': 17,
u'tm\xe1': 17,
u'\u0161un': 17,
u'pww': 17,
u'lki': 17,
u'wij': 17,
u'#ww': 17,
u'i\u0159\xed': 17,
u'#jh': 17,
u'\u0161ab': 17,
u'hns': 17,
u'auo': 17,
u'mea': 17,
u'r\u017em': 17,
u'\xf3ni': 17,
u'yed': 17,
u'k\xfd\u0161': 17,
u'iur': 17,
u'\u010d\xedp': 17,
u'zh\xfd': 17,
u'\u0148k\u016f': 17,
u'#fp': 17,
u'iis': 17,
u'qu\xed': 17,
u'#\xf6r': 17,
u'mib': 17,
u'sfu': 17,
u'c\u016fv': 17,
u'too': 17,
u'bsb': 17,
u'ppl': 17,
u'kyi': 17,
u'u\u010d#': 17,
u'eh\u0159': 17,
u'suk': 17,
u'suu': 17,
u'iei': 17,
u'aat': 17,
u'\xfdty': 17,
u'dlr': 17,
u'vax': 17,
u'ds\xe1': 17,
u'dsa': 17,
u'ztl': 17,
u'ssb': 17,
u'u#\xf6': 17,
u'\u016fls': 17,
u'tpw': 17,
u'xae': 17,
u'agg': 17,
u'unr': 17,
u'phy': 17,
u'\u0161o#': 17,
u'zeg': 17,
u'\xed\u0159l': 17,
u'\xedty': 17,
u'\u017e\xedl': 17,
u'dsh': 17,
u'r\xfdl': 17,
u'\u010f#l': 17,
u'htu': 17,
u'#l\u016f': 17,
u'\xe9\u0161s': 17,
u'\xfach': 17,
u'\u0159kn': 17,
u'ytc': 17,
u'#\u017ed': 17,
u'\xe1sc': 17,
u'nmo': 17,
u'egl': 17,
u'fdl': 17,
u'ecl': 17,
u'be\xfa': 17,
u'fga': 17,
u'\u017ei\u0107': 17,
u'lu\u0148': 17,
u't\u0159t': 17,
u'upm': 17,
u'\xf3l#': 17,
u'isf': 17,
u'baa': 17,
u'\xe9l\xe8': 17,
u'pae': 17,
u'g\u0159i': 17,
u'sde': 17,
u'lyk': 17,
u'\xe9ov': 17,
u'na\u010f': 17,
u'thm': 17,
u'nup': 17,
u'\xf3s\xe1': 17,
u'\xe1n\xe9': 17,
u'i\xf6l': 17,
u'ssk': 17,
u'e\xfat': 17,
u'\xe9b#': 17,
u'igf': 17,
u'fab': 17,
u'tlm': 17,
u'hi\xf6': 17,
u'\u016f\u017e#': 17,
u'gr#': 17,
u'ta\u015f': 17,
u'ndz': 17,
u'yb\u0159': 17,
u'\xe1rl': 17,
u'\u0148up': 16,
u'tzo': 16,
u'jib': 16,
u'vkr': 16,
u'heb': 16,
u'sc#': 16,
u's\xe9v': 16,
u'ug#': 16,
u'#g\xf3': 16,
u'kn\xe1': 16,
u'\xf3z\xe1': 16,
u'o\xfas': 16,
u'lzu': 16,
u'w#z': 16,
u'aet': 16,
u'wee': 16,
u'inm': 16,
u'erj': 16,
u'k\xe9b': 16,
u'i\u0161\xe9': 16,
u'uzr': 16,
u'\xfdne': 16,
u'eif': 16,
u'ym\u0159': 16,
u'hji': 16,
u'dvd': 16,
u'lks': 16,
u'#j\xf6': 16,
u'gy\u0148': 16,
u'u\u0148s': 16,
u'ayt': 16,
u'snc': 16,
u'e#\u010f': 16,
u'gec': 16,
u'oie': 16,
u'nae': 16,
u'\u0159i\u010f': 16,
u'nsr': 16,
u'#ft': 16,
u'iit': 16,
u'mu\xf1': 16,
u'\xe9an': 16,
u'm\xe1\u010d': 16,
u'u\u010dm': 16,
u'g#f': 16,
u'#r\xf3': 16,
u'cda': 16,
u'\xe1pn': 16,
u'r\u010da': 16,
u'\xf3zs': 16,
u'ndb': 16,
u'myj': 16,
u'dsv': 16,
u'fox': 16,
u'say': 16,
u'n\xedp': 16,
u'mld': 16,
u'm\xfdv': 16,
u'uet': 16,
u'ihj': 16,
u'gly': 16,
u'laa': 16,
u'\u011bjs': 16,
u'nir': 16,
u'r\xfd\u0161': 16,
u't\u016fv': 16,
u'r\xfdz': 16,
u'hty': 16,
u'ek\u0159': 16,
u'\u017e\xe1\u0159': 16,
u'yt\u010d': 16,
u'yn\xe9': 16,
u'#dz': 16,
u'oxu': 16,
u'bep': 16,
u'j\u010d\xe1': 16,
u'orz': 16,
u'r\xe1f': 16,
u'vje': 16,
u'r\xedh': 16,
u'r\xe4s': 16,
u'abn': 16,
u'ded': 16,
u'fut': 16,
u'ewy': 16,
u'dab': 16,
u'j\xe1s': 16,
u'u\u0159#': 16,
u'ray': 16,
u'l\u0161e': 16,
u'd\xe9j': 16,
u'\xe8si': 16,
u'e\xfav': 16,
u'yns': 16,
u'izy': 16,
u'izz': 16,
u'rir': 16,
u'my\u010d': 16,
u'vaa': 16,
u'\u011b\u0161k': 16,
u'p\xe9h': 15,
u'dgo': 15,
u'\xe1zs': 15,
u'\xe4ss': 15,
u'lri': 15,
u't\xe9s': 15,
u'zr\u0148': 15,
u'ivt': 15,
u'g\xf6t': 15,
u'nrw': 15,
u'\u0161##': 15,
u'r\xf3m': 15,
u'e\u013ea': 15,
u'aey': 15,
u'kr\u0161': 15,
u'\u0159\xe1b': 15,
u'\u0165in': 15,
u'gny': 15,
u'lca': 15,
u'h\xe1\u010d': 15,
u'cac': 15,
u'ffr': 15,
u'hve': 15,
u'yvu': 15,
u'\u013ea#': 15,
u'psc': 15,
u'\u0148#g': 15,
u'kaf': 15,
u'\xfduc': 15,
u'i\u0159k': 15,
u'\xe7y#': 15,
u'a\u010fu': 15,
u'sja': 15,
u'gsm': 15,
u'om\u017e': 15,
u'#n#': 15,
u'fsk': 15,
u'\u011b\u0161t': 15,
u'gea': 15,
u'aj\xfa': 15,
u'oig': 15,
u'kmh': 15,
u'pch': 15,
u'\xe4ne': 15,
u's\xe9a': 15,
u'#f\xe4': 15,
u'y\u017ee': 15,
u'u\u0161s': 15,
u'\u011bky': 15,
u'vp#': 15,
u'cly': 15,
u'\xe1h\u016f': 15,
u'pp\xe4': 15,
u'squ': 15,
u'ro\u015f': 15,
u'\u010daj': 15,
u'fco': 15,
u'\u011br\xe1': 15,
u'lhk': 15,
u'#rv': 15,
u'l\u016f\u017e': 15,
u'cdm': 15,
u'ixo': 15,
u'bde': 15,
u'\u0161t\xfd': 15,
u'#\u010dc': 15,
u'dwi': 15,
u'\xfdtr': 15,
u'ltz': 15,
u'jop': 15,
u'nd\xe9': 15,
u's#+': 15,
u'ho\u010f': 15,
u'dsp': 15,
u'\xf1iz': 15,
u'\u015fia': 15,
u'g\xe9n': 15,
u'zpy': 15,
u'\xe9ty': 15,
u'ft\xed': 15,
u'n#\xed': 15,
u'\u011b\u0148m': 15,
u'f\xe4r': 15,
u'rdj': 15,
u'npe': 15,
u'u\xf1i': 15,
u'mtu': 15,
u'\xe9\u0161\u0165': 15,
u'spp': 15,
u'aor': 15,
u'\xedp#': 15,
u'jt\xed': 15,
u'jhe': 15,
u'\xe9de': 15,
u'wov': 15,
u'\u010dlr': 15,
u'p\xe4n': 15,
u'hlm': 15,
u'\xe9sk': 15,
u'tge': 15,
u'dmu': 15,
u'upf': 15,
u'\xe4rm': 15,
u'jhr': 15,
u'ulc': 15,
u'kke': 15,
u'\u017eam': 15,
u'\u016fpl': 15,
u'de\xdf': 15,
u'fum': 15,
u'peo': 15,
u'g\xf3l': 15,
u'lyp': 15,
u'e\u013e#': 15,
u'e\u0159\xe1': 15,
u'vr#': 15,
u'#xi': 15,
u'bym': 15,
u'ysc': 15,
u'iog': 15,
u'\xe1j#': 15,
u'p\xf3z': 15,
u'xfa': 15,
u'nun': 15,
u'nua': 15,
u'jyl': 15,
u'n\u016fs': 15,
u'mss': 15,
u'an\u0103': 15,
u'#p#': 15,
u'jub': 15,
u'rrs': 15,
u'fe+': 15,
u'#mp': 15,
u'j\xfap': 15,
u'fr\xe9': 14,
u'm\u0161i': 14,
u'#c\xe9': 14,
u't#x': 14,
u'\xf8me': 14,
u'#fn': 14,
u'cry': 14,
u'\u011bdl': 14,
u'\u011bd\xe1': 14,
u'iju': 14,
u'tr\xf3': 14,
u'tdt': 14,
u'\u0103o#': 14,
u'w#j': 14,
u'dbe': 14,
u'aee': 14,
u'gby': 14,
u'\u016fjc': 14,
u'rb\xed': 14,
u'\xf6me': 14,
u'a#q': 14,
u'nks': 14,
u'er\xf6': 14,
u'\u011bl\u0148': 14,
u'ob\u0148': 14,
u'uzy': 14,
u'mrk': 14,
u'ps\xed': 14,
u'am\xfa': 14,
u'dzu': 14,
u'loy': 14,
u'rji': 14,
u'\xed#q': 14,
u'uv\xfd': 14,
u'ncb': 14,
u'o\u010fm': 14,
u'ees': 14,
u'l\u010dk': 14,
u'n\u0103#': 14,
u'jn\u016f': 14,
u'djy': 14,
u'yas': 14,
u'ov\u010d': 14,
u'r\u017ek': 14,
u'ki\u010d': 14,
u'omf': 14,
u'fn\u011b': 14,
u'#nx': 14,
u'\xf6ry': 14,
u'e#y': 14,
u'\xfd\u010d\xed': 14,
u'\xfcmu': 14,
u'gob': 14,
u'#hb': 14,
u'uf#': 14,
u'vlk': 14,
u'sfa': 14,
u't\xe2n': 14,
u'kuf': 14,
u'cl\u016f': 14,
u'elp': 14,
u'viu': 14,
u'iak': 14,
u'eh\xe9': 14,
u'#r\xf8': 14,
u'g\xe1r': 14,
u'\u017eha': 14,
u'\xfdt\u011b': 14,
u'\xf3#a': 14,
u's#\xe1': 14,
u'xii': 14,
u'lph': 14,
u'jk\u0159': 14,
u'\xf6te': 14,
u'\xfav\u010d': 14,
u'cds': 14,
u'ttr': 14,
u'sa\u010f': 14,
u'm\u016fl': 14,
u'\xfari': 14,
u'acl': 14,
u'acp': 14,
u'pd#': 14,
u'r\xf8m': 14,
u'\xe9hl': 14,
u'jc\xed': 14,
u'\xe4fn': 14,
u't\xfcr': 14,
u'etw': 14,
u'niy': 14,
u'#nn': 14,
u'yk\u0159': 14,
u'ytm': 14,
u'#\u017ev': 14,
u'vu\u010d': 14,
u'cce': 14,
u'asf': 14,
u'st\xfa': 14,
u'oxy': 14,
u'vb\xe1': 14,
u'zyn': 14,
u'dm\xe1': 14,
u'\xf3lu': 14,
u'isz': 14,
u'n#y': 14,
u'\u010d#\u017e': 14,
u'zuh': 14,
u'jha': 14,
u'jhu': 14,
u'ulr': 14,
u'b\xfd\u010d': 14,
u'\xe1b\u016f': 14,
u'\u016fpr': 14,
u'i\xfas': 14,
u'csr': 14,
u'\xe1f#': 14,
u'ewt': 14,
u'lyg': 14,
u'a\u0159u': 14,
u'afk': 14,
u'u\u0159o': 14,
u'd\xed\u010d': 14,
u'a\u0161c': 14,
u'p#\u0161': 14,
u'ajj': 14,
u'zb\u0159': 14,
u'##+': 14,
u't\xfar': 14,
u'##w': 14,
u'ejf': 14,
u'gv#': 14,
u'h\xe4f': 14,
u'j\u010d\xed': 14,
u'j\xfa\u017e': 14,
u'j\xfan': 14,
u'\u017eny': 14,
u'\u0159bu': 13,
u'x#i': 13,
u'irp': 13,
u'm\u0161e': 13,
u'os\xfa': 13,
u'\u0161aj': 13,
u'\xbac#': 13,
u'gwi': 13,
u'#\xf3d': 13,
u'je\u0159': 13,
u'\xe9j\xe0': 13,
u'aam': 13,
u'nro': 13,
u'evp': 13,
u'jai': 13,
u'jaf': 13,
u'wez': 13,
u'nvo': 13,
u'\xedvy': 13,
u'\u011bh\u016f': 13,
u'\u0159\xe1v': 13,
u'rnm': 13,
u'h\xe1\xed': 13,
u'kvu': 13,
u'f#\u010d': 13,
u'sod': 13,
u'noe': 13,
u'yio': 13,
u'd\xeat': 13,
u'\xe9fo': 13,
u'\u0159iu': 13,
u'puv': 13,
u'\xeatr': 13,
u'cev': 13,
u'o\u010ft': 13,
u'urf': 13,
u'urq': 13,
u'\u016fz\xe1': 13,
u'ovm': 13,
u'om\xf3': 13,
u'\u010d\xe1\u0159': 13,
u'lsy': 13,
u'c\xe1r': 13,
u'uju': 13,
u'o#q': 13,
u'j\xe0#': 13,
u'\xfd\u0159e': 13,
u'vly': 13,
u'#f\xed': 13,
u'iid': 13,
u'#m\u0159': 13,
u'\u016fld': 13,
u'rc\xe1': 13,
u'\u011bkr': 13,
u's\xedv': 13,
u'vpo': 13,
u'imr': 13,
u'tow': 13,
u'\u010de\u0165': 13,
u'+#z': 13,
u'ppc': 13,
u's\xe1r': 13,
u'g#+': 13,
u'ulf': 13,
u'yur': 13,
u'fca': 13,
u'g#\u017e': 13,
u'ehi': 13,
u'v\xe9w': 13,
u'suo': 13,
u'pt\u0161': 13,
u'oy#': 13,
u'#\u200b\u200b': 13,
u'aab': 13,
u'n\u0161s': 13,
u'\u010d#f': 13,
u'yo#': 13,
u'joo': 13,
u'rpy': 13,
u'vab': 13,
u'atg': 13,
u'khu': 13,
u'#aq': 13,
u'ttu': 13,
u'o\u0148o': 13,
u'\u015fes': 13,
u'ddy': 13,
u'acn': 13,
u'acs': 13,
u'\u010fat': 13,
u'rdw': 13,
u'ziz': 13,
u'#ye': 13,
u'ihi': 13,
u'mh#': 13,
u'o\u0161\xe1': 13,
u'x#h': 13,
u'\xe9wa': 13,
u'#\xbac': 13,
u'o\u010du': 13,
u'as\xe9': 13,
u'pue': 13,
u'lir': 13,
u'na\u0148': 13,
u'\u016bna': 13,
u'kr\u010d': 13,
u'dmk': 13,
u'\xe1\u010fa': 13,
u'yga': 13,
u'\u013eov': 13,
u'#lg': 13,
u'l\xe1\u010f': 13,
u'\u016ftk': 13,
u'di\xf3': 13,
u'l\xedp': 13,
u'\xe1b\xe1': 13,
u'f\xe1t': 13,
u'cs\xe1': 13,
u'hy\u0148': 13,
u'\xf3da': 13,
u'zja': 13,
u'yp\xed': 13,
u'#dd': 13,
u'peg': 13,
u'sdp': 13,
u'lya': 13,
u'sih': 13,
u'iof': 13,
u'vr\u016f': 13,
u'\u011bso': 13,
u'\xe7ov': 13,
u'pi\xf3': 13,
u'bum': 13,
u'mw#': 13,
u'l\u0161u': 13,
u'\xeds#': 13,
u'\xe0#v': 13,
u'prz': 13,
u'toy': 13,
u'#pd': 13,
u'fah': 13,
u'#d\xea': 13,
u'\u016f\u017e\xed': 13,
u'\u010d\xed\u0161': 13,
u'#k\xf6': 13,
u'fib': 12,
u'irg': 12,
u'b\xedv': 12,
u'sow': 12,
u'osz': 12,
u'onj': 12,
u'jtu': 12,
u'hzo': 12,
u'dza': 12,
u'r\xf3b': 12,
u'gbt': 12,
u'lgb': 12,
u'lg\xe1': 12,
u'h\xedv': 12,
u'vs#': 12,
u'\xf4ne': 12,
u'nkv': 12,
u'\xe0#p': 12,
u'rnk': 12,
u'f\xf3b': 12,
u'gu\u0161': 12,
u'j\u010ft': 12,
u'mr\u016f': 12,
u'yin': 12,
u'\xfdnu': 12,
u'ffn': 12,
u'eix': 12,
u'dzl': 12,
u'jri': 12,
u'ce\u010d': 12,
u'nct': 12,
u'yms': 12,
u'ymf': 12,
u'\u0165at': 12,
u'st\xe2': 12,
u'lkr': 12,
u'm#x': 12,
u'wid': 12,
u'jny': 12,
u'ur\u0103': 12,
u'#w\xf6': 12,
u'm\xfam': 12,
u'tbo': 12,
u'yam': 12,
u'yah': 12,
u's\xfdc': 12,
u'eah': 12,
u'a\u010fm': 12,
u'\xe2nc': 12,
u'\u017eol': 12,
u'au\u015f': 12,
u'r\u017ea': 12,
u'\xe1\u010dn': 12,
u'fsh': 12,
u'lsh': 12,
u'ge\u0159': 12,
u'iuj': 12,
u'twe': 12,
u'u\u015fe': 12,
u'ra\xfa': 12,
u'im\u0161': 12,
u'ga\u0161': 12,
u'sy\u0159': 12,
u'ch\xed': 12,
u'mi\xe1': 12,
u'miq': 12,
u'#\xedl': 12,
u'm\xfa#': 12,
u'rcs': 12,
u'ad\u010d': 12,
u'ubp': 12,
u'nwa': 12,
u'nhl': 12,
u'a\u0144#': 12,
u'to\xe1': 12,
u'i+#': 12,
u'awl': 12,
u'j\xe1m': 12,
u'kym': 12,
u'\xe1ng': 12,
u'pps': 12,
u't\u010d\xed': 12,
u'm\xe1b': 12,
u'o\u0159o': 12,
u'pk\xe9': 12,
u'\u0161e\u0148': 12,
u'v\xe9p': 12,
u'hwe': 12,
u'ptn': 12,
u'rkn': 12,
u'#rf': 12,
u'a\xfcl': 12,
u'\u0161ez': 12,
u'ie\u0159': 12,
u'xp#': 12,
u'z\xe9k': 12,
u'v\xedh': 12,
u'hk#': 12,
u'lt\xe9': 12,
u'ouw': 12,
u'\u011bd\u016f': 12,
u'\xf3#k': 12,
u'\xf3#t': 12,
u'\xf3#s': 12,
u'jog': 12,
u'\u0161ve': 12,
u'ndj': 12,
u'\xe8#t': 12,
u'\u011bzu': 12,
u'vau': 12,
u'y\u0161#': 12,
u'voi': 12,
u'at\xf3': 12,
u'jk#': 12,
u'nxp': 12,
u'nla': 12,
u'\xe1ca': 12,
u'sao': 12,
u'u#\xe1': 12,
u'\u0148k\xe1': 12,
u'#pb': 12,
u'uig': 12,
u'ohs': 12,
u'tpe': 12,
u'se\u0159': 12,
u'rdl': 12,
u'npa': 12,
u'i\u016f#': 12,
u'jsv': 12,
u'unm': 12,
u'mho': 12,
u'ze\xed': 12,
u'o\u0161s': 12,
u'ze\u0159': 12,
u'ypj': 12,
u'yen': 12,
u'u\xeds': 12,
u'rh\xf4': 12,
u'b\xed\u010d': 12,
u'rls': 12,
u'\u0159k\xe9': 12,
u'vup': 12,
u'tdi': 12,
u'tdo': 12,
u'mpy': 12,
u'li\xe1': 12,
u'dt\xf3': 12,
u'gst': 12,
u'mab': 12,
u'yry': 12,
u'\xe9\u0159c': 12,
u'pyc': 12,
u'lu\xed': 12,
u'lug': 12,
u'#ll': 12,
u'ls\u016f': 12,
u'ty\u010f': 12,
u'#\u03b1#': 12,
u'n#q': 12,
u's\u016fv': 12,
u'op\xfd': 12,
u'diy': 12,
u'h\xf4n': 12,
u'\xe1b#': 12,
u'\u0103u#': 12,
u'bme': 12,
u'd#y': 12,
u'\u017eap': 12,
u'\xe1\u0159o': 12,
u'v\u016fn': 12,
u'r\xe9s': 12,
u'af\u0159': 12,
u'e\u013eo': 12,
u'dae': 12,
u'lfu': 12,
u'w\u011b#': 12,
u'vr\u010f': 12,
u'iok': 12,
u'ra\xfc': 12,
u'a\xfal': 12,
u'cny': 12,
u'w\xf6g': 12,
u'\u0161\u0148o': 12,
u'\u016fce': 12,
u'hi\xed': 12,
u'nd\xfa': 12,
u'wno': 12,
u'#mg': 12,
u'#mv': 12,
u'mfe': 12,
u'aad': 12,
u'owo': 12,
u't\xedz': 11,
u'fiz': 11,
u'soi': 11,
u'\xe9vu': 11,
u'jik': 11,
u'#rm': 11,
u'yzp': 11,
u'hey': 11,
u't#\xe1': 11,
u'xon': 11,
u'jeb': 11,
u'\u0161#g': 11,
u'ij\u0161': 11,
u'tsw': 11,
u'a#\xf3': 11,
u'\xe1il': 11,
u'nkn': 11,
u'udg': 11,
u'\xe9be': 11,
u'k\xe9\u0161': 11,
u'\xe0#h': 11,
u'\u011blt': 11,
u'foo': 11,
u'no\xfa': 11,
u'#xx': 11,
u'\u010dny': 11,
u'uvk': 11,
u'uv\u016f': 11,
u'kag': 11,
u'\u0161up': 11,
u'nnn': 11,
u'\u0159m\u011b': 11,
u'lk#': 11,
u'wig': 11,
u'i\u0159#': 11,
u'oxo': 11,
u'rct': 11,
u'url': 11,
u'ng\xe1': 11,
u'c#w': 11,
u'eai': 11,
u'sji': 11,
u'dr\u010d': 11,
u'r\u017et': 11,
u'na\u0144': 11,
u'i\xe1\u0161': 11,
u'ayu': 11,
u'y\xf6s': 11,
u'orh': 11,
u'\xe1\xedh': 11,
u'#bm': 11,
u'vhi': 11,
u'iud': 11,
u'k\xfdv': 11,
u'mm\xfa': 11,
u'i\xe1\u0159': 11,
u'o#x': 11,
u'u\u0165o': 11,
u'sye': 11,
u'cup': 11,
u'nsb': 11,
u'#q#': 11,
u'chz': 11,
u'mi\u0144': 11,
u'bwa': 11,
u'pge': 11,
u'mip': 11,
u'i\u0159s': 11,
u'nwo': 11,
u'zdc': 11,
u'kuw': 11,
u'\xe1hv': 11,
u'nh\xe1': 11,
u'+#k': 11,
u'+#s': 11,
u'vim': 11,
u'#ew': 11,
u'#\xe1g': 11,
u't\u010di': 11,
u'iaa': 11,
u'a\u0161a': 11,
u'i\u010ft': 11,
u'\u010dmi': 11,
u'\u010d#\xfa': 11,
u'ojr': 11,
u'r\u010ds': 11,
u'#\u010d\u0161': 11,
u'bd#': 11,
u'ouj': 11,
u'fko': 11,
u'\xeddm': 11,
u'ho\u010d': 11,
u'yul': 11,
u'mdg': 11,
u'\u017el\xe1': 11,
u'um\xfd': 11,
u'olj': 11,
u'\xf3a#': 11,
u'nlo': 11,
u'\xe1c\xe1': 11,
u'axo': 11,
u'u#+': 11,
u'\xfary': 11,
u'n\xedo': 11,
u'\xe9tl': 11,
u'pow': 11,
u'dds': 11,
u'\u011b\u0165o': 11,
u'\xe9he': 11,
u'odg': 11,
u'npo': 11,
u'etc': 11,
u'fnl': 11,
u'xec': 11,
u'\u010dyt': 11,
u'zee': 11,
u'p\u011br': 11,
u'kif': 11,
u'tey': 11,
u'ilf': 11,
u'ilh': 11,
u'bpr': 11,
u'#nh': 11,
u'g\u0142o': 11,
u'spd': 11,
u'aok': 11,
u'\u0161if': 11,
u'jty': 11,
u'vuv': 11,
u'\u010d\u0161e': 11,
u'b#g': 11,
u'nmi': 11,
u'yob': 11,
u'hha': 11,
u'li\u0107': 11,
u'asd': 11,
u'\xe4ov': 11,
u'stg': 11,
u'wea': 11,
u'ruu': 11,
u'idz': 11,
u'be\u0148': 11,
u'k\xf3r': 11,
u'dp\u0159': 11,
u'\u017ei\u010d': 11,
u'swe': 11,
u'n#\xf6': 11,
u'\xfd\u017ed': 11,
u'\u017em\xe1': 11,
u'wwe': 11,
u'bm#': 11,
u'paz': 11,
u'#tg': 11,
u'wso': 11,
u'edw': 11,
u'\u0142os': 11,
u'ik\u0159': 11,
u'bip': 11,
u'kiu': 11,
u't\xe0#': 11,
u'udm': 11,
u'ksw': 11,
u'ysn': 11,
u'cox': 11,
u'u\u017e\xe1': 11,
u'coa': 11,
u'j\xed\u010d': 11,
u'\u0107#a': 11,
u'nuo': 11,
u'p\xfdt': 11,
u'ti\xe1': 11,
u'hu\u0161': 11,
u'ylc': 11,
u'\xed\u017ek': 11,
u'\xe9zs': 11,
u'\xe9z#': 11,
u'vzc': 11,
u'\u010dok': 11,
u'uw\xfc': 11,
u'y#y': 11,
u'\xfdkr': 11,
u'arw': 11,
u'mf\xe1': 11,
u'sk\xf3': 11,
u'p\xe9m': 10,
u'fih': 10,
u'#oa': 10,
u'\xfdnm': 10,
u'osw': 10,
u'#cb': 10,
u'a\xeds': 10,
u'\u016f\u0148#': 10,
u't#y': 10,
u'ez\u010d': 10,
u'\u0119te': 10,
u'\u017e\u0161t': 10,
u'wri': 10,
u'\u017elo': 10,
u'\xe9fe': 10,
u'\xf3go': 10,
u'\xfama': 10,
u'\u0165\xe1v': 10,
u'oj\u010d': 10,
u'ijc': 10,
u'o\xfa#': 10,
u'\u0161#\u0159': 10,
u'\u0107ov': 10,
u'lzn': 10,
u'n\xe7y': 10,
u't\xe1j': 10,
u'w#c': 10,
u'szc': 10,
u'jag': 10,
u'jae': 10,
u'\xedvo': 10,
u'lfr': 10,
u'inl': 10,
u'a#\xe9': 10,
u'brk': 10,
u'nfp': 10,
u'\xfdbe': 10,
u'\xe3a#': 10,
u'bny': 10,
u'aib': 10,
u'zcz': 10,
u'y#\xe1': 10,
u'jv\u010d': 10,
u'ibb': 10,
u'cae': 10,
u'\xf3ro': 10,
u'no\u015b': 10,
u'psh': 10,
u'k\u010do': 10,
u'#sg': 10,
u'ce\u0161': 10,
u'tfe': 10,
u'l\u010dy': 10,
u'\u0161um': 10,
u'wim': 10,
u'rs\xfd': 10,
u'ngw': 10,
u'ngp': 10,
u'yag': 10,
u'rb\u0161': 10,
u'c\xedp': 10,
u'\u011b\u017ea': 10,
u'\xf6ff': 10,
u'fse': 10,
u'ayy': 10,
u'bo\u0161': 10,
u'or\xe9': 10,
u'c\xe1c': 10,
u'dkd': 10,
u'oio': 10,
u'\xf3ba': 10,
u'p\xf3d': 10,
u'iul': 10,
u'syd': 10,
u'cum': 10,
u'ufl': 10,
u'nsj': 10,
u'nsd': 10,
u'zhu': 10,
u'#fs': 10,
u'eue': 10,
u'sf\xe1': 10,
u'u\u0161m': 10,
u'oad': 10,
u'oac': 10,
u'oap': 10,
u'ubh': 10,
u'bsc': 10,
u'#\xe1d': 10,
u'gif': 10,
u'gi\xf3': 10,
u'ky\xf6': 10,
u'vt\xe1': 10,
u'yu#': 10,
u'z\xed\u010d': 10,
u'rk\u016b': 10,
u'\u0161p\xed': 10,
u'su\u010f': 10,
u'k\u016f\u0148': 10,
u'jss': 10,
u'fg\xe1': 10,
u'n\u0161a': 10,
u'\u0161\xe1h': 10,
u'ltd': 10,
u'#g\u0142': 10,
u'jo\xe3': 10,
u'\u010die': 10,
u'\u010da\u0159': 10,
u'fku': 10,
u'myt': 10,
u'myc': 10,
u'at\xfc': 10,
u'alw': 10,
u'\u015b\u0107#': 10,
u'kny': 10,
u'lpy': 10,
u'm\xe1j': 10,
u'ztv': 10,
u'n\xe9o': 10,
u'\xe9po': 10,
u'dh\xed': 10,
u'i\u0107o': 10,
u'k\u016bn': 10,
u'#a+': 10,
u'iph': 10,
u'ipn': 10,
u'fpa': 10,
u'vey': 10,
u'ssm': 10,
u'\u016flu': 10,
u'kld': 10,
u'\xe1ga': 10,
u'\xe1gu': 10,
u'tpa': 10,
u'ctr': 10,
u'se\u017e': 10,
u'cte': 10,
u'm\xfd\u017e': 10,
u'\xfaka': 10,
u'phu': 10,
u'tl\u016f': 10,
u'\u0159k\xe1': 10,
u'nbr': 10,
u'\u011bj\u016f': 10,
u'\xb0#c': 10,
u'epg': 10,
u'g\u0142a': 10,
u'gho': 10,
u'y\u010ft': 10,
u'x#l': 10,
u'ip\u0159': 10,
u'wca': 10,
u'\u017eu\u0161': 10,
u'\xe9d\xe9': 10,
u'dt\xe9': 10,
u'hbo': 10,
u'ru\xe1': 10,
u'rua': 10,
u'z#y': 10,
u'm\xedz': 10,
u'tgr': 10,
u'jlo': 10,
u'\xf3le': 10,
u'#l\xf6': 10,
u'ty\u0148': 10,
u'u\xe1n': 10,
u'v\u011bh': 10,
u'ooa': 10,
u'\u0142as': 10,
u'\u011b\u010d\u016f': 10,
u's\xf3g': 10,
u'\xfcck': 10,
u'\u0159no': 10,
u'l\xedl': 10,
u'pab': 10,
u'r\xf6f': 10,
u'\xe9kn': 10,
u'wse': 10,
u'ab\u016f': 10,
u'#d\u0103': 10,
u'ok\xf3': 10,
u'\xf3di': 10,
u'i\u010d\xe1': 10,
u'ewe': 10,
u'feh': 10,
u'xbr': 10,
u'ufr': 10,
u'af\u016f': 10,
u'og\u0142': 10,
u'f#g': 10,
u'kse': 10,
u't\xfdn': 10,
u'io\xfa': 10,
u'\xe1jk': 10,
u'njo': 10,
u'tmy': 10,
u'tm#': 10,
u'buu': 10,
u'piu': 10,
u'\xe9ch': 10,
u'f\u0159e': 10,
u'ry\u0161': 10,
u'ocv': 10,
u'\xfad#': 10,
u'j\xe1\u0159': 10,
u'd\xe9e': 10,
u'r\u0103o': 10,
u'hue': 10,
u'rm\xfd': 10,
u'p\xe1j': 10,
u'o\u015b\u0107': 10,
u'uwa': 10,
u'i\u0161s': 10,
u'arq': 10,
u'jv\u0159': 10,
u'us\u0142': 10,
u'fec': 10,
u'p\xe9z': 9,
u'\u0148u#': 9,
u'\xed\u0161\u0165': 9,
u'j\xf6r': 9,
u'azd': 9,
u'lra': 9,
u'djo': 9,
u'm\xf3n': 9,
u'ji\u010d': 9,
u'a\xed#': 9,
u'#cg': 9,
u't#+': 9,
u'hef': 9,
u't#q': 9,
u'pbe': 9,
u'ch\u0161': 9,
u'#fb': 9,
u'\xfcla': 9,
u'psb': 9,
u'h\xe9r': 9,
u'amd': 9,
u'r\xf3#': 9,
u'ch\xe4': 9,
u'w#t': 9,
u'jaa': 9,
u'of\xf3': 9,
u'weu': 9,
u'\xf6ma': 9,
u'krb': 9,
u'krc': 9,
u'lfg': 9,
u'a#\xf6': 9,
u'\u0159\xe1p': 9,
u'brl': 9,
u'mvo': 9,
u'\u0161ma': 9,
u'\u011bl\u010d': 9,
u'y#\xe9': 9,
u'\u0117mi': 9,
u'p\u016ft': 9,
u'yim': 9,
u'\xfdny': 9,
u'mre': 9,
u'#xb': 9,
u'\xe9fr': 9,
u'a\xefd': 9,
u'wy#': 9,
u'cei': 9,
u'#wz': 9,
u'\xf3ja': 9,
u'kek': 9,
u'#jk': 9,
u'#j\xfc': 9,
u'o\xe1z': 9,
u'\u0119sa': 9,
u'xha': 9,
u'l#\xe1': 9,
u'p\u011b\u0165': 9,
u'kit': 9,
u'uk\xe9': 9,
u'mae': 9,
u'oil': 9,
u'\u0142ow': 9,
u'kmo': 9,
u'yya': 9,
u'd\xfa#': 9,
u'l\u017eu': 9,
u'gaw': 9,
u'jby': 9,
u'iir': 9,
u'\u03b5\u03b4\u03c1': 9,
u'\u011bmy': 9,
u'\u016flh': 9,
u'wyo': 9,
u'dct': 9,
u'ub\u0159': 9,
u'nwi': 9,
u'\u011bk\xe1': 9,
u'nhs': 9,
u'toz': 9,
u'jzo': 9,
u'pke': 9,
u'r\xfcc': 9,
u'+#v': 9,
u'kyc': 9,
u'ahh': 9,
u'vt\xed': 9,
u'iau': 9,
u'\xe1ll': 9,
u'al\xf3': 9,
u'suf': 9,
u'\xf1a#': 9,
u'fge': 9,
u'\u010d#\u0159': 9,
u'yl\xe1': 9,
u'ixy': 9,
u'l\xe4o': 9,
u'ed\xfd': 9,
u'\xf3#r': 9,
u'\xfdt\u016f': 9,
u'##\xba': 9,
u'##\xed': 9,
u'm\xe9r': 9,
u'\u0148st': 9,
u'\xe1th': 9,
u'mym': 9,
u'#m\xfc': 9,
u'nd\u017e': 9,
u'xi\u010d': 9,
u'\xf1ig': 9,
u'lpa': 9,
u'c\xed\u0159': 9,
u'n\xe9\u0159': 9,
u'ttl': 9,
u'sa\u010d': 9,
u'\u0103ia': 9,
u'n\xede': 9,
u'ohm': 9,
u'oht': 9,
u'zm#': 9,
u'rm\u0161': 9,
u'\u0117ni': 9,
u'xas': 9,
u'jj\xe1': 9,
u'ziu': 9,
u'm\xfdd': 9,
u'ct\xe1': 9,
u'ctp': 9,
u'zih': 9,
u'le\u010f': 9,
u'#yu': 9,
u'v\xfd\u0159': 9,
u'xej': 9,
u'ak\xf3': 9,
u'ak\xed': 9,
u'alr': 9,
u'uak': 9,
u'nt\u0161': 9,
u'kt\xf3': 9,
u'\u010f#f': 9,
u'spy': 9,
u'\u0161st': 9,
u'\u0161iz': 9,
u'tl\xfd': 9,
u'zag': 9,
u'x#\u017e': 9,
u'ucs': 9,
u'stt': 9,
u'naw': 9,
u'sns': 9,
u'awe': 9,
u'otm': 9,
u'otw': 9,
u'\xfd\u0148a': 9,
u'is\u0142': 9,
u'i\u0119t': 9,
u'\u0159\u010di': 9,
u'\u010d#\u0161': 9,
u'ul\u0117': 9,
u's\xf3l': 9,
u'r\xedr': 9,
u'#\u0161m': 9,
u'l\u0117n': 9,
u'hd#': 9,
u'l\xed\u0161': 9,
u'f\xe1r': 9,
u'\xfas\u0165': 9,
u'oj\u017e': 9,
u'\xfas#': 9,
u'hya': 9,
u'abm': 9,
u'ryk': 9,
u'\xf3de': 9,
u'\u010dkn': 9,
u'l\xe9g': 9,
u'z\xfat': 9,
u'ewc': 9,
u'\u017ee\u010d': 9,
u'f#h': 9,
u'ksa': 9,
u't\u0117m': 9,
u'e\u0159\u016f': 9,
u'svl': 9,
u'fyl': 9,
u'esg': 9,
u'pim': 9,
u'pip': 9,
u'xfo': 9,
u'enj': 9,
u'\xe9ca': 9,
u'\xfd\u010dk': 9,
u'ra\u010f': 9,
u'aj\u0161': 9,
u'l\u0161a': 9,
u'nu\u0165': 9,
u'o\u015fo': 9,
u'f\xedk': 9,
u'an\u0163': 9,
u'e\xdf#': 9,
u'\xe0ov': 9,
u'vz\xfd': 9,
u'#k\xe9': 9,
u'ynk': 9,
u'du\u017e': 9,
u'duo': 9,
u'ar\u0148': 9,
u'dps': 9,
u'h#x': 9,
u'#kf': 9,
u'e\u0148k': 9,
u'nf\xe1': 9,
u'#mw': 9,
u'bly': 9,
u'uaj': 9,
u'\u016fvt': 9,
u'\xf8re': 8,
u'czi': 8,
u'uop': 8,
u'fim': 8,
u'\xed\u0161u': 8,
u'\xedby': 8,
u'yfo': 8,
u'irc': 8,
u'fry': 8,
u'tzm': 8,
u'j\xf6n': 8,
u't\xe9g': 8,
u'\u011brb': 8,
u'kja': 8,
u'#\xf3s': 8,
u'pbt': 8,
u'\xfap\u011b': 8,
u'\u0161ep': 8,
u'crc': 8,
u'\xb0#\xb0': 8,
u'\xedji': 8,
u'\u016fn\u011b': 8,
u'\xedjs': 8,
u'\xedb\u0159': 8,
u'evd': 8,
u'w##': 8,
u'\u0161i\u010d': 8,
u'ofl': 8,
u'r\u0165#': 8,
u'b#\u0161': 8,
u'\xedvu': 8,
u'\xedvl': 8,
u'vsu': 8,
u'a#+': 8,
u'br\xfc': 8,
u'nk\xfd': 8,
u'wzn': 8,
u'emf': 8,
u'hr\xed': 8,
u'aiw': 8,
u'i\xf1e': 8,
u'\xe0##': 8,
u'ca\u0159': 8,
u'i\u0161\u0161': 8,
u'gu\u0159': 8,
u'azr': 8,
u'\xe9fk': 8,
u'hld': 8,
u'am\xf3': 8,
u'cee': 8,
u'rwi': 8,
u'cew': 8,
u'kaa': 8,
u'\xfatu': 8,
u'm#\xe1': 8,
u'es\xfa': 8,
u'#\u0151r': 8,
u'dca': 8,
u'rs\u016f': 8,
u'fpr': 8,
u'pi\xf1': 8,
u'l\xfa\u010d': 8,
u'vpa': 8,
u'rs\xe9': 8,
u'\xf3je': 8,
u'keg': 8,
u'kea': 8,
u'amh': 8,
u'tm\u011b': 8,
u'gy\u016f': 8,
u'zwa': 8,
u'ji\u0159': 8,
u'gya': 8,
u'\xed\xf1i': 8,
u'e\u017eo': 8,
u'bo\u015f': 8,
u'\u010d\xe1n': 8,
u'e#\xf3': 8,
u'\u010d\u0148\u016f': 8,
u'\u0161\u0161t': 8,
u'\u010dep': 8,
u'e\u015f#': 8,
u'vhp': 8,
u'iuo': 8,
u'o#\u010f': 8,
u'gag': 8,
u'ufn': 8,
u'oea': 8,
u'zh\xe1': 8,
u'd\u0103i': 8,
u'\xdfun': 8,
u'#\xed\xf1': 8,
u'v\xe1p': 8,
u'nhu': 8,
u'pk\xfd': 8,
u'vij': 8,
u'ppp': 8,
u'\u0111je': 8,
u'mg#': 8,
u'llg': 8,
u'd\u017e\xed': 8,
u'iaz': 8,
u'nlu': 8,
u'o\u015ft': 8,
u'o\u0159\u0161': 8,
u'al\xfa': 8,
u'oyo': 8,
u'a\u015fo': 8,
u'#rs': 8,
u'\xf6ri': 8,
u'ie\xdf': 8,
u'\xe1pk': 8,
u'ylm': 8,
u'++#': 8,
u'l\xe4m': 8,
u'i#y': 8,
u'hku': 8,
u'siu': 8,
u'\xe4ub': 8,
u'\xf1er': 8,
u'\xfdt\xed': 8,
u'\xf3#p': 8,
u'\xe1t\u010d': 8,
u'nd\u0159': 8,
u'ho\u0165': 8,
u'mly': 8,
u'y\u0161\u016f': 8,
u'voo': 8,
u'e\u0161r': 8,
u'mdl': 8,
u'xio': 8,
u'\u0161ch': 8,
u'\u010dud': 8,
u'#ax': 8,
u'tki': 8,
u'ydd': 8,
u'ip\xf3': 8,
u'\xe1cv': 8,
u'v#y': 8,
u'gdf': 8,
u'rx#': 8,
u'fth': 8,
u'jsa': 8,
u'xak': 8,
u'\xfakr': 8,
u'\xe4mu': 8,
u'ziw': 8,
u'npp': 8,
u'le\u0159': 8,
u'#yv': 8,
u'llb': 8,
u'\xfddl': 8,
u'n\xe7a': 8,
u'la\u0148': 8,
u'uaz': 8,
u'nt\xf3': 8,
u'ni\u0119': 8,
u't\u016fj': 8,
u'#\u0111j': 8,
u'ek\xfd': 8,
u'\u010f#\xfa': 8,
u'eks': 8,
u'aoc': 8,
u'gbe': 8,
u'b\xed\u0159': 8,
u'\u015fti': 8,
u'z\u010dt': 8,
u'kxi': 8,
u'uxh': 8,
u'\u016fjm': 8,
u'\xf3t\u011b': 8,
u'mpb': 8,
u'li\xe9': 8,
u'stp': 8,
u'gp#': 8,
u'r\u010ft': 8,
u'naa': 8,
u'd\u010dt': 8,
u'aw\u011b': 8,
u'jzj': 8,
u'zys': 8,
u'xle': 8,
u'tgi': 8,
u'dml': 8,
u'u\u016f#': 8,
u'yge': 8,
u'#lj': 8,
u'#ls': 8,
u'is\xe9': 8,
u'isj': 8,
u'isb': 8,
u'u\xe1r': 8,
u'h\u0161s': 8,
u'mc#': 8,
u'wwf': 8,
u'zut': 8,
u'zui': 8,
u'kk\xe1': 8,
u'ixa': 8,
u'iwa': 8,
u'pa\xed': 8,
u'bmw': 8,
u'pax': 8,
u'tu\u016f': 8,
u'f\xe1n': 8,
u'\xfasv': 8,
u'rr\xe1': 8,
u'hda': 8,
u'a\u0161\xe9': 8,
u'csd': 8,
u'ryd': 8,
u'\xf3d#': 8,
u'fus': 8,
u'ik\u010d': 8,
u'bif': 8,
u'ufu': 8,
u'kid': 8,
u'gc#': 8,
u'\u017e#\xed': 8,
u'm\xfch': 8,
u'udw': 8,
u're\u015f': 8,
u'f#\u017e': 8,
u'f#\u0159': 8,
u't\u0117#': 8,
u'ioi': 8,
u'nju': 8,
u'buf': 8,
u'bue': 8,
u'j\xedn': 8,
u'd\xe1\u010d': 8,
u'f\xe9m': 8,
u'nuv': 8,
u'\xf3sc': 8,
u'\u0161\xe1r': 8,
u'e\xdfu': 8,
u'dyr': 8,
u'dy\u0148': 8,
u'\xe9zo': 8,
u'ju\xe1': 8,
u'fay': 8,
u'\xf3w#': 8,
u'guo': 8,
u'due': 8,
u'lje': 8,
u'lju': 8,
u'#ds': 8,
u'#tn': 8,
u'\u0159n\xe9': 8,
u'cfp': 8,
u's\xfas': 8,
u'#kk': 8,
u'bbl': 8,
u'#\u010fa': 8,
u'mfy': 8,
u'\u017enu': 8,
u'lvs': 8,
u'p\xe9d': 7,
u'czw': 7,
u'\xed\u0161s': 7,
u'#og': 7,
u'hay': 7,
u'mb\u011b': 7,
u'\u0144#a': 7,
u'sog': 7,
u'ufg': 7,
u'\xe9vk': 7,
u'os\u0165': 7,
u'uks': 7,
u'\u0161a\u0161': 7,
u'yz#': 7,
u'jt#': 7,
u'heh': 7,
u'sc\u016f': 7,
u'\u0161e\u010d': 7,
u'gfl': 7,
u'k\xe1\u010d': 7,
u'\u011bzm': 7,
u'rfs': 7,
u'rfl': 7,
u'rf\xe9': 7,
u'a\xf1a': 7,
u'chj': 7,
u'ij\u010f': 7,
u'\xe8ge': 7,
u'sgp': 7,
u'w#m': 7,
u'w#o': 7,
u'aeg': 7,
u'aei': 7,
u'wef': 7,
u'yr\u0161': 7,
u'vsi': 7,
u'h\xed#': 7,
u'yr\xe9': 7,
u'a#\xe0': 7,
u'br\u016f': 7,
u'\u0163i#': 7,
u'hrb': 7,
u'haf': 7,
u'\xe0#j': 7,
u'rnc': 7,
u'rnw': 7,
u'fbi': 7,
u'\xe1mh': 7,
u'soh': 7,
u'\u011bse': 7,
u'\xf3rn': 7,
u'sox': 7,
u'mrs': 7,
u'lfs': 7,
u'k\u010d\xed': 7,
u'sri': 7,
u'\u0159io': 7,
u'v\u010d\xe1': 7,
u'vea': 7,
u'a++': 7,
u'cey': 7,
u'uv\u0161': 7,
u'm#\xe9': 7,
u'n\xf3t': 7,
u'hjo': 7,
u'os\u0142': 7,
u'\u017eke': 7,
u'\u03c3\u03c4\u03b9': 7,
u'ur\xe9': 7,
u'\xedev': 7,
u'tbc': 7,
u'jji': 7,
u'gyj': 7,
u'un\xed': 7,
u'\xedah': 7,
u'\xe1\u010dr': 7,
u'yes': 7,
u'k\xf3z': 7,
u'#n\xf3': 7,
u'\u03c2##': 7,
u'ajb': 7,
u'boi': 7,
u'ayn': 7,
u'lsr': 7,
u'u\xfar': 7,
u'jfl': 7,
u'#bc': 7,
u'iua': 7,
u'eyk': 7,
u'sbp': 7,
u'o#\xb0': 7,
u'i\u017ea': 7,
u'gak': 7,
u'syf': 7,
u'jbu': 7,
u'jba': 7,
u'ldc': 7,
u'\xedim': 7,
u's\xe9g': 7,
u'\xf6ns': 7,
u'mi\u015f': 7,
u'\u015fbo': 7,
u'ubt': 7,
u'\xedu#': 7,
u'\xe5la': 7,
u'i\u0159\u010d': 7,
u'mua': 7,
u'xxo': 7,
u'\u011blm': 7,
u'ah\u0159': 7,
u'hsi': 7,
u'soj': 7,
u'gij': 7,
u'll\xf3': 7,
u'ulp': 7,
u'\u03b4\u03c1\u03af': 7,
u'vtl': 7,
u'gtp': 7,
u'u\u010dc': 7,
u'iap': 7,
u'\u016f\u0161k': 7,
u's\xe1\u010d': 7,
u'\xe1lp': 7,
u'uyh': 7,
u'o\u0159c': 7,
u'l\u0148m': 7,
u'\u011br\u010d': 7,
u'eh\u0148': 7,
u'v\xe9c': 7,
u'v\xe9e': 7,
u'#\xe5l': 7,
u'oyd': 7,
u'\u03b1\u03c3\u03b7': 7,
u'gpo': 7,
u'#rd': 7,
u'\xe9#x': 7,
u'js#': 7,
u'm\xedo': 7,
u'aap': 7,
u'tgv': 7,
u'\u03bd\u03b5\u03b4': 7,
u'#i+': 7,
u'bh#': 7,
u'g\xe1d': 7,
u'cts': 7,
u'ugl': 7,
u'hka': 7,
u'#\u010dm': 7,
u'#\u010du': 7,
u'i\xe8g': 7,
u'ou\xfa': 7,
u'ltn': 7,
u'wha': 7,
u't\xf3#': 7,
u'lt\u016f': 7,
u'\xfams': 7,
u'##\xe8': 7,
u'rp\xe9': 7,
u'nd\xed': 7,
u'\xe8#r': 7,
u's#\xe0': 7,
u'myh': 7,
u's#x': 7,
u'yun': 7,
u'dsz': 7,
u'smk': 7,
u'atw': 7,
u'atx': 7,
u'lp\xe1': 7,
u'\u03af\u03b1\u03c3': 7,
u'prk': 7,
u'ol\u017e': 7,
u'vev': 7,
u'#\u03b2#': 7,
u'\xf8kk': 7,
u'\xfars': 7,
u'agf': 7,
u'uie': 7,
u'fte': 7,
u'\u010fas': 7,
u'\u0148ou': 7,
u'eyl': 7,
u'exh': 7,
u'exx': 7,
u'\xefd#': 7,
u'pdl': 7,
u'f\u016fm': 7,
u'ckb': 7,
u'phd': 7,
u'xei': 7,
u'xer': 7,
u'\xfddr': 7,
u'la\u0163': 7,
u'uam': 7,
u'uag': 7,
u'kiz': 7,
u'ntz': 7,
u'ntw': 7,
u'b\xe1\u0161': 7,
u'coz': 7,
u'thl': 7,
u'bps': 7,
u'ykj': 7,
u'ekd': 7,
u'aog': 7,
u'\xedp\u0159': 7,
u'x#\u0159': 7,
u'uxo': 7,
u'uxi': 7,
u'o\u010dm': 7,
u'yol': 7,
u'\xfdlk': 7,
u'o\xebl': 7,
u'\xe9dy': 7,
u'\u017eup': 7,
u'li\xe8': 7,
u'rhi': 7,
u'wom': 7,
u'cgt': 7,
u'h\u0148a': 7,
u'l\xf8k': 7,
u'fdi': 7,
u'i\u017em': 7,
u'ycy': 7,
u'be\u015f': 7,
u'jzd': 7,
u'\xe9\u0159s': 7,
u'ot\u0161': 7,
u'shy': 7,
u'uuv': 7,
u'\xe9su': 7,
u'\xe9sc': 7,
u'jly': 7,
u'#l\xe4': 7,
u'#l\xf8': 7,
u'tya': 7,
u'op\xe9': 7,
u'a\u0163i': 7,
u'fl#': 7,
u'kkr': 7,
u'xpr': 7,
u'iwe': 7,
u'd#q': 7,
u'\u017eab': 7,
u'\u03c5\u03bd\u03b5': 7,
u'abh': 7,
u'\u03c1\u03af\u03b1': 7,
u'ggc': 7,
u't\u011b\u010d': 7,
u'#dj': 7,
u'fug': 7,
u'\xf3bn': 7,
u'pep': 7,
u'lyb': 7,
u'lyi': 7,
u'afj': 7,
u'e\u015fb': 7,
u'byo': 7,
u'\xe7o#': 7,
u'\u011bsk': 7,
u'nji': 7,
u'pig': 7,
u'd\xe1\u0161': 7,
u'aif': 7,
u'd\xedj': 7,
u'd\xed\u0159': 7,
u'thn': 7,
u'\xf3#j': 7,
u'ocz': 7,
u'nuz': 7,
u'a\xfa\u010d': 7,
u'\u011bmk': 7,
u'wf#': 7,
u'lcb': 7,
u'n\u016f\u0161': 7,
u'ywa': 7,
u'\u03c4\u03b9\u03c2': 7,
u'eyj': 7,
u'nns': 7,
u'\xf3sa': 7,
u'yj\u0161': 7,
u'f\xedn': 7,
u'd\xe9c': 7,
u'o\u0148u': 7,
u'p\xe1\u017e': 7,
u'juc': 7,
u'juz': 7,
u'rvh': 7,
u'cbu': 7,
u'igs': 7,
u'fax': 7,
u'k\u0161i': 7,
u'\xfala': 7,
u'ckf': 7,
u'fky': 7,
u'y#q': 7,
u'izj': 7,
u'ar\xfa': 7,
u'hij': 7,
u'riy': 7,
u'lji': 7,
u'#t\xfc': 7,
u'nfu': 7,
u'\u03c3\u03c5\u03bd': 7,
u'mfl': 7,
u'\xfdru': 7,
u'\u016fva': 7,
u'fs#': 7,
u'#\u03c3\u03c5': 7,
u'#\u03c3\u03c4': 7,
u'\u03b9\u03c2#': 7,
u'rly': 6,
u'u\u0165m': 6,
u'p\xe9e': 6,
u'r\u016fr': 6,
u'#o\xe1': 6,
u'kfw': 6,
u'ir\xed': 6,
u'yfu': 6,
u'haw': 6,
u'k\xf6s': 6,
u'soe': 6,
u'mbp': 6,
u'j#\xed': 6,
u't\xe9\u010d': 6,
u'oeg': 6,
u't\xe9l': 6,
u't\u0103l': 6,
u'zow': 6,
u'\xfcl#': 6,
u'#qa': 6,
u'#c\u0103': 6,
u'\u0103t\u0103': 6,
u'#=#': 6,
u'dfr': 6,
u'xit': 6,
u'ugm': 6,
u'ugb': 6,
u'cru': 6,
u'\u0151ry': 6,
u'\xf3z\u0148': 6,
u'xch': 6,
u'r\xf3a': 6,
u'ch\xf3': 6,
u'lof': 6,
u'w#e': 6,
u'w#i': 6,
u'w#l': 6,
u'\u013ei#': 6,
u'jaw': 6,
u'jau': 6,
u'ucl': 6,
u'rbl': 6,
u'a#\u0163': 6,
u'\u011bha': 6,
u'p\xfa#': 6,
u'\xedv\u016f': 6,
u'inx': 6,
u'er\u017e': 6,
u'l\xf6v': 6,
u'nkm': 6,
u'br#': 6,
u'\xe9ba': 6,
u'aii': 6,
u'ai\xe9': 6,
u'gn\xe9': 6,
u'\xe0#k': 6,
u'lcz': 6,
u'wah': 6,
u'waz': 6,
u'\u011bll': 6,
u'r\u0161\u0165': 6,
u'kv#': 6,
u'h\xe1k': 6,
u'gud': 6,
u'l\xf2#': 6,
u'soa': 6,
u'yia': 6,
u'mri': 6,
u'k\xedn': 6,
u'eiw': 6,
u'eiy': 6,
u'\xe9fy': 6,
u'dz\xe1': 6,
u'\u016fbo': 6,
u'jru': 6,
u'p\u0159\u016f': 6,
u'uvz': 6,
u'ceh': 6,
u'cez': 6,
u'\xe0a#': 6,
u'vr\u0165': 6,
u'o\u010fo': 6,
u'm#y': 6,
u'm#q': 6,
u'dti': 6,
u'\u03b5\u03b9#': 6,
u'svs': 6,
u'ur\u0161': 6,
u'yaf': 6,
u's\xfdp': 6,
u'mza': 6,
u'h\u016fv': 6,
u'e\u010da': 6,
u'#z\u012b': 6,
u'\u017eon': 6,
u'mee': 6,
u'me\u0159': 6,
u'zwe': 6,
u'gym': 6,
u'vyf': 6,
u'c\u0103t': 6,
u'doi': 6,
u'omh': 6,
u'\u011b\u017et': 6,
u'kik': 6,
u'yeh': 6,
u'yev': 6,
u'#nr': 6,
u'axa': 6,
u'e\u017e\xe9': 6,
u'rnt': 6,
u'boy': 6,
u'bo\u010f': 6,
u'ay\xe1': 6,
u'snp': 6,
u'\u0159e\u0148': 6,
u'\u016fve': 6,
u'e#\xe9': 6,
u'\xe4in': 6,
u'oik': 6,
u'ujo': 6,
u'\xedmh': 6,
u'\xedge': 6,
u'fw#': 6,
u'hf#': 6,
u'\xe4#s': 6,
u'\xedz\u016f': 6,
u'pca': 6,
u'j\xf3n': 6,
u'u\u0165n': 6,
u'rgs': 6,
u'nsn': 6,
u'ld\u0159': 6,
u'#fm': 6,
u'\u03c3\u03b7#': 6,
u'iiz': 6,
u'd\u0103#': 6,
u'j\u017e\xed': 6,
u'ch\xf6': 6,
u'#\xf6v': 6,
u'sfr': 6,
u'k\xfak': 6,
u'k\xe4i': 6,
u'\u016flc': 6,
u'y\xe1#': 6,
u'oau': 6,
u'xxx': 6,
u'pk\xe1': 6,
u'xx#': 6,
u'+#d': 6,
u'+#m': 6,
u'n\u0117#': 6,
u'vib': 6,
u'hsa': 6,
u'hsl': 6,
u'fja': 6,
u'\u03b9#\u03c3': 6,
u'gtu': 6,
u'ppm': 6,
u'\xf6vi': 6,
u'\u010dac': 6,
u'fcc': 6,
u'uym': 6,
u'g#\u0165': 6,
u'o\u0159l': 6,
u'v\xe9k': 6,
u'eh\xfd': 6,
u'\xe9en': 6,
u'#jt': 6,
u'rk\xfa': 6,
u'imh': 6,
u'rtp': 6,
u'\xf2#r': 6,
u'a\xe7o': 6,
u'g\xe1z': 6,
u'ylb': 6,
u'jlu': 6,
u'\u010d#g': 6,
u'l\xe4n': 6,
u'tph': 6,
u'\xf1ez': 6,
u'\xfazn': 6,
u'\xf3#z': 6,
u't\xf3r': 6,
u'\u0159la': 6,
u'#vb': 6,
u'joi': 6,
u'rph': 6,
u'\xf6vp': 6,
u'\xf3ms': 6,
u'\xe1tm': 6,
u'\xe1t\u0148': 6,
u'l\xe0#': 6,
u'txe': 6,
u'#gg': 6,
u'\xfdpi': 6,
u'lpr': 6,
u'n\xe9z': 6,
u'nr\xe1': 6,
u'\xe4nd': 6,
u'ol\xf2': 6,
u'\u010dua': 6,
u'd\u016ft': 6,
u'khm': 6,
u'h\xf3z': 6,
u'\xed\u017e\u016f': 6,
u'\xe1c#': 6,
u'n\xedr': 6,
u'ddo': 6,
u'\u016fll': 6,
u'dd\u016f': 6,
u'\u011b\u0148t': 6,
u'\u011b\u0148#': 6,
u'\xe1g#': 6,
u'\xe1gy': 6,
u'hgv': 6,
u'tpr': 6,
u'pdc': 6,
u'ueb': 6,
u'zi\xe1': 6,
u'zi\xe8': 6,
u'vms': 6,
u'ihk': 6,
u'\xfamo': 6,
u'vma': 6,
u'etd': 6,
u'b\u011bz': 6,
u'\xe9li': 6,
u'l\xfds': 6,
u'\xedt\xfd': 6,
u'ntt': 6,
u'\u017e\xedn': 6,
u'ilj': 6,
u'thw': 6,
u'm\u017e\xed': 6,
u'\u010f#\u0161': 6,
u'#\xe0s': 6,
u'ghz': 6,
u'x#\xfa': 6,
u'lm\u011b': 6,
u'jti': 6,
u'jta': 6,
u'#\u017en': 6,
u'ccu': 6,
u'ccm': 6,
u'\xf3t\xf3': 6,
u'tdc': 6,
u'tde': 6,
u'yom': 6,
u'\xfdla': 6,
u'\xe9d#': 6,
u'\xe9dk': 6,
u'e\u017es': 6,
u'std': 6,
u'\xe9d\u016f': 6,
u'jp\u016f': 6,
u'woe': 6,
u'v\u017ei': 6,
u'n\u010d\u016f': 6,
u'jpi': 6,
u'\u012ble': 6,
u'l#x': 6,
u'z#\xe9': 6,
u'fly': 6,
u'#hf': 6,
u'ecz': 6,
u'beg': 6,
u'awr': 6,
u'shn': 6,
u'\xfds\u016f': 6,
u'ot\xf3': 6,
u'\xe9so': 6,
u'\xf3lo': 6,
u'c\xe1n': 6,
u'#ld': 6,
u'\xf3l\u016f': 6,
u'l\xe1l': 6,
u'\xe9u#': 6,
u'mcl': 6,
u'tnt': 6,
u'nyk': 6,
u'kka': 6,
u'r\xedu': 6,
u'tui': 6,
u'a\u017ea': 6,
u'al\xe0': 6,
u'\xe1\u0159c': 6,
u'gg#': 6,
u'ho\xfa': 6,
u'\xf3d\u011b': 6,
u'ik\xe4': 6,
u'ikm': 6,
u'ful': 6,
u'ly\u0148': 6,
u'sda': 6,
u'lyl': 6,
u'\xedt\u016f': 6,
u'\u0165na': 6,
u'\u0103li': 6,
u'\xe9\u010dk': 6,
u't\xe0a': 6,
u't\xe0o': 6,
u'afs': 6,
u'e\u013ei': 6,
u'm\xfcn': 6,
u'm\xfcl': 6,
u'\xe0s#': 6,
u'udt': 6,
u'ksu': 6,
u'ksk': 6,
u'#xa': 6,
u'ioo': 6,
u'nfc': 6,
u'a\u0159\u0161': 6,
u'#kw': 6,
u'\u0107#k': 6,
u'\u0107#s': 6,
u'd\xed\u0161': 6,
u'\xe9kl': 6,
u'\u011bm\xfd': 6,
u'ocd': 6,
u'gss': 6,
u'kwa': 6,
u'cn#': 6,
u'n\u016fj': 6,
u'zcu': 6,
u'\u011bpa': 6,
u'ic\u016f': 6,
u'z\u012bl': 6,
u'pr\xed': 6,
u'\xe9gl': 6,
u'hui': 6,
u'huk': 6,
u'od\u0103': 6,
u'bl\xfd': 6,
u'\u017eva': 6,
u'ss\xfa': 6,
u'e\xfa#': 6,
u's\xe3o': 6,
u'vzm': 6,
u'jum': 6,
u'cbr': 6,
u'izc': 6,
u'dud': 6,
u'lj#': 6,
u'zz\xe1': 6,
u'zzy': 6,
u'gr\xe9': 6,
u'#tt': 6,
u'i\u010dm': 6,
u'y\u0148o': 6,
u'caj': 6,
u'kbe': 6,
u'h#q': 6,
u'h\xfd\u010d': 6,
u'd\xe1i': 6,
u'h\xfd\u0159': 6,
u'\u011bsa': 6,
u'avf': 6,
u'mff': 6,
u'mfi': 6,
u'\xe9rv': 5,
u'\u015foa': 5,
u't\xed\xe1': 5,
u't\xed\u0159': 5,
u'p\xe9c': 5,
u'xta': 5,
u'\xedbo': 5,
u'\xf3ov': 5,
u'won': 5,
u'dgi': 5,
u'\u0161\xeda': 5,
u'irn': 5,
u'rgw': 5,
u'frd': 5,
u'cub': 5,
u'j\xf6k': 5,
u'wsp': 5,
u't\xe9o': 5,
u't\xe9b': 5,
u'p\xedd': 5,
u'p\xedj': 5,
u'\xfcls': 5,
u'p\xed\u010f': 5,
u'fm#': 5,
u'zyp': 5,
u'\u016fse': 5,
u'\u0119tr': 5,
u'vlc': 5,
u'wra': 5,
u'b\xfa#': 5,
u'gfa': 5,
u'm\u0148u': 5,
u'jex': 5,
u'aaf': 5,
u'vo\xe1': 5,
u'vog': 5,
u'amz': 5,
u's\xf6d': 5,
u'ijh': 5,
u'sgo': 5,
u'gch': 5,
u'\xe1#\xe1': 5,
u'\xe9nk': 5,
u'\u015fcu': 5,
u'w#w': 5,
u'\xe9n\xe9': 5,
u'szu': 5,
u'aem': 5,
u'\u0161i\u0107': 5,
u'ja\u0161': 5,
u'dao': 5,
u'nv#': 5,
u'wet': 5,
u'vse': 5,
u'nv\xed': 5,
u'in\xf3': 5,
u'yr#': 5,
u'jz#': 5,
u'inr': 5,
u'a#\xf8': 5,
u'mi\u0161': 5,
u'cmp': 5,
u'\u017e#w': 5,
u'pji': 5,
u'\xed\xe1n': 5,
u'emh': 5,
u'emj': 5,
u'\xe9bl': 5,
u'em\u0148': 5,
u'so\u0159': 5,
u'aip': 5,
u'lck': 5,
u'jvr': 5,
u'a+#': 5,
u'mbh': 5,
u'\xf3ri': 5,
u'no\xeb': 5,
u'\u016f\u010dc': 5,
u'mrv': 5,
u'mro': 5,
u'\xe8te': 5,
u'hvd': 5,
u'am\xfc': 5,
u'srs': 5,
u'\xed#+': 5,
u'#aw': 5,
u'puu': 5,
u'#s\xe8': 5,
u'ffu': 5,
u'\u0163a#': 5,
u'ifs': 5,
u'\xe1qu': 5,
u'if\xe1': 5,
u'kaw': 5,
u'\xe1ja': 5,
u'iye': 5,
u'\u0165al': 5,
u'\u017ek\u016f': 5,
u'oz\xe9': 5,
u'c\xe9v': 5,
u'c\xe9u': 5,
u'#wy': 5,
u'ur\xfd': 5,
u'ngc': 5,
u'ngj': 5,
u'\xfa#m': 5,
u'\xfa#n': 5,
u'tbt': 5,
u'yao': 5,
u's\xfdm': 5,
u'uk\u0161': 5,
u'r\xe3a': 5,
u'hn\u016f': 5,
u'\xe8le': 5,
u'eav': 5,
u'\u0119sy': 5,
u'ovb': 5,
u'\u0165#g': 5,
u'b\xf3n': 5,
u'\u0107#b': 5,
u'gyo': 5,
u'gyt': 5,
u'\u0159a\u010f': 5,
u'#uf': 5,
u'gy\xfc': 5,
u'\u03c4\u03bf#': 5,
u'fn\xe9': 5,
u'r\u017el': 5,
u'\xfcan': 5,
u'on\xf3': 5,
u'\xe1\u010dt': 5,
u'kii': 5,
u'yek': 5,
u'yea': 5,
u'#zz': 5,
u'zd\u0148': 5,
u'ayd': 5,
u'c\xe1v': 5,
u'zs\xe9': 5,
u'ge=': 5,
u'\u015bni': 5,
u'aj\xf6': 5,
u'oiv': 5,
u'\u011bck': 5,
u'#bj': 5,
u'\xe1\u017e\u010f': 5,
u'goy': 5,
u'iuk': 5,
u'\xe7ai': 5,
u'eyi': 5,
u'hfc': 5,
u'l\u017e\u016f': 5,
u'ldw': 5,
u'ldy': 5,
u'bsy': 5,
u'nsv': 5,
u'#fv': 5,
u'y\u017eo': 5,
u'\xe1dv': 5,
u'j\u017e\xe1': 5,
u'ch\xe8': 5,
u'pg#': 5,
u'uio': 5,
u'\u017e\u010fa': 5,
u'v\xe1q': 5,
u'b\u010d\xed': 5,
u'wyn': 5,
u's\xedh': 5,
u'#zs': 5,
u'imf': 5,
u'\u010dez': 5,
u'nh#': 5,
u'\xfcr#': 5,
u'\xfcrg': 5,
u'mue': 5,
u'+#c': 5,
u'#\xe1n': 5,
u'hse': 5,
u'ahs': 5,
u'd\u0119t': 5,
u'ky\u010d': 5,
u'g#\xfa': 5,
u'\xe1l\u0161': 5,
u'iai': 5,
u'l\xf3s': 5,
u'\xfd#w': 5,
u'nld': 5,
u'l\u0148c': 5,
u'l\u0148t': 5,
u'yh\u0159': 5,
u'lcs': 5,
u'\xf6ne': 5,
u'z\xedd': 5,
u'#\xe5s': 5,
u'vyg': 5,
u'sug': 5,
u'sua': 5,
u'i\u010fm': 5,
u'jsy': 5,
u'iex': 5,
u'uun': 5,
u'\u011bvs': 5,
u'qat': 5,
u'\xe9sz': 5,
u'\xfbte': 5,
u'a\xe7y': 5,
u'g\xe1b': 5,
u'z\xe9r': 5,
u'r\u010d#': 5,
u'j\xfcr': 5,
u'e\u0165t': 5,
u'e\u0165m': 5,
u'ouu': 5,
u't\xf3z': 5,
u't\xf3w': 5,
u'##x': 5,
u'joy': 5,
u'\xe2yo': 5,
u'\xe8#a': 5,
u'ndv': 5,
u'bco': 5,
u's#y': 5,
u'mdo': 5,
u'dsl': 5,
u'f\xfcr': 5,
u'oqu': 5,
u'huo': 5,
u'um\xe9': 5,
u'foc': 5,
u'uyn': 5,
u'um\u010d': 5,
u'kh#': 5,
u'ydi': 5,
u'ttf': 5,
u'\xe1co': 5,
u'tt\u016f': 5,
u'vfo': 5,
u'\xebll': 5,
u'\xfare': 5,
u'\u015fem': 5,
u'\xe9t#': 5,
u'cp\u011b': 5,
u'ddt': 5,
u'dfu': 5,
u'\xf6ku': 5,
u'klm': 5,
u'n\u0163a': 5,
u'itf': 5,
u'itg': 5,
u'n#\xf3': 5,
u'fts': 5,
u'\u011b\u0148a': 5,
u'ex\xed': 5,
u'se\xe1': 5,
u'mlk': 5,
u'ueo': 5,
u'ctl': 5,
u'\xfaku': 5,
u'npr': 5,
u'ct\u016f': 5,
u'vm\xfc': 5,
u'bae': 5,
u's\xe8t': 5,
u'rd\u017e': 5,
u'ckl': 5,
u'unb': 5,
u'eoe': 5,
u'\u0161ot': 5,
u'gls': 5,
u'\u0148c\xed': 5,
u'la\xe2': 5,
u'uap': 5,
u'o\u0161a': 5,
u'\u011btc': 5,
u'ni\u016f': 5,
u'cog': 5,
u'ep\xfa': 5,
u'epj': 5,
u'\xfcss': 5,
u'bpt': 5,
u'vdc': 5,
u'xys': 5,
u'pl\u017e': 5,
u'ekk': 5,
u'aon': 5,
u'\u0159k\u016f': 5,
u'\u011bn\u010d': 5,
u'\u015f#s': 5,
u'v#\u200b': 5,
u'x#\u010d': 5,
u'wc#': 5,
u'vut': 5,
u'uxy': 5,
u'\xed\u010f#': 5,
u'ccl': 5,
u'a\xe2y': 5,
u'cvr': 5,
u'eg\xf3': 5,
u'\xe9da': 5,
u'dib': 5,
u'asg': 5,
u'asj': 5,
u'liq': 5,
u'st\xf3': 5,
u'oxl': 5,
u'b\xe9l': 5,
u'jpl': 5,
u'ma\u0142': 5,
u'ruf': 5,
u'idd': 5,
u'na\xed': 5,
u'utb': 5,
u'z#q': 5,
u'asq': 5,
u'du\u0159': 5,
u'i\u017et': 5,
u'#hc': 5,
u'#hg': 5,
u'vbr': 5,
u'yc#': 5,
u'be\u010d': 5,
u'hl\xf3': 5,
u'#\xf8r': 5,
u'yrr': 5,
u'y\xfcr': 5,
u'\u011b#\u0163': 5,
u'\u011b#\xe1': 5,
u'i\u015fo': 5,
u'tgo': 5,
u'#ao': 5,
u'gl\xed': 5,
u'is\xe1': 5,
u'o\xe3o': 5,
u'o\xe3a': 5,
u'n#\u0151': 5,
u'tye': 5,
u'a\u010dm': 5,
u'ba\xf1': 5,
u'#iw': 5,
u'mcs': 5,
u'sj\xf8': 5,
u'v\u011b\u0148': 5,
u'k\u017e#': 5,
u'rrh': 5,
u'ooc': 5,
u'nye': 5,
u'on\xe7': 5,
u'vja': 5,
u'r\xedi': 5,
u'on\xfd': 5,
u'k\u0151#': 5,
u'bmi': 5,
u'paw': 5,
u'dju': 5,
u'r\xf6s': 5,
u'\u0142a#': 5,
u'f\xe1i': 5,
u'f\xe1g': 5,
u'f\xe1d': 5,
u'\u017eak': 5,
u'ab\xfa': 5,
u'a\u0161p': 5,
u'okv': 5,
u'okc': 5,
u'okk': 5,
u'\xfal#': 5,
u'b\xe6k': 5,
u'de\u03b2': 5,
u'#d\u0119': 5,
u't\u011b\u0148': 5,
u'a\u015f#': 5,
u'h\xe8l': 5,
u'a\u015fc': 5,
u'ko\xfa': 5,
u'#dg': 5,
u'o\xfbt': 5,
u'z\xfas': 5,
u'ik\u0151': 5,
u'usb': 5,
u'pe=': 5,
u'jzk': 5,
u'jzi': 5,
u'lyv': 5,
u'lyw': 5,
u'lyr': 5,
u'lye': 5,
u'\xedm\xed': 5,
u'e\u013ee': 5,
u'lf\xed': 5,
u'\u0148\u0161t': 5,
u'co\xfb': 5,
u'f#w': 5,
u'ogd': 5,
u'zf#': 5,
u'ks\xe1': 5,
u'\xf6ll': 5,
u'ilg': 5,
u't\xfd\u0159': 5,
u'\xe1jo': 5,
u'esq': 5,
u'buy': 5,
u'j\xe1t': 5,
u'\u0107#n': 5,
u'aij': 5,
u'gg\xe1': 5,
u'\xe9cl': 5,
u'gow': 5,
u'raa': 5,
u'lbl': 5,
u'lb#': 5,
u'occ': 5,
u'ywo': 5,
u'icp': 5,
u'\u010dci': 5,
u'\xe1nb': 5,
u'ti\xe8': 5,
u'r\u0161#': 5,
u'msc': 5,
u'f\xedg': 5,
u'd\xe9p': 5,
u'css': 5,
u'k#y': 5,
u'\xe9g#': 5,
u'\u0161re': 5,
u'e\xfa\u0159': 5,
u'dy\u0161': 5,
u'p\u010du': 5,
u'p\xe1\u010d': 5,
u'igt': 5,
u'\u010don': 5,
u'nb\xf3': 5,
u'uwe': 5,
u'\xe1r\u0148': 5,
u'yn\u010d': 5,
u'hiu': 5,
u'duy': 5,
u'rix': 5,
u'\xfdvn': 5,
u'\xe0em': 5,
u'\u0159\u016fv': 5,
u'grz': 5,
u'\u010dk\xfd': 5,
u'fee': 5,
u'ndf': 5,
u'byp': 5,
u's\xfan': 5,
u'#kb': 5,
u'wsi': 5,
u'h#\xe1': 5,
u'\u0148\xe1c': 5,
u'j\u010dn': 5,
u'eb\xe6': 5,
u'hm\xe1': 5,
u'mfu': 5,
u'\u011bsy': 5,
u'zae': 4,
u'\xe6k#': 4,
u'\u0159k\xfd': 4,
u'kfi': 4,
u'jo\u010d': 4,
u'#ow': 4,
u'\u0161\xedn': 4,
u'yfa': 4,
u'\u011bs\u0148': 4,
u'\u015f#v': 4,
u'tzs': 4,
u'd\xfd\u0148': 4,
u'\u03bd\u03b9#': 4,
u'ha\xe7': 4,
u'j\xf6l': 4,
u'f\xe2n': 4,
u'az\xe9': 4,
u't\xe9p': 4,
u'\xe9v#': 4,
u't\u0103n': 4,
u'p\xedk': 4,
u'\xe4ho': 4,
u'jid': 4,
u'tew': 4,
u'jiu': 4,
u'zo\u0159': 4,
u'\u03c1\u03c7\u03af': 4,
u'\xedn\xe9': 4,
u'zom': 4,
u'ivp': 4,
u'ivs': 4,
u'fvo': 4,
u'tvz': 4,
u't#\xe9': 4,
u'mnn': 4,
u'scv': 4,
u'#\u017e#': 4,
u'x\xedk': 4,
u'\xe9ja': 4,
u'jeu': 4,
u'jeo': 4,
u'ojl': 4,
u'\xfame': 4,
u'#gt': 4,
u'pss': 4,
u'ci+': 4,
u'fza': 4,
u'ci\xf2': 4,
u'y\u017ed': 4,
u'\u013e#a': 4,
u'o\u010dl': 4,
u'r\xf3j': 4,
u'mjd': 4,
u'\xe1#x': 4,
u'lz\xe1': 4,
u'dbr': 4,
u'ucf': 4,
u'lgy': 4,
u'ofk': 4,
u'wep': 4,
u'a#\u010f': 4,
u'h\xedh': 4,
u'a#\xe5': 4,
u'y\u0159t': 4,
u'a#\xb0': 4,
u'\u0159\xe1h': 4,
u'l\xf6s': 4,
u'er\xed': 4,
u'\u0117#p': 4,
u'mv#': 4,
u'mv\u010d': 4,
u'gn\u011b': 4,
u'k\xe9p': 4,
u'emt': 4,
u'emw': 4,
u'\xe9bu': 4,
u'lc#': 4,
u'\xe0#d': 4,
u'wak': 4,
u'waf': 4,
u'k#\xe1': 4,
u'h\xe1i': 4,
u'h\xe1b': 4,
u'i\u0161a': 4,
u'fom': 4,
u'trd': 4,
u'y#\u0111': 4,
u'\u03b6\u03b5\u03b9': 4,
u'ib\xed': 4,
u'ha\u0142': 4,
u'\xfdn\u016f': 4,
u'no\xee': 4,
u's\u012bt': 4,
u'no\u0165': 4,
u'\xf3r\u016f': 4,
u'\xfdni': 4,
u'\xed#\u03b2': 4,
u'\xed\xed#': 4,
u'azt': 4,
u'eii': 4,
u'j\u0161\u0165': 4,
u'psp': 4,
u'gja': 4,
u'ps\u012b': 4,
u'am\u010d': 4,
u'rje': 4,
u'rja': 4,
u'#s\u016f': 4,
u'#s\u017e': 4,
u'k\u010de': 4,
u'\xfa\u010ds': 4,
u'#s\xf6': 4,
u'ce=': 4,
u'\xed\u010dl': 4,
u'tfi': 4,
u'o\u010fu': 4,
u'm#\xf6': 4,
u'y\xe1\xf1': 4,
u'm\u011b\u010f': 4,
u'yv#': 4,
u'i\xe9h': 4,
u'c\xe9s': 4,
u'wiz': 4,
u'\xedzv': 4,
u'izt': 4,
u'rs\xe1': 4,
u'\xede#': 4,
u'rz\u0105': 4,
u'\xfa#g': 4,
u'\xfa#z': 4,
u'\xf3ju': 4,
u'kee': 4,
u'b\xe9c': 4,
u'keu': 4,
u'eao': 4,
u'bcg': 4,
u'\xe2nt': 4,
u'\u0119so': 4,
u'mwa': 4,
u'bl#': 4,
u'\u0159av': 4,
u'b\xf3a': 4,
u'gyh': 4,
u'gys': 4,
u'nwr': 4,
u'm\xf6#': 4,
u'fn\xe1': 4,
u'fnu': 4,
u'unl': 4,
u'\xe1\u010dc': 4,
u'\u03af\u03b6\u03b5': 4,
u'#nv': 4,
u'#nc': 4,
u'cg#': 4,
u'axu': 4,
u'we\u0142': 4,
u'e\u017ed': 4,
u'\xf3\u0142y': 4,
u'\u03b7#\u03b1': 4,
u'ay\xed': 4,
u'\u010d\xe1c': 4,
u'lsm': 4,
u'or\xf3': 4,
u'\xe1\xedo': 4,
u'e#+': 4,
u'zso': 4,
u'iv\u0159': 4,
u'\u011b\u0161c': 4,
u'\u200b\u200bp': 4,
u'dv#': 4,
u'\xf3bi': 4,
u'\xfane': 4,
u'z\u016fv': 4,
u'#bn': 4,
u'#bt': 4,
u'vha': 4,
u'\xe1\u0161c': 4,
u'\xe1\u0161o': 4,
u'\xe4#o': 4,
u'u#\u010f': 4,
u'sb\xe1': 4,
u'mm\xe1': 4,
u'sbr': 4,
u'mmy': 4,
u'o#\xe0': 4,
u'v\u0148a': 4,
u'l\u017eb': 4,
u'\xe1\xf1e': 4,
u'\xe9ir': 4,
u'#\xe9#': 4,
u'i\xf2#': 4,
u'u\u0165t': 4,
u'dga': 4,
u'cuz': 4,
u'cud': 4,
u'nsl': 4,
u'j\u017ei': 4,
u'iiy': 4,
u'tsh': 4,
u'qu#': 4,
u'ts\xe1': 4,
u'\u200b#p': 4,
u'tja': 4,
u'\u016fla': 4,
u'uic': 4,
u'\xe9m\xe1': 4,
u'gmb': 4,
u'ub\u010d': 4,
u'v\u0159a': 4,
u'oai': 4,
u'ubm': 4,
u'q#s': 4,
u'\u010dev': 4,
u'bsp': 4,
u'+#b': 4,
u'+#o': 4,
u'xxi': 4,
u'vip': 4,
u'ppy': 4,
u'gih': 4,
u'cns': 4,
u'llp': 4,
u'd\u017el': 4,
u'cnn': 4,
u'ky\xf3': 4,
u'\xfa#o': 4,
u'ppr': 4,
u'iaj': 4,
u'nlh': 4,
u'\u0161e\u0159': 4,
u'#\xe1\u0161': 4,
u'k\u0142o': 4,
u'oys': 4,
u'oyc': 4,
u'l\u016fn': 4,
u'jir': 4,
u'zyd': 4,
u'\u011b#x': 4,
u'iey': 4,
u'm\xedk': 4,
u'\xf2#c': 4,
u'tgy': 4,
u'\u011bvz': 4,
u'\u0144cz': 4,
u'roq': 4,
u'a\xe7i': 4,
u'i#\u010f': 4,
u'i#\xe9': 4,
u'l\xe4h': 4,
u'tpi': 4,
u'z\xe9n': 4,
u'exs': 4,
u'\u011bt\xe9': 4,
u'\u016fnu': 4,
u'j\xfca': 4,
u'#\u010dn': 4,
u'\xf3#o': 4,
u'\xfazu': 4,
u'wh#': 4,
u'lt\u0161': 4,
u'##y': 4,
u'rpi': 4,
u'tcs': 4,
u'tcl': 4,
u'\u011bzk': 4,
u's#\xe9': 4,
u'myr': 4,
u'#m\xfa': 4,
u'ho\u0142': 4,
u'hoi': 4,
u'hox': 4,
u'e\u0161m': 4,
u'dsi': 4,
u'\xfdpk': 4,
u'xi\xed': 4,
u'lpi': 4,
u'n\xe9n': 4,
u'\xe9ph': 4,
u'\xe9pr': 4,
u'\xe4ns': 4,
u'dhu': 4,
u'jkl': 4,
u'\u0161ci': 4,
u'\u011b\u0161\xe1': 4,
u'olh': 4,
u'umd': 4,
u'kh\xe1': 4,
u'ipt': 4,
u'yd\u0159': 4,
u'sa\u0161': 4,
u'lnn': 4,
u'axm': 4,
u'xma': 4,
u'u#\xf3': 4,
u'u#\xe0': 4,
u'jgl': 4,
u'ddl': 4,
u'acr': 4,
u'cpn': 4,
u'cpo': 4,
u'cpe': 4,
u'k\u016fr': 4,
u'#e+': 4,
u'\u0107em': 4,
u'evt': 4,
u'\u010fau': 4,
u'\xe1go': 4,
u'ft\u011b': 4,
u'xal': 4,
u'jco': 4,
u'zi\u0144': 4,
u'le\xf3': 4,
u'ueu': 4,
u'rdp': 4,
u'ctc': 4,
u'zi\xfa': 4,
u'\xedhe': 4,
u'\xedhy': 4,
u'v\xed\u0161': 4,
u'ckt': 4,
u'hyu': 4,
u'v\xfd\u0148': 4,
u'qtr': 4,
u'bte': 4,
u'bti': 4,
u'g\xfcl': 4,
u'xel': 4,
u'mhf': 4,
u'#y\xe1': 4,
u'hpe': 4,
u'\xe3ov': 4,
u'\u0161oa': 4,
u'\u0161os': 4,
u'i\u0148#': 4,
u'ak\u0161': 4,
u'o\u0161o': 4,
u'ntm': 4,
u'al\xe8': 4,
u'\xe1r\xed': 4,
u'nix': 4,
u'\u03b9\u03bd\u03b9': 4,
u'pls': 4,
u'xye': 4,
u't\u016f\u010d': 4,
u'mtt': 4,
u'ek\xf3': 4,
u'\u03c7\u03af\u03b6': 4,
u'sph': 4,
u'spk': 4,
u'lm\xf6': 4,
u'\xedp\u011b': 4,
u'\xedp\xe1': 4,
u'\u015f#k': 4,
u'wcz': 4,
u'll\xe9': 4,
u'yth': 4,
u'#qt': 4,
u'o\u010dc': 4,
u'yog': 4,
u'y\u016fm': 4,
u'\u0159ta': 4,
u'egh': 4,
u'as\xfd': 4,
u'#\u03b9\u03bd': 4,
u'pug': 4,
u'\u017eub': 4,
u'wed': 4,
u'\xf3\u0142#': 4,
u'l#\xe9': 4,
u'c\u010de': 4,
u'l#y': 4,
u'\xedvc': 4,
u'cgr': 4,
u'idv': 4,
u'na\xfa': 4,
u'h\xfcs': 4,
u'#hp': 4,
u'ec\xe9': 4,
u'nyb': 4,
u'\xedv\u010d': 4,
u'aw\xfc': 4,
u'k\xf3\u0142': 4,
u'\xfdv\u011b': 4,
u'ec\u010d': 4,
u'awu': 4,
u'pyw': 4,
u'\u017eii': 4,
u'luu': 4,
u'e\xf3n': 4,
u'zyb': 4,
u'z\u0105d': 4,
u'\u0165ka': 4,
u'jl\u016f': 4,
u'\u0161ib': 4,
u'ne\xed': 4,
u'#\u03b1\u03c1': 4,
u'\xf3li': 4,
u'p\u0119k': 4,
u'ygd': 4,
u'vf#': 4,
u'#lc': 4,
u'z\xe9#': 4,
u'ofp': 4,
u'tys': 4,
u's\u016fl': 4,
u'sl\u0148': 4,
u'bap': 4,
u'\u0159\u010dl': 4,
u'\xfd\u017eo': 4,
u'mck': 4,
u'\xe9l\xe9': 4,
u'akb': 4,
u'zuk': 4,
u'o\u0142o': 4,
u'zu\u0161': 4,
u'\u012bti': 4,
u'\u011b\u010ds': 4,
u'\xf4le': 4,
u'\u011b\u010d#': 4,
u'n\u017em': 4,
u'\xedop': 4,
u'\xedou': 4,
u'fl\xed': 4,
u's\u0159e': 4,
u'iwc': 4,
u'l\xedr': 4,
u'pa\u017e': 4,
u'\xfa\u0159#': 4,
u'd#\xb0': 4,
u'pah': 4,
u'r\xf6n': 4,
u'moa': 4,
u'\u017eav': 4,
u'xne': 4,
u'o\u0161r': 4,
u'\xe1\u0159a': 4,
u'ntp': 4,
u'\u011bjo': 4,
u'r\u010dk': 4,
u'csp': 4,
u'ko\u0144': 4,
u'okh': 4,
u'zj\xed': 4,
u'v\u016fv': 4,
u'\xfalu': 4,
u'\u0142od': 4,
u'a\u015fe': 4,
u'uh\u0159': 4,
u'#dk': 4,
u'r\xe9e': 4,
u'\u0142#k': 4,
u'h\u0159m': 4,
u'fuz': 4,
u'hcf': 4,
u'fuc': 4,
u'a\u0142#': 4,
u'bim': 4,
u'g\xf3\u0142': 4,
u'bi\xf3': 4,
u'mki': 4,
u'lyu': 4,
u'lyd': 4,
u'af\xed': 4,
u'daf': 4,
u're\u0163': 4,
u'ksy': 4,
u'\u0105dk': 4,
u'vrk': 4,
u'u\u017ea': 4,
u'cja': 4,
u'ioj': 4,
u'sk\u200b': 4,
u'mwo': 4,
u'\u0107#j': 4,
u'\u0107#p': 4,
u'p#w': 4,
u'\xedy#': 4,
u'kwh': 4,
u'oc\u016f': 4,
u'nu\u0161': 4,
u'\xf6pf': 4,
u'ibp': 4,
u'nu\u010d': 4,
u'cnt': 4,
u'ti\u0107': 4,
u'\xf3s#': 4,
u'msz': 4,
u'o\xeet': 4,
u'\xe9gy': 4,
u'\xed\u017e\u0161': 4,
u'ssp': 4,
u'dyg': 4,
u'lns': 4,
u'ss\xe3': 4,
u'unw': 4,
u'ko\u010f': 4,
u'uht': 4,
u'rv\u016f': 4,
u'p\u010d\xed': 4,
u'h\xe4n': 4,
u'#p\u0119': 4,
u'nbi': 4,
u'k\u0161e': 4,
u'nb\u016f': 4,
u'te\u017e': 4,
u'efy': 4,
u'r\u010fm': 4,
u'dug': 4,
u'sw#': 4,
u'o#+': 4,
u'\u016f\u017en': 4,
u'\u03b1\u03c1\u03c7': 4,
u'#t\u0103': 4,
u'zze': 4,
u'#t\xe2': 4,
u'\u010dk\xe9': 4,
u'r\xe9b': 4,
u'caa': 4,
u'voy': 4,
u'few': 4,
u'us\xfd': 4,
u'kba': 4,
u'h#y': 4,
u'#kc': 4,
u'\xf3m\u016f': 4,
u'vay': 4,
u'#k\u0161': 4,
u'e\u0148\u0161': 4,
u'hm\u011b': 4,
u'\u0142y#': 4,
u'ebk': 4,
u'hms': 4,
u'\u011bsc': 4,
u'j\xfat': 4,
u'blj': 4,
u'e\u0163u': 4,
u'skt': 4,
u'skj': 4,
u'g\xf3#': 4,
u'\u017ena': 4,
u'y\xf3t': 4,
u'owc': 4,
u'sk\u0142': 4,
u'p\xe9\u0161': 3,
u'\xe9rg': 3,
u'rlb': 3,
u'p\xe9t': 3,
u'\xe4lj': 3,
u't\xed\u010d': 3,
u'dn#': 3,
u'ajg': 3,
u'czn': 3,
u'\xedb\xe1': 3,
u'\xed\u0161l': 3,
u'e\xedm': 3,
u'kfa': 3,
u'kfo': 3,
u'\u0161\xedz': 3,
u'\u0161\xedp': 3,
u'\u0161\xedv': 3,
u'fr\u016f': 3,
u'\u011bzy': 3,
u'irh': 3,
u'cun': 3,
u'cui': 3,
u'fr\xed': 3,
u'ir\u010d': 3,
u'k\xf6p': 3,
u'b\xedo': 3,
u'soo': 3,
u'xku': 3,
u'osd': 3,
u'zri': 3,
u'\xe9vn': 3,
u'\xe9vi': 3,
u'\u011bga': 3,
u'jio': 3,
u'\u0161ad': 3,
u'ukm': 3,
u'zoe': 3,
u'uk\xf3': 3,
u'rz\xfd': 3,
u'jto': 3,
u'kj\xf6': 3,
u'#ck': 3,
u'ld\u011b': 3,
u'uoc': 3,
u'c\u01ceu': 3,
u'k\xfa\u0161': 3,
u'\xe1a#': 3,
u'ez\u0148': 3,
u't#\xf3': 3,
u'scn': 3,
u'g\xf6r': 3,
u'dsc': 3,
u'pbc': 3,
u'e\xedc': 3,
u'aa\u010d': 3,
u'wre': 3,
u'y\u0159d': 3,
u'jei': 3,
u'aac': 3,
u'aai': 3,
u'\u0161ei': 3,
u'\u016fna': 3,
u'\xfam#': 3,
u'vo\u010f': 3,
u'nre': 3,
u'#gw': 3,
u'h\xe9s': 3,
u'a\xf1i': 3,
u'vl\u016f': 3,
u'tr\xf4': 3,
u'sgr': 3,
u'sgi': 3,
u'sga': 3,
u'pfe': 3,
u'tr\u0159': 3,
u'\xfavp': 3,
u'srh': 3,
u'r\xf3o': 3,
u'r\xf3w': 3,
u'lzv': 3,
u'w#r': 3,
u'w#g': 3,
u'sz\xf6': 3,
u'of\u016f': 3,
u'a#\u03b1': 3,
u'ucy': 3,
u'lgu': 3,
u'of\xe9': 3,
u'cva': 3,
u'weo': 3,
u'a#\xba': 3,
u'req': 3,
u'\xfa\u010d\xe1': 3,
u'y\u0159c': 3,
u'br\u010d': 3,
u'\xe1i#': 3,
u'\xe1in': 3,
u'nkh': 3,
u'x\u010d\xed': 3,
u'udv': 3,
u'udz': 3,
u'\u0159rg': 3,
u'k\xe9n': 3,
u'emd': 3,
u'emq': 3,
u'bnp': 3,
u'#\u0159v': 3,
u'\xe0#a': 3,
u'\xe0#t': 3,
u'obp': 3,
u'wam': 3,
u'wax': 3,
u'rn\u0161': 3,
u'\xedrc': 3,
u'h\xe1g': 3,
u'ums': 3,
u'guu': 3,
u'y#\xf6': 3,
u'\u011bsu': 3,
u'mry': 3,
u'#xt': 3,
u'eih': 3,
u'psl': 3,
u'\xe9du': 3,
u'\xed#\u0165': 3,
u'y\u010da': 3,
u'ifl': 3,
u'\xed#\u010f': 3,
u'am\u0159': 3,
u'lo\xe1': 3,
u'az\xf3': 3,
u'wmo': 3,
u'v\u010dk': 3,
u'#sq': 3,
u'jr\xe1': 3,
u'v\xe1j': 3,
u'uvc': 3,
u'ifm': 3,
u'ifk': 3,
u'k\u010du': 3,
u'\u010d\u016fz': 3,
u'ka\u0144': 3,
u'if\xe9': 3,
u'uv\u0148': 3,
u'ncf': 3,
u'\xf3vi': 3,
u'nc\xe9': 3,
u'vr\u0148': 3,
u'qba': 3,
u'm#\xf3': 3,
u'\xed\xe9#': 3,
u'eeh': 3,
u'eee': 3,
u'eec': 3,
u'm\u010de': 3,
u'\u013ee#': 3,
u'dt\xe1': 3,
u'i\xe9#': 3,
u'i\xe9f': 3,
u'n\u0103s': 3,
u'c\xe9z': 3,
u'\u0159mo': 3,
u'\xedz\xe1': 3,
u'i\u0159n': 3,
u'#wm': 3,
u'+re': 3,
u'rsd': 3,
u'rsb': 3,
u'cyb': 3,
u'rsy': 3,
u'ur\xf3': 3,
u'urh': 3,
u'ngb': 3,
u'\xfa#b': 3,
u'\xfa#k': 3,
u'ng\xf3': 3,
u'kei': 3,
u'kef': 3,
u'yak': 3,
u'amj': 3,
u'piq': 3,
u'ke\u010d': 3,
u'xst': 3,
u'hnn': 3,
u'm#\u010f': 3,
u'sj\xed': 3,
u'au\xed': 3,
u'meu': 3,
u'drm': 3,
u'sjo': 3,
u'\u0161\u010do': 3,
u'xhi': 3,
u'xhu': 3,
u'\u03c0\u03bc#': 3,
u'c\xede': 3,
u'wuo': 3,
u'\u03b1\u03c4\u03bf': 3,
u'\xe9qu': 3,
u'n\u017ea': 3,
u'gye': 3,
u'#ua': 3,
u'fn\xfd': 3,
u'r\u017ec': 3,
u'r\u017ed': 3,
u'fna': 3,
u'fno': 3,
u'\xe1\u010do': 3,
u'#ns': 3,
u'u\u0148m': 3,
u'\xf6ra': 3,
u'e#\u03b5': 3,
u'ayp': 3,
u'\u010d\xe1l': 3,
u'e#\u0165': 3,
u'lsb': 3,
u'xl#': 3,
u'xlo': 3,
u'or\u017e': 3,
u'e#\xb0': 3,
u'\u0159ei': 3,
u'\u0159ea': 3,
u'gep': 3,
u'gev': 3,
u'uj\u016f': 3,
u'jfr': 3,
u'dkk': 3,
u'uj\xfa': 3,
u'd\xe1s': 3,
u'\u016fmu': 3,
u'\u200b\u200be': 3,
u'ujn': 3,
u'ujd': 3,
u'ujc': 3,
u'\xf3be': 3,
u'h\xf6n': 3,
u'h\xf6g': 3,
u'h\xf6p': 3,
u'#bz': 3,
u'\xfd#\xe1': 3,
u'zl\u016f': 3,
u'vhr': 3,
u'e\u015fe': 3,
u'\u0142op': 3,
u'ajp': 3,
u'vh\xe1': 3,
u'iut': 3,
u's\u0165#': 3,
u'\xe1\u0161s': 3,
u'j\xf3d': 3,
u'r\xf4l': 3,
u'mmt': 3,
u'\u012b\u0137e': 3,
u'eqa': 3,
u'\u016frk': 3,
u'#\xe9c': 3,
u'gav': 3,
u'gaj': 3,
u'n\u0103u': 3,
u'syj': 3,
u'dgh': 3,
u'jbi': 3,
u'ldp': 3,
u'\u0161\u0165k': 3,
u'ld\xf6': 3,
u'\u0142ka': 3,
u'oeo': 3,
u'vl#': 3,
u's\xe9k': 3,
u'vlu': 3,
u'\xfd\u0159#': 3,
u'chd': 3,
u'chp': 3,
u'iic': 3,
u'iik': 3,
u'd\xf6r': 3,
u'dd+': 3,
u'ch\xfc': 3,
u'#\xf6g': 3,
u'mi\xf3': 3,
u'sf\xe2': 3,
u'\u0103n\u0103': 3,
u'\u017e\u010fk': 3,
u'wyb': 3,
u'gml': 3,
u'j\u0161p': 3,
u'rcl': 3,
u'm\xfas': 3,
u'\u0163u#': 3,
u'##\u03b1': 3,
u'ecf': 3,
u'kuy': 3,
u'k\u016fl': 3,
u'q#j': 3,
u'#zy': 3,
u'ku\u017a': 3,
u'vpe': 3,
u'nhy': 3,
u'cyp': 3,
u'pks': 3,
u'cyl': 3,
u'\u043d\u0430#': 3,
u'+#h': 3,
u'+#t': 3,
u'\u0151#g': 3,
u'#\xe1\xe1': 3,
u'#\xe1j': 3,
u's\u010dc': 3,
u'ppu': 3,
u'el\u010d': 3,
u'ybk': 3,
u'sqa': 3,
u'ro\xe1': 3,
u'ngf': 3,
u's\u010di': 3,
u'\xfaba': 3,
u'kyl': 3,
u'\u011bp\xe1': 3,
u'ia\u015f': 3,
u'ic\u01ce': 3,
u'd\u011bg': 3,
u'iaf': 3,
u'\u010dau': 3,
u'l\xf3v': 3,
u'g#\u0161': 3,
u'mqu': 3,
u'nnt': 3,
u's+#': 3,
u'\u01ceuo': 3,
u'v\xe9q': 3,
u'\xe9es': 3,
u'rk\u0161': 3,
u'suh': 3,
u'j\u0159r': 3,
u'al\u0165': 3,
u'su\u0148': 3,
u'd\u017a#': 3,
u'\xf6rf': 3,
u'js\xed': 3,
u'#r\xf6': 3,
u'jsu': 3,
u'm\xedh': 3,
u'rt\xe4': 3,
u'aav': 3,
u'ahd': 3,
u'\u010d#\xed': 3,
u'ojp': 3,
u'c#\xe9': 3,
u'ugs': 3,
u'si\u016f': 3,
u'\u0161t\u016f': 3,
u'apm': 3,
u'e\u0165o': 3,
u'ap\xe9': 3,
u'#\u010ds': 3,
u'siz': 3,
u'oux': 3,
u'k\xfat': 3,
u'\xf3#c': 3,
u'\xfazo': 3,
u'whe': 3,
u'inw': 3,
u'joz': 3,
u'jom': 3,
u'##\xe1': 3,
u'vto': 3,
u'rpl': 3,
u'#v\u012b': 3,
u'jo\u0103': 3,
u'v\xeds': 3,
u'#m\u017e': 3,
u'##\u03bc': 3,
u'##\u03b2': 3,
u'\xdfe#': 3,
u'y\u0161m': 3,
u'y\u0161a': 3,
u'ho\u0148': 3,
u'xt\xe1': 3,
u'xt\xe9': 3,
u'\xf6v\xe9': 3,
u'\xf1in': 3,
u'xiv': 3,
u'lpg': 3,
u'zts': 3,
u'n\xe9e': 3,
u'\xe9pa': 3,
u'\u015fin': 3,
u'i\u0107i': 3,
u'\xeame': 3,
u'\xe9p\u011b': 3,
u'ol\xf3': 3,
u'olp': 3,
u'um=': 3,
u'\u017ami': 3,
u'khe': 3,
u'nxl': 3,
u'nxo': 3,
u'\u011bri': 3,
u'tkj': 3,
u'ydn': 3,
u'uex': 3,
u'yds': 3,
u'fpl': 3,
u'\u0142\xf3d': 3,
u'tt\xe9': 3,
u'm\u016fv': 3,
u'vfl': 3,
u'c\xedy': 3,
u'\u0159ah': 3,
u'\xe9t\xe9': 3,
u'n\xedb': 3,
u'n\xeda': 3,
u'n\xedj': 3,
u'n\xedi': 3,
u't\xebo': 3,
u'\xe9tr': 3,
u'\xe9ts': 3,
u'\u0159de': 3,
u'acd': 3,
u'acv': 3,
u'uia': 3,
u'uib': 3,
u'cpc': 3,
u'cpr': 3,
u'\u0117#v': 3,
u'\xfcnc': 3,
u'xup': 3,
u'trl': 3,
u'it\xeb': 3,
u'ex\u010d': 3,
u'sew': 3,
u'pdo': 3,
u'xa#': 3,
u'\u0137ef': 3,
u'pfa': 3,
u'rd\xf3': 3,
u'x#g': 3,
u'uec': 3,
u'\u03bc\u03bc#': 3,
u'npd': 3,
u'npc': 3,
u'o\u0165m': 3,
u'npt': 3,
u'#yp': 3,
u'#yi': 3,
u'kpa': 3,
u'kpi': 3,
u'ihv': 3,
u'\u017e\xe9\u0159': 3,
u'\u0159\u010du': 3,
u'ckp': 3,
u'btr': 3,
u'omd': 3,
u'etp': 3,
u'xeq': 3,
u'eo\u017e': 3,
u'u\xf1a': 3,
u'mch': 3,
u'hpo': 3,
u'gl\xe9': 3,
u'\xe3oa': 3,
u'\u016fd\xe1': 3,
u'uai': 3,
u'\xe1rb': 3,
u'fad': 3,
u'nt\xed': 3,
u'\u03b1#p': 3,
u'\xedts': 3,
u'\xf6sz': 3,
u'\xf6si': 3,
u'ktp': 3,
u'il\xe4': 3,
u'd\xf3#': 3,
u'coi': 3,
u'ni\xf3': 3,
u'r#\xed': 3,
u'bpe': 3,
u'plc': 3,
u'mtf': 3,
u'ht\xe1': 3,
u'u\xedm': 3,
u'htm': 3,
u'ao\u010d': 3,
u'ghb': 3,
u'ghl': 3,
u'#\u015bw': 3,
u'za\u010f': 3,
u'za\xed': 3,
u'\u015f#n': 3,
u'jts': 3,
u'jtm': 3,
u'gwa': 3,
u'm\xe0o': 3,
u'\u017e\xe1v': 3,
u'yt\u0117': 3,
u'ccw': 3,
u'o\u010d\u016f': 3,
u'mpf': 3,
u'u\xe9#': 3,
u'awf': 3,
u'\xe8ve': 3,
u'hho': 3,
u'egb': 3,
u'pu\xe1': 3,
u'puc': 3,
u'#rz': 3,
u'rh\xf3': 3,
u'lix': 3,
u'as\u0159': 3,
u'r\u016bn': 3,
u'v\u017e\xed': 3,
u'woh': 3,
u'wod': 3,
u'wo#': 3,
u'n\u010d\u0148': 3,
u'ox\u016f': 3,
u'hba': 3,
u'jp#': 3,
u'\xfa\u017e\xed': 3,
u'vya': 3,
u'id\xf3': 3,
u'ruy': 3,
u'idp': 3,
u'idc': 3,
u'nln': 3,
u'id\u010d': 3,
u'z#x': 3,
u'cde': 3,
u'#h\xed': 3,
u'\xfdhy': 3,
u'v\u012b\u0137': 3,
u'#\u03c0\u03bc': 3,
u'in\u0103': 3,
u'd+c': 3,
u'awo': 3,
u'pyg': 3,
u'd+#': 3,
u'\u017eiz': 3,
u'\u017eir': 3,
u'mgm': 3,
u'mgl': 3,
u'\xfds#': 3,
u'shd': 3,
u'shf': 3,
u'shk': 3,
u'\u03b1\u03b9#': 3,
u'\u011b#\xb0': 3,
u'or\xfa': 3,
u'\u011b#+': 3,
u'jl\xed': 3,
u'foi': 3,
u'i\u015fi': 3,
u'i\u015f#': 3,
u'jl#': 3,
u'dmr': 3,
u'tg#': 3,
u'\u0142es': 3,
u'ar\u016b': 3,
u'#lb': 3,
u'n#\u0165': 3,
u'xul': 3,
u'n#\xe9': 3,
u'l\xe1j': 3,
u'tyj': 3,
u'tyf': 3,
u'baw': 3,
u'ix\u016f': 3,
u'mcj': 3,
u'sld': 3,
u'opd': 3,
u'wws': 3,
u'gbu': 3,
u'oo\u010d': 3,
u'ooe': 3,
u'ooo': 3,
u'\u011b\u010d\u0148': 3,
u'nyv': 3,
u'nyj': 3,
u'\xedom': 3,
u'fl\xe1': 3,
u'vj\xed': 3,
u'r\xedm': 3,
u'pa\u015f': 3,
u'r\u0159e': 3,
u'kt\u011b': 3,
u'tuy': 3,
u'tuv': 3,
u'tue': 3,
u'pa\xf1': 3,
u'bma': 3,
u'a\u017er': 3,
u'paf': 3,
u'z\xe9c': 3,
u'\xebov': 3,
u'\u017eau': 3,
u'\xfas\xe1': 3,
u'a\u0161#': 3,
u'ggs': 3,
u'ggo': 3,
u'i\xfad': 3,
u'\xfa\u0159\xed': 3,
u'dew': 3,
u'rye': 3,
u'csy': 3,
u'csw': 3,
u'ryp': 3,
u'ab\u010d': 3,
u'jd\xed': 3,
u'de\xed': 3,
u'\xfali': 3,
u'cfe': 3,
u'#d+': 3,
u'\u015bwi': 3,
u'ik\xf3': 3,
u'+c#': 3,
u'fu\u0161': 3,
u'ikw': 3,
u'h\u0159a': 3,
u'z\xfa#': 3,
u'\xe1fi': 3,
u'l\xe9o': 3,
u'fuo': 3,
u'ewl': 3,
u'\xedmp': 3,
u'\u0159#\xed': 3,
u'ly\u016f': 3,
u'peb': 3,
u'\u017ee\u0148': 3,
u'usq': 3,
u'sdm': 3,
u'mk#': 3,
u'#mk': 3,
u'sd\xe1': 3,
u'\u017eez': 3,
u't\xe0e': 3,
u'a\u0165o': 3,
u'afp': 3,
u'#uu': 3,
u'daa': 3,
u'\xe9b\xe1': 3,
u'cw#': 3,
u'emz': 3,
u'\u03bd\u03b1\u03c4': 3,
u'udh': 3,
u'da\u0144': 3,
u'l\u0165a': 3,
u'zfi': 3,
u'\xfd#\xe9': 3,
u'\u0107i#': 3,
u'cjo': 3,
u'ilq': 3,
u'tm\xe9': 3,
u'#\xb1#': 3,
u'hr\u0148': 3,
u'pi\u0148': 3,
u'j\xed\u0161': 3,
u'k\u0159t': 3,
u'bug': 3,
u'es\u0142': 3,
u'\u0107#o': 3,
u'd\xedh': 3,
u'\xe9co': 3,
u'aj\xf3': 3,
u'hyh': 3,
u's\u017e\xed': 3,
u'thd': 3,
u'p#\xed': 3,
u'\xedso': 3,
u'wfo': 3,
u'wfp': 3,
u'rnh': 3,
u'\xeds\u010d': 3,
u'vve': 3,
u'\u011bm\u011b': 3,
u'lcd': 3,
u'u\u017am': 3,
u'icb': 3,
u'eyf': 3,
u'\u011bp\xed': 3,
u'p\xfd\u0161': 3,
u'\xe1n\xfd': 3,
u'tiy': 3,
u'tiq': 3,
u'pms': 3,
u'r\xfa\u0159': 3,
u'yj#': 3,
u'msu': 3,
u'msh': 3,
u'yji': 3,
u'csh': 3,
u'a\u0142e': 3,
u'e\xdfe': 3,
u'\xe9gi': 3,
u'huy': 3,
u'anv': 3,
u'\xed\u017e\u010f': 3,
u'dyl': 3,
u'rml': 3,
u'ss\xe1': 3,
u'\xed\u017eb': 3,
u'ss\u016f': 3,
u'uhv': 3,
u'#p\xfa': 3,
u'\u010do\u010d': 3,
u'vz#': 3,
u'p\xe1p': 3,
u'juh': 3,
u'\u0103#a': 3,
u'igd': 3,
u'\u010dor': 3,
u'\xe1rd': 3,
u'faa': 3,
u'r#y': 3,
u'k\u0161a': 3,
u'jvs': 3,
u'm\u017et': 3,
u'iz\xe9': 3,
u'izs': 3,
u'\xfdk#': 3,
u'a\u015fi': 3,
u'hiq': 3,
u'hiw': 3,
u'arj': 3,
u'm\u0159u': 3,
u't\xe1g': 3,
u'qar': 3,
u'hi\u015f': 3,
u'zz\xfa': 3,
u'\xf3d\u017a': 3,
u'zzf': 3,
u'gr\xf6': 3,
u'wni': 3,
u'\u0165\u016fm': 3,
u'rr\xe9': 3,
u'sk\xfa': 3,
u'\xe1v\u0148': 3,
u'ibm': 3,
u'umt': 3,
u'\u010dk#': 3,
u'\xe1vl': 3,
u'tpo': 3,
u'kby': 3,
u'#k\xfa': 3,
u'#ks': 3,
u'e\u0148b': 3,
u'j\u010dc': 3,
u'bbs': 3,
u'bb\xe9': 3,
u'j\xfac': 3,
u'\u011bsm': 3,
u'j\xfa#': 3,
u'\xfdrk': 3,
u'##\u03c0': 3,
u'#\u03bc\u03bc': 3,
u'\u016fvi': 2,
u'\xe9r\xe8': 2,
u'cz\u0105': 2,
u'p\xe9n': 2,
u'\u0159b\u011b': 2,
u't\xed\u0161': 2,
u'\xe9rc': 2,
u'\u03bf\u03c2#': 2,
u'#\u043d\u0430': 2,
u'nzk': 2,
u'e\u0144o': 2,
u'e\u0144#': 2,
u'f=e': 2,
u'xv#': 2,
u'tz\xe1': 2,
u'\u015f#z': 2,
u'rgm': 2,
u'tzt': 2,
u'a\u0142g': 2,
u'\u010fky': 2,
u'\u010fka': 2,
u'mb\u016f': 2,
u'mb\u0159': 2,
u'k\xf6v': 2,
u'k\xf6y': 2,
u'\xedpn': 2,
u'\u0144#v': 2,
u'\u0144#p': 2,
u'ar\u0165': 2,
u'azg': 2,
u'xkr': 2,
u'osg': 2,
u'\xfat\xe1': 2,
u'oez': 2,
u'\u0161ag': 2,
u'\u0161af': 2,
u'\u0161as': 2,
u'ld\u017e': 2,
u'\u010dse': 2,
u'ukh': 2,
u'\xednp': 2,
u'rz\xe9': 2,
u'o\u016f#': 2,
u'\u0430\u044f#': 2,
u'kje': 2,
u'uoe': 2,
u'yzy': 2,
u'yzm': 2,
u'#c\u0153': 2,
u'ivz': 2,
u'vuy': 2,
u't#\u010f': 2,
u'bjp': 2,
u'k\xfar': 2,
u'he\xe1': 2,
u'\u0165v\xed': 2,
u'g\xf6d': 2,
u'gsb': 2,
u'g\xf6s': 2,
u'pbr': 2,
u'\xf6#k': 2,
u'\xf6#z': 2,
u'\u0430\u0441\u043d': 2,
u'xog': 2,
u'\u016fsm': 2,
u'vlj': 2,
u'wrz': 2,
u'k\xe1a': 2,
u'\xe9j#': 2,
u'\u013eub': 2,
u'\u03bfc#': 2,
u'e=r': 2,
u'k\xe1\u0148': 2,
u'aau': 2,
u'\xfcll': 2,
u'\u016fne': 2,
u'ugy': 2,
u'\u016fn#': 2,
u'crv': 2,
u'crt': 2,
u'\u016fn\xed': 2,
u'ug\xe1': 2,
u'\u0151rs': 2,
u'vo\xe9': 2,
u'nrg': 2,
u'nrr': 2,
u'a\xf1e': 2,
u'h\xe9i': 2,
u'\u011bd\xfd': 2,
u'vgr': 2,
u'o\xfa\u0159': 2,
u'vl\u010d': 2,
u'j\u017eh': 2,
u'o\xfaz': 2,
u'ciw': 2,
u'\xed\xfat': 2,
u'bvz': 2,
u'iil': 2,
u'\u013e##': 2,
u'r\xf3\u017c': 2,
u'\u013e#m': 2,
u'\u013e#v': 2,
u'pfo': 2,
u'mj\xe1': 2,
u'r\xf3e': 2,
u'r\xf3v': 2,
u'r\xf3p': 2,
u'mjo': 2,
u'\xe9n\u0161': 2,
u'\xe1#q': 2,
u'\xe1#y': 2,
u'ib\u0159': 2,
u'lz\xed': 2,
u'lz\u016f': 2,
u'w#u': 2,
u'w#h': 2,
u'jaj': 2,
u'd\u0161\u0165': 2,
u'db#': 2,
u'tsf': 2,
u'#\u013eu': 2,
u're\xf3': 2,
u'\u016fjs': 2,
u'cv#': 2,
u'lgh': 2,
u'\u011flu': 2,
u'r\u0165a': 2,
u'e\xe1m': 2,
u'ofg': 2,
u'a#\u0142': 2,
u'a#\u0151': 2,
u'a#\u015f': 2,
u'weh': 2,
u'wem': 2,
u'wev': 2,
u'\u011bhi': 2,
u'vsp': 2,
u'\u011bh\xe1': 2,
u'h\xedr': 2,
u'h\xedl': 2,
u'krh': 2,
u'jzp': 2,
u'lft': 2,
u'\u0105cy': 2,
u'inp': 2,
u'y\u0159r': 2,
u'l\xf6n': 2,
u'nkw': 2,
u'br\xf3': 2,
u'nk\xfa': 2,
u'dhy': 2,
u'o\xedr': 2,
u'\xe7u#': 2,
u'ai\u010d': 2,
u'ai\u0159': 2,
u'k\xe9o': 2,
u'emv': 2,
u'\u0165il': 2,
u'hr\xf3': 2,
u'\xe9bc': 2,
u'em\u010d': 2,
u'r\u0161\xe1': 2,
u'\u0148ej': 2,
u'\xe0#o': 2,
u'\xe0#m': 2,
u'\xe0#z': 2,
u'rnl': 2,
u'zca': 2,
u'h\xe1\u010f': 2,
u'k#q': 2,
u'k#x': 2,
u'wac': 2,
u'\xeetu': 2,
u'\xedri': 2,
u'r\u0161\u016f': 2,
u'k#\xe9': 2,
u'kvd': 2,
u'i\u0161c': 2,
u'y#+': 2,
u'yvn': 2,
u'yvk': 2,
u'gug': 2,
u'guz': 2,
u'yv\xfd': 2,
u'gu\xea': 2,
u'fbs': 2,
u'i\u0161\u010d': 2,
u'ibk': 2,
u'yie': 2,
u'yii': 2,
u'yip': 2,
u'\xfdnk': 2,
u'mrl': 2,
u'mrd': 2,
u'mrf': 2,
u'o\xe9#': 2,
u'c+x': 2,
u'\u0159vo': 2,
u'k\xedj': 2,
u'k\xedw': 2,
u'eip': 2,
u'k\xed#': 2,
u'eiv': 2,
u'\u0153ur': 2,
u'p\u012bk': 2,
u'y\u0159p': 2,
u'amc': 2,
u'psr': 2,
u'y\u010dt': 2,
u'dzd': 2,
u'srn': 2,
u'sry': 2,
u'\xed#\xe0': 2,
u'am\u0163': 2,
u'rjo': 2,
u'\xed#\xb0': 2,
u'\u0103nc': 2,
u'\xe1\xe1#': 2,
u'p\u0159p': 2,
u'p\u0159u': 2,
u'wma': 2,
u'#s\u0142': 2,
u's\u0148\xe1': 2,
u'\xe1\xe1l': 2,
u'\xfcbe': 2,
u'v\u010di': 2,
u'\u03b2ov': 2,
u'jr\xfa': 2,
u'#\xedn': 2,
u'#s\xe3': 2,
u'uvs': 2,
u'ifp': 2,
u'\xed\u010dt': 2,
u'ifz': 2,
u'h\xfa\u0159': 2,
u'ip\u0117': 2,
u'\u0148#w': 2,
u'k\u010di': 2,
u'ka\u0148': 2,
u'\u010d\u016fv': 2,
u'rw\u0119': 2,
u'ncr': 2,
u'ncs': 2,
u'\xf3ve': 2,
u'o\u010fk': 2,
u'h\xfat': 2,
u'bzp': 2,
u'\xf3v\xe1': 2,
u'm#\xe0': 2,
u'ym\xe9': 2,
u'\xdfna': 2,
u'stz': 2,
u'\xe1jn': 2,
u'iyi': 2,
u'iyv': 2,
u'iys': 2,
u'hja': 2,
u'i\u0146\u0161': 2,
u'#\u03bb\u03ae': 2,
u'\u0165av': 2,
u'eeo': 2,
u'dv\u010d': 2,
u'\u013eem': 2,
u'\xe4te': 2,
u'pu\u017c': 2,
u'wi\u016f': 2,
u'lkh': 2,
u'lkp': 2,
u'tmn': 2,
u'\u0146\u0161#': 2,
u'c\xe9#': 2,
u'lk\xf3': 2,
u'wia': 2,
u'\u0159ma': 2,
u'wix': 2,
u'i\u0159m': 2,
u'\xf6yk': 2,
u'#wf': 2,
u'rsl': 2,
u'cyc': 2,
u'rsz': 2,
u'm\xeam': 2,
u'\u0142go': 2,
u's\xfd\u010d': 2,
u'v\u0161o': 2,
u'ngm': 2,
u'tbr': 2,
u'\xfa#u': 2,
u'\xfa#v': 2,
u'\xfa#p': 2,
u'\xedex': 2,
u'\xf3js': 2,
u'ng=': 2,
u'b\xe9k': 2,
u'p\u0117d': 2,
u'nj\u0161': 2,
u'yaz': 2,
u'yau': 2,
u'yaa': 2,
u'yac': 2,
u'yab': 2,
u'#jy': 2,
u'ke\u0161': 2,
u'#j\xfa': 2,
u'xsi': 2,
u'n\u0159\xed': 2,
u'#\u03bfc': 2,
u'#\u03bf#': 2,
u'\u0148br': 2,
u'eae': 2,
u'hn\xe9': 2,
u't\u017ee': 2,
u'bca': 2,
u'auu': 2,
u'auw': 2,
u'e\u010ds': 2,
u'blb': 2,
u'\u017eor': 2,
u'\u017eoj': 2,
u'au\xdf': 2,
u'i\xedl': 2,
u'i\xedr': 2,
u'sj#': 2,
u'meb': 2,
u'mew': 2,
u'drn': 2,
u'au\u0159': 2,
u'\u0161\u010da': 2,
u'lwe': 2,
u's\xed\u0161': 2,
u'n\xe8v': 2,
u'e\xf1o': 2,
u'e\xf1a': 2,
u'me\u0165': 2,
u'b\xf3o': 2,
u'\u0165#\xed': 2,
u'do\u0148': 2,
u'\u0159a\u017e': 2,
u'fmo': 2,
u'gyl': 2,
u'\u0159a\u010d': 2,
u'+vc': 2,
u'c\u0103#': 2,
u'gsd': 2,
u'\u03c2#\u03c3': 2,
u'\xf3n\u0161': 2,
u'rzc': 2,
u'f\xe9n': 2,
u'fn#': 2,
u'omv': 2,
u'\u011b\u017es': 2,
u'\xe1\u010d\xe1': 2,
u'\xf3nk': 2,
u'p\u011b\xfa': 2,
u'un\u010d': 2,
u'uk\xfa': 2,
u'bd\u017e': 2,
u'yee': 2,
u'#nz': 2,
u'#nm': 2,
u'#n\xfa': 2,
u'o\xfdc': 2,
u'iq#': 2,
u'\xe7e#': 2,
u'fsb': 2,
u'fsj': 2,
u'fsw': 2,
u'\xf6re': 2,
u'e\u017er': 2,
u's\u0161v': 2,
u'\xf3\u0142k': 2,
u'ayk': 2,
u'ayg': 2,
u'ayf': 2,
u'ma\xf1': 2,
u'e\u017e\u0161': 2,
u'maq': 2,
u'lsn': 2,
u'lsv': 2,
u'lss': 2,
u'e#\u0142': 2,
u'e#\u0123': 2,
u'ls\xed': 2,
u'c\xe1#': 2,
u'p\u013a\u0148': 2,
u'c\xe1d': 2,
u'zs#': 2,
u'\xe1\xedm': 2,
u'e#=': 2,
u'zsy': 2,
u'zsi': 2,
u'\xf3\u0159e': 2,
u'or\u0119': 2,
u'aqb': 2,
u'geu': 2,
u'a\xedh': 2,
u'\u010dre': 2,
u'oi\u010d': 2,
u'uj\u015b': 2,
u'dkv': 2,
u'jfa': 2,
u'e\u03b2e': 2,
u'e\u03b2o': 2,
u'\u200b\u200b#': 2,
u'\u016fme': 2,
u'oif': 2,
u'oix': 2,
u'#\u015fe': 2,
u'km\xe9': 2,
u'o\u0103o': 2,
u'\xedmm': 2,
u'h\xf6r': 2,
u'z\u016fd': 2,
u'yyi': 2,
u'\xf3b\u011b': 2,
u'\xfcm#': 2,
u'#bf': 2,
u'#bk': 2,
u'#bq': 2,
u't\u0119t': 2,
u'#b\xe2': 2,
u'p\xf3n': 2,
u'+a+': 2,
u'iuc': 2,
u'k\xfdl': 2,
u'eya': 2,
u'eyg': 2,
u'ivm': 2,
u'k\u200b#': 2,
u'ivj': 2,
u'wi\u015b': 2,
u'bk#': 2,
u'g+#': 2,
u'hfi': 2,
u'hfl': 2,
u'\xe4#k': 2,
u'\xe4#a': 2,
u'\xe4#v': 2,
u'\xe4#t': 2,
u'sb\xfc': 2,
u'j\xf3v': 2,
u'\xe9#\u200b': 2,
u'tfa': 2,
u'#\xe9d': 2,
u'#\xe9i': 2,
u'\xedng': 2,
u'z\u011bm': 2,
u'y\xedc': 2,
u'sya': 2,
u'syo': 2,
u'ns\u016f': 2,
u'ldh': 2,
u'rgp': 2,
u'\u0161\u0165v': 2,
u'a\u0144c': 2,
u'\xedin': 2,
u'tvt': 2,
u'o\u011fl': 2,
u'\u017adz': 2,
u'zhi': 2,
u'i#\u200b': 2,
u'\xf6n#': 2,
u'w\u0119#': 2,
u's\xe9t': 2,
u's\xe9c': 2,
u'\xfd\u0159u': 2,
u'ddf': 2,
u'e\u015bn': 2,
u'\xf6n\xf6': 2,
u's\xe9\u0159': 2,
u'd\u0103n': 2,
u'chh': 2,
u'iiv': 2,
u'iie': 2,
u'iib': 2,
u'iio': 2,
u'd\xf6l': 2,
u'acm': 2,
u'ch\xfa': 2,
u'qu\xe9': 2,
u'ut\u0148': 2,
u'bw#': 2,
u'c#\xed': 2,
u'wip': 2,
u'pgr': 2,
u'\xfcv#': 2,
u'r\xfcn': 2,
u'nuh': 2,
u'nuf': 2,
u'mij': 2,
u'sff': 2,
u'k\xfan': 2,
u'\u011b\xfa\u0159': 2,
u'k\xe4o': 2,
u'r\u0119b': 2,
u't\xe2r': 2,
u'wyh': 2,
u'adf': 2,
u'rcy': 2,
u'\u0163uo': 2,
u'oae': 2,
u'\u011bkl': 2,
u'\u011bkc': 2,
u'nw\xfc': 2,
u'q#t': 2,
u'wdo': 2,
u'\u011bk\u016f': 2,
u'\xedu\u0161': 2,
u'u\u017ca': 2,
u'ku\u017e': 2,
u'to\u015f': 2,
u'to\u011f': 2,
u'\u013aou': 2,
u'\xf6#p': 2,
u'vvo': 2,
u'\xe7i#': 2,
u'\xfap\u0159': 2,
u'nht': 2,
u'\xf3\u017e\xed': 2,
u'bs\xed': 2,
u'cyz': 2,
u'kk\xe4': 2,
u'+#f': 2,
u'+#l': 2,
u'+#i': 2,
u'tvl': 2,
u'+#\xb0': 2,
u'#\xe1r': 2,
u'\xe9at': 2,
u'hsc': 2,
u'hsy': 2,
u'#\xe1#': 2,
u'giv': 2,
u'gim': 2,
u'ah\xed': 2,
u'\u03c0\u03c1\u03cc': 2,
u'ti\u016f': 2,
u'\u016fa#': 2,
u'ro\u0165': 2,
u'\xfa#d': 2,
u'\xfa#t': 2,
u'\u03b9#\u03b1': 2,
u'gti': 2,
u'ia\u0161': 2,
u'#++': 2,
u'vtu': 2,
u'l\xf3\u017e': 2,
u'vt\xe9': 2,
u'#+\xb0': 2,
u'l\xf3\u0159': 2,
u'u\u010ds': 2,
u'uy\u017e': 2,
u'shh': 2,
u'yua': 2,
u'iay': 2,
u'm\xe1z': 2,
u'm\xe1g': 2,
u'\u010dap': 2,
u'\u010dag': 2,
u'tk#': 2,
u'nl\xe1': 2,
u'\u011br\u011b': 2,
u'lcy': 2,
u'yat': 2,
u'u\xeas': 2,
u'v\xe9o': 2,
u'\u0161pr': 2,
u'ptv': 2,
u'ftl': 2,
u'lh\u010d': 2,
u'k\u200b\u200b': 2,
u'al\u0148': 2,
u'\xf1al': 2,
u'\xf1an': 2,
u'oyi': 2,
u'lhu': 2,
u'ya\xf1': 2,
u'l\u016fc': 2,
u'l\u016fp': 2,
u'bh\xfa': 2,
u'\xfa\u0161i': 2,
u'o#\u200b': 2,
u'd\u01d0m': 2,
u'\xe9#\xe1': 2,
u'i\u010f#': 2,
u'rtd': 2,
u'iej': 2,
u'n\u0161p': 2,
u'uut': 2,
u'uur': 2,
u'aaz': 2,
u'ie\u0142': 2,
u'trb': 2,
u'\u011bv\xfd': 2,
u'qad': 2,
u'qa#': 2,
u's\xf8r': 2,
u'#ij': 2,
u'#iq': 2,
u'\u016fd\u016f': 2,
u'ylt': 2,
u'ylp': 2,
u'ixi': 2,
u'xpu': 2,
u'++d': 2,
u'i#\xe5': 2,
u'i#\xf3': 2,
u'i#\xf6': 2,
u'l\xe4c': 2,
u'tpl': 2,
u'l\xe4r': 2,
u'#\u015fa': 2,
u'i#+': 2,
u'z\xfd\u010d': 2,
u'v\xedo': 2,
u'hki': 2,
u'si\u0107': 2,
u'apb': 2,
u'h#\xe9': 2,
u'dwy': 2,
u'\u0161tc': 2,
u'\xf3#\u017e': 2,
u'\xfdtc': 2,
u'yy#': 2,
u'o\u0148m': 2,
u'ltj': 2,
u'\xf3#b': 2,
u'\xf3#v': 2,
u't\xf3l': 2,
u'#g\u0151': 2,
u'jo\xeb': 2,
u'#v\xf6': 2,
u'yub': 2,
u'jof': 2,
u'joj': 2,
u'ie\u0144': 2,
u'\u010dik': 2,
u'fki': 2,
u'uqu': 2,
u'\u010f\u016f#': 2,
u'tck': 2,
u'\xedds': 2,
u'\xf3my': 2,
u'\xf3me': 2,
u'##\u02c7': 2,
u'ndm': 2,
u'xat': 2,
u'vae': 2,
u'\u0123ir': 2,
u'\u043e\u0439#': 2,
u'=re': 2,
u's#=': 2,
u'myo': 2,
u'#m\xea': 2,
u'#m\xe4': 2,
u'#m\xe3': 2,
u'y\u0161r': 2,
u'xt+': 2,
u'\u011bdv': 2,
u'u\u010dl': 2,
u'ho\xfd': 2,
u'm\u0163#': 2,
u'e\u0161s': 2,
u'mdr': 2,
u'mds': 2,
u'mdu': 2,
u'smp': 2,
u'h\xe9z': 2,
u'md#': 2,
u'atj': 2,
u'f\xfch': 2,
u'xij': 2,
u'xix': 2,
u'\u016fte': 2,
u'uy\xe8': 2,
u'n\xe9d': 2,
u'n\xe9p': 2,
u'\xe9pl': 2,
u'\xe9pu': 2,
u'wth': 2,
u'\u01d0ma': 2,
u'cdt': 2,
u'dhd': 2,
u'=al': 2,
u'dhp': 2,
u'y\u0142a': 2,
u'\u010duk': 2,
u'kh\u016f': 2,
u'umg': 2,
u'umc': 2,
u'\u011b\u0161m': 2,
u'fok': 2,
u'#w\xfc': 2,
u'khi': 2,
u'\u017ce#': 2,
u'h\xf3r': 2,
u'e\u0142#': 2,
u've\u017e': 2,
u'fpi': 2,
u'ttn': 2,
u'ttm': 2,
u'sj\u0159': 2,
u'\xe2ne': 2,
u'o\u0148a': 2,
u'\u016fzr': 2,
u'sax': 2,
u'saw': 2,
u'bl\u016f': 2,
u'axl': 2,
u'd\xfcs': 2,
u'\u0119#w': 2,
u'\u03cc\u03b5\u03b4': 2,
u'xml': 2,
u'n\xed\u0159': 2,
u'\xe9t\u016f': 2,
u'wuj': 2,
u'\u0159\xedu': 2,
u'n\xedf': 2,
u'\xe9th': 2,
u'\xe9ti': 2,
u'\xe9tu': 2,
u'wp#': 2,
u'\u015fen': 2,
u'u#\u0163': 2,
u'u#\u015b': 2,
u'gde': 2,
u'gdi': 2,
u'oh\u016f': 2,
u'u#\u03bf': 2,
u'y\xe8r': 2,
u'uip': 2,
u'ohz': 2,
u'cpd': 2,
u'cpp': 2,
u'\xf3em': 2,
u'k\u016fp': 2,
u'\xedlm': 2,
u'\xfcne': 2,
u'\xfcns': 2,
u'kll': 2,
u'\xfcn#': 2,
u'=#o': 2,
u'xud': 2,
u'dfa': 2,
u'n\u0163o': 2,
u'=##': 2,
u'itp': 2,
u'it\xe0': 2,
u'z\xfdm': 2,
u'tp\u0159': 2,
u'trt': 2,
u'uee': 2,
u'\u010fal': 2,
u'\xe1gs': 2,
u'o\u0144s': 2,
u'o\u0144c': 2,
u'\xe9zi': 2,
u'pdb': 2,
u'xap': 2,
u'ml+': 2,
u'\u017ede': 2,
u'mls': 2,
u'dr\u0165': 2,
u'n\xe1a': 2,
u'\xe9hu': 2,
u'\u015fan': 2,
u'\u015fah': 2,
u'ag\u016f': 2,
u'\u0165ou': 2,
u'jcy': 2,
u'jca': 2,
u'=in': 2,
u'\u0161ki': 2,
u'leq': 2,
u'rdr': 2,
u'rdh': 2,
u't\xfcv': 2,
u'o\u0165t': 2,
u'o\u0165a': 2,
u'\xb1##': 2,
u'ih\u0159': 2,
u'\u012bks': 2,
u'a\u017ad': 2,
u'vm\xe1': 2,
u'rv\xfd': 2,
u'i\u016fm': 2,
u'ihh': 2,
u'+do': 2,
u'kp\u0159': 2,
u'ckn': 2,
u'et\xfa': 2,
u'bty': 2,
u'bto': 2,
u'cbi': 2,
u'\u011b\u017em': 2,
u'et\u0119': 2,
u'\u03b7\u03c2#': 2,
u'et\u0151': 2,
u'phr': 2,
u'\xdf#j': 2,
u'\xdf#m': 2,
u'mhe': 2,
u'mha': 2,
u'mhu': 2,
u'\xed#\xb1': 2,
u'w#\u017e': 2,
u'hwi': 2,
u'eo\u0161': 2,
u'zya': 2,
u'hps': 2,
u'n\xe7u': 2,
u'ptc': 2,
u'\xe3os': 2,
u'\xe9lu': 2,
u'\u0161of': 2,
u'la\u015f': 2,
u'i\xf3#': 2,
u'uas': 2,
u'uwi': 2,
u'zey': 2,
u't\xf8e': 2,
u'o\u0161c': 2,
u'o\u0161u': 2,
u'ntj': 2,
u'ntb': 2,
u'ze\u017e': 2,
u'ze\u015b': 2,
u'nbw': 2,
u'ktv': 2,
u'o\u0161\u016f': 2,
u'\u03b2e#': 2,
u'\u03bf##': 2,
u'b\xe1\u0148': 2,
u'cl#': 2,
u'\u200bpo': 2,
u'd\xf3b': 2,
u'ilw': 2,
u'\xf3po': 2,
u'\u200bp\u0159': 2,
u'\u017e\xed\u0159': 2,
u'\u010d\u0165o': 2,
u'th\xe1': 2,
u'il\u0103': 2,
u'\xe2rg': 2,
u'\u0435#c': 2,
u'#nf': 2,
u'\u03b5\u03c4\u03b1': 2,
u'ykk': 2,
u'vdl': 2,
u'mta': 2,
u'htr': 2,
u'sp\xf3': 2,
u'\xe9\u0161\xe1': 2,
u'\xe1nf': 2,
u'spb': 2,
u'spc': 2,
u'\xe9\u0161a': 2,
u'aoe': 2,
u'\u0159p\u011b': 2,
u'za\u0165': 2,
u'rl\xe9': 2,
u'lm\xed': 2,
u'zay': 2,
u'\u015f#j': 2,
u'\u015f#a': 2,
u'\u015f#b': 2,
u'\xedpe': 2,
u'\u03b9#\u03c4': 2,
u'z\u010de': 2,
u'jt\xfd': 2,
u'kxa': 2,
u'\u200bev': 2,
u'vur': 2,
u'gwp': 2,
u'm\xe0e': 2,
u'uxl': 2,
u'\u0163oc': 2,
u'\u016fjt': 2,
u'r\u0148m': 2,
u'b#\xf6': 2,
u'l\xfcb': 2,
u'nm\xe1': 2,
u'yng': 2,
u'b#w': 2,
u'dwe': 2,
u'yok': 2,
u'yoa': 2,
u'mp\xf3': 2,
u'u\xe9n': 2,
u'mp\u0159': 2,
u'wli': 2,
u'l\u0103#': 2,
u'\u0165ch': 2,
u'hh#': 2,
u'il\xf6': 2,
u'#\u015f\u0131': 2,
u'\u013a\u0148u': 2,
u'\u0161we': 2,
u'\u017eul': 2,
u'gpg': 2,
u'\u017eus': 2,
u'rh\xe9': 2,
u'stb': 2,
u'liy': 2,
u'pu\u017e': 2,
u'st\xe4': 2,
u'pu\u0142': 2,
u'st\xf8': 2,
u'oxt': 2,
u'n\u010da': 2,
u'v\u017ee': 2,
u'b\xe9a': 2,
u'\u010f\xe1t': 2,
u'\u0103#j': 2,
u'\u0103#d': 2,
u'\u0103#b': 2,
u'\u03b7#\u03bb': 2,
u'a#\u0111': 2,
u'l#\xf3': 2,
u'l#q': 2,
u'id=': 2,
u'idk': 2,
u'fde': 2,
u'utp': 2,
u'kc#': 2,
u'kch': 2,
u'kcj': 2,
u'ycl': 2,
u'yca': 2,
u'l+v': 2,
u'flf': 2,
u'iee': 2,
u'\xfdh#': 2,
u'\u0151fi': 2,
u'#\u03bd\u03b1': 2,
u'#\u03c0\u03c1': 2,
u'i\u010dr': 2,
u'hln': 2,
u'v\u0159i': 2,
u'k\xf3w': 2,
u'k\xf3n': 2,
u'yrh': 2,
u'ecm': 2,
u'e\u010f\u016f': 2,
u'ecp': 2,
u'snt': 2,
u'jze': 2,
u'\xe9\u0159o': 2,
u'e\u010fa': 2,
u'awy': 2,
u'pya': 2,
u'lu\xf1': 2,
u'mga': 2,
u'ot\u0148': 2,
u'shv': 2,
u'dpp': 2,
u'r\xed\u0161': 2,
u'e\xf3z': 2,
u'otz': 2,
u'sh\u016f': 2,
u'wki': 2,
u'lu\u0159': 2,
u'zyo': 2,
u'\u011b#y': 2,
u'\xe9sa': 2,
u'=do': 2,
u'=de': 2,
u'z\u0105c': 2,
u'#\u0123i': 2,
u'\u0165ky': 2,
u'\u015f\u0131k': 2,
u'#\u03c4\u03b7': 2,
u'#\u03c4\u03bf': 2,
u'upg': 2,
u'\u016fvr': 2,
u'\xedcm': 2,
u'up\xf3': 2,
u'\u017cel': 2,
u'#l\xfc': 2,
u'ygm': 2,
u'ygh': 2,
u'\xe1\u010f\xe1': 2,
u'c\xe1l': 2,
u'cmo': 2,
u'#\u02c7#': 2,
u'n#\xe1': 2,
u'l\xe1f': 2,
u'tyo': 2,
u'tyi': 2,
u'n#+': 2,
u'baq': 2,
u'baf': 2,
u'mcy': 2,
u'akj': 2,
u'v\u011b\u0165': 2,
u'wwd': 2,
u'wwb': 2,
u'wwk': 2,
u'op\u013a': 2,
u'b\u010d\xe1': 2,
u's\u0142u': 2,
u'v\u011bp': 2,
u'v\u011bv': 2,
u'+xm': 2,
u'jh#': 2,
u'g\u0117#': 2,
u'\u010dt\xe1': 2,
u'e\u0142a': 2,
u'k\u017et': 2,
u'zzp': 2,
u'tnk': 2,
u'di\xf1': 2,
u'ny\xed': 2,
u'oof': 2,
u'\xedo#': 2,
u'uab': 2,
u'nyp': 2,
u'nyc': 2,
u'#\u043e\u0442': 2,
u'\xedoc': 2,
u'\xedod': 2,
u'zns': 2,
u'fl\xe4': 2,
u'\u017cak': 2,
u'kky': 2,
u'#\u0161\u010d': 2,
u'\xf6d\xf6': 2,
u'xpi': 2,
u'ixl': 2,
u'r\xedc': 2,
u'#\u0161w': 2,
u'#\u0161n': 2,
u'\xdfi#': 2,
u'h\u0165o': 2,
u'l\xedj': 2,
u'l\xed\xfa': 2,
u'pa\u0142': 2,
u'pa\u017a': 2,
u'tuu': 2,
u'd#\xe1': 2,
u'\xe1b\u0159': 2,
u'bmu': 2,
u'd#\xe9': 2,
u'g\u0159\xed': 2,
u'\u016fp\u0159': 2,
u'mo\xe1': 2,
u'\u017eaz': 2,
u'\u016fpo': 2,
u'#\u03b5\u03c5': 2,
u't\u0151f': 2,
u'rr\xed': 2,
u'o\u0161v': 2,
u'i\xfa\u010d': 2,
u't\xe4t': 2,
u'hyz': 2,
u'wsz': 2,
u'ggy': 2,
u'hdo': 2,
u'gga': 2,
u'jd#': 2,
u'jdn': 2,
u'hy\u0165': 2,
u'b\xe1b': 2,
u'de\xf1': 2,
u'uhf': 2,
u'okw': 2,
u'k\u011b#': 2,
u'#d\u01d0': 2,
u'\xedkh': 2,
u'\u011ben': 2,
u'vn\u0161': 2,
u't\u011b\u0159': 2,
u'rrl': 2,
u'yp\xfd': 2,
u'ko\xed': 2,
u'\xfal\u016f': 2,
u'\xf6hl': 2,
u'#\u0165c': 2,
u'r\xe9o': 2,
u'u\u0142a': 2,
u'ik\xed': 2,
u'\u0142#z': 2,
u'l\xe9e': 2,
u'\xe1fa': 2,
u'fud': 2,
u'fua': 2,
u'ypp': 2,
u'ik\u0161': 2,
u'biu': 2,
u'pey': 2,
u'pew': 2,
u'\xfcte': 2,
u'\xfctt': 2,
u'usr': 2,
u'c\u0153u': 2,
u'kiw': 2,
u'jzb': 2,
u'e=p': 2,
u'e=s': 2,
u'e=d': 2,
u'e=a': 2,
u'e=o': 2,
u'\xe9o#': 2,
u'\xe9od': 2,
u'a\u0165p': 2,
u'a\u0165u': 2,
u'a\u0165a': 2,
u'p\xf3\u0142': 2,
u'afz': 2,
u'siy': 2,
u'six': 2,
u'f#\xfa': 2,
u'\xe5sy': 2,
u'\xe5sa': 2,
u'#\u0142\xf3': 2,
u'f#y': 2,
u'lfp': 2,
u'\u017e#x': 2,
u'\u017e#y': 2,
u'y\u0159h': 2,
u'da\xed': 2,
u'\u0165io': 2,
u'#\u0142u': 2,
u'\xedwi': 2,
u'kss': 2,
u'ksd': 2,
u'ksb': 2,
u'ksm': 2,
u'kbu': 2,
u'ks\xe9': 2,
u'#xk': 2,
u'\xfcmt': 2,
u'u\u017et': 2,
u'=sl': 2,
u'ilr': 2,
u'z\xf6n': 2,
u'tm\xfd': 2,
u'tm\xed': 2,
u'pi\u0144': 2,
u'j\xed\u0159': 2,
u'k\u0159a': 2,
u'r\xeb#': 2,
u'p\xf3r': 2,
u'nyz': 2,
u'es\u0161': 2,
u'u\u0159c': 2,
u'vcs': 2,
u'\xfdc#': 2,
u'\u0107#d': 2,
u'\u0107#m': 2,
u'\u0107#z': 2,
u'en\u0165': 2,
u'f\xe9d': 2,
u'en\xe8': 2,
u'en\xf6': 2,
u'\u03c1\u03cc\u03b5': 2,
u'z\u0148e': 2,
u's\u017ei': 2,
u'\xe9cs': 2,
u'\xe9cr': 2,
u'\u011bt\xfd': 2,
u'\u0161nu': 2,
u'\u0161ny': 2,
u'wst': 2,
u'=ep': 2,
u'ra\xed': 2,
u'\xe4ck': 2,
u'\xe4ch': 2,
u'\u0161\u016fm': 2,
u'f\u0159i': 2,
u'j\xfar': 2,
u'\u0142ub': 2,
u'oc+': 2,
u'nuu': 2,
u'oc\xe9': 2,
u'kwe': 2,
u'yk\xf3': 2,
u'ma\xdf': 2,
u'#g+': 2,
u'vvz': 2,
u'l\u0161\u0161': 2,
u'\xf6pe': 2,
u'p\xfdv': 2,
u'jy#': 2,
u'm=i': 2,
u'u\u010fa': 2,
u'qi#': 2,
u'\xe1n\u010d': 2,
u'nn\u016f': 2,
u'm\xe3o': 2,
u'ths': 2,
u'\xe1np': 2,
u'\xe1nr': 2,
u'nnh': 2,
u'o\u015fa': 2,
u'\xf3si': 2,
u'\xf3se': 2,
u'ti+': 2,
u'bq#': 2,
u'a\u0161\u016f': 2,
u'pma': 2,
u'\u0161\xe1v': 2,
u'msy': 2,
u'msf': 2,
u'r\xfat': 2,
u'd\xe9t': 2,
u'n\xec#': 2,
u'ej\xf3': 2,
u't\xfab': 2,
u'ejy': 2,
u'r\u0103e': 2,
u'waj': 2,
u'e\xdfi': 2,
u'pr\xfa': 2,
u'huw': 2,
u'hux': 2,
u'ebp': 2,
u'gki': 2,
u'\u0165p\xe1': 2,
u'an+': 2,
u'kv\xfd': 2,
u'\u017ev\xe1': 2,
u'ssz': 2,
u'dyv': 2,
u'dyo': 2,
u'ssg': 2,
u'dya': 2,
u'j\u015bc': 2,
u'rmd': 2,
u'jow': 2,
u'rt+': 2,
u'ss\xe9': 2,
u'e\xfad': 2,
u'\u0103#v': 2,
u'okb': 2,
u'\xe1\u0148#': 2,
u'n\xed\u0161': 2,
u'ig\u0117': 2,
u'p\xe1h': 2,
u'\u015bci': 2,
u'cbe': 2,
u'rvl': 2,
u'vso': 2,
u'\u010do#': 2,
u'#p\u012b': 2,
u'\u010dop': 2,
u'\u010doj': 2,
u'\u010doc': 2,
u'\u011bt\xe1': 2,
u'fap': 2,
u'fag': 2,
u'r#x': 2,
u'nbs': 2,
u'\xf3wu': 2,
u'i\u0161m': 2,
u'k\u0161t': 2,
u'\xed\u0159\xe1': 2,
u'r#\u015f': 2,
u'ni\u0159': 2,
u'go\u0159': 2,
u'izk': 2,
u'\xfdke': 2,
u'ef\xe1': 2,
u't\xfcm': 2,
u'efp': 2,
u'ef=': 2,
u'gvn': 2,
u'bft': 2,
u'\u03c4\u03b1\u03b9': 2,
u'pvd': 2,
u'ri\u0146': 2,
u'pv#': 2,
u'a\xdfn': 2,
u'\u0103em': 2,
u'ac\u0103': 2,
u'ljs': 2,
u'zz#': 2,
u'b\xf6l': 2,
u'ow\u011b': 2,
u'zzi': 2,
u'#t\xf6': 2,
u'wnb': 2,
u'#tp': 2,
u'\xeas#': 2,
u'rr\xfc': 2,
u'r\xe9z': 2,
u'cay': 2,
u'ibc': 2,
u'cfi': 2,
u'r\xe9c': 2,
u'ta\u0144': 2,
u'\u010dkl': 2,
u'y#\u0163': 2,
u'caf': 2,
u'vox': 2,
u'i\u010d\u0165': 2,
u'spz': 2,
u'\xe1vh': 2,
u'\xedfa': 2,
u'us\xe9': 2,
u'byn': 2,
u'kb#': 2,
u'owb': 2,
u's\xfal': 2,
u'h#\xe5': 2,
u'h\xfdr': 2,
u'ybd': 2,
u'#k\u0148': 2,
u'j\u010dt': 2,
u'd\xe1j': 2,
u'ebc': 2,
u'ebm': 2,
u'n\xf6#': 2,
u'n\xf6m': 2,
u'k\u0148o': 2,
u'v\xf3r': 2,
u't+#': 2,
u'bbb': 2,
u't+a': 2,
u't+r': 2,
u'myi': 2,
u'avv': 2,
u'tjo': 2,
u'pz#': 2,
u'eb\u010d': 2,
u'mfn': 2,
u'mfs': 2,
u'\xfdre': 2,
u'\xfdrc': 2,
u'owm': 2,
u'mf\u016f': 2,
u'owt': 2,
u'kd\u011b': 2,
})
|
dmort27/pylid
|
pylid/langs/cs.py
|
Python
|
mit
| 327,925
|
[
"ADF",
"ASE",
"BWA",
"EPW",
"Elk",
"MOE"
] |
6572aab9cd95b7219026333b54db8644f8efff46c4561263861030d1c3161f65
|
#-------------------------------------------------------------------------------
# . File : QMCallerGAMESS.py
# . Program : MolarisTools
# . Copyright : USC, Mikolaj Feliks (2015-2018)
# . License : GNU GPL v3.0 (http://www.gnu.org/licenses/gpl-3.0.en.html)
#-------------------------------------------------------------------------------
import subprocess, os.path, exceptions
from MolarisTools.Utilities import WriteData
from MolarisTools.Units import symbolToAtomicNumber
from MolarisTools.Parser import GAMESSOutputFile, GAMESSDatFile
from MolarisTools.QMMM import QMCaller
class QMCallerGAMESS (QMCaller):
"""A class to provide communication between Molaris and GAMESS-US."""
# . Options specific to GAMESS
# . By default, basis set is 6-31G*
defaultAttributes = {
"ncpu" : 1 ,
"version" : "01" ,
"memory" : 10 ,
"restart" : False ,
"gbasis" : "n31" ,
"ngauss" : 6 ,
"ndfunc" : 1 ,
"fileGAMESSError" : "job.err" ,
"fileGAMESSInput" : "job.inp" ,
"fileGAMESSOutput" : "job.log" ,
"fileGAMESSCheckpoint" : "job.dat" ,
"pathGAMESS" : os.path.join (os.environ["HOME"], "local", "opt", "gamess", "rungms") ,
}
defaultAttributes.update (QMCaller.defaultAttributes)
def __init__ (self, **keywordArguments):
"""Constructor."""
super (QMCallerGAMESS, self).__init__ (**keywordArguments)
# . Prepare a GAMESS input file
self._WriteInput ()
def _WriteInput (self):
"""Write a GAMESS input file."""
data = []
# . System section (memory is in MW)
data.append (" $system mwords=%d $end\n" % self.memory)
# . Control section, charge and multiplicity
data.append (" $contrl scftyp=rhf runtyp=gradient dfttyp=%s\n" % self.method)
data.append (" maxit=100 mult=%d icharg=%d $end\n" % (self.multiplicity, self.charge))
# . Basis set
data.append (" $basis gbasis=%s ngauss=%d ndfunc=%d $end\n" % (self.gbasis, self.ngauss, self.ndfunc))
# . Initial guess
# . Reuse the wavefunction if the checkpoint file exists
guess = " $guess guess=huckel $end\n"
if self.restart:
if os.path.exists (self.fileGAMESSCheckpoint):
guess = " $guess guess=moread $end\n"
else:
self.restart = False
data.append (guess)
# . SCF options
data.append (" $scf dirscf=.true. $end\n")
# . Geometry
data.append (" $data\n")
data.append ("Comment line\n")
data.append ("C1\n")
atoms = self.molaris.qatoms + self.molaris.latoms
for atom in atoms:
data.append ("%2s %4.1f %9.4f %9.4f %9.4f\n" % (atom.label, symbolToAtomicNumber[atom.label], atom.x, atom.y, atom.z))
data.append ("$end\n")
# . Initial orbitals
if self.restart:
data.append (" $vec\n")
checkpoint = GAMESSDatFile (self.fileGAMESSCheckpoint)
data.extend (checkpoint.vec)
data.append ("$end\n")
# . Finish up
WriteData (data, self.fileGAMESSInput)
# . TODO: Cosmo and QM/MM (point charges)
def Run (self):
# . Run the calculation
# . Open files
fileError = open (self.fileGAMESSError , "w")
fileOutput = open (self.fileGAMESSOutput , "w")
stem, extension = os.path.splitext (self.fileGAMESSInput)
filename = os.path.basename (stem)
# . Example: ~/local/opt/gamess/rungms scf-ginkgo 01 8 > scf-ginkgo.out &
subprocess.check_call ([self.pathGAMESS, filename, self.version, "%d" % self.ncpu], stdout=fileOutput, stderr=fileError)
# . Close files
fileOutput.close ()
fileError.close ()
# . Parse the output file
gamess = GAMESSOutputFile (filename=self.fileGAMESSOutput)
self.Efinal = gamess.Efinal
self.forces = gamess.forces
# . Assign charges
if self.chargeScheme == "Mulliken":
self.charges = gamess.charges
# . Finish up
self._Finalize ()
#===============================================================================
# . Main program
#===============================================================================
if __name__ == "__main__": pass
|
mfx9/MolarisTools
|
MolarisTools/QMMM/QMCallerGAMESS.py
|
Python
|
gpl-3.0
| 4,684
|
[
"GAMESS"
] |
db4c6775ee1daf8d3003c24f5d0692292d80553892547d4c8a7118b5aa7420cf
|
#!/usr/bin/env python
"""
author Brian O'Connor
broconno@ucsc.edu
This module first queries a given Redwood instance for metadata document,
downloads them, minimally transforms them, and, finally, loads them into
Elasticsearch ready for querying.
"""
import semver
import logging
import os
import os.path
import platform
import argparse
import json
import jsonschema
import datetime
import re
import dateutil
import ssl
import dateutil.parser
import ast
#from urllib import urlopen
from urllib2 import urlopen, Request
from subprocess import Popen, PIPE
class QueryAndLoad:
def __init__(self):
parser = argparse.ArgumentParser(description='Queries Redwood, downloads metadata, and prepares Elasticsearch index.')
parser.add_argument('--redwood-domain', default='ops-dev.ucsc-cgl.org', required=True)
parser.add_argument('--redwood-token', default='token_path', required=True)
parser.add_argument('--working-dir', default='working-dir', required=True)
# get args
args = parser.parse_args()
self.redwood_domain = args.redwood_domain
self.redwood_token = args.redwood_token
self.working_dir = args.working_dir
# run
self.run()
def run(self):
#Assigning the environmental variables for REDWOOD ENDPOINT (here refered as redwood host),
#and for the ACCESS_TOKEN (here referred to as redwood token)
os.environ["ACCESS_TOKEN"] = self.redwood_token
os.environ["REDWOOD_ENDPOINT"] = self.redwood_domain
print ("** FINDING FILES **")
last= False
page=0
obj_arr=[]
# figure out the pages
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
#json_str = urlopen(str("https://"+self.redwood_domain+":8444/entities?fileName=assay.json&page=0"), context=ctx).read()
json_str = urlopen(str("https://"+self.redwood_domain+"/api/v1/repository/files?include=facets&from=1&size=200&filters=%7B%22file%22:%7B%22fileFormat%22:%7B%22is%22:%5B%22json%22%5D%7D%7D%7D"), context=ctx).read()
metadata_struct = json.loads(json_str)
for hit in metadata_struct['hits']:
#print (hit)
if hit['fileCopies'][0]['fileName'] == "assay.json" or hit['fileCopies'][0]['fileName'] == "provenance.json":
object_id = hit['objectID']
print("INFO: "+hit['fileCopies'][0]['repoDataBundleId']+" "+hit['objectID'])
print("** DOWNLOAD **")
# docker run -it --rm -e ACCESS_TOKEN=`cat token.txt` -e REDWOOD_ENDPOINT=ops-dev.ucsc-cgl.org -v $(pwd)/samples:/samples -v $(pwd)/outputs:/outputs -v $(pwd):/dcc/data quay.io/ucsc_cgl/core-client:1.1.0-alpha /bin/
# icgc-storage-client download --output-dir /outputs --object-id bd9a15ad-758b-5c8e-8e63-340e511789cf --output-layout bundle --force
command = ["docker", "run", "--rm", "-e", "ACCESS_TOKEN="+self.redwood_token, "-e", "REDWOOD_ENDPOINT="+self.redwood_domain, "-v", "$(pwd)/samples:/samples", "-v", "$(pwd)/outputs:/outputs", "-v", "$(pwd):/dcc/data", "quay.io/ucsc_cgl/core-client:1.1.0-alpha"]
command.append("icgc-storage-client")
command.append("download")
command.append("--output-dir")
command.append("/outputs")
command.append("--object-id")
command.append(str(object_id))
command.append("--output-layout")
command.append("bundle")
command.append("--force")
print (" ".join(command))
try:
c_data=Popen(["/bin/bash", "-c", " ".join(command)], stdout=PIPE, stderr=PIPE)
stdout, stderr = c_data.communicate()
print (stdout)
print (stderr)
except Exception as e:
print ('Error while downloading file with content ID: %s Error: %s' % (object_id, e))
print ("** BUILDING INDEX **")
outfile = open("elasticsearch_index.jsonl", "w")
index_index = 1
# walk directory structure, parse JSONs, put in single json, write ES index file
for root, dirs, files in os.walk("outputs"):
for currdir in dirs:
assay_file = open("outputs/"+currdir+"/assay.json", "r")
assay_str = json.dumps(json.loads(assay_file.read()))
assay_file.close()
provenance_file = open("outputs/"+currdir+"/provenance.json", "r")
provenance_str = json.dumps(json.loads(provenance_file.read()))
provenance_file.close()
outfile.write('{"index":{"_id":"' + str(index_index) + '","_type":"meta"}}\n')
outfile.write('{"assay_json": '+assay_str+', "provenance_json": '+provenance_str+'}')
index_index += 1
outfile.close()
# run the class
if __name__ == '__main__':
runner = QueryAndLoad()
|
briandoconnor/hca-indexing-sandbox
|
query_and_load.py
|
Python
|
apache-2.0
| 5,071
|
[
"Brian"
] |
0163bab55f859c726cd52c689a886b89894b3e10b8d8dc3f7a7a9b72397488db
|
from ..nodes import CtreeNode
from ..util import flatten
class TemplateNode(CtreeNode):
"""Base class for all template nodes."""
def __init__(self, template_txt, child_dict):
"""
Create a new template node.
:param template_txt: The template as a string.
:param child_dict: A mapping between template keys \
and subtrees (vhdl_ctreeNodes).
"""
from textwrap import dedent
from string import Template
from collections import namedtuple
dedented_txt = dedent(template_txt)
self._template = Template(dedented_txt)
self._children = child_dict
self._fields = child_dict.keys()
super(TemplateNode, self).__init__()
def codegen(self, indent=0):
from ..templates.codegen import TemplateCodeGen
return TemplateCodeGen(indent).visit(self)
def label(self):
from ..templates.dotgen import TemplateDotLabeller
return TemplateDotLabeller().visit(self)
def _requires_semicolon(self):
return False
def __setattr__(self, name, val):
from ..nodes import CtreeNode
if name == "_children":
# set parent pointers in child_dict
assert isinstance(val, dict)
super(TemplateNode, self).__setattr__(name, val)
for name, value in val.items():
for child in flatten(value):
child.parent = self
elif hasattr(self, "_children") and name in self._children:
# insert into _children dictionary and set parent pointers
self._children[name] = val
if isinstance(val, CtreeNode):
val.parent = self
else:
# do standard attribute resolution
super(TemplateNode, self).__setattr__(name, val)
def __getattr__(self, name):
if name != "_children" and name in self._children:
child = self._children[name]
assert child.parent == self, "Encountered bad parent pointer to %s." % repr(child.parent)
return self._children[name]
raise AttributeError("'%s' has no attribute '%s'" % (type(self).__name__, name))
class StringTemplate(TemplateNode):
"""
A template node that wraps Python's string.Template.
"""
def __init__(self, template_txt="", child_dict={}):
"""
Create a new template node.
:param template_txt: The template as a string.
:param child_dict: A mapping between template keys \
and subtrees (vhdl_ctreeNodes).
"""
super(StringTemplate, self).__init__(template_txt, child_dict)
class FileTemplate(TemplateNode):
"""
A template node that wraps Python's string.Template.
"""
def __init__(self, template_path=".", child_dict={}):
"""
Create a new template node.
:param template_path: The path to the template file.
:param child_dict: A mapping between template keys \
and subtrees (vhdl_ctreeNodes).
"""
self._template_path = template_path
with open(template_path, "r") as template_file:
contents = template_file.read()
super(FileTemplate, self).__init__(contents, child_dict)
|
alphaFred/Sejits4Fpgas
|
sejits4fpgas/src/vhdl_ctree/templates/nodes.py
|
Python
|
gpl-3.0
| 3,245
|
[
"VisIt"
] |
a965f38ced61a3d8969e62cbd91c86a49d0e03b71865477d3b6011441bfb18c9
|
import data
import pycuda.driver as cuda
import numpy as np
from pycuda._driver import MemoryError
import logging
import kernels
from pycuda.compiler import SourceModule
def run( exp, gm, sm, nm, sample_block_size, npairs_block_size, nets_block_size, rms_only=True):
srt = data.SharedSampleRankTemplate( exp.buffer_nsamples, gm.buffer_npairs )
rt = data.SharedRankTemplate( exp.buffer_nsamples, gm.buffer_npairs )
rms = data.SharedRankMatchingScores( nm.buffer_nnets, exp.buffer_nsamples )
try:
exp.toGPU( sample_block_size )
rms.toGPU( sample_block_size, nets_block_size )
nm.toGPU( nets_block_size )
rt.toGPU( sample_block_size, npairs_block_size )
sm.toGPU( sample_block_size )
srt.toGPU( sample_block_size, npairs_block_size )
gm.toGPU( npairs_block_size )
except MemoryError:
#we ran out of memory, maybe dev memory changed, in any case,
logging.error("*************MemoryERROR*********************")
req_mem = reqMemory(exp, rms,np,rt,sm,srt,gm,nm, sample_block_size, nets_block_size, npairs_block_size )
logging.error("Shared Dirac")
logging.error( "Req. Mem[%f], Avail. Mem[%f]" % (float(req_mem)/1073741824.0, float(cuda.mem_get_info()[0])/1073741824.0) )
for d in [exp,rms, nm, rt, sm, srt, gm]:
if d.gpu_data is not None:
d.gpu_data.free()
raise
sampleRankTemplate( exp.gpu_data, gm.gpu_data, srt.gpu_data, exp.buffer_nsamples, gm.buffer_npairs, npairs_block_size, sample_block_size)
rankTemplate( srt.gpu_data, sm.gpu_data, rt.gpu_data, srt.buffer_nsamples, sm.buffer_kneighbors, gm.buffer_npairs, npairs_block_size, sample_block_size)
rankMatchingScores( srt.gpu_data, rt.gpu_data, rms.gpu_data, nm.gpu_data, srt.buffer_nsamples, nm.buffer_nnets, sample_block_size, nets_block_size)
return (srt, rt, rms)
def reqMemory(exp, rms,np,rt,sm,srt,gm,nm,sample_block_size, nets_block_size, npairs_block_size ):
pred = exp.gpu_mem( sample_block_size )
pred += rms.gpu_mem( sample_block_size, nets_block_size )
pred += nm.gpu_mem( nets_block_size )
pred += rt.gpu_mem( sample_block_size, npairs_block_size )
pred += sm.gpu_mem( sample_block_size )
pred += srt.gpu_mem( sample_block_size, npairs_block_size )
pred += gm.gpu_mem( npairs_block_size )
return pred
def sampleRankTemplate( exp_gpu, gmap_gpu, srt_gpu, nsamp, npairs, pairs_block_size, sample_block_size):
"""
nsamp is the columns dim (shape[1]) of exp_gpu
npairs is the length of gmap_gpu (shape[0])
"""
block = (pairs_block_size, sample_block_size, 1)
grid = (npairs/pairs_block_size, nsamp/sample_block_size)
kernel_source = kernels.srt(nsamp)
mod = SourceModule(kernel_source)
func = mod.get_function('srtKernel')
func(exp_gpu, gmap_gpu, srt_gpu, block=block, grid=grid )
def rankTemplate( srt_gpu, sample_map_gpu,rt_gpu, nsamples, neighbors, npairs, pairs_block_size, sample_block_size):
"""
srt_gpu is (npairs, nsamples)
sample_map_gpu is (neighbors, nsamples)
"""
block = (pairs_block_size, sample_block_size, 1)
grid = (npairs/pairs_block_size, nsamples/sample_block_size)
kernel_source = kernels.rt( neighbors, nsamples )
mod = SourceModule(kernel_source)
func = mod.get_function('rtKernel')
func( srt_gpu, sample_map_gpu, rt_gpu, block=block, grid=grid)
def rankMatchingScores( srt_gpu, rt_gpu, rms_gpu, nmap_gpu, nsamples, nnets, sample_block_size, nets_block_size):
block = (nets_block_size, sample_block_size, 1)
grid = ( nnets/nets_block_size, nsamples/sample_block_size)
kernel_source = kernels.rms( nsamples, nnets )
mod = SourceModule( kernel_source )
func = mod.get_function('rmsKernel')
func( rt_gpu, srt_gpu, nmap_gpu, rms_gpu, block=block, grid=grid )
|
JohnCEarls/GPUDirac
|
gpudirac/device/dirac.py
|
Python
|
agpl-3.0
| 3,857
|
[
"DIRAC"
] |
50c704061c224cbe91c9c3e798b86419fc9117485593f6d6814e63d50c867379
|
"""Driver for gradient calculations."""
from __future__ import print_function
import six.moves.builtins as builtins
import logging
import time
import traceback
import warnings
import numpy # for numeric_grad
from six import itervalues
from six.moves import StringIO
import theano
from theano import gof
from theano.gof import Variable
from theano.compat import OrderedDict, izip
from six.moves import xrange, reduce
from theano.gof.null_type import NullType, null_type
from theano.gof.op import get_debug_values
from theano.compile import ViewOp
np = numpy
__authors__ = "James Bergstra, Razvan Pascanu, Arnaud Bergeron, Ian Goodfellow"
__copyright__ = "(c) 2011, Universite de Montreal"
__license__ = "3-clause BSD License"
__contact__ = "theano-dev <theano-dev@googlegroups.com>"
__docformat__ = "restructuredtext en"
_logger = logging.getLogger('theano.gradient')
# we can't do "import theano.tensor"
# tensor depends on theano.compile
# theano.compile depends on theano.gradient (this file)
# the reason theano.compile depends on theano.gradient
# is that theano.compile.builders contains the op from graph
# functionality and it uses theano.gradient to implement
# the new op's grad method
tensor = None
_msg_retType = 'op.grad(...) returned a non-list'
grad_time = 0
def format_as(use_list, use_tuple, outputs):
"""
Formats the outputs according to the flags `use_list` and `use_tuple`.
If `use_list` is True, `outputs` is returned as a list (if `outputs`
is not a list or a tuple then it is converted in a one element list).
If `use_tuple` is True, `outputs` is returned as a tuple (if `outputs`
is not a list or a tuple then it is converted into a one element tuple).
Otherwise (if both flags are false), `outputs` is returned.
"""
assert not (use_list and use_tuple), \
"Both flags cannot be simultaneously True"
if (use_list or use_tuple) and not isinstance(outputs, (list, tuple)):
if use_list:
return [outputs]
else:
return (outputs,)
elif not (use_list or use_tuple) and isinstance(outputs, (list, tuple)):
assert len(outputs) == 1, \
"Wrong arguments. Expected a one element list"
return outputs[0]
elif use_list or use_tuple:
if use_list:
return list(outputs)
else:
return tuple(outputs)
else:
return outputs
def grad_not_implemented(op, x_pos, x, comment=""):
"""
Return an un-computable symbolic variable of type `x.type`.
If any call to tensor.grad results in an expression containing this
un-computable variable, an exception (NotImplementedError) will be
raised indicating that the gradient on the
`x_pos`'th input of `op` has not been implemented. Likewise if
any call to theano.function involves this variable.
Optionally adds a comment to the exception explaining why this
gradient is not implemented.
"""
return (NullType((
"This variable is Null because the grad method for "
"input %s (%s) of the %s op is not implemented. %s"
) % (x_pos, x, op, comment)))()
def grad_undefined(op, x_pos, x, comment=""):
"""
Return an un-computable symbolic variable of type `x.type`.
If any call to tensor.grad results in an expression containing this
un-computable variable, an exception (GradUndefinedError) will be
raised indicating that the gradient on the
`x_pos`'th input of `op` is mathematically undefined. Likewise if
any call to theano.function involves this variable.
Optionally adds a comment to the exception explaining why this
gradient is not defined.
"""
return (NullType(
(
"This variable is Null because the grad method for "
"input %s (%s) of the %s op is mathematically undefined. %s"
) % (x_pos, x, op, comment)))()
class DisconnectedType(theano.gof.type.Type):
""" A type indicating that a variable is a result
of taking the gradient of c with respect to x
when c is not a function of x.
A symbolic placeholder for 0, but to convey
the extra information that this gradient is 0
because it is disconnected.
"""
def filter(self, data, strict=False, allow_downcast=None):
raise AssertionError(
(
"If you're assigning to a DisconnectedType you're"
" doing something wrong. It should only be used as"
" a symbolic placeholder."
))
def fiter_variable(self, other):
raise AssertionError(
(
"If you're assigning to a DisconnectedType you're"
" doing something wrong. It should only be used as"
" a symbolic placeholder."
))
def may_share_memory(a, b):
return False
def value_eq(a, b, force_same_dtype=True):
raise AssertionError(
(
"If you're assigning to a DisconnectedType you're"
" doing something wrong. It should only be used as"
" a symbolic placeholder."
))
def __str__(self):
return 'DisconnectedType'
disconnected_type = DisconnectedType()
########################
# R Operator
########################
def Rop(f, wrt, eval_points):
"""
Computes the R operation on `f` wrt to `wrt` evaluated at points given
in `eval_points`. Mathematically this stands for the jacobian of `f` wrt
to `wrt` right muliplied by the eval points.
:type f: Variable or list of Variables
`f` stands for the output of the computational graph to which you
want to apply the R operator
:type wrt: Variable or list of `Variables`s
variables for which you compute the R operator of the expression
described by `f`
:type eval_points: Variable or list of Variables
evalutation points for each of the variables in `wrt`
:rtype: Variable or list/tuple of Variables depending on type of f
:return: symbolic expression such that
R_op[i] = sum_j ( d f[i] / d wrt[j]) eval_point[j]
where the indices in that expression are magic multidimensional
indices that specify both the position within a list and all
coordinates of the tensor element in the last.
If `wrt` is a list/tuple, then return a list/tuple with the results.
"""
from theano.tensor import as_tensor_variable
using_list = isinstance(f, list)
using_tuple = isinstance(f, tuple)
if not isinstance(wrt, (list, tuple)):
wrt = [wrt]
if not isinstance(eval_points, (list, tuple)):
eval_points = [eval_points]
if not isinstance(f, (list, tuple)):
f = [f]
assert len(wrt) == len(eval_points)
# Check that each element of wrt corresponds to an element
# of eval_points with the same dimensionality.
for pack in enumerate(zip(wrt, eval_points)):
i = pack[0]
wrt_elem, eval_point = pack[1]
if not isinstance(wrt_elem, gof.Variable):
wrt_elem = as_tensor_variable(wrt_elem)
if not isinstance(eval_point, gof.Variable):
eval_point = as_tensor_variable(eval_point)
try:
if wrt_elem.type.ndim != eval_point.type.ndim:
raise ValueError('Element ' +
str(i) +
' of wrt/eval_point have mismatched ' +
'dimensionality: ' +
str(wrt_elem.type.ndim) +
' versus ' +
str(eval_point.type.ndim))
except AttributeError:
# wrt_elem and eval_point don't always have ndim like random type
# Tensor, Sparse and CudaNdArray have the ndim attribute
pass
seen_nodes = OrderedDict()
def _traverse(node):
""" TODO: writeme """
if node is None:
return
op = node.op
inputs = node.inputs
# Compute the evaluation points corresponding to each of the
# inputs of the node
local_eval_points = []
for inp in inputs:
if inp in wrt:
local_eval_points.append(eval_points[wrt.index(inp)])
elif inp.owner is None:
try:
local_eval_points.append(inp.zeros_like())
except:
# None should be used for non-differentiable
# arguments, like for example random states
local_eval_points.append(None)
elif inp.owner in seen_nodes:
local_eval_points.append(
seen_nodes[inp.owner][inp.owner.outputs.index(inp)])
else:
# We actually need to compute the R_op for this node
_traverse(inp.owner)
local_eval_points.append(
seen_nodes[inp.owner][inp.owner.outputs.index(inp)])
same_type_eval_points = []
for x, y in zip(inputs, local_eval_points):
if y is not None:
if not isinstance(x, gof.Variable):
x = as_tensor_variable(x)
if not isinstance(y, gof.Variable):
y = as_tensor_variable(y)
try:
y = x.type.filter_variable(y)
except TypeError:
# This is a hack
# Originally both grad and Rop were written
# with the assumption that a variable and the
# gradient wrt that variable would have the same
# dtype. This was a bad assumption because the
# gradient wrt an integer can take on non-integer
# values.
# grad is now fixed, but Rop is not, so when grad
# does the right thing and violates this assumption
# we have to make it be wrong for Rop to keep working
# Rop should eventually be upgraded to handle integers
# correctly, the same as grad
y = theano.tensor.cast(y, x.type.dtype)
y = x.type.filter_variable(y)
assert x.type == y.type
same_type_eval_points.append(y)
else:
same_type_eval_points.append(y)
seen_nodes[node] = op.R_op(node.inputs, same_type_eval_points)
# end _traverse
# Populate the dictionary
for out in f:
_traverse(out.owner)
rval = []
for out in f:
if out in wrt:
rval.append(eval_points[wrt.index(out)])
elif seen_nodes[out.owner][out.owner.outputs.index(out)] is None:
raise ValueError(('The function is not differentiable with '
'respect to the provided inputs !'))
else:
rval.append(seen_nodes[out.owner][out.owner.outputs.index(out)])
return format_as(using_list, using_tuple, rval)
def Lop(f, wrt, eval_points, consider_constant=None,
disconnected_inputs='raise'):
"""
Computes the L operation on `f` wrt to `wrt` evaluated at points given
in `eval_points`. Mathematically this stands for the jacobian of `f` wrt
to `wrt` left muliplied by the eval points.
:type f: Variable or list of Variables
`f` stands for the output of the computational graph to which you
want to apply the L operator
:type wrt: Variable or list of `Variables`s
variables for which you compute the L operator of the expression
described by `f`
:type eval_points: Variable or list of Variables
evalutation points for each of the variables in `f`
:rtype: Variable or list/tuple of Variables depending on type of f
:return: symbolic expression such that
L_op[i] = sum_i ( d f[i] / d wrt[j]) eval_point[i]
where the indices in that expression are magic multidimensional
indices that specify both the position within a list and all
coordinates of the tensor element in the last
If `f` is a list/tuple, then return a list/tuple with the results.
"""
if type(eval_points) not in (list, tuple):
eval_points = [eval_points]
using_list = isinstance(wrt, list)
using_tuple = isinstance(wrt, tuple)
if not isinstance(f, (list, tuple)):
f = [f]
# make copies of f and grads so we don't modify the client's copy
f = list(f)
grads = list(eval_points)
if not isinstance(wrt, (list, tuple)):
wrt = [wrt]
assert len(f) == len(grads)
known = dict(izip(f, grads))
ret = grad(cost=None, known_grads=known,
consider_constant=consider_constant, wrt=wrt,
disconnected_inputs=disconnected_inputs)
return format_as(using_list, using_tuple, ret)
#########################
# Gradient
#########################
def grad(cost, wrt, consider_constant=None,
disconnected_inputs='raise', add_names=True,
known_grads=None, return_disconnected='zero',
null_gradients='raise'):
"""
Return symbolic gradients for one or more variables with respect to some
cost.
For more information about how automatic differentiation works in Theano,
see :mod:`gradient`. For information on how to implement the gradient of
a certain Op, see :func:`grad`.
Parameters
----------
cost : scalar (0-dimensional) tensor variable or None
Value with respect to which we are differentiating. May be
`None` if known_grads is provided.
wrt : variable or list of variables
term[s] for which we want gradients
consider_constant : list of variables
expressions not to backpropagate through
disconnected_inputs : {'ignore', 'warn', 'raise'}
Defines the behaviour if some of the variables in `wrt` are
not part of the computational graph computing `cost` (or if
all links are non-differentiable). The possible values are:
- 'ignore': considers that the gradient on these parameters is zero.
- 'warn': consider the gradient zero, and print a warning.
- 'raise': raise DisconnectedInputError.
add_names : bool
If True, variables generated by grad will be named
(d<cost.name>/d<wrt.name>) provided that both cost and wrt
have names
known_grads : dict, optional
A dictionary mapping variables to their gradients. This is
useful in the case where you know the gradient on some
variables but do not know the original cost.
return_disconnected : {'zero', 'None', 'Disconnected'}
- 'zero' : If wrt[i] is disconnected, return value i will be
wrt[i].zeros_like()
- 'None' : If wrt[i] is disconnected, return value i will be
None
- 'Disconnected' : returns variables of type DisconnectedType
null_gradients : {'raise', 'return'}
Defines the behaviour if some of the variables in `wrt` have a
null gradient. The possibles values are:
- 'raise' : raise a NullTypeGradError exception
- 'return' : return the null gradients
Returns
-------
variable or list/tuple of variables (matches `wrt`)
symbolic expression of gradient of `cost` with respect to each
of the `wrt` terms. If an element of `wrt` is not
differentiable with respect to the output, then a zero
variable is returned.
"""
t0 = time.time()
global tensor
if tensor is None:
from theano import tensor
if cost is None:
if known_grads is None:
raise AssertionError("cost and known_grads can't both be None.")
if cost is not None and isinstance(cost.type, NullType):
raise ValueError("Can't differentiate a NaN cost."
"cost is NaN because " +
cost.type.why_null)
if cost is not None and cost.ndim != 0:
raise TypeError("cost must be a scalar.")
if isinstance(wrt, set):
raise TypeError("wrt must not be a set. sets have no defined "
"iteration order, so we can't return gradients in a"
" matching order.")
using_list = isinstance(wrt, list)
using_tuple = isinstance(wrt, tuple)
if not using_list and not using_tuple:
wrt = [wrt]
for elem in wrt:
if not isinstance(elem, Variable):
raise TypeError("Expected Variable, got " + str(elem) +
" of type " + str(type(elem)))
outputs = []
if cost is not None:
outputs.append(cost)
if known_grads is not None:
outputs.extend(list(known_grads.keys()))
var_to_app_to_idx = _populate_var_to_app_to_idx(
outputs, wrt, consider_constant)
# build a dict mapping var to the gradient of cost with respect to var
grad_dict = OrderedDict()
if known_grads is None:
known_grads = OrderedDict()
# The gradient of the cost is 1 unless specified otherwise by known_grads.
if cost is not None:
if cost in known_grads:
g_cost = known_grads[cost]
else:
g_cost = _float_ones_like(cost)
# g_cost may be Disconnected or NullType. A creative use of the
# function, sure, but nonetheless one we can and should support.
# So before we try to cast it make sure it even has a dtype
if (hasattr(g_cost.type, 'dtype') and
cost.type.dtype not in tensor.discrete_dtypes):
# Here we enforce the constraint that floating point variables
# have the same dtype as their gradient.
g_cost = g_cost.astype(cost.type.dtype)
# DO NOT enforce g_cost to be 0 if cost is an integer.
# This is to be enforced by the Op.grad method for the
# Op that outputs cost.
if hasattr(g_cost.type, 'dtype'):
assert g_cost.type.dtype not in tensor.discrete_dtypes
grad_dict[cost] = g_cost
for var in known_grads:
g_var = known_grads[var]
if not hasattr(g_var, 'type'):
raise TypeError('output grads must be theano variables.'
'Ambiguous whether %s should be made into tensor'
' or sparse theano variable' % str(type(g_var)))
if (not isinstance(g_var.type, (NullType, DisconnectedType)) and
'float' not in str(g_var.type.dtype)):
raise TypeError("Gradients must always be NullType, "
"DisconnectedType, or continuous, but grad was "
"given a known_grad of type " + str(g_var.type))
# DO NOT check that these gradients are equal to 0 if var is int
# The gradient is allowed to be non-zero on var in that case
# Ops outputing var should not backpropagate its gradient further
# but that is enforced elsewhere (grep for only_connected_to_int)
grad_dict[var] = g_var
def handle_disconnected(var):
message = ("grad method was asked to compute the gradient "
"with respect to a variable that is not part of "
"the computational graph of the cost, or is used "
"only by a non-differentiable operator: %s" % var)
if disconnected_inputs == 'ignore':
pass
elif disconnected_inputs == 'warn':
warnings.warn(message, stacklevel=2)
elif disconnected_inputs == 'raise':
# Add the var trace
tr = getattr(var.tag, 'trace', [])
if len(tr) > 0:
message += "\nBacktrace when the node is created:\n"
# Print separate message for each element in the list of batcktraces
sio = StringIO()
for subtr in tr:
traceback.print_list(subtr, sio)
message += str(sio.getvalue())
raise DisconnectedInputError(message)
else:
raise ValueError("Invalid value for keyword "
"'disconnected_inputs', valid values are "
"'ignore', 'warn' and 'raise'.")
# variables that do not influence the cost have zero gradient.
# if wrt is such a variable, populate the grad_dict with this info
# so that wrt not being in var_to_app_to_idx won't cause an error below
# according to the flag, possibly raise an error if wrt is disconnected
for elem in wrt:
if elem not in var_to_app_to_idx and elem is not cost \
and elem not in grad_dict:
handle_disconnected(elem)
grad_dict[elem] = disconnected_type()
cost_name = None
if add_names and cost is not None:
cost_name = cost.name
# Make sure we didn't initialize the grad_dict with any ints
# The gradient may NEVER be an int, even if the variable is an int.
# Read the Op contract and talk to Ian Goodfellow before changing this!
for var in grad_dict:
g = grad_dict[var]
if hasattr(g.type, 'dtype'):
assert g.type.dtype in tensor.float_dtypes
rval = _populate_grad_dict(var_to_app_to_idx,
grad_dict, wrt, cost_name)
for i in xrange(len(rval)):
if isinstance(rval[i].type, NullType):
if null_gradients == 'raise':
raise NullTypeGradError("tensor.grad encountered a NaN. " +
rval[i].type.why_null)
else:
assert null_gradients == 'return'
if isinstance(rval[i].type, DisconnectedType):
handle_disconnected(rval[i])
if return_disconnected == 'zero':
rval[i] = _float_zeros_like(wrt[i])
elif return_disconnected == 'None':
rval[i] = None
else:
assert return_disconnected == 'Disconnected'
if using_tuple:
rval = tuple(rval)
elif not using_list:
rval, = rval
t1 = time.time()
global grad_time
grad_time += t1 - t0
return rval
def subgraph_grad(wrt, end, start=None, cost=None, details=False):
'''
With respect to `wrt`, computes gradients of cost and/or from
existing `start` gradients, up to the `end` variables of a
symbolic digraph. In other words, computes gradients for a
subgraph of the symbolic theano function. Ignores all disconnected
inputs.
This can be useful when one needs to perform the gradient descent
iteratively (e.g. one layer at a time in an MLP), or when a
particular operation is not differentiable in theano
(e.g. stochastic sampling from a multinomial). In the latter case,
the gradient of the non-differentiable process could be
approximated by user-defined formula, which could be calculated
using the gradients of a cost with respect to samples (0s and
1s). These gradients are obtained by performing a subgraph_grad
from the `cost` or previously known gradients (`start`) up to the
outputs of the stochastic process (`end`). A dictionary mapping
gradients obtained from the user-defined differentiation of the
process, to variables, could then be fed into another
subgraph_grad as `start` with any other `cost` (e.g. weight
decay).
In an MLP, we could use subgraph_grad to iteratively backpropagate:
.. code-block:: python
x, t = theano.tensor.fvector('x'), theano.tensor.fvector('t')
w1 = theano.shared(np.random.randn(3,4))
w2 = theano.shared(np.random.randn(4,2))
a1 = theano.tensor.tanh(theano.tensor.dot(x,w1))
a2 = theano.tensor.tanh(theano.tensor.dot(a1,w2))
cost2 = theano.tensor.sqr(a2 - t).sum()
cost2 += theano.tensor.sqr(w2.sum())
cost1 = theano.tensor.sqr(w1.sum())
params = [[w2],[w1]]
costs = [cost2,cost1]
grad_ends = [[a1], [x]]
next_grad = None
param_grads = []
for i in xrange(2):
param_grad, next_grad = theano.subgraph_grad(
wrt=params[i], end=grad_ends[i],
start=next_grad, cost=costs[i]
)
next_grad = dict(zip(grad_ends[i], next_grad))
param_grads.extend(param_grad)
:type wrt: list of variables
:param wrt:
Gradients are computed with respect to `wrt`.
:type end: list of variables
:param end:
Theano variables at which to end gradient descent (they are
considered constant in theano.grad). For convenience, the
gradients with respect to these variables are also returned.
:type start: dictionary of variables
:param start:
If not None, a dictionary mapping variables to their
gradients. This is useful when the gradient on some variables
are known. These are used to compute the gradients backwards up
to the variables in `end` (they are used as known_grad in
theano.grad).
:type cost: scalar (0-dimensional) variable
:param cost:
Additional costs for which to compute the gradients. For
example, these could be weight decay, an l1 constraint, MSE,
NLL, etc. May optionally be None if start is provided. Warning
: If the gradients of `cost` with respect to any of the `start`
variables is already part of the `start` dictionary, then it may
be counted twice with respect to `wrt` and `end`.
.. warning::
If the gradients of `cost` with respect to any of the `start`
variables is already part of the `start` dictionary, then it
may be counted twice with respect to `wrt` and `end`.
:type details: bool
:param details:
When True, additionally returns the list of gradients from
`start` and of `cost`, respectively, with respect to `wrt` (not
`end`).
:rtype: Tuple of 2 or 4 Lists of Variables
:return: Returns lists of gradients with respect to `wrt` and `end`,
respectively.
.. versionadded:: 0.7
'''
assert ((cost is not None) or (start is not None))
assert isinstance(end, list)
assert isinstance(wrt, list)
if start is not None:
assert isinstance(start, dict)
params = list(set(wrt + end))
start_grads = None
cost_grads = None
if start is not None:
start_grads = list(
theano.grad(
cost=None, wrt=params, known_grads=start,
consider_constant=end,
disconnected_inputs='ignore'
)
)
if cost is not None:
cost_grads = list(
theano.grad(
cost=cost, wrt=params,
consider_constant=end,
disconnected_inputs='ignore'
)
)
grads = None
if start is None:
grads = cost_grads
else:
grads = start_grads
if cost_grads is not None:
for i in range(len(grads)):
grads[i] += cost_grads[i]
pgrads = OrderedDict(izip(params, grads))
# separate wrt from end grads:
wrt_grads = list(pgrads[k] for k in wrt)
end_grads = list(pgrads[k] for k in end)
if details:
return wrt_grads, end_grads, start_grads, cost_grads
return wrt_grads, end_grads
def _node_to_pattern(node):
""" given an apply node, obtain its connection pattern
this is just a wrapper around Op.connection_pattern
that does type checking and supplies the default value
if the method is not implemented
"""
if hasattr(node.op, 'connection_pattern'):
connection_pattern = node.op.connection_pattern(node)
if not isinstance(connection_pattern, list):
raise TypeError(
"Op.connection_pattern should return " +
("list of list of bool, but for Op=%s" % node.op) +
"got %s with type %s." % (connection_pattern,
type(connection_pattern)))
if len(connection_pattern) != len(node.inputs):
raise ValueError(
'%s.connection_pattern should have %d' %
(node.op, len(node.inputs)) + ' rows but has %d.' %
len(connection_pattern))
for ii, output_pattern in enumerate(connection_pattern):
if not isinstance(output_pattern, list):
raise TypeError(
'%s.connection_pattern should return' %
node.op + ' a list of lists, but element %d' % ii +
'is %s of type %s.' % (output_pattern,
type(output_pattern)))
else:
connection_pattern = [[True for output in node.outputs]
for ipt in node.inputs]
assert isinstance(connection_pattern, list)
assert len(connection_pattern) == len(node.inputs)
for ii in xrange(len(node.inputs)):
assert isinstance(connection_pattern[ii], list)
assert len(connection_pattern[ii]) == len(node.outputs)
return connection_pattern
def _populate_var_to_app_to_idx(outputs, wrt, consider_constant):
"""
Helper function for grad function.
outputs: a list of variables we want to take gradients of
wrt: a list of variables we want to take the gradient with
respect to.
consider_constant: a list of variables not to backpropagate
through.
returns:
var_to_app_to_idx:
A dictionary mapping a variable to a second dictionary.
The second dictionary maps apply nodes acting on this
variable to the variable's index in the apply node's
input list.
This dictionary will only contain variables that
meet two criteria:
1) The elements of at least one output are a
function of the elements of the variable
2) The elements of the variable are a function of the
elements of at least one member of wrt.
This set is exactly the set of variables that connect
the variables in wrt to the cost being differentiated.
(A variable in consider_constant is not a function of
anything)
"""
# Validate and format consider_constant
if consider_constant is None:
consider_constant = []
else:
# error checking on consider_constant: verify that it is a collection
# of theano variables
# this is important, if someone accidentally passes a nested data
# structure with theano variables at the leaves, only the root will
# be properly considered constant
try:
iter(consider_constant)
except TypeError:
raise TypeError('consider_constant must be an iterable collection,'
' got ' + str(type(consider_constant)))
for elem in consider_constant:
if not isinstance(elem, gof.Variable):
raise TypeError('Elements of consider_constant must be '
'variables, but got ' + str(type(elem)))
# var_to_app_to_idx[var][node] = [i,j] means node has
# var as input at positions i and j
var_to_app_to_idx = OrderedDict()
# Set of variables that have been added to their true parents
# ('true' here means that the elements of the variable are a function
# of the elements of the parent, according to the op's
# connection_pattern)
# Note: we need to revisit the apply nodes repeatedly, because
# different outputs of the apply node are connected to
# different subsets of the inputs.
accounted_for = set([])
def account_for(var):
# Don't visit the same variable twice
if var in accounted_for:
return
accounted_for.add(var)
# Constants are not a function of anything
if var in consider_constant:
return
# Recursively add the variables that this variable is
# a function of.
if var.owner is not None:
app = var.owner
connection_pattern = _node_to_pattern(app)
var_idx = app.outputs.index(var)
for i, ipt in enumerate(app.inputs):
# don't process ipt if it is not a true
# parent of var
if not connection_pattern[i][var_idx]:
continue
if ipt not in var_to_app_to_idx:
# This object here *must* be an OrderedDict, because
# we iterate over its keys when adding up the terms of the
# gradient on ipt. If it is a regular dict, the grad method
# will return something that is analytically correct, but
# whose order of doing additions depends on the memory
# location of the apply nodes.
var_to_app_to_idx[ipt] = OrderedDict()
app_to_idx = var_to_app_to_idx[ipt]
if app not in app_to_idx:
app_to_idx[app] = []
idx = app_to_idx[app]
if i not in idx:
idx.append(i)
account_for(ipt)
# add all variables that are true ancestors of the cost
for output in outputs:
account_for(output)
# determine which variables have elements of wrt as a true
# ancestor. Do this with an upward pass starting from wrt,
# following only true connections
visited = set([])
def visit(var):
if var in visited:
return
if var not in var_to_app_to_idx:
return
visited.add(var)
nodes = var_to_app_to_idx[var]
for node in nodes:
connection_pattern = _node_to_pattern(node)
for idx in nodes[node]:
for ii, output in enumerate(node.outputs):
if connection_pattern[idx][ii]:
visit(output)
for elem in wrt:
visit(elem)
# Remove variables that don't have wrt as a true ancestor
orig_vars = list(var_to_app_to_idx.keys())
for var in orig_vars:
if var not in visited:
del var_to_app_to_idx[var]
return var_to_app_to_idx
class NullTypeGradError(TypeError):
"""
Raised when grad encounters a NullType.
"""
class DisconnectedInputError(ValueError):
"""
Raised when grad is asked to compute the gradient
with respect to a disconnected input and
disconnected_inputs='raise'.
"""
def _populate_grad_dict(var_to_app_to_idx,
grad_dict, wrt, cost_name=None):
"""
Helper function for grad function.
var_to_app_to_idx: a dictionary mapping a variable to
a second dictionary.
the second dictionary maps apply nodes acting on
this variable to the variable's index in the apply
node's input list
grad_dict: A dictionary mapping variables to their gradients.
Should be populated by grad function, which should:
-Set the gradient with respect to the cost to 1
-Load all gradients from known_grads, possibly
overriding the cost
-Set the gradient for disconnected
inputs to a variable with type DisconnectedType()
wrt: the minimal set of variables that must be included in grad_dict
cost_name: The name of the cost being differentiated, optional.
used to name the grad with respect to x as
(d<cost_name>/dx)
returns: a list of gradients corresponding to wrt
"""
# build a dict mapping node to the terms node contributes to each of
# its inputs' gradients
term_dict = OrderedDict()
def access_term_cache(node):
""" Populates term_dict[node] and returns it """
if node not in term_dict:
inputs = node.inputs
output_grads = [access_grad_cache(var) for var in node.outputs]
# list of bools indicating if each output is connected to the cost
outputs_connected = [not isinstance(g.type, DisconnectedType)
for g in output_grads]
connection_pattern = _node_to_pattern(node)
# list of bools indicating if each input is connected to the cost
inputs_connected = [
(True in [input_to_output and output_to_cost for
input_to_output, output_to_cost in
zip(input_to_outputs, outputs_connected)]) for
input_to_outputs in connection_pattern
]
# List of bools indicating if each output is an integer dtype
output_is_int = [hasattr(output.type, 'dtype') and
output.type.dtype in theano.tensor.discrete_dtypes
for output in node.outputs]
# List of bools indicating if each output is NullType
ograd_is_nan = [isinstance(output.type, NullType)
for output in output_grads]
# List of bools indicating if each input only has NullType outputs
only_connected_to_nan = [
(True not in
[in_to_out and out_to_cost and not out_nan
for in_to_out, out_to_cost, out_nan in
zip(in_to_outs, outputs_connected, ograd_is_nan)])
for in_to_outs in connection_pattern]
if True not in inputs_connected:
# All outputs of this op are disconnected so we can skip
# Calling the op's grad method and report that the inputs
# are disconnected
# (The op's grad method could do this too, but this saves the
# implementer the trouble of worrying about this case)
input_grads = [disconnected_type() for ipt in inputs]
elif False not in only_connected_to_nan:
# All inputs are only connected to nan gradients, so we don't
# need to bother calling the grad method. We know the gradient
# with respect to all connected inputs is nan.
input_grads = []
for connected in inputs_connected:
if connected:
input_grads.append(null_type())
else:
input_grads.append(disconnected_type())
else:
# At least one input of this op is connected to the cost so and
# not all output gradients are undefined so we must
# call the op's grad method
# Each Op's grad function requires inputs and output_grads
# If the Op destroys any input, but the grad expression uses
# it, then chances are the resulting graph will have a
# dependency cycle. We avoid this cycle by passing (symbolic)
# copies of each destroyed input.
try:
dinputs = [node.inputs[x[0]] for x in
itervalues(node.op.destroy_map)]
except AttributeError:
dinputs = []
def try_to_copy_if_needed(var):
if var in dinputs and hasattr(var, 'copy'):
return var.copy()
return var
inputs = [try_to_copy_if_needed(ipt) for ipt in inputs]
# Build a list of output gradients with the same dtype as
# the corresponding output variable.
# If an output is of a float dtype, we want to cast the
# output gradient into the same dtype, to avoid having a
# gradient graph with double precision (taking more memory,
# and more computation).
# If an output is of an integer dtype, then we just leave it
# alone.
# DO NOT force integer variables to have zero grad. This causes
# bugs where we fail to detect disconnected or undefined
# gradients.
# DO NOT force integer variables to have integer dtype.
# This is a violation of the op contract.
new_output_grads = []
for o, og in zip(node.outputs, output_grads):
o_dt = getattr(o.type, 'dtype', None)
og_dt = getattr(og.type, 'dtype', None)
if (o_dt not in theano.tensor.discrete_dtypes and
og_dt and o_dt != og_dt):
new_output_grads.append(og.astype(o_dt))
else:
new_output_grads.append(og)
# Make sure that, if new_output_grads[i] has a floating point
# dtype, it is the same dtype as outputs[i]
for o, ng in zip(node.outputs, new_output_grads):
o_dt = getattr(o.type, 'dtype', None)
ng_dt = getattr(ng.type, 'dtype', None)
if (ng_dt is not None and
o_dt not in theano.tensor.discrete_dtypes):
assert ng_dt == o_dt
# Someone who had obviously not read the Op contract tried
# to modify this part of the function.
# If you ever think it is a good idea to make an integer
# valued gradient, please
# 1) Read the Op contract again
# 2) Talk to Ian Goodfellow
# (Both of these sources will tell you not to do it)
for ng in new_output_grads:
assert (getattr(ng.type, 'dtype', None)
not in theano.tensor.discrete_dtypes)
# If config.compute_test_value is turned on, check that the
# gradients on the outputs of this node have the right shape.
# We also check the gradient on the inputs later--both checks
# are needed, because some gradients are only ever specified
# by the user, not computed by Op.grad, and some gradients are
# only computed and returned, but never passed as another
# node's output grads.
for idx, packed in enumerate(izip(node.outputs,
new_output_grads)):
orig_output, new_output_grad = packed
if not hasattr(orig_output, 'shape'):
continue
if isinstance(new_output_grad.type, DisconnectedType):
continue
for orig_output_v, new_output_grad_v in get_debug_values(
*packed):
o_shape = orig_output_v.shape
g_shape = new_output_grad_v.shape
if o_shape != g_shape:
raise ValueError(
"Got a gradient of shape " +
str(o_shape) + " on an output of shape " +
str(g_shape))
input_grads = node.op.grad(inputs, new_output_grads)
if input_grads is None:
raise TypeError("%s.grad returned NoneType, "
"expected iterable." % str(node.op))
if len(input_grads) != len(inputs):
raise ValueError(("%s returned the wrong number of" +
" gradient terms.") % str(node.op))
# We can not enforce this, as AdvancedSubtensor1 has an option to
# return the sparse grad for optimization reason.
# for ig, i in zip(input_grads, inputs):
# if (not isinstance(ig.type, (DisconnectedType, NullType)) and
# type(ig.type) != type(i.type)):
# raise ValueError(
# "%s returned the wrong type for gradient terms."
# " Sparse inputs must have sparse grads and dense"
# " inputs must have dense grad. Got %s, expected %s" %(
# str(node.op), ig.type, i.type))
# must convert to list in case the op returns a tuple
# we won't be able to post-process out the Nones if it does that
input_grads = list(input_grads)
# Need to propagate the NullType gradients; if an input grad is
# not disconnected and the corresponding input is connected
# to at least one output whose gradient is NullType then the input
# grad should be NullType.
for inp_idx in range(len(input_grads)):
for out_idx in range(len(ograd_is_nan)):
if (ograd_is_nan[out_idx] and
connection_pattern[inp_idx][out_idx] and
not isinstance(input_grads[inp_idx].type,
DisconnectedType)):
input_grads[inp_idx] = output_grads[out_idx]
# Do type checking on the result
# List of bools indicating if each input only has integer outputs
only_connected_to_int = [
(True not in
[in_to_out and out_to_cost and not out_int
for in_to_out, out_to_cost, out_int in
zip(in_to_outs, outputs_connected, output_is_int)])
for in_to_outs in connection_pattern]
for i, term in enumerate(input_grads):
# Disallow Nones
if term is None:
# We don't know what None means. in the past it has been
# used to mean undefined, zero, or disconnected.
# We therefore don't allow it because its usage has become
# so muddied.
raise TypeError(
('%s.grad returned None for' +
' a gradient term, '
'this is prohibited. Instead of None,'
'return zeros_like(input), disconnected_type(),'
' or a NullType variable such as those made with '
'the grad_undefined or grad_unimplemented helper '
'functions.') % node.op)
# Check that the gradient term for this input
# has the right shape
if hasattr(term, 'shape'):
orig_ipt = inputs[i]
for orig_ipt_v, term_v in get_debug_values(orig_ipt, term):
i_shape = orig_ipt_v.shape
t_shape = term_v.shape
if i_shape != t_shape:
raise ValueError(
"%s.grad returned object of "
"shape %s as gradient term on input %d "
"of shape %s" % (node.op, t_shape, i, i_shape))
if not isinstance(term.type,
(NullType, DisconnectedType)):
if term.type.dtype not in theano.tensor.float_dtypes:
raise TypeError(str(node.op) + '.grad illegally '
' returned an integer-valued variable.'
' (Input index %d, dtype %s)' % (
i, term.type.dtype))
if only_connected_to_nan[i]:
assert isinstance(term.type, NullType)
if only_connected_to_int[i]:
# This term has only integer outputs and we know
# it's not undefined or disconnected
# The only other valid thing it can be is 0
is_zero = _is_zero(term)
assert is_zero in ['yes', 'no', 'maybe']
if is_zero == 'maybe':
msg = "%s.grad returned %s of type %s for input"
msg += " %d. This input's only connections to "
msg += "the cost through this op are via "
msg += "integer-valued outputs so it should be "
msg += "NullType, DisconnectedType, or some form "
msg += "of zeros. It is not NullType or "
msg += "DisconnectedType and theano can't "
msg += "simplify it to a constant, so it's not "
msg += "verifiably zeros."
msg = msg % (str(node.op), str(term),
str(type(term)), i)
if is_zero == 'no':
msg = "%s.grad returned %s of type %s for input"
msg += " %d. Since this input is only connected "
msg += "to integer-valued outputs, it should "
msg += "evaluate to zeros, but it evaluates to"
msg += "%s."
msg % (node.op, term, type(term), i,
theano.get_scalar_constant_value(term))
raise ValueError(msg)
# Check that op.connection_pattern matches the connectivity
# logic driving the op.grad method
for i, packed in enumerate(zip(inputs, input_grads,
inputs_connected)):
ipt, ig, connected = packed
actually_connected = \
not isinstance(ig.type, DisconnectedType)
if actually_connected and not connected:
msg = "%s.grad returned %s of type %s for input %d."
msg += " Expected DisconnectedType instance based on "
msg += " the output of the op's connection_pattern "
msg += "method."
msg = msg % (str(node.op), str(ig), str(ig.type), i)
raise TypeError(msg)
if connected and not actually_connected:
msg = "%s.grad returned DisconnectedType for input"
msg += " %d."
msg = msg % (str(node.op), i)
if hasattr(node.op, 'connection_pattern'):
msg += ' Its connection_pattern method does not'
msg += ' allow this.'
raise TypeError(msg)
else:
msg += ' You may want to implement a '
msg += 'connection_pattern method for it.'
warnings.warn(msg)
# cache the result
term_dict[node] = input_grads
return term_dict[node]
# populate grad_dict[var] and return it
def access_grad_cache(var):
if var not in grad_dict:
# If var is not in grad_dict already, we must compute it
if var in var_to_app_to_idx:
null_terms = []
terms = []
node_to_idx = var_to_app_to_idx[var]
for node in node_to_idx:
for idx in node_to_idx[node]:
term = access_term_cache(node)[idx]
if not isinstance(term, gof.Variable):
raise TypeError(
"%s.grad returned %s, expected"
" Variable instance." % (str(node.op),
type(term)))
if isinstance(term.type, NullType):
null_terms.append(term)
continue
# Don't try to sum up DisconnectedType placeholders
if isinstance(term.type, DisconnectedType):
continue
if hasattr(var, 'ndim') and term.ndim != var.ndim:
raise ValueError(
("%s.grad returned a term with"
" %d dimensions, but %d are required.") % (
str(node.op), term.ndim, var.ndim))
terms.append(term)
# Add up the terms to get the total gradient on this variable
if len(null_terms) > 0:
# At least one term is a NullType : the total gradient
# will also be a NullType
grad_dict[var] = null_terms[0]
elif len(terms) > 0:
# the next line is like sum(terms) but doesn't add an
# extraneous TensorConstant(0)
grad_dict[var] = reduce(lambda x, y: x + y, terms)
else:
grad_dict[var] = disconnected_type()
if cost_name is not None and var.name is not None:
grad_dict[var].name = '(d%s/d%s)' % (cost_name, var.name)
else:
# this variable isn't connected to the cost in the
# computational graph
grad_dict[var] = disconnected_type()
# end if cache miss
return grad_dict[var]
rval = [access_grad_cache(elem) for elem in wrt]
return rval
def _float_zeros_like(x):
""" Like zeros_like, but forces the object to have a
a floating point dtype """
rval = x.zeros_like()
if rval.type.dtype.find('float') != -1:
return rval
return rval.astype(theano.config.floatX)
def _float_ones_like(x):
""" Like ones_like, but forces the object to have a
floating point dtype """
rval = tensor.ones_like(x)
if rval.type.dtype.find('float') != -1:
return rval
return rval.astype(theano.config.floatX)
class numeric_grad(object):
"""
Compute the numeric derivative of a scalar-valued function at a particular
point.
"""
# Note on step sizes and tolerances:
#
# There is a relationship between the step size and the function value and
# the measurement error that is incurred due to rounding. The finite
# difference we measure is
# delta = f(x0) - f(x0+eps)
#
# For maximum precision, f should be close to zero.
# For every power of 2 that f departs from zero, we lose a bit of precision
# in delta.
#
# Even in this case of maximum accuracy, there is a tradeoff between
# stepsize and measurement error.
# Taking small steps allows us to measure large derivatives accuractly,
# but longer steps are required to measure small derivatives accurately.
# However longer steps introduce bias into our measurement in general
# for non-linear functions.
#
# It would be interesting to have a version of numeric grad that used an
# adaptive stepsize.
#
# For now, we use a heuristic that catches very bad gradients, but is not
# perfectly accurate.
type_eps = {'float64': 1e-7,
'float32': 3e-4,
numpy.dtype('float64'): 1e-7,
numpy.dtype('float32'): 3e-4}
def __init__(self, f, pt, eps=None, out_type=None):
"""Return the gradient of f at pt.
:param f: a differentiable function such that f(*pt) is a scalar
:param pt: an ndarray, a list of ndarrays or tuple of ndarrays
:param out_type: dtype of output, if complex (i.e. 'complex32' or
'complex64')
This function computes the gradient by a one-sided finite
differences of a fixed step size (eps).
It is assumed that f(...) will return a scalar.
It is assumed that all f's inputs are numpy.ndarray objects.
:param eps: the stepsize for the finite differencing. None means
input dtype-dependent. See `type_eps`.
"""
def prod(inputs):
rval = 1
for i in inputs:
rval *= i
return rval
packed_pt = False
if not isinstance(pt, (list, tuple)):
pt = [pt]
packed_pt = True
apt = [numpy.array(p) for p in pt]
shapes = [p.shape for p in apt]
dtypes = [str(p.dtype) for p in apt]
# TODO: remove this eventually (why was this here in the first place ?)
# In the case of CSM, the arguments are a mixture of floats and
# integers...
# if not dtypes == [dtypes[0]] * len(apt):
# raise TypeError('All function arguments must have same dtype')
total_size = builtins.sum(prod(sh) for sh in shapes)
working_dtype = builtins.min(
(self.type_eps[dt], dt) for dt in dtypes)[1]
# create un-initialized memory
x = numpy.ndarray((total_size,), dtype=working_dtype)
# (not out_type is None) --> (out_type is not None) ???
if (out_type is not None) and (out_type.startswith('complex')):
gx = numpy.ndarray((total_size,), dtype=out_type)
else:
gx = numpy.ndarray((total_size,), dtype=working_dtype)
if eps is None:
eps = builtins.max(self.type_eps[dt] for dt in dtypes)
# set up aliases so that apt[i] is backed by memory in x
# and self.gf is backed by memory in gx
cur_pos = 0
self.gf = []
for i, p in enumerate(apt):
p_size = prod(p.shape)
# set up alias
apt[i] = x[cur_pos: cur_pos + p_size].reshape(p.shape)
self.gf.append(gx[cur_pos: cur_pos + p_size].reshape(p.shape))
# initialize with p's value
apt[i][...] = p
cur_pos += p_size
f_x = f(*[p.copy() for p in apt])
# now iterate over the elements of x, and call f on apt.
x_copy = x.copy()
for i in xrange(total_size):
x[:] = x_copy
x[i] += eps
f_eps = f(*apt)
# TODO: remove this when it is clear that the next
# replacemement does not pose problems of its own. It was replaced
# for its inability to handle complex variables.
# gx[i] = numpy.asarray((f_eps - f_x) / eps)
gx[i] = ((f_eps - f_x) / eps)
if packed_pt:
self.gf = self.gf[0]
@staticmethod
def abs_rel_err(a, b):
"""Return absolute and relative error between a and b.
The relative error is a small number when a and b are close, relative
to how big they are.
Formulas used:
abs_err = abs(a - b)
rel_err = abs_err / max(abs(a) + abs(b), 1e-8)
The denominator is clipped at 1e-8 to avoid dividing by 0 when a and b
are both close to 0.
The tuple (abs_err, rel_err) is returned
"""
abs_err = abs(a - b)
rel_err = abs_err / numpy.maximum(abs(a) + abs(b), 1e-8)
# The numpy.asarray are needed as if a or b is a sparse matrix
# this would result in a numpy.matrix and not a numpy.ndarray
# and the behave differently causing problem later.
# In particular a_npy_matrix.flatten().shape == (1, n_element)
abs_err = numpy.asarray(abs_err)
rel_err = numpy.asarray(rel_err)
return (abs_err, rel_err)
def abs_rel_errors(self, g_pt):
"""Return the abs and rel error of gradient estimate `g_pt`
`g_pt` must be a list of ndarrays of the same length as self.gf,
otherwise a ValueError is raised.
Corresponding ndarrays in `g_pt` and `self.gf` must have the same
shape or ValueError is raised.
"""
if len(g_pt) != len(self.gf):
raise ValueError('argument has wrong number of elements',
len(g_pt))
errs = []
for i, (a, b) in enumerate(zip(g_pt, self.gf)):
if a.shape != b.shape:
raise ValueError('argument element %i has wrong shape %s' % (
i, str((a.shape, b.shape))))
errs.append(numeric_grad.abs_rel_err(a, b))
return errs
def max_err(self, g_pt, abs_tol, rel_tol):
"""Find the biggest error between g_pt and self.gf.
What is measured is the violation of relative and absolute errors,
wrt the provided tolerances (abs_tol, rel_tol).
A value > 1 means both tolerances are exceeded.
Return the argmax of min(abs_err / abs_tol, rel_err / rel_tol) over
g_pt, as well as abs_err and rel_err at this point.
"""
pos = []
errs = []
abs_errs = []
rel_errs = []
abs_rel_errs = self.abs_rel_errors(g_pt)
for abs_err, rel_err in abs_rel_errs:
if not numpy.all(numpy.isfinite(abs_err)):
raise ValueError('abs_err not finite', repr(abs_err))
if not numpy.all(numpy.isfinite(rel_err)):
raise ValueError('rel_err not finite', repr(rel_err))
scaled_err = numpy.minimum(abs_err / abs_tol, rel_err / rel_tol)
max_i = scaled_err.argmax()
pos.append(max_i)
errs.append(scaled_err.flatten()[max_i])
abs_errs.append(abs_err.flatten()[max_i])
rel_errs.append(rel_err.flatten()[max_i])
# max over the arrays in g_pt
max_arg = numpy.argmax(errs)
max_pos = pos[max_arg]
return (max_arg, max_pos, abs_errs[max_arg], rel_errs[max_arg])
def verify_grad(fun, pt, n_tests=2, rng=None, eps=None,
out_type=None, abs_tol=None,
rel_tol=None, mode=None, cast_to_output_type=False):
"""Test a gradient by Finite Difference Method. Raise error on failure.
Example:
>>> verify_grad(theano.tensor.tanh,
... (numpy.asarray([[2,3,4], [-1, 3.3, 9.9]]),),
... rng=numpy.random)
Raises an Exception if the difference between the analytic gradient and
numerical gradient (computed through the Finite Difference Method) of a
random projection of the fun's output to a scalar exceeds the given
tolerance.
:param fun: a Python function that takes Theano variables as inputs,
and returns a Theano variable. For instance, an Op instance with
a single output.
:param pt: the list of numpy.ndarrays to use as input values.
These arrays must be either float32 or float64 arrays.
:param n_tests: number of times to run the test
:param rng: random number generator used to sample u, we test gradient
of sum(u * fun) at pt
:param eps: stepsize used in the Finite Difference Method (Default
None is type-dependent)
Raising the value of eps can raise or lower the absolute and
relative errors of the verification depending on the
Op. Raising eps does not lower the verification quality
for linear operations. It
is better to raise eps than raising abs_tol or rel_tol.
:param out_type: dtype of output, if complex (i.e. 'complex32' or
'complex64')
:param abs_tol: absolute tolerance used as threshold for gradient
comparison
:param rel_tol: relative tolerance used as threshold for gradient
comparison
:param cast_to_output_type: if the output is float32 and
cast_to_output_type is True, cast the random projection to
float32. Otherwise it is float64.
:note: WARNING to unit-test writers: if `op` is a function that builds
a graph, try to make it a SMALL graph. Often verify grad is run
in debug mode, which can be very slow if it has to verify a lot of
intermediate computations.
:note: This function does not support multiple outputs. In
tests/test_scan.py there is an experimental verify_grad that
covers that case as well by using random projections.
"""
# The import is here to prevent circular import.
from theano import compile, shared
import theano.tensor
from theano.tensor import as_tensor_variable, TensorType
assert isinstance(pt, (list, tuple))
pt = [numpy.array(p) for p in pt]
for i, p in enumerate(pt):
if p.dtype not in ('float32', 'float64'):
raise TypeError(
('verify_grad can work only with floating point '
'inputs, but input %i has dtype "%s".') % (i, p.dtype))
_type_tol = dict( # relative error tolerances for different types
float32=1e-2,
float64=1e-4)
if abs_tol is None:
abs_tol = builtins.max(_type_tol[str(p.dtype)] for p in pt)
if rel_tol is None:
rel_tol = builtins.max(_type_tol[str(p.dtype)] for p in pt)
if rng is None:
raise TypeError(('rng should be a valid instance of '
'numpy.random.RandomState. You may '
'want to use theano.tests.unittest'
'_tools.verify_grad instead of '
'theano.gradient.verify_grad.'))
# We allow input downcast in function, because numeric_grad works in the
# most precise dtype used among the inputs, so we may need to cast some.
def function(inputs, output, name):
f = compile.function(inputs, output, accept_inplace=True,
allow_input_downcast=True, mode=mode,
on_unused_input='ignore', name=name)
return f
tensor_pt = [
TensorType(
as_tensor_variable(p).dtype,
as_tensor_variable(p).broadcastable)(name='input %i' % i)
for i, p in enumerate(pt)]
# fun can be either a function or an actual Op instance
o_output = fun(*tensor_pt)
if isinstance(o_output, list):
raise NotImplementedError(('cant (yet) autotest gradient of fun '
'with multiple outputs'))
# we could make loop over outputs making random projections R for each,
# but this doesn't handle the case where not all the outputs are
# differentiable... so I leave this as TODO for now -JB.
o_fn = function(tensor_pt, o_output, name='gradient.py fwd')
o_fn_out = o_fn(*[p.copy() for p in pt])
if isinstance(o_fn_out, tuple) or isinstance(o_fn_out, list):
raise TypeError(
'It seems like you are trying to use verify_grad '
'on an op or a function which outputs a list: there should'
' be a single (array-like) output instead')
# random_projection should not have elements too small,
# otherwise too much precision is lost in numerical gradient
def random_projection():
plain = rng.rand(*o_fn_out.shape) + 0.5
if cast_to_output_type and o_output.dtype == "float32":
return numpy.array(plain, o_output.dtype)
return plain
t_r = shared(random_projection())
t_r.name = 'random_projection'
# random projection of o onto t_r
# This sum() is defined above, it's not the builtin sum.
cost = theano.tensor.sum(t_r * o_output)
cost_fn = function(tensor_pt, cost, name='gradient.py cost')
symbolic_grad = grad(cost, tensor_pt,
disconnected_inputs='ignore')
grad_fn = function(tensor_pt, symbolic_grad,
name='gradient.py symbolic grad')
for test_num in xrange(n_tests):
try:
num_grad = numeric_grad(cost_fn, [p.copy() for p in pt],
eps, out_type)
analytic_grad = grad_fn(*[p.copy() for p in pt])
# Since `tensor_pt` is a list, `analytic_grad` should be one too.
assert isinstance(analytic_grad, list)
max_arg, max_err_pos, max_abs_err, max_rel_err = num_grad.max_err(
analytic_grad, abs_tol, rel_tol)
if max_abs_err > abs_tol and max_rel_err > rel_tol:
raise verify_grad.E_grad(max_arg, max_err_pos,
max_abs_err, max_rel_err,
abs_tol, rel_tol)
# get new random projection for next test
if test_num < n_tests - 1:
t_r.set_value(random_projection(), borrow=True)
except Exception as e:
e.args += ("\nThe error happened with the following inputs:", pt,
"\nThe value of eps is:", eps,
"\nThe out_type is:", out_type)
raise
class GradientError(Exception):
"""This error is raised when a gradient is calculated, but incorrect."""
def __init__(self, arg, err_pos, abs_err, rel_err, abs_tol, rel_tol):
Exception.__init__(self) # to be compatible with python2.4
self.arg = arg
self.err_pos = err_pos
self.abs_err = abs_err
self.rel_err = rel_err
self.abs_tol = abs_tol
self.rel_tol = rel_tol
def __str__(self):
# args may have been inserted by e.g. makeTester
args_msg = ", ".join(str(a) for a in self.args)
return """\
GradientError: numeric gradient and analytic gradient exceed tolerance:
At position %i of argument %i,
abs. error = %f, abs. tolerance = %f
rel. error = %f, rel. tolerance = %f
Exception args: %s""" % (self.err_pos, self.arg,
self.abs_err, self.abs_tol,
self.rel_err, self.rel_tol,
args_msg)
verify_grad.E_grad = GradientError
def jacobian(expression, wrt, consider_constant=None,
disconnected_inputs='raise'):
"""
:type expression: Vector (1-dimensional) Variable
:type wrt: Variable or list of Variables
:param consider_constant: a list of expressions not to backpropagate
through
:type disconnected_inputs: string
:param disconnected_inputs: Defines the behaviour if some of the variables
in ``wrt`` are not part of the computational graph computing ``cost``
(or if all links are non-differentiable). The possible values are:
- 'ignore': considers that the gradient on these parameters is zero.
- 'warn': consider the gradient zero, and print a warning.
- 'raise': raise an exception.
:return: either a instance of Variable or list/tuple of Variables
(depending upon `wrt`) repesenting the jacobian of `expression`
with respect to (elements of) `wrt`. If an element of `wrt` is not
differentiable with respect to the output, then a zero
variable is returned. The return value is of same type
as `wrt`: a list/tuple or TensorVariable in all cases.
"""
from theano.tensor import arange
# Check inputs have the right format
assert isinstance(expression, Variable), \
"tensor.jacobian expects a Variable as `expression`"
assert expression.ndim < 2, \
("tensor.jacobian expects a 1 dimensional variable as "
"`expression`. If not use flatten to make it a vector")
using_list = isinstance(wrt, list)
using_tuple = isinstance(wrt, tuple)
if isinstance(wrt, (list, tuple)):
wrt = list(wrt)
else:
wrt = [wrt]
if expression.ndim == 0:
# expression is just a scalar, use grad
return format_as(using_list, using_tuple,
grad(expression,
wrt,
consider_constant=consider_constant,
disconnected_inputs=disconnected_inputs))
def inner_function(*args):
idx = args[0]
expr = args[1]
rvals = []
for inp in args[2:]:
rval = grad(expr[idx],
inp,
consider_constant=consider_constant,
disconnected_inputs=disconnected_inputs)
rvals.append(rval)
return rvals
# Computing the gradients does not affect the random seeds on any random
# generator used n expression (because during computing gradients we are
# just backtracking over old values. (rp Jan 2012 - if anyone has a
# counter example please show me)
jacobs, updates = theano.scan(inner_function,
sequences=arange(expression.shape[0]),
non_sequences=[expression] + wrt)
assert not updates, \
("Scan has returned a list of updates. This should not "
"happen! Report this to theano-users (also include the "
"script that generated the error)")
return format_as(using_list, using_tuple, jacobs)
def hessian(cost, wrt, consider_constant=None,
disconnected_inputs='raise'):
"""
:type cost: Scalar (0-dimensional) Variable.
:type wrt: Vector (1-dimensional tensor) 'Variable' or list of
vectors (1-dimensional tensors) Variables
:param consider_constant: a list of expressions not to backpropagate
through
:type disconnected_inputs: string
:param disconnected_inputs: Defines the behaviour if some of the variables
in ``wrt`` are not part of the computational graph computing ``cost``
(or if all links are non-differentiable). The possible values are:
- 'ignore': considers that the gradient on these parameters is zero.
- 'warn': consider the gradient zero, and print a warning.
- 'raise': raise an exception.
:return: either a instance of Variable or list/tuple of Variables
(depending upon `wrt`) repressenting the Hessian of the `cost`
with respect to (elements of) `wrt`. If an element of `wrt` is not
differentiable with respect to the output, then a zero
variable is returned. The return value is of same type
as `wrt`: a list/tuple or TensorVariable in all cases.
"""
from theano.tensor import arange
# Check inputs have the right format
assert isinstance(cost, Variable), \
"tensor.hessian expects a Variable as `cost`"
assert cost.ndim == 0, \
"tensor.hessian expects a 0 dimensional variable as `cost`"
using_list = isinstance(wrt, list)
using_tuple = isinstance(wrt, tuple)
if isinstance(wrt, (list, tuple)):
wrt = list(wrt)
else:
wrt = [wrt]
hessians = []
for input in wrt:
assert isinstance(input, Variable), \
"tensor.hessian expects a (list of) Variable as `wrt`"
assert input.ndim == 1, \
"tensor.hessian expects a (list of) 1 dimensional variable "\
"as `wrt`"
expr = grad(cost, input, consider_constant=consider_constant,
disconnected_inputs=disconnected_inputs)
# It is possible that the inputs are disconnected from expr,
# even if they are connected to cost.
# This should not be an error.
hess, updates = theano.scan(lambda i, y, x: grad(
y[i],
x,
consider_constant=consider_constant,
disconnected_inputs='ignore'),
sequences=arange(expr.shape[0]),
non_sequences=[expr, input])
assert not updates, \
("Scan has returned a list of updates. This should not "
"happen! Report this to theano-users (also include the "
"script that generated the error)")
hessians.append(hess)
return format_as(using_list, using_tuple, hessians)
def _is_zero(x):
"""
Returns 'yes', 'no', or 'maybe' indicating whether x
is always 0.
'maybe' means that x is an expression that is complicated enough
that we can't tell that it simplifies to 0.
"""
if not hasattr(x, 'type'):
return np.all(x == 0.)
if isinstance(x.type, NullType):
return 'no'
if isinstance(x.type, DisconnectedType):
return 'yes'
no_constant_value = True
try:
constant_value = theano.get_scalar_constant_value(x)
no_constant_value = False
except theano.tensor.basic.NotScalarConstantError:
pass
if no_constant_value:
return 'maybe'
if constant_value != 0.:
return 'no'
return 'yes'
class ConsiderConstant(ViewOp):
def grad(self, args, g_outs):
return [g_out.zeros_like(g_out) for g_out in g_outs]
consider_constant_ = ConsiderConstant()
# I create a function only to have the doc show well.
def consider_constant(x):
"""
DEPRECATED: use zero_grad() or disconnected_grad() instead.
Consider an expression constant when computing gradients.
The expression itself is unaffected, but when its gradient is
computed, or the gradient of another expression that this
expression is a subexpression of, it will not be backpropagated
through. In other words, the gradient of the expression is
truncated to 0.
:param x: A Theano expression whose gradient should be truncated.
:return: The expression is returned unmodified, but its gradient
is now truncated to 0.
.. versionadded:: 0.7
"""
warnings.warn((
"consider_constant() is deprecated, use zero_grad() or "
"disconnected_grad() instead."), stacklevel=3)
return consider_constant_(x)
class ZeroGrad(ViewOp):
def grad(self, args, g_outs):
return [g_out.zeros_like(g_out) for g_out in g_outs]
zero_grad_ = ZeroGrad()
def zero_grad(x):
"""
Consider an expression constant when computing gradients.
The expression itself is unaffected, but when its gradient is
computed, or the gradient of another expression that this
expression is a subexpression of, it will be backpropagated
through with a value of zero. In other words, the gradient of
the expression is truncated to 0.
:param x: A Theano expression whose gradient should be truncated.
:return: The expression is returned unmodified, but its gradient
is now truncated to 0.
"""
return zero_grad_(x)
class DisconnectedGrad(ViewOp):
def grad(self, args, g_outs):
return [disconnected_type() for g_out in g_outs]
def connection_pattern(self, node):
return [[False]]
disconnected_grad_ = DisconnectedGrad()
def disconnected_grad(x):
"""
Consider an expression constant when computing gradients,
while effectively not backpropagating through it.
The expression itself is unaffected, but when its gradient is
computed, or the gradient of another expression that this
expression is a subexpression of, it will not be backpropagated
through. This is effectively equivalent to truncating the gradient
expression to 0, but is executed faster than zero_grad(), which stilll
has to go through the underlying computational graph related to the
expression.
:param x: A Theano expression whose gradient should not be
backpropagated through.
:return: The expression is returned unmodified, but its gradient
is now effectively truncated to 0.
"""
return disconnected_grad_(x)
class GradClip(ViewOp):
# See doc in user fct grad_clip
__props__ = ()
def __init__(self, clip_lower_bound, clip_upper_bound):
# We do not put those member in __eq__ or __hash__
# as they do not influence the perform of this op.
self.clip_lower_bound = clip_lower_bound
self.clip_upper_bound = clip_upper_bound
assert(self.clip_upper_bound >= self.clip_lower_bound)
def grad(self, args, g_outs):
return [theano.tensor.clip(g_out, self.clip_lower_bound,
self.clip_upper_bound)
for g_out in g_outs]
def grad_clip(x, lower_bound, upper_bound):
"""
This op do a view in the forward, but clip the gradient.
This is an elemwise operation.
:param x: the variable we want its gradient inputs clipped
:param lower_bound: The lower bound of the gradient value
:param upper_bound: The upper bound of the gradient value.
:examples:
x = theano.tensor.scalar()
z = theano.tensor.grad(grad_clip(x, -1, 1)**2, x)
z2 = theano.tensor.grad(x**2, x)
f = theano.function([x], outputs = [z, z2])
print(f(2.0)) # output (1.0, 4.0)
:note: We register an opt in tensor/opt.py that remove the GradClip.
So it have 0 cost in the forward and only do work in the grad.
"""
return GradClip(lower_bound, upper_bound)(x)
|
valexandersaulys/airbnb_kaggle_contest
|
venv/lib/python3.4/site-packages/Theano-0.7.0-py3.4.egg/theano/gradient.py
|
Python
|
gpl-2.0
| 81,192
|
[
"VisIt"
] |
c2992bf2af7b96682e94f3b82a1e3676b1b194949e27d643135be7d624284703
|
# GridCal
# Copyright (C) 2022 Santiago Peñate Vera
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from typing import List
import numpy as np
import numba as nb
from scipy.sparse import csc_matrix, diags
def find_islands(adj: csc_matrix, active: np.ndarray):
"""
Method to get the islands of a graph
This is the non-recursive version
:return: list of islands, where each element is a list of the node indices of the island
"""
node_number = adj.shape[0]
# Mark all the vertices as not visited
visited = np.zeros(node_number, dtype=bool)
# storage structure for the islands (list of lists)
islands = list() # type: List[List[int]]
# set the island index
island_idx = 0
# go though all the vertices...
for node in range(node_number):
# if the node has not been visited...
if not visited[node] and active[node]:
# add new island, because the recursive process has already visited all the island connected to v
# if island_idx >= len(islands):
islands.append(list())
# ------------------------------------------------------------------------------------------------------
# DFS: store in the island all the reachable vertices from current vertex "node"
#
# declare a stack with the initial node to visit (node)
stack = list() # type: List[int]
stack.append(node)
while len(stack) > 0:
# pick the first element of the stack
v = stack.pop(0)
# if v has not been visited...
if not visited[v]:
# mark as visited
visited[v] = True
# add element to the island
islands[island_idx].append(v)
# Add the neighbours of v to the stack
start = adj.indptr[v]
end = adj.indptr[v + 1]
for i in range(start, end):
k = adj.indices[i] # get the row index in the CSC scheme
if not visited[k] and active[k]:
stack.append(k)
# ------------------------------------------------------------------------------------------------------
# increase the islands index, because all the other connected vertices have been visited
island_idx += 1
# sort each of the islands to maintain raccord
for island in islands:
island.sort() # the sorting is done in-place
return islands
def get_elements_of_the_island(C_element_bus, island):
"""
Get the branch indices of the island
:param C_element_bus: CSC elements-buses connectivity matrix with the dimensions: elements x buses
:param island: array of bus indices of the island
:return: array of indices of the elements that match that island
"""
if not isinstance(C_element_bus, csc_matrix):
C_element_bus = C_element_bus.tocsc()
# faster method
n_rows = C_element_bus.shape[0]
visited = np.zeros(n_rows, dtype=bool)
elm_idx = np.zeros(n_rows, dtype=int)
n_visited = 0
for k in range(len(island)):
j = island[k] # column index
for l in range(C_element_bus.indptr[j], C_element_bus.indptr[j + 1]):
i = C_element_bus.indices[l] # row index
if not visited[i]:
visited[i] = True
elm_idx[n_visited] = i
n_visited += 1
# resize vector
elm_idx = elm_idx[:n_visited]
return elm_idx
def get_adjacency_matrix(C_branch_bus_f, C_branch_bus_t, branch_active, bus_active):
"""
Compute the adjacency matrix
:param C_branch_bus_f: Branch-bus_from connectivity matrix
:param C_branch_bus_t: Branch-bus_to connectivity matrix
:param branch_active: array of branches availability
:param bus_active: array of buses availability
:return: Adjacency matrix
"""
br_states_diag = diags(branch_active)
Cf = br_states_diag * C_branch_bus_f
Ct = br_states_diag * C_branch_bus_t
# branch - bus connectivity
C_branch_bus = Cf + Ct
# Connectivity node - Connectivity node connectivity matrix
C_bus_bus = diags(bus_active) * (C_branch_bus.T * C_branch_bus)
return C_bus_bus
class Graph:
def __init__(self, C_bus_bus, C_branch_bus, bus_states):
"""
Graph adapted to work with CSC sparse matrices
see: http://www.scipy-lectures.org/advanced/scipy_sparse/csc_matrix.html
:param C_bus_bus: Adjacency matrix in lil format
:param C_branch_bus: Connectivity of the branches and the buses
:param bus_states: states of the branches
"""
self.node_number = C_bus_bus.shape[0]
self.adj = C_bus_bus
self.C_branch_bus = C_branch_bus
self.bus_states = bus_states
def find_islands(self):
"""
Method to get the islands of a graph
This is the non-recursive version
:return: List of islands where each element is a list of the node indices of the island
"""
return find_islands(self.adj)
def get_branches_of_the_island(self, island):
"""
Get the branch indices of the island
:param island: array of bus indices of the island
:return: array of indices of the branches that belong to the island
"""
return get_elements_of_the_island(self.C_branch_bus, island)
|
SanPen/GridCal
|
src/GridCal/Engine/Core/topology.py
|
Python
|
lgpl-3.0
| 6,221
|
[
"VisIt"
] |
68a3f26f5a023f935af20ec0937566539f51b7c7e3d579273bcbb05efce37935
|
# c: 14.12.2007, r: 03.11.2008
import os
import numpy as nm
from sfepy.base.base import output, default_printer, pause, debug, Struct
from sfepy.fem import MeshIO
from gen_mesh import gen_concentric
is_3D = False
generate_2D = False
fig_suffix = '.pdf'
if is_3D:
filename_mesh = 'database/phono/cube_sphere.mesh'
## filename_mesh = 'database/phono/cube_cylinder.mesh'
out_groups = [1]
in_groups = [2]
diameters_g = None
tepss_g = nm.logspace( -3, -0.5, 11 )
default_y3_diameter = 0.1
diameters_g = nm.linspace( 0.075, 0.26, 11 )
else:
#filename_mesh = 'database/phono/mesh_circ21.mesh'
#filename_mesh = 'database/phono/mesh_circ21_small.mesh'
filename_mesh = 'database/phono/mesh_circ.vtk'
out_groups = [1]
if generate_2D:
in_groups, diameters_g = gen_concentric( 'tmp/mesh.geo',
1., 0.2, 0.08, 0.1, 0.6, 7 )
diameters_g = nm.array( diameters_g[:-2] ) + 0.001
else:
os.system("cp database/phono/mesh_circ.geo tmp/mesh.geo")
in_groups = [2]
diameters_g = None
tepss_g = nm.logspace( -3, -1, 11 )
default_y3_diameter = 0.25
os.system("gmsh -2 tmp/mesh.geo -format mesh")
os.system("script/mesh_to_vtk.py tmp/mesh.mesh database/phono/mesh_circ.vtk")
#pause()
cwd = os.path.split( os.path.join( os.getcwd(), __file__ ) )[0]
options = {
'save_eig_vectors' : (10, 0),
# Either:
'eig_range' : (0, 10), # -> freq_range = eigs[slice(*eig_range)][[0, -1]]
# Or (this has precedence if not None):
'fixed_eig_range' : (0., 50.),
'freq_margins' : (10, 10), # % of freq_range
'feps' : 1e-4, # frequency
'zeps' : 1e-10, # zero finding
'teps' : 1e-1, # eigenmomentum threshold
'teps_rel' : True, # eigenmomentum threshold is relative w.r.t. largest one
'freq_step' : 0.02, # in percent of freq_range
# 'eig_vector_transform' : ('select_in_plane', 'z', 1e-1),
# 'plot_transform' : ('clip', (-20, 20)),
'plot_transform' : ('normalize', (-2, 2)),
# 'plot_transform' : None,
#############################################
# 'parametric_hook' : 'vary_y3_size',
# 'parametric_hook' : 'vary_teps',
'post_process_hook' : 'post_process',
'output_dir' : os.path.join( cwd, 'output/' ),
#############################################
'eigenmomentum' : {'var' : 'up',
'regions' : ['Y2', 'Y3'],
'term' : '%.12e * di_volume_integrate.i1.%s( %s )'},
# Used to compute average density.
'region_to_material' : {'Y1' : 'matrix',
'Y2' : 'inclusion',
'Y3' : 'rigid',},
'tensor_names' : {'elastic' : 'lame',},
'volume' : 'd_volume.i1.%s( uy )',
'eig_problem' : 'simple',
'fig_name' : 'band_gaps_sym_025' + fig_suffix,
'plot_options' : {
'show' : True, # Show figure.
'legend' : False, # Show legend.
},
'plot_rsc' : { # Resources for all plots.
'resonance' : {'linewidth' : 0.5, 'color' : 'k', 'linestyle' : '-' },
'masked' : {'linewidth' : 0.2, 'color' : 'k', 'linestyle' : ':' },
'x_axis' : {'linewidth' : 1, 'color' : 'k', 'linestyle' : '-' },
'eig_min' : {'linewidth' : 1, 'color' : 'k', 'linestyle' : '--' },
'eig_max' : {'linewidth' : 1, 'color' : 'k', 'linestyle' : '-' },
'strong_gap' : {'linewidth' : 0, 'facecolor' : (1, 1, 0.5) },
'weak_gap' : {'linewidth' : 0, 'facecolor' : (1, 1, 1) },
'propagation' : {'linewidth' : 0, 'facecolor' : (0.5, 1, 0.5) },
## 'strong_gap' : {'linewidth' : 0, 'facecolor' : (0.6, 0.6, 0.6) },
## 'weak_gap' : {'linewidth' : 0, 'facecolor' : (0.8, 0.8, 0.8) },
## 'propagation' : {'linewidth' : 0, 'facecolor' : (1, 1, 1) },
'params' : {'axes.labelsize': 'x-large',
'text.fontsize': 'large',
'legend.fontsize': 'large',
'xtick.labelsize': 'large',
'ytick.labelsize': 'large',
'text.usetex': False},
},
}
regions = {
'Y' : ('all', {}),
'Y1' : (' +e '.join( ('elements of group %d' % ig)
for ig in out_groups ), {}),
'Y23' : (' +e '.join( ('elements of group %d' % ig)
for ig in in_groups ), {}),
'Y3' : ('nodes by select_y3_circ( x, y, z, %f )' % default_y3_diameter, {}),
'Y2' : ('r.Y23 -e r.Y3', {}),
'Y23_Surface': ('r.Y1 *n r.Y23', {'can_cells' : False}),
}
material_1 = {
'name' : 'matrix',
'mode' : 'here',
'region' : 'Y1',
# aluminium
'lame' : {'lambda' : 5.898, 'mu' : 2.681}, # in 1e+10 Pa
'density' : 0.2799, # in 1e4 kg/m3
}
material_2 = {
'name' : 'inclusion',
'mode' : 'here',
'region' : 'Y2',
# epoxy
'lame' : {'lambda' : 0.1798, 'mu' : 0.148}, # in 1e+10 Pa
'density' : 0.1142, # in 1e4 kg/m3
}
material_3 = {
'name' : 'rigid',
'mode' : 'here',
'region' : 'Y3',
# lead
# 'lame' : {'lambda' : 0.1798, 'mu' : 0.148}, # in 1e+10 Pa
'lame' : {'lambda' : 4.074 , 'mu' : 0.5556}, # in 1e+10 Pa, does not matter
# 'density' : 0.1142, # in 1e4 kg/m3
'density' : 1.1340, # in 1e4 kg/m3
}
dim = MeshIO.any_from_filename( filename_mesh ).read_dimension()
geom = {3 : '3_4', 2 : '2_3'}[dim]
field_0 = {
'name' : 'displacement_Y',
'dim' : (dim,1),
'domain' : 'Y',
'bases' : {'Y' : '%s_P1' % geom}
}
field_1 = {
'name' : 'displacement_Y23',
'dim' : (dim,1),
'domain' : 'Y23',
'bases' : {'Y23' : '%s_P1' % geom}
}
variables = {
'u' : ('unknown field', 'displacement_Y23', 0),
'v' : ('test field', 'displacement_Y23', 'u'),
'uy' : ('parameter field', 'displacement_Y', None),
'up' : ('parameter field', 'displacement_Y23', 'u'),
}
ebc_1 = {
'name' : 'ZeroSurface',
'region' : 'Y23_Surface',
'dofs' : {'u.all' : 0.0},
}
lcbc_1 = {
'name' : 'RigidBody',
'region' : 'Y3',
'dofs' : {'u.all' : 'rigid'},
}
integral_1 = {
'name' : 'i1',
'kind' : 'v',
'quadrature' : 'gauss_o2_d%d' % dim,
}
##
# Eigenproblem equations.
# dw_lin_elastic_iso.i1.Y3( rigid.lame, v, u ) should have no effect!
equations = {
'lhs' : """dw_lin_elastic_iso.i1.Y2( inclusion.lame, v, u )
+ dw_lin_elastic_iso.i1.Y3( rigid.lame, v, u )""",
'rhs' : """dw_mass_vector.i1.Y2( inclusion.density, v, u )
+ dw_mass_vector.i1.Y3( rigid.density, v, u )""",
}
##
# FE assembling parameters.
fe = {
'chunk_size' : 100000
}
def clip( data, plot_range ):
return nm.clip( data, *plot_range )
def normalize( data, plot_range ):
aux = nm.arctan( data )
return clip( aux, plot_range )
##
# 02.10.2007, c
def select_in_plane( vec, shape, normal_direction, eps ):
n_nod, dim = shape
dir_vecs = {2 : {'x': 0, 'y' : 1, 'z' : 1},
3 : {'x': 0, 'y' : 1, 'z' : 2}}
ident = nm.eye( dim, dtype = nm.float64 )
dir_vec = ident[:,dir_vecs[dim][normal_direction]]
proj = nm.dot( nm.reshape( vec, (n_nod, dim) ), dir_vec )
if nm.any( nm.abs( proj ) > eps ):
return nm.zeros_like( vec ), True
else:
return vec, False
def select_rigid( x, y, z ):
if filename_mesh.find( 'cube_' ) >= 0:
out = nm.where( (x > -0.1) & (x < 0.1) &\
(y > -0.1) & (y < 0.1) &\
(z > -0.1) & (z < 0.1),
1, 0 )
elif filename_mesh.find( 'mesh_circ.vtk' ) >= 0:
out = nm.where( (x > -0.2) & (x < 0.2) &\
(y > -0.2) & (y < 0.2),
1, 0 )
else:
out = nm.where( (x > 0.4) & (x < 0.6) & (y > 0.4) & (y < 0.6),
1, 0 )
return out
def select_y3_circ( x, y, z, diameter ):
r = x**2 + y**2
if dim == 3:
r += z**2
r = nm.sqrt( r )
out = nm.where( r < diameter, 1, 0 )
n = nm.where( out == 1 )[0].shape[0]
if n <= 3:
raise ValueError( 'too few nodes selected! (%d)' % n )
return out
def extend_cell_data( data, pb, rname, val = None ):
n_el = pb.domain.shape.n_el
if data.shape[0] == n_el: return data
if val is None:
if data.shape[2] > 1: # Vector.
val = nm.amin( nm.abs( data ) )
else: # Scalar.
val = nm.amin( data )
edata = nm.empty( (n_el,) + data.shape[1:], dtype = nm.float64 )
edata.fill( val )
region = pb.domain.regions[rname]
offs = region.get_cell_offsets()
eoffs = pb.domain.get_cell_offsets()
## print offs
## print eoffs
## print pb.domain.mat_ids_to_i_gs
## pause()
for group in pb.domain.iter_groups():
ig = group.ig
ii = eoffs[ig]
if ig in region.igs:
n_cell = region.shape[ig].n_cell
ir = offs[ig]
edata[ii+region.cells[ig]] = data[ir:ir+n_cell]
return edata
def post_process( out, problem, mtx_phi ):
from sfepy.fem import eval_term_op
for key in out.keys():
ii = int( key[1:] )
vec = mtx_phi[:,ii].copy()
strain = eval_term_op( vec, 'de_cauchy_strain.i1.Y23( u )', problem )
strain = extend_cell_data( strain, problem, 'Y23' )
out['strain%03d' % ii] = Struct( name = 'output_data',
mode = 'cell', data = strain,
dof_types = None )
return out
def save_log( filename, bg, log_item ):
"""Saves band gaps, valid flags, eigenfrequencies."""
fd = open( filename, 'w' )
freq_range = bg.freq_range_margins
fd.write( log_item )
fd.write( 'squared: %s\n' % False )
fd.write( 'n_zeroed: %d\n' % bg.n_zeroed )
fd.write( 'n_eigs: %d\n' % bg.n_eigs )
fd.write( 'f0 f1 flag_min f_min v_min flag_max f_max v_max'
' kind\ndesc\n' )
format = "%f %f %d %f %f %d %f %f %s\n%s\n"
n_row = len( freq_range ) - 1
fd.write( '%d\n' % n_row )
for ir in xrange( n_row ):
f0, f1 = freq_range[[ir, ir+1]]
gmin, gmax = bg.gaps[ir]
fd.write( format % ((f0, f1) + tuple( gmin ) + tuple( gmax )
+ bg.kinds[ir]) )
fd.write( 'valid resonance\n' )
freq_range = bg.freq_range_initial
n_row = len( freq_range )
fd.write( '%d\n' % n_row )
valid_in_range = bg.valid[bg.eig_range]
for ir in xrange( n_row ):
fd.write( '%d %f\n' % (valid_in_range[ir], freq_range[ir] ) )
fd.close()
def vary_teps( problem ):
"""Vary eigenmomentum threshold."""
from sfepy.solvers.ts import get_print_info
default_printer.prefix = 'vary_teps:'
if tepss_g is None:
tepss = nm.logspace( -3, -1, 11 )
else:
tepss = tepss_g
ofn_trunk, output_dir = problem.ofn_trunk, problem.output_dir
join = os.path.join
n_digit, aux, d_format = get_print_info( len( tepss ) + 1 )
for ii, teps in enumerate( tepss ):
output( 'iteration %d: teps %.2e' % (ii, teps) )
opts = problem.conf.options
opts.teps = teps
opts.plot_options['show'] = False
opts.fig_name = join( output_dir,
(('band_gaps_%s' % d_format)
+ '_teps_%3.2e' + fig_suffix) % (ii, teps) )
problem.ofn_trunk = ofn_trunk + '_' + (d_format % ii)
out = []
yield problem, out
evp, bg = out[-1]
filename = join( output_dir,
('band_gaps_%s.txt' % d_format) % ii )
log_item = '$10^q$: %f\n' % teps
save_log( filename, bg, log_item )
yield None
def vary_y3_size( problem ):
"""Vary size of Y3 inclusion."""
from sfepy.fem import ProblemDefinition
from sfepy.solvers.ts import get_print_info
default_printer.prefix = 'vary_y3_size:'
y3_diameters = [0.2, 0.25, 0.3, 0.35, 0.4]
if diameters_g is None:
y3_diameters = nm.linspace( 0.15, 0.45, 16 )
else:
y3_diameters = diameters_g
# y3_diameters = [0.45]
ofn_trunk, output_dir = problem.ofn_trunk, problem.output_dir
join = os.path.join
conf = problem.conf
cr = conf.get_raw( 'regions' )
n_digit, aux, d_format = get_print_info( len( y3_diameters ) + 1 )
for ii, diameter in enumerate( y3_diameters ):
output( 'iteration %d: diameter %3.2f' % (ii, diameter) )
opts = problem.conf.options
cr['Y3'] = ('nodes by select_y3_circ( x, y, z, %.5f )' % diameter, {})
conf.edit( 'regions', cr )
problem = ProblemDefinition.from_conf( conf )
problem.save_regions( join( output_dir, ('regions_' + d_format) % ii ),
['Y2', 'Y3'] )
for region in problem.domain.regions:
if not region.has_cells_if_can():
raise ValueError( 'region %s has no cells!' % region.name )
opts.plot_options['show'] = False
opts.fig_name = join( output_dir,
(('band_gaps_%s' % d_format)
+ '_y3_%03.2f' + fig_suffix) % (ii, diameter) )
problem.ofn_trunk = ofn_trunk + '_' + (d_format % ii)
out = []
yield problem, out
evp, bg = out[-1]
filename = join( output_dir,
('band_gaps_%s.txt' % d_format) % ii )
log_item = '$r(Y_3)$: %f\n' % diameter
save_log( filename, bg, log_item )
yield None
|
certik/sfepy
|
input/phono/band_gaps_rigid.py
|
Python
|
bsd-3-clause
| 13,456
|
[
"VTK"
] |
ffba37c7e2784e06f1ed10104be35f57a630576f59acdcabde700720f2e4aee9
|
"""
Module simplifying manipulation of XML described at
http://libvirt.org/formatdomain.html
"""
import logging
from autotest.client.shared import error
from virttest import xml_utils
from virttest.libvirt_xml import base, accessors, xcepts
from virttest.libvirt_xml.devices import librarian
class VMXMLDevices(list):
"""
List of device instances from classes handed out by librarian.get()
"""
@staticmethod
def __type_check__(other):
try:
# Raise error if object isn't dict-like or doesn't have key
device_tag = other['device_tag']
# Check that we have support for this type
librarian.get(device_tag)
except (AttributeError, TypeError, xcepts.LibvirtXMLError):
# Required to always raise TypeError for list API in VMXML class
raise TypeError("Unsupported item type: %s" % str(type(other)))
def __setitem__(self, key, value):
self.__type_check__(value)
super(VMXMLDevices, self).__setitem__(key, value)
return self
def append(self, value):
self.__type_check__(value)
super(VMXMLDevices, self).append(value)
return self
def extend(self, iterable):
# Make sure __type_check__ happens
for item in iterable:
self.append(item)
return self
def by_device_tag(self, tag):
result = VMXMLDevices()
for device in self:
if device.device_tag == tag:
result.append(device)
return result
class VMXMLBase(base.LibvirtXMLBase):
"""
Accessor methods for VMXML class properties (items in __slots__)
Properties:
hypervisor_type: string, hypervisor type name
get: return domain's type attribute value
set: change domain type attribute value
del: raise xcepts.LibvirtXMLError
vm_name: string, name of the vm
get: return text value of name tag
set: set text value of name tag
del: raise xcepts.LibvirtXMLError
uuid: string, uuid string for vm
get: return text value of uuid tag
set: set text value for (new) uuid tag (unvalidated)
del: remove uuid tag
vcpu, max_mem, current_mem: integers
get: returns integer
set: set integer
del: removes tag
numa: dictionary
get: return dictionary of numatune/memory attributes
set: set numatune/memory attributes from dictionary
del: remove numatune/memory tag
devices: VMXMLDevices (list-like)
get: returns VMXMLDevices instance for all devices
set: Define all devices from VMXMLDevices instance
del: remove all devices
cputune: VMCPUTune
get: return VMCPUTune instance for the domain.
set: Define cputune tag from a VMCPUTune instance.
del: remove cputune tag
current_vcpu: string, 'current' attribute of vcpu tag
get: return a string for 'current' attribute of vcpu
set: change 'current' attribute of vcpu
del: remove 'current' attribute of vcpu
placement: string, 'placement' attribute of vcpu tag
get: return a string for 'placement' attribute of vcpu
set: change 'placement' attribute of vcpu
del: remove 'placement' attribute of vcpu
emulatorpin: string, cpuset value (see man virsh: cpulist)
get: return text value of cputune/emulatorpin attributes
set: set cputune/emulatorpin attributes from string
del: remove cputune/emulatorpin tag
"""
# Additional names of attributes and dictionary-keys instances may contain
__slots__ = ('hypervisor_type', 'vm_name', 'uuid', 'vcpu', 'max_mem',
'current_mem', 'numa', 'devices', 'seclabel',
'cputune', 'placement', 'current_vcpu', 'os', 'os_type',
'os_arch', 'os_init', 'os_boot', 'os_loader', 'os_bios',
'pm')
__uncompareable__ = base.LibvirtXMLBase.__uncompareable__
__schema_name__ = "domain"
def __init__(self, virsh_instance=base.virsh):
accessors.XMLAttribute(property_name="hypervisor_type",
libvirtxml=self,
forbidden=None,
parent_xpath='/',
tag_name='domain',
attribute='type')
accessors.XMLElementText(property_name="vm_name",
libvirtxml=self,
forbidden=None,
parent_xpath='/',
tag_name='name')
accessors.XMLElementText(property_name="uuid",
libvirtxml=self,
forbidden=None,
parent_xpath='/',
tag_name='uuid')
accessors.XMLElementInt(property_name="vcpu",
libvirtxml=self,
forbidden=None,
parent_xpath='/',
tag_name='vcpu')
accessors.XMLAttribute(property_name="current_vcpu",
libvirtxml=self,
forbidden=None,
parent_xpath='/',
tag_name='vcpu',
attribute='current')
accessors.XMLAttribute(property_name="placement",
libvirtxml=self,
forbidden=None,
parent_xpath='/',
tag_name='vcpu',
attribute='placement')
accessors.XMLElementInt(property_name="max_mem",
libvirtxml=self,
forbidden=None,
parent_xpath='/',
tag_name='memory')
accessors.XMLElementInt(property_name="current_mem",
libvirtxml=self,
forbidden=None,
parent_xpath='/',
tag_name='currentMemory')
accessors.XMLElementText(property_name="os",
libvirtxml=self,
forbidden=None,
parent_xpath='/',
tag_name='os')
accessors.XMLElementText(property_name="os_type",
libvirtxml=self,
forbidden=None,
parent_xpath='/os',
tag_name='type')
accessors.XMLAttribute(property_name="os_arch",
libvirtxml=self,
forbidden=None,
parent_xpath='/os',
tag_name='type',
attribute='arch')
accessors.XMLElementDict(property_name="os_boot",
libvirtxml=self,
forbidden=None,
parent_xpath='/os',
tag_name='boot')
accessors.XMLElementText(property_name="os_init",
libvirtxml=self,
forbidden=None,
parent_xpath='/os',
tag_name='init')
accessors.XMLElementText(property_name="os_loader",
libvirtxml=self,
forbidden=None,
parent_xpath='/os',
tag_name='loader')
accessors.XMLElementDict(property_name="os_bios",
libvirtxml=self,
forbidden=None,
parent_xpath='/os',
tag_name='bios')
accessors.XMLElementDict(property_name="numa",
libvirtxml=self,
forbidden=None,
parent_xpath='numatune',
tag_name='memory')
accessors.XMLElementNest(property_name='cputune',
libvirtxml=self,
parent_xpath='/',
tag_name='cputune',
subclass=VMCPUTune,
subclass_dargs={
'virsh_instance': virsh_instance})
accessors.XMLElementNest(property_name='pm',
libvirtxml=self,
parent_xpath='/',
tag_name='pm',
subclass=VMPM,
subclass_dargs={
'virsh_instance': virsh_instance})
super(VMXMLBase, self).__init__(virsh_instance=virsh_instance)
def get_devices(self, device_type=None):
"""
Put all nodes of devices into a VMXMLDevices instance.
"""
devices = VMXMLDevices()
all_devices = self.xmltreefile.find('devices')
if device_type is not None:
device_nodes = all_devices.findall(device_type)
else:
device_nodes = all_devices
for node in device_nodes:
device_tag = node.tag
device_class = librarian.get(device_tag)
new_one = device_class.new_from_element(node,
virsh_instance=self.virsh)
devices.append(new_one)
return devices
def set_devices(self, value):
"""
Define devices based on contents of VMXMLDevices instance
"""
value_type = type(value)
if not issubclass(value_type, VMXMLDevices):
raise xcepts.LibvirtXMLError("Value %s Must be a VMXMLDevices or "
"subclass not a %s"
% (str(value), str(value_type)))
# Start with clean slate
exist_dev = self.xmltreefile.find('devices')
if exist_dev is not None:
self.del_devices()
if len(value) > 0:
devices_element = xml_utils.ElementTree.SubElement(
self.xmltreefile.getroot(), 'devices')
for device in value:
# Separate the element from the tree
device_element = device.xmltreefile.getroot()
devices_element.append(device_element)
self.xmltreefile.write()
def del_devices(self):
"""
Remove all devices
"""
self.xmltreefile.remove_by_xpath('/devices')
self.xmltreefile.write()
def get_seclabel(self):
"""
Return seclabel + child attribute dict or raise LibvirtXML error
:return: None if no seclabel in xml,
dict of seclabel's attributs and children.
"""
__children_list__ = ['label', 'baselabel', 'imagelabel']
seclabel_node = self.xmltreefile.find("seclabel")
# no seclabel tag found in xml.
if seclabel_node is None:
raise xcepts.LibvirtXMLError("Seclabel for this domain does not "
"exist")
seclabel = dict(seclabel_node.items())
for child_name in __children_list__:
child_node = seclabel_node.find(child_name)
if child_node is not None:
seclabel[child_name] = child_node.text
return seclabel
def set_seclabel(self, seclabel_dict):
"""
Set seclabel of vm. Modify the attributs and children if seclabel
exists and create a new seclabel if seclabel is not found in
xmltreefile.
"""
__attributs_list__ = ['type', 'model', 'relabel']
__children_list__ = ['label', 'baselabel', 'imagelabel']
# check the type of seclabel_dict.
if not isinstance(seclabel_dict, dict):
raise xcepts.LibvirtXMLError("seclabel_dict should be a instance of"
"dict, but not a %s.\n"
% type(seclabel_dict))
seclabel_node = self.xmltreefile.find("seclabel")
if seclabel_node is None:
seclabel_node = xml_utils.ElementTree.SubElement(
self.xmltreefile.getroot(),
"seclabel")
for key, value in seclabel_dict.items():
if key in __children_list__:
child_node = seclabel_node.find(key)
if child_node is None:
child_node = xml_utils.ElementTree.SubElement(
seclabel_node,
key)
child_node.text = value
elif key in __attributs_list__:
seclabel_node.set(key, value)
else:
continue
self.xmltreefile.write()
def del_seclabel(self):
"""
Remove the seclabel tag from a domain
"""
try:
self.xmltreefile.remove_by_xpath("/seclabel")
except (AttributeError, TypeError):
pass # Element already doesn't exist
self.xmltreefile.write()
class VMXML(VMXMLBase):
"""
Higher-level manipulations related to VM's XML or guest/host state
"""
# Must copy these here or there will be descriptor problems
__slots__ = []
def __init__(self, hypervisor_type='kvm', virsh_instance=base.virsh):
"""
Create new VM XML instance
"""
super(VMXML, self).__init__(virsh_instance=virsh_instance)
# Setup some bare-bones XML to build upon
self.xml = u"<domain type='%s'></domain>" % hypervisor_type
@staticmethod # static method (no self) needed b/c calls VMXML.__new__
def new_from_dumpxml(vm_name, options="", virsh_instance=base.virsh):
"""
Return new VMXML instance from virsh dumpxml command
:param vm_name: Name of VM to dumpxml
:param virsh_instance: virsh module or instance to use
:return: New initialized VMXML instance
"""
# TODO: Look up hypervisor_type on incoming XML
vmxml = VMXML(virsh_instance=virsh_instance)
vmxml['xml'] = virsh_instance.dumpxml(vm_name,
extra=options).stdout.strip()
return vmxml
@staticmethod
def new_from_inactive_dumpxml(vm_name, options="", virsh_instance=base.virsh):
"""
Return new VMXML instance of inactive domain from virsh dumpxml command
:param vm_name: Name of VM to dumpxml
:param options: virsh dumpxml command's options
:param virsh_instance: virsh module or instance to use
:return: New initialized VMXML instance
"""
if options.find("--inactive") == -1:
options += " --inactive"
return VMXML.new_from_dumpxml(vm_name, options, virsh_instance)
@staticmethod
def get_device_class(type_name):
"""
Return class that handles type_name devices, or raise exception.
"""
return librarian.get(type_name)
def undefine(self, options=None):
"""Undefine this VM with libvirt retaining XML in instance"""
return self.virsh.remove_domain(self.vm_name, options)
def define(self):
"""Define VM with virsh from this instance"""
result = self.virsh.define(self.xml)
if result.exit_status:
logging.debug("Define %s failed.\n"
"Detail: %s.", self.vm_name, result.stderr)
return False
return True
def sync(self, options=None):
"""Rebuild VM with the config file."""
# If target vm no longer exist, this will raise an exception.
try:
backup = self.new_from_dumpxml(self.vm_name)
except IOError:
logging.debug("Failed to backup %s.", self.vm_name)
backup = None
if not self.undefine(options):
raise xcepts.LibvirtXMLError("Failed to undefine %s."
% self.vm_name)
if not self.define():
if backup:
backup.define()
raise xcepts.LibvirtXMLError("Failed to define %s, from %s."
% (self.vm_name, self.xml))
@staticmethod
def vm_rename(vm, new_name, uuid=None, virsh_instance=base.virsh):
"""
Rename a vm from its XML.
:param vm: VM class type instance
:param new_name: new name of vm
:param uuid: new_vm's uuid, if None libvirt will generate.
:return: a new VM instance
"""
if vm.is_alive():
vm.destroy(gracefully=True)
vmxml = VMXML.new_from_dumpxml(vm_name=vm.name,
virsh_instance=virsh_instance)
backup = vmxml.copy()
# can't do in-place rename, must operate on XML
if not vmxml.undefine():
del vmxml # clean up temporary files
raise xcepts.LibvirtXMLError("Error reported while undefining VM")
# Alter the XML
vmxml.vm_name = new_name
if uuid is None:
# invalidate uuid so libvirt will regenerate
del vmxml.uuid
vm.uuid = None
else:
vmxml.uuid = uuid
vm.uuid = uuid
# Re-define XML to libvirt
logging.debug("Rename %s to %s.", vm.name, new_name)
# error message for failed define
error_msg = "Error reported while defining VM:\n"
try:
if not vmxml.define():
raise xcepts.LibvirtXMLError(error_msg + "%s"
% vmxml.get('xml'))
except error.CmdError, detail:
del vmxml # clean up temporary files
# Allow exceptions thrown here since state will be undefined
backup.define()
raise xcepts.LibvirtXMLError(error_msg + "%s" % detail)
# Keep names uniform
vm.name = new_name
return vm
@staticmethod
def set_pm_suspend(vm_name, mem="yes", disk="yes", virsh_instance=base.virsh):
"""
Add/set pm suspend Support
:params vm_name: Name of defined vm
:params mem: Enable suspend to memory
:params disk: Enable suspend to disk
"""
# Build a instance of class VMPM.
pm = VMPM()
pm.mem_enabled = mem
pm.disk_enabled = disk
# Set pm to the new instance.
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
vmxml.pm = pm
vmxml.sync()
@staticmethod
def set_vm_vcpus(vm_name, value, current=None, virsh_instance=base.virsh):
"""
Convenience method for updating 'vcpu' and 'current' attribute property
of a defined VM
:param vm_name: Name of defined vm to change vcpu elemnet data
:param value: New data value, None to delete.
:param current: New current value, None will not change current value
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
if value is not None:
if current is not None:
if current > value:
raise xcepts.LibvirtXMLError(
"The cpu current value %s is larger than max number %s"
% (current, value))
else:
vmxml['vcpu'] = value # call accessor method to change XML
vmxml['current_vcpu'] = current
else: # value is None
del vmxml.vcpu
vmxml.undefine()
vmxml.define()
# Temporary files for vmxml cleaned up automatically
# when it goes out of scope here.
@staticmethod
def check_cpu_mode(mode):
"""
Check input cpu mode invalid or not.
:param mode: the mode of cpu:'host-model'...
"""
# Possible values for the mode attribute are:
# "custom", "host-model", "host-passthrough"
cpu_mode = ["custom", "host-model", "host-passthrough"]
if mode.strip() not in cpu_mode:
raise xcepts.LibvirtXMLError(
"The cpu mode '%s' is invalid!" % mode)
def get_disk_all(self):
"""
Return VM's disk from XML definition, None if not set
"""
disk_nodes = self.xmltreefile.find('devices').findall('disk')
disks = {}
for node in disk_nodes:
dev = node.find('target').get('dev')
disks[dev] = node
return disks
@staticmethod
def get_disk_source(vm_name, option="", virsh_instance=base.virsh):
"""
Get block device of a defined VM's disks.
:param vm_name: Name of defined vm.
:param option: extra option.
"""
vmxml = VMXML.new_from_dumpxml(vm_name, option,
virsh_instance=virsh_instance)
disks = vmxml.get_disk_all()
return disks.values()
@staticmethod
def get_disk_blk(vm_name, virsh_instance=base.virsh):
"""
Get block device of a defined VM's disks.
:param vm_name: Name of defined vm.
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
disks = vmxml.get_disk_all()
return disks.keys()
@staticmethod
def get_disk_count(vm_name, virsh_instance=base.virsh):
"""
Get count of VM's disks.
:param vm_name: Name of defined vm.
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
disks = vmxml.get_disk_all()
if disks is not None:
return len(disks)
return 0
@staticmethod
def check_disk_exist(vm_name, disk_src, virsh_instance=base.virsh):
"""
Check if given disk exist in VM.
:param vm_name: Domain name.
:param disk_src: Domian disk source path or darget dev.
:return: True/False
"""
found = False
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
if not vmxml.get_disk_count(vm_name, virsh_instance=virsh_instance):
raise xcepts.LibvirtXMLError("No disk in domain %s." % vm_name)
blk_list = vmxml.get_disk_blk(vm_name, virsh_instance=virsh_instance)
disk_list = vmxml.get_disk_source(vm_name, virsh_instance=virsh_instance)
try:
file_list = []
for disk in disk_list:
file_list.append(disk.find('source').get('file'))
except AttributeError:
logging.debug("No 'file' type disk.")
if disk_src in file_list + blk_list:
found = True
return found
@staticmethod
def check_disk_type(vm_name, disk_src, disk_type, virsh_instance=base.virsh):
"""
Check if disk type is correct in VM
:param vm_name: Domain name.
:param disk_src: Domain disk source path
:param disk_type: Domain disk type
:return: True/False
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
if not vmxml.get_disk_count(vm_name, virsh_instance=virsh_instance):
raise xcepts.LibvirtXMLError("No disk in domain %s." % vm_name)
disks = vmxml.get_disk_source(vm_name, virsh_instance=virsh_instance)
found = False
try:
for disk in disks:
disk_dev = ""
if disk_type == "file":
disk_dev = disk.find('source').get('file')
elif disk_type == "block":
disk_dev = disk.find('source').get('dev')
if disk_src == disk_dev:
found = True
except AttributeError:
logging.debug("No '%s' type disk." % disk_type)
return found
@staticmethod
def get_disk_serial(vm_name, disk_target, virsh_instance=base.virsh):
"""
Get disk serial in VM
:param vm_name: Domain name.
:param disk_target: Domain disk target
:return: disk serial
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
if not vmxml.get_disk_count(vm_name, virsh_instance=virsh_instance):
raise xcepts.LibvirtXMLError("No disk in domain %s." % vm_name)
try:
disk = vmxml.get_disk_all()[disk_target]
except KeyError:
raise xcepts.LibvirtXMLError("Wrong disk target:%s." % disk_target)
serial = ""
try:
serial = disk.find("serial").text
except AttributeError:
logging.debug("No serial assigned.")
return serial
@staticmethod
def get_disk_address(vm_name, disk_target, virsh_instance=base.virsh):
"""
Get disk address in VM
:param vm_name: Domain name.
:param disk_target: Domain disk target
:return: disk address
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
if not vmxml.get_disk_count(vm_name, virsh_instance=virsh_instance):
raise xcepts.LibvirtXMLError("No disk in domain %s." % vm_name)
try:
disk = vmxml.get_disk_all()[disk_target]
except KeyError:
raise xcepts.LibvirtXMLError("Wrong disk target:%s." % disk_target)
address_str = ""
try:
address = disk.find("address")
add_type = address.get("type")
add_domain = address.get("domain")
add_bus = address.get("bus")
add_slot = address.get("slot")
add_func = address.get("function")
address_str = ("%s:%s.%s.%s.%s"
% (add_type, add_domain, add_bus,
add_slot, add_func))
except AttributeError, e:
raise xcepts.LibvirtXMLError("Get wrong attribute: %s" % str(e))
return address_str
@staticmethod
def get_numa_params(vm_name, virsh_instance=base.virsh):
"""
Return VM's numa setting from XML definition
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
return vmxml.numa
def get_primary_serial(self):
"""
Get a dict with primary serial features.
"""
xmltreefile = self.__dict_get__('xml')
primary_serial = xmltreefile.find('devices').find('serial')
serial_features = {}
serial_type = primary_serial.get('type')
serial_port = primary_serial.find('target').get('port')
# Support node here for more features
serial_features['serial'] = primary_serial
# Necessary features
serial_features['type'] = serial_type
serial_features['port'] = serial_port
return serial_features
@staticmethod
def set_primary_serial(vm_name, dev_type, port, path=None,
virsh_instance=base.virsh):
"""
Set primary serial's features of vm_name.
:param vm_name: Name of defined vm to set primary serial.
:param dev_type: the type of ``serial:pty,file...``
:param port: the port of serial
:param path: the path of serial, it is not necessary for pty
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
xmltreefile = vmxml.__dict_get__('xml')
try:
serial = vmxml.get_primary_serial()['serial']
except AttributeError:
logging.debug("Can not find any serial, now create one.")
# Create serial tree, default is pty
serial = xml_utils.ElementTree.SubElement(
xmltreefile.find('devices'),
'serial', {'type': 'pty'})
# Create elements of serial target, default port is 0
xml_utils.ElementTree.SubElement(serial, 'target', {'port': '0'})
serial.set('type', dev_type)
serial.find('target').set('port', port)
# path may not be exist.
if path is not None:
serial.find('source').set('path', path)
else:
try:
source = serial.find('source')
serial.remove(source)
except AssertionError:
pass # Element not found, already removed.
xmltreefile.write()
vmxml.set_xml(xmltreefile.name)
vmxml.undefine()
vmxml.define()
@staticmethod
def set_agent_channel(vm_name):
"""
Add channel for guest agent running
:param vm_name: Name of defined vm to set agent channel
"""
vmxml = VMXML.new_from_dumpxml(vm_name)
try:
exist = vmxml.__dict_get__('xml').find('devices').findall('channel')
findc = 0
for ec in exist:
if ec.find('target').get('name') == "org.qemu.guest_agent.0":
findc = 1
break
if findc == 0:
raise AttributeError("Cannot find guest agent channel")
except AttributeError:
channel = vmxml.get_device_class('channel')(type_name='unix')
channel.add_source(mode='bind',
path='/var/lib/libvirt/qemu/guest.agent')
channel.add_target(type='virtio',
name='org.qemu.guest_agent.0')
vmxml.devices = vmxml.devices.append(channel)
vmxml.define()
@staticmethod
def remove_agent_channel(vm_name):
"""
Delete channel for guest agent
:param vm_name: Name of defined vm to remove agent channel
"""
vmxml = VMXML.new_from_dumpxml(vm_name)
try:
exist = vmxml.__dict_get__('xml').find('devices').findall('channel')
for ec in exist:
if ec.find('target').get('name') == "org.qemu.guest_agent.0":
channel = vmxml.get_device_class('channel')(type_name='unix')
channel.add_source(mode='bind',
path=ec.find('source').get('path'))
channel.add_target(type='virtio',
name=ec.find('target').get('name'))
vmxml.del_device(channel)
vmxml.define()
except AttributeError:
raise xcepts.LibvirtXMLError("Fail to remove agent channel!")
def get_iface_all(self):
"""
Get a dict with interface's mac and node.
"""
iface_nodes = self.xmltreefile.find('devices').findall('interface')
interfaces = {}
for node in iface_nodes:
mac_addr = node.find('mac').get('address')
interfaces[mac_addr] = node
return interfaces
@staticmethod
def get_iface_by_mac(vm_name, mac, virsh_instance=base.virsh):
"""
Get the interface if mac is matched.
:param vm_name: Name of defined vm.
:param mac: a mac address.
:return: return a dict include main interface's features
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
interfaces = vmxml.get_iface_all()
try:
interface = interfaces[mac]
except KeyError:
interface = None
if interface is not None: # matched mac exists.
iface_type = interface.get('type')
source = interface.find('source').get(iface_type)
features = {}
features['type'] = iface_type
features['mac'] = mac
features['source'] = source
return features
else:
return None
@staticmethod
def get_iface_dev(vm_name, virsh_instance=base.virsh):
"""
Return VM's interface device from XML definition, None if not set
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
ifaces = vmxml.get_iface_all()
if ifaces:
return ifaces.keys()
return None
@staticmethod
def get_first_mac_by_name(vm_name, virsh_instance=base.virsh):
"""
Convenience method for getting first mac of a defined VM
:param: vm_name: Name of defined vm to get mac
"""
vmxml = VMXML.new_from_dumpxml(vm_name, virsh_instance=virsh_instance)
xmltreefile = vmxml.__dict_get__('xml')
try:
iface = xmltreefile.find('devices').find('interface')
return iface.find('mac').get('address')
except AttributeError:
return None
@staticmethod
def get_iftune_params(vm_name, options="", virsh_instance=base.virsh):
"""
Return VM's interface tuning setting from XML definition
"""
vmxml = VMXML.new_from_dumpxml(vm_name, options=options,
virsh_instance=virsh_instance)
xmltreefile = vmxml.__dict_get__('xml')
iftune_params = {}
bandwidth = None
try:
bandwidth = xmltreefile.find('devices/interface/bandwidth')
try:
iftune_params['inbound'] = bandwidth.find(
'inbound').get('average')
iftune_params['outbound'] = bandwidth.find(
'outbound').get('average')
except AttributeError:
logging.error("Can't find <inbound> or <outbound> element")
except AttributeError:
logging.error("Can't find <bandwidth> element")
return iftune_params
def get_net_all(self):
"""
Return VM's net from XML definition, None if not set
"""
xmltreefile = self.__dict_get__('xml')
net_nodes = xmltreefile.find('devices').findall('interface')
nets = {}
for node in net_nodes:
dev = node.find('target').get('dev')
nets[dev] = node
return nets
# TODO re-visit this method after the libvirt_xml.devices.interface module
# is implemented
@staticmethod
def get_net_dev(vm_name):
"""
Get net device of a defined VM's nets.
:param vm_name: Name of defined vm.
"""
vmxml = VMXML.new_from_dumpxml(vm_name)
nets = vmxml.get_net_all()
if nets is not None:
return nets.keys()
return None
@staticmethod
def set_cpu_mode(vm_name, mode='host-model'):
"""
Set cpu's mode of VM.
:param vm_name: Name of defined vm to set cpu mode.
:param mode: the mode of cpu:'host-model'...
"""
vmxml = VMXML.new_from_dumpxml(vm_name)
vmxml.check_cpu_mode(mode)
xmltreefile = vmxml.__dict_get__('xml')
try:
cpu = xmltreefile.find('/cpu')
logging.debug("Current cpu mode is '%s'!", cpu.get('mode'))
cpu.set('mode', mode)
except AttributeError:
logging.debug("Can not find any cpu, now create one.")
cpu = xml_utils.ElementTree.SubElement(xmltreefile.getroot(),
'cpu', {'mode': mode})
xmltreefile.write()
vmxml.undefine()
vmxml.define()
def add_device(self, value):
"""
Add a device into VMXML.
:param value: instance of device in libvirt_xml/devices/
"""
devices = self.get_devices()
for device in devices:
if device == value:
logging.debug("Device %s is already in VM %s.", value, self)
return
devices.append(value)
self.set_devices(devices)
def del_device(self, value):
"""
Remove a device from VMXML
:param value: instance of device in libvirt_xml/devices/
"""
devices = self.get_devices()
not_found = True
for device in devices:
if device == value:
not_found = False
devices.remove(device)
break
if not_found:
logging.debug("Device %s does not exist in VM %s.", value, self)
return
self.set_devices(devices)
@staticmethod
def add_security_info(vmxml, passwd):
"""
Add passwd for graphic
:param vmxml: instance of VMXML
:param passwd: Password you want to set
"""
devices = vmxml.devices
graphics_index = devices.index(devices.by_device_tag('graphics')[0])
graphics = devices[graphics_index]
graphics.passwd = passwd
vmxml.devices = devices
vmxml.define()
def add_hostdev(self, source_address, mode='subsystem',
type='pci',
managed='yes'):
"""
Add a hostdev device to guest.
"""
dev = self.get_device_class('hostdev')()
dev.mode = mode
dev.type = type
dev.managed = managed
dev.source_address = source_address
self.add_device(dev)
@staticmethod
def get_blkio_params(vm_name, options="", virsh_instance=base.virsh):
"""
Return VM's block I/O setting from XML definition
"""
vmxml = VMXML.new_from_dumpxml(vm_name, options=options,
virsh_instance=virsh_instance)
xmltreefile = vmxml.__dict_get__('xml')
blkio_params = {}
try:
blkio = xmltreefile.find('blkiotune')
try:
blkio_params['weight'] = blkio.find('weight').text
except AttributeError:
logging.error("Can't find <weight> element")
except AttributeError:
logging.error("Can't find <blkiotune> element")
if blkio and blkio.find('device'):
blkio_params['device_weights_path'] = \
blkio.find('device').find('path').text
blkio_params['device_weights_weight'] = \
blkio.find('device').find('weight').text
return blkio_params
class VMCPUXML(VMXML):
"""
Higher-level manipulations related to VM's XML(CPU)
"""
# Must copy these here or there will be descriptor problems
__slots__ = ('model', 'vendor', 'feature_list',)
def __init__(self, virsh_instance=base.virsh, vm_name='', mode='host-model'):
"""
Create new VMCPU XML instance
"""
# The set action is for test.
accessors.XMLElementText(property_name="model",
libvirtxml=self,
forbidden=['del'],
parent_xpath='/cpu',
tag_name='model')
accessors.XMLElementText(property_name="vendor",
libvirtxml=self,
forbidden=['del'],
parent_xpath='/cpu',
tag_name='vendor')
# This will skip self.get_feature_list() defined below
accessors.AllForbidden(property_name="feature_list",
libvirtxml=self)
super(VMCPUXML, self).__init__(virsh_instance=virsh_instance)
# Setup some bare-bones XML to build upon
self.set_cpu_mode(vm_name, mode)
self['xml'] = self.__dict_get__('virsh').dumpxml(vm_name,
extra="--update-cpu").stdout.strip()
def get_feature_list(self):
"""
Accessor method for feature_list property (in __slots__)
"""
feature_list = []
xmltreefile = self.__dict_get__('xml')
for feature_node in xmltreefile.findall('/cpu/feature'):
feature_list.append(feature_node)
return feature_list
def get_feature_name(self, num):
"""
Get assigned feature name
:param num: Assigned feature number
:return: Assigned feature name
"""
count = len(self.feature_list)
if num >= count:
raise xcepts.LibvirtXMLError("Get %d from %d features"
% (num, count))
feature_name = self.feature_list[num].get('name')
return feature_name
def remove_feature(self, num):
"""
Remove a assigned feature from xml
:param num: Assigned feature number
"""
xmltreefile = self.__dict_get__('xml')
count = len(self.feature_list)
if num >= count:
raise xcepts.LibvirtXMLError("Remove %d from %d features"
% (num, count))
feature_remove_node = self.feature_list[num]
cpu_node = xmltreefile.find('/cpu')
cpu_node.remove(feature_remove_node)
@staticmethod
def check_feature_name(value):
"""
Check feature name valid or not.
:param value: The feature name
:return: True if check pass
"""
sys_feature = []
cpu_xml_file = open('/proc/cpuinfo', 'r')
for line in cpu_xml_file.readline():
if line.find('flags') != -1:
feature_names = line.split(':')[1].strip()
sys_sub_feature = feature_names.split(' ')
sys_feature = list(set(sys_feature + sys_sub_feature))
return (value in sys_feature)
def set_feature(self, num, value):
"""
Set a assigned feature value to xml
:param num: Assigned feature number
:param value: The feature name modified to
"""
count = len(self.feature_list)
if num >= count:
raise xcepts.LibvirtXMLError("Set %d from %d features"
% (num, count))
feature_set_node = self.feature_list[num]
feature_set_node.set('name', value)
def add_feature(self, value):
"""
Add a feature Element to xml
:param num: Assigned feature number
"""
xmltreefile = self.__dict_get__('xml')
cpu_node = xmltreefile.find('/cpu')
xml_utils.ElementTree.SubElement(cpu_node, 'feature', {'name': value})
class VMClockXML(VMXML):
"""
Higher-level manipulations related to VM's XML(Clock)
"""
# Must copy these here or there will be descriptor problems
__slots__ = ('offset', 'timezone', 'adjustment', 'timers')
def __init__(self, virsh_instance=base.virsh, offset="utc"):
"""
Create new VMClock XML instance
"""
# The set action is for test.
accessors.XMLAttribute(property_name="offset",
libvirtxml=self,
forbidden=[],
parent_xpath='/',
tag_name='clock',
attribute='offset')
accessors.XMLAttribute(property_name="timezone",
libvirtxml=self,
forbidden=[],
parent_xpath='/',
tag_name='clock',
attribute='timezone')
accessors.XMLAttribute(property_name="adjustment",
libvirtxml=self,
forbidden=[],
parent_xpath='/',
tag_name='clock',
attribute='adjustment')
accessors.XMLElementList(property_name="timers",
libvirtxml=self,
forbidden=[],
parent_xpath="/clock",
marshal_from=self.marshal_from_timer,
marshal_to=self.marshal_to_timer)
super(VMClockXML, self).__init__(virsh_instance=virsh_instance)
# Set default offset for clock
self.offset = offset
def from_dumpxml(self, vm_name, virsh_instance=base.virsh):
"""Helper to load xml from domain."""
self.xml = VMXML.new_from_dumpxml(vm_name,
virsh_instance=virsh_instance).xml
# Sub-element of clock
class Timer(VMXML):
"""Timer element of clock"""
__slots__ = ('name', 'present')
def __init__(self, virsh_instance=base.virsh, timer_name="tsc"):
"""
Create new Timer XML instance
"""
# The set action is for test.
accessors.XMLAttribute(property_name="name",
libvirtxml=self,
forbidden=[],
parent_xpath='/clock',
tag_name='timer',
attribute='name')
accessors.XMLAttribute(property_name="present",
libvirtxml=self,
forbidden=[],
parent_xpath='/clock',
tag_name='timer',
attribute='present')
super(VMClockXML.Timer, self).__init__(virsh_instance=virsh_instance)
# name is mandatory for timer
self.name = timer_name
def update(self, attr_dict):
for attr, value in attr_dict.items():
setattr(self, attr, value)
@staticmethod
def marshal_from_timer(item, index, libvirtxml):
"""Convert a Timer instance into tag + attributes"""
del index
del libvirtxml
timer = item.xmltreefile.find("clock/timer")
try:
return (timer.tag, dict(timer.items()))
except AttributeError: # Didn't find timer
raise xcepts.LibvirtXMLError("Expected a list of timer "
"instances, not a %s" % str(item))
@staticmethod
def marshal_to_timer(tag, attr_dict, index, libvirtxml):
"""Convert a tag + attributes to a Timer instance"""
del index
if tag == 'timer':
newone = VMClockXML.Timer(virsh_instance=libvirtxml.virsh)
newone.update(attr_dict)
return newone
else:
return None
class VMCPUTune(base.LibvirtXMLBase):
"""
CPU tuning tag XML class
Elements:
vcpupins: list of dict - vcpu, cpuset
emulatorpin: attribute - cpuset
shares: int
period: int
quota: int
emulator_period: int
emulator_quota: int
"""
__slots__ = ('vcpupins', 'emulatorpin', 'shares', 'period', 'quota',
'emulator_period', 'emulator_quota')
def __init__(self, virsh_instance=base.virsh):
accessors.XMLElementList('vcpupins', self, parent_xpath='/',
marshal_from=self.marshal_from_vcpupins,
marshal_to=self.marshal_to_vcpupins)
accessors.XMLAttribute('emulatorpin', self, parent_xpath='/',
tag_name='emulatorpin', attribute='cpuset')
for slot in self.__all_slots__:
if slot in ('shares', 'period', 'quota', 'emulator_period',
'emulator_quota'):
accessors.XMLElementInt(slot, self, parent_xpath='/',
tag_name=slot)
super(self.__class__, self).__init__(virsh_instance=virsh_instance)
self.xml = '<cputune/>'
@staticmethod
def marshal_from_vcpupins(item, index, libvirtxml):
"""
Convert a dict to vcpupin tag and attributes.
"""
del index
del libvirtxml
if not isinstance(item, dict):
raise xcepts.LibvirtXMLError("Expected a dictionary of host "
"attributes, not a %s"
% str(item))
return ('vcpupin', dict(item))
@staticmethod
def marshal_to_vcpupins(tag, attr_dict, index, libvirtxml):
"""
Convert a vcpupin tag and attributes to a dict.
"""
del index
del libvirtxml
if tag != 'vcpupin':
return None
return dict(attr_dict)
class VMPM(base.LibvirtXMLBase):
"""
VM power management tag XML class
Elements:
suspend-to-disk: attribute - enabled
suspend-to-mem: attribute - enabled
"""
__slots__ = ('disk_enabled', 'mem_enabled')
def __init__(self, virsh_instance=base.virsh):
accessors.XMLAttribute('disk_enabled', self, parent_xpath='/',
tag_name='suspend-to-disk', attribute='enabled')
accessors.XMLAttribute('mem_enabled', self, parent_xpath='/',
tag_name='suspend-to-mem', attribute='enabled')
super(self.__class__, self).__init__(virsh_instance=virsh_instance)
self.xml = '<pm/>'
|
waynesun09/virt-test
|
virttest/libvirt_xml/vm_xml.py
|
Python
|
gpl-2.0
| 49,655
|
[
"VisIt"
] |
3368701003a5fedec6f1b5a4a97126d715842841aca9395edd3f41fa67143e9b
|
# Copyright (c) 2010 Howard Hughes Medical Institute.
# All rights reserved.
# Use is subject to Janelia Farm Research Campus Software Copyright 1.1 license terms.
# http://license.janelia.org/license/jfrc_copyright_1_1.html
import osg, osgDB, osgText
from shape import Shape, UnitShape, PathShape
import neuroptikon
from network.object import Object
from network.region import Region
from network.neuron import Neuron
from network.arborization import Arborization
from network.neurite import Neurite # pylint: disable=E0611,F0401
from network.stimulus import Stimulus
from network.attribute import Attribute
from library.texture import Texture
from gettext import gettext
from pydispatch import dispatcher
import os.path, random, sys
from math import atan2, pi, sqrt
try:
import xml.etree.cElementTree as ElementTree
except ImportError:
import xml.etree.ElementTree as ElementTree
class Visible(object):
"""
Instances of this class map a network object (neurion, region, etc.) to a specific display. They capture all of the attributes needed to render the object.
You should never create an instance of this class directly. Instead use the value returned by calling :meth:`visualizeObject() <Display.Display.Display.visualizeObject>` on a display. If you want to have a purely visual object that does not represent any object in the biological network then pass None to visualizeObject().
"""
try:
if osgText.readFontFile(str("Arial Bold.ttf")):
labelFont = str('Arial Bold.ttf')
elif osgText.readFontFile(str("ArialBD.ttf")):
labelFont = str('ArialBD.ttf')
elif osgText.readFontFile(str("Arial.ttf")):
labelFont = str('Arial.ttf')
elif osgText.readFontFile(str("Georgia.ttf")):
labelFont = str('Georgia.ttf')
else:
labelFont = None
except:
(exceptionType, exceptionValue, exceptionTraceback) = sys.exc_info()
print 'Could not load Arial font (' + str(exceptionValue) + ' (' + exceptionType.__name__ + ')' + ')'
labelFont = None
osgDB.Registry.instance().getReaderWriterForExtension('osg') # Make sure the osg plug-in can be found before the cwd gets changed for a script run.
def __init__(self, display, client):
self.display = display
self.displayId = display._generateUniqueId() # a unique identifier within the display
self.client = client
self._orphanClass = None # Object subclass if client used to be non-None but display-only script couldn't find the client
self._glowColor = None
self._glowNode = None
self._glowNodeMaterial = None
self._glowShape = None
# Geometry attributes
self._position = (random.random() - 0.5, random.random() - 0.5, 0)
self._positionIsFixed = False
self._size = (.001, .001, .001)
self._sizeIsFixed = True
self._sizeIsAbsolute = False
self._rotation = (0, 0, 1, 0)
# Appearance attributes
self._weight = 1.0
self._originalWeight = None
self._label = None
self._labelNode = None
self._labelPosition = (0.0, 0.0, 0.0) # in local coordinates
self._labelColor = (0.0, 0.0, 0.0)
self._shape = None
self._color = (0.5, 0.5, 0.5)
self._opacity = 1.0
self._currentOpacity = 1.0
self._dependencies = set()
self.dependentVisibles = set()
# Path attributes
self._pathMidPoints = []
self._pathStart = None
self._pathEnd = None
self._pathIsFixed = False
self.connectedPaths = []
# Flow attributes
self._animateFlow = False
self._flowTo = False
self._flowToColor = None
self._flowToSpacing = None
self._flowToSpeed = None
self._flowToSpread = None
self._flowFrom = False
self._flowFromColor = None
self._flowFromSpacing = None
self._flowFromSpeed = None
self._flowFromSpread = None
self.sgNode = osg.MatrixTransform()
self._shapeGeode = osg.Geode()
self._shapeGeode.setName(str(self.displayId))
self.sgNode.addChild(self._shapeGeode)
self._shapeGeode2 = None
self._textGeode = None
self._textDrawable = None
self._staticTexture = None
self._staticTextureScale = 1.0
# Parent and children
self.parent = None
self.children = []
self.childGroup = None # will get created when children are added
# Arrangement attributes
self.arrangedAxis = 'largest'
self.arrangedSpacing = 0.02
self.arrangedWeight = 1.0
self._updateLabel()
if isinstance(self.client, Region):
dispatcher.connect(self._displayChangedShowName, ('set', 'showRegionNames'), self.display)
if isinstance(self.client, Neuron):
dispatcher.connect(self._displayChangedShowName, ('set', 'showNeuronNames'), self.display)
dispatcher.connect(self._displayChangedShowName, ('set', 'showNeuronNamesOnSelection'), self.display)
dispatcher.connect(self._displayChangedShowName, ('set', 'viewDimensions'), self.display)
dispatcher.connect(self._displayChangedShowName, ('set', 'orthoViewPlane'), self.display)
dispatcher.connect(self._displayChangedShowName, ('set', 'labelsFloatOnTop'), self.display)
self._updateOpacity()
dispatcher.connect(self._displayChangedGhosting, ('set', 'useGhosts'), self.display)
dispatcher.connect(self._displayChangedGhosting, ('set', 'ghostingOpacity'), self.display)
if not hasattr(Visible, 'cullFrontFacesAttr'):
# This is a bit of a hack. osgswig does not expose the osg::CullFace class which is needed to get proper transparency.
# To get around this we load an osg file which contains nodes with both front and back cull face state attributes and extract them.
if neuroptikon.runningFromSource:
cullFacesPath = os.path.join(neuroptikon.rootDir, 'display', 'cull_faces.osg')
else:
cullFacesPath = os.path.join(neuroptikon.rootDir, 'cull_faces.osg')
# TODO - installed version fails to load cull_faces.osg on Windows
try:
cullFacesNode = osgDB.readNodeFile(cullFacesPath)
cullFacesGroup = cullFacesNode.asGroup()
Visible.cullFrontFacesAttr = cullFacesGroup.getChild(0).getStateSet().getAttribute(osg.StateAttribute.CULLFACE)
Visible.cullBackFacesAttr = cullFacesGroup.getChild(1).getStateSet().getAttribute(osg.StateAttribute.CULLFACE)
# Make sure the node with our attributes doesn't get garbage collected.
cullFacesNode.ref()
except IOError:
print "cullFacesPath = " + cullFacesPath
pass
def __repr__(self):
if self.client is None:
return gettext('anonymous proxy')
else:
return gettext('proxy of %s') % (self.client.name or self.client.defaultName() or gettext('<unnamed %s>') % (self.client.__class__.displayName()))
@classmethod
def _fromXMLElement(cls, xmlElement, display):
orphanClass = xmlElement.get('orphanClass', None)
if orphanClass:
client = None
else:
client = display.network.objectWithId(xmlElement.get('objectId'))
visible = Visible(display, client)
visible._orphanClass = orphanClass
visible.displayId = int(xmlElement.get('id'))
visible._shapeGeode.setName(str(visible.displayId))
if visible._textGeode:
visible._textGeode.setName(str(visible.displayId))
trueStrings = ['true', 't', 'T', 'yes', 'y', 'Y']
falseStrings = ['false', 'f', 'F', 'no', 'n', 'N']
# Set any geometry
geometryElement = xmlElement.find('Geometry')
if geometryElement is None:
geometryElement = xmlElement.find('geometry')
if geometryElement is not None:
positionElement = geometryElement.find('Position')
if positionElement is None:
positionElement = geometryElement.find('position')
if positionElement is not None:
x = float(positionElement.get('x'))
y = float(positionElement.get('y'))
z = float(positionElement.get('z'))
visible.setPosition((x, y, z))
if positionElement.get('fixed') in trueStrings:
visible.setPositionIsFixed(True)
sizeElement = geometryElement.find('Size')
if sizeElement is None:
sizeElement = geometryElement.find('size')
if sizeElement is not None:
width = float(sizeElement.get('x'))
height = float(sizeElement.get('y'))
depth = float(sizeElement.get('z'))
visible.setSize((width, height, depth))
if sizeElement.get('fixed') == 'false':
visible.setSizeIsFixed(False)
if sizeElement.get('absolute') in trueStrings:
visible.setSizeIsAbsolute(True)
rotationElement = geometryElement.find('Rotation')
if rotationElement is None:
rotationElement = geometryElement.find('rotation')
if rotationElement is not None:
x = float(rotationElement.get('x'))
y = float(rotationElement.get('y'))
z = float(rotationElement.get('z'))
angle = float(rotationElement.get('angle'))
visible.setRotation((x, y, z, angle))
# Set any appearance
appearanceElement = xmlElement.find('Appearance')
if appearanceElement is None:
appearanceElement = xmlElement.find('appearance')
if appearanceElement is not None:
labelElement = appearanceElement.find('Label')
if labelElement is None:
labelElement = appearanceElement.find('label')
if labelElement != None:
textElement = labelElement.find('Text')
colorElement = labelElement.find('Color')
positionElement = labelElement.find('Position')
if textElement != None or colorElement != None or positionElement != None:
if textElement != None:
visible.setLabel(textElement.text or '')
if colorElement != None:
visible.setLabelColor((float(colorElement.get('r')), float(colorElement.get('g')), float(colorElement.get('b'))))
if positionElement != None:
visible.setLabelPosition((float(positionElement.get('x')), float(positionElement.get('y')), float(positionElement.get('z'))))
else:
visible.setLabel(labelElement.text or '') # previous XML format
shapeElement = appearanceElement.find('Shape')
shapeClassName = None if shapeElement == None else shapeElement.get('class')
shapeAttrs = {}
if shapeClassName == None:
shapeName = appearanceElement.findtext('Shape') or appearanceElement.findtext('shape')
if shapeName == 'ball':
shapeClassName = 'Ball'
elif shapeName == 'capsule':
shapeClassName = 'Capsule'
elif shapeName == 'cone':
shapeClassName = 'Cone'
elif shapeName in ['Line', 'tube']:
shapeClassName = 'Line'
elif shapeName != None:
shapeClassName = 'Box' # the default
else:
# Get any attributes
for element in shapeElement.findall('Attribute'):
attribute = Attribute._fromXMLElement(object, element)
if attribute is not None:
shapeAttrs[attribute.name()] = attribute.value()
if shapeClassName == None:
visible.setShape(None)
else:
shape = neuroptikon.shapeClass(shapeClassName)(**shapeAttrs)
visible.setShape(shape)
colorElement = appearanceElement.find('Color')
if colorElement is None:
colorElement = appearanceElement.find('color')
if colorElement is not None:
red = float(colorElement.get('r'))
green = float(colorElement.get('g'))
blue = float(colorElement.get('b'))
visible.setColor((red, green, blue))
opacityText = appearanceElement.findtext('Opacity') or appearanceElement.findtext('opacity')
if opacityText is not None:
visible.setOpacity(float(opacityText))
weightText = appearanceElement.findtext('Weight') or appearanceElement.findtext('weight')
if weightText is not None:
visible.setWeight(float(weightText))
textureElement = appearanceElement.find('Texture')
if textureElement is None:
textureElement = appearanceElement.find('texture')
if textureElement is not None:
textureId = textureElement.get('identifier')
textureScale = textureElement.get('scale')
if textureId is None:
textureId = textureElement.text
textureScale = "10.0"
if textureId is not None:
visible.setTexture(neuroptikon.library.texture(textureId))
if textureScale is not None:
visible.setTextureScale(float(textureScale))
# Set up any arrangement
arrangementElement = xmlElement.find('Arrangement')
if arrangementElement is None:
arrangementElement = xmlElement.find('arrangement')
if arrangementElement is not None:
axis = arrangementElement.get('axis')
visible.setArrangedAxis(None if axis == 'None' else axis)
spacing = arrangementElement.get('spacing')
if spacing is not None:
visible.setArrangedSpacing(float(spacing))
weight = arrangementElement.get('weight')
if weight is not None:
visible.setArrangedWeight(float(weight))
# Set up any path
pathElement = xmlElement.find('Path')
if pathElement is None:
pathElement = xmlElement.find('path')
if pathElement is not None:
pathStart = display.visibleWithId(int(pathElement.get('startVisibleId')))
pathEnd = display.visibleWithId(int(pathElement.get('endVisibleId')))
if pathStart is None or pathEnd is None:
raise ValueError, gettext('Could not create path')
visible.setPathEndPoints(pathStart, pathEnd)
if pathElement.get('fixed') in trueStrings:
visible.setPathIsFixed(True)
flowTo = pathElement.get('flowTo')
if flowTo in trueStrings:
visible.setFlowTo(True)
elif flowTo in falseStrings:
visible.setFlowTo(False)
flowFrom = pathElement.get('flowFrom')
if flowFrom in trueStrings:
visible.setFlowFrom(True)
elif flowFrom == falseStrings:
visible.setFlowFrom(False)
midPoints = []
for midPointElement in pathElement.findall('MidPoint'):
x = float(midPointElement.get('x'))
y = float(midPointElement.get('y'))
z = float(midPointElement.get('z'))
midPoints.append((x, y, z))
visible.setPathMidPoints(midPoints)
flowToElement = pathElement.find('FlowToAppearance')
if flowToElement is None:
flowToElement = pathElement.find('flowToAppearance')
if flowToElement is not None:
colorElement = flowToElement.find('Color')
if colorElement is None:
colorElement = flowToElement.find('color')
if colorElement is not None:
red = float(colorElement.get('r'))
green = float(colorElement.get('g'))
blue = float(colorElement.get('b'))
alpha = float(colorElement.get('a'))
visible.setFlowToColor((red, green, blue, alpha))
if flowToElement.get('spacing') is not None:
visible.setFlowToSpacing(float(flowToElement.get('spacing')))
if flowToElement.get('speed') is not None:
visible.setFlowToSpeed(float(flowToElement.get('speed')))
if flowToElement.get('spread') is not None:
visible.setFlowToSpread(float(flowToElement.get('spread')))
flowFromElement = pathElement.find('FlowFromAppearance')
if flowFromElement is None:
flowFromElement = pathElement.find('flowFromAppearance')
if flowFromElement is not None:
colorElement = flowFromElement.find('Color')
if colorElement is None:
colorElement = flowFromElement.find('color')
if colorElement is not None:
red = float(colorElement.get('r'))
green = float(colorElement.get('g'))
blue = float(colorElement.get('b'))
alpha = float(colorElement.get('a'))
visible.setFlowFromColor((red, green, blue, alpha))
if flowFromElement.get('spacing') is not None:
visible.setFlowFromSpacing(float(flowFromElement.get('spacing')))
if flowFromElement.get('speed') is not None:
visible.setFlowFromSpeed(float(flowFromElement.get('speed')))
if flowFromElement.get('spread') is not None:
visible.setFlowFromSpread(float(flowFromElement.get('spread')))
# Create any child visibles
for visibleElement in xmlElement.findall('Visible'):
childVisible = Visible._fromXMLElement(visibleElement, display)
if childVisible is None:
raise ValueError, gettext('Could not create visualized item')
display.addVisible(childVisible, visible)
return visible
def _toXMLElement(self, parentElement):
visibleElement = ElementTree.SubElement(parentElement, 'Visible')
visibleElement.set('id', str(self.displayId))
if self._orphanClass:
visibleElement.set('orphanClass', self._orphanClass.__name__)
elif self.client is not None:
visibleElement.set('objectId', str(self.client.networkId))
# Add a comment to the XML to make it easier to figure out the client of the visible.
if self.client:
visibleElement.append(ElementTree.Comment(self.client.__class__.displayName() + ': ' + (self.client.name or self.client.abbreviation or gettext('(unnamed)'))))
# Add the geometry
geometryElement = ElementTree.SubElement(visibleElement, 'Geometry')
if self.parent == None or self.parent.arrangedAxis == None:
positionElement = ElementTree.SubElement(geometryElement, 'Position')
positionElement.set('x', str(self._position[0]))
positionElement.set('y', str(self._position[1]))
positionElement.set('z', str(self._position[2]))
positionElement.set('fixed', 'true' if self._positionIsFixed else 'false')
if self.parent == None or self.parent.arrangedAxis == None or self._sizeIsAbsolute:
sizeElement = ElementTree.SubElement(geometryElement, 'Size')
if self.parent == None or self.parent.arrangedAxis == None:
sizeElement.set('x', str(self._size[0]))
sizeElement.set('y', str(self._size[1]))
sizeElement.set('z', str(self._size[2]))
sizeElement.set('fixed', 'true' if self.sizeIsFixed else 'false')
sizeElement.set('absolute', 'true' if self._sizeIsAbsolute else 'false')
if self.parent == None or self.parent.arrangedAxis == None:
rotationElement = ElementTree.SubElement(geometryElement, 'Rotation')
rotationElement.set('x', str(self._rotation[0]))
rotationElement.set('y', str(self._rotation[1]))
rotationElement.set('z', str(self._rotation[2]))
rotationElement.set('angle', str(self._rotation[3]))
# Add the appearance
appearanceElement = ElementTree.SubElement(visibleElement, 'Appearance')
if self._label is not None or self._labelColor != (0.0, 0.0, 0.0) or self._labelPosition != (0.0, 0.0, 0.0):
labelElement = ElementTree.SubElement(appearanceElement, 'Label')
if self._label is not None:
textElement = ElementTree.SubElement(labelElement, 'Text')
textElement.text = self._label
if self._labelColor != (0.0, 0.0, 0.0):
colorElement = ElementTree.SubElement(labelElement, 'Color')
colorElement.set('r', str(self._labelColor[0]))
colorElement.set('g', str(self._labelColor[1]))
colorElement.set('b', str(self._labelColor[2]))
if len(self._labelColor) > 3:
colorElement.set('a', str(self._labelColor[3]))
if self._labelPosition != (0.0, 0.0, 0.0):
positionElement = ElementTree.SubElement(labelElement, 'Position')
positionElement.set('x', str(self._labelPosition[0]))
positionElement.set('y', str(self._labelPosition[1]))
positionElement.set('z', str(self._labelPosition[2]))
if self._shape is not None:
shapeElement = ElementTree.SubElement(appearanceElement, 'Shape')
shapeElement.set('class', self._shape.__class__.__name__)
# Save any custom attributes of the shape.
for attributeName, attributeValue in self._shape.persistentAttributes().iteritems():
attribute = None
if isinstance(attributeValue, str):
attribute = Attribute(self._shape, attributeName, Attribute.STRING_TYPE, attributeValue)
elif isinstance(attributeValue, bool): # bool is a sub-class of int so must be tested before it
attribute = Attribute(self._shape, attributeName, Attribute.BOOLEAN_TYPE, attributeValue)
elif isinstance(attributeValue, int):
attribute = Attribute(self._shape, attributeName, Attribute.INTEGER_TYPE, attributeValue)
elif isinstance(attributeValue, float):
attribute = Attribute(self._shape, attributeName, Attribute.DECIMAL_TYPE, attributeValue)
if attribute != None:
attribute._toXMLElement(shapeElement)
colorElement = ElementTree.SubElement(appearanceElement, 'Color')
colorElement.set('r', str(self._color[0]))
colorElement.set('g', str(self._color[1]))
colorElement.set('b', str(self._color[2]))
ElementTree.SubElement(appearanceElement, 'Opacity').text = str(self._opacity)
ElementTree.SubElement(appearanceElement, 'Weight').text = str(self._weight)
if self._staticTexture is not None:
textureElement = ElementTree.SubElement(appearanceElement, 'Texture')
textureElement.set('identifier', self._staticTexture.identifier)
textureElement.set('scale', str(self._staticTextureScale))
# Add the arrangement
arrangementElement = ElementTree.SubElement(visibleElement, 'Arrangement')
arrangementElement.set('axis', str(self.arrangedAxis))
arrangementElement.set('spacing', str(self.arrangedSpacing))
arrangementElement.set('weight', str(self.arrangedWeight))
# Add any path
if self.isPath():
pathElement = ElementTree.SubElement(visibleElement, 'Path')
pathElement.set('startVisibleId', str(self._pathStart.displayId))
pathElement.set('endVisibleId', str(self._pathEnd.displayId))
pathElement.set('fixed', 'true' if self._pathIsFixed else 'false')
pathElement.set('flowTo', 'true' if self._flowTo else 'false')
pathElement.set('flowFrom', 'true' if self._flowFrom else 'false')
if self._flowToColor is not None or self._flowToSpread is not None:
flowToElement = ElementTree.SubElement(pathElement, 'FlowToAppearance')
if self._flowToColor is not None:
colorElement = ElementTree.SubElement(flowToElement, 'Color')
colorElement.set('r', str(self._flowToColor[0]))
colorElement.set('g', str(self._flowToColor[1]))
colorElement.set('b', str(self._flowToColor[2]))
colorElement.set('a', str(self._flowToColor[3]))
if self._flowToSpacing is not None:
flowToElement.set('spacing', str(self._flowToSpacing))
if self._flowToSpeed is not None:
flowToElement.set('speed', str(self._flowToSpeed))
if self._flowToSpread is not None:
flowToElement.set('spread', str(self._flowToSpread))
if self._flowFromColor is not None or self._flowFromSpread is not None:
flowFromElement = ElementTree.SubElement(pathElement, 'FlowFromAppearance')
if self._flowFromColor is not None:
colorElement = ElementTree.SubElement(flowFromElement, 'Color')
colorElement.set('r', str(self._flowFromColor[0]))
colorElement.set('g', str(self._flowFromColor[1]))
colorElement.set('b', str(self._flowFromColor[2]))
colorElement.set('a', str(self._flowFromColor[3]))
if self._flowFromSpacing is not None:
flowFromElement.set('spacing', str(self._flowFromSpacing))
if self._flowFromSpeed is not None:
flowFromElement.set('speed', str(self._flowFromSpeed))
if self._flowFromSpread is not None:
flowFromElement.set('spread', str(self._flowFromSpread))
for midPoint in self._pathMidPoints:
midPointElement = ElementTree.SubElement(pathElement, 'MidPoint')
midPointElement.set('x', str(midPoint[0]))
midPointElement.set('y', str(midPoint[1]))
midPointElement.set('z', '0.0' if len(midPoint) == 2 else str(midPoint[2]))
# Add any child visibles
for childVisible in self.children:
childElement = childVisible._toXMLElement(visibleElement)
if childElement is None:
raise ValueError, gettext('Could not save visualized item')
return visibleElement
def _scriptParamsToKeywordArgs(self, params, scriptRefs):
keywordArgs = []
for key, value in params.iteritems():
if key == 'pathEndPoints':
pathStart, pathEnd = value
if isinstance(pathStart, Object):
startRef = scriptRefs[pathStart.networkId]
else:
startRef = ('visible' + str(pathStart.displayId)) if not pathStart.client else scriptRefs[pathStart.client.networkId]
if isinstance(pathEnd, Object):
endRef = scriptRefs[pathEnd.networkId]
else:
endRef = ('visible' + str(pathEnd.displayId)) if not pathEnd.client else scriptRefs[pathEnd.client.networkId]
valueText = '(' + startRef + ', ' + endRef + ')'
elif isinstance(value, Object):
valueText = scriptRefs[value.networkId]
elif isinstance(value, Visible):
valueText = ('visible' + str(value.displayId)) if not value.client else scriptRefs[value.client.networkId]
elif isinstance(value, Texture):
valueText = 'library.texture(%s)' % (repr(value.identifier))
else:
valueText = repr(value)
keywordArgs += ['%s = %s' % (key, valueText)]
return ', '.join(keywordArgs)
def _toScriptFile(self, scriptFile, scriptRefs, displayRef, savingNetwork):
# The stimulus visibles make this complicated because there are two visibles per stimulus object (a node and an path) and some attributes come from one visible and some from the other.
# This is worked around by tweaking the value of self as the attributes are queried. The attributes are grouped as follows to simplify the switching:
# Attribute Stimulus Non-Stimulus
# ========= ======== ============
# size node
# rotation node
# arr. axis node
# arr. spacing node
# arr. weight node
#
# label node node
# position node node
#
# weight path path
# flow color path path
# flow spread path path
#
# shape path node or path
# color path node or path
# opacity path node or path
# texture path node or path
defaultParams = self.client.defaultVisualizationParams() if self.client else Object._defaultVisualizationParams
if 'shape' in defaultParams:
# Create a default shape instance if a string or class is the default value.
if isinstance(defaultParams['shape'], str):
defaultParams['shape'] = neuroptikon.shapeClass(defaultParams['shape'])()
elif isinstance(defaultParams['shape'], type(self.__class__)):
defaultParams['shape'] = defaultParams['shape']()
params = {}
if isinstance(self.client, Stimulus):
visibles = list(self.display.visiblesForObject(self.client))
nodeVisible = visibles[0 if visibles[1].isPath() else 1]
pathVisible = visibles[0 if visibles[0].isPath() else 1]
else:
# Size, rotation and arrangement are never applied to stimuli.
if not self.isPath():
if self.parent == None or self.parent.arrangedAxis == None:
params['size'] = self.size()
if not self.sizeIsFixed():
params['sizeIsFixed'] = False
if self.rotation() != (0, 0, 1, 0):
params['rotation'] = self.rotation()
if self.parent is not None and self.sizeIsAbsolute():
params['sizeIsAbsolute'] = self.sizeIsAbsolute()
if len(self.children) > 0:
if self.arrangedAxis is not None:
params['arrangedAxis'] = self.arrangedAxis
if self.arrangedSpacing is not None:
params['arrangedSpacing'] = self.arrangedSpacing
if self.parent is not None:
params['parent'] = self.parent.client or self.parent
if self.arrangedWeight != 1.0:
params['arrangedWeight'] = self.arrangedWeight
# Stimuli label and position are always taken from the node visible.
if isinstance(self.client, Stimulus):
self = nodeVisible
if self._label is None:
params['label'] = None
else:
params['label'] = self._label
if self._labelColor != (0.0, 0.0, 0.0):
params['labelColor'] = self._labelColor
if self._labelPosition != (0.0, 0.0, 0.0):
params['labelPosition'] = self._labelPosition
if not self.isPath() and (self.parent == None or self.parent.arrangedAxis == None):
params['position'] = self.position()
if self.positionIsFixed():
params['positionIsFixed'] = True
# All other stimuli attributes are taken from the path visible.
if isinstance(self.client, Stimulus):
self = pathVisible
if self.isPath():
params['weight'] = self.weight()
params['pathIsFixed'] = self.pathIsFixed()
if self._flowTo:
params['flowTo'] = True
if self.flowToColor() != None:
params['flowToColor'] = self.flowToColor()
if self.flowToSpacing() != None:
params['flowToSpacing'] = self.flowToSpacing()
if self.flowToSpeed() != None:
params['flowToSpeed'] = self.flowToSpeed()
if self.flowToSpread() != None:
params['flowToSpread'] = self.flowToSpread()
if self._flowFrom:
params['flowFrom'] = True
if self.flowFromColor() != None:
params['flowFromColor'] = self.flowFromColor()
if self.flowFromSpacing() != None:
params['flowFromSpacing'] = self.flowFromSpacing()
if self.flowFromSpeed() != None:
params['flowFromSpeed'] = self.flowFromSpeed()
if self.flowFromSpread() != None:
params['flowFromSpread'] = self.flowFromSpread()
if isinstance(self.client, Stimulus):
params['target'] = self._pathEnd.client or self._pathEnd
else:
params['pathEndPoints'] = (self._pathStart.client or self._pathStart, self._pathEnd.client or self._pathEnd)
if self._pathMidPoints != []:
params['pathMidPoints'] = self._pathMidPoints
params['shape'] = self.shape()
params['color'] = self.color()
params['opacity'] = self.opacity()
params['texture'] = self._staticTexture
if not savingNetwork:
orphanDefaults = self.client.__class__._defaultVisualizationParams() if self.client else Object._defaultVisualizationParams
if 'shape' in orphanDefaults:
# Create a default shape instance if a string or class is the default value.
if isinstance(orphanDefaults['shape'], str):
orphanDefaults['shape'] = neuroptikon.shapeClass(orphanDefaults['shape'])()
elif isinstance(orphanDefaults['shape'], type(self.__class__)):
orphanDefaults['shape'] = orphanDefaults['shape']()
orphanParams = params.copy()
# Strip out values that are the same as the default.
for key in params.keys():
if key in defaultParams and params[key] == defaultParams[key]:
del params[key]
if not savingNetwork:
# Get the default params that are different than a generic instance of the client's class.
for key in orphanParams.keys():
if key != 'target' and ((key in orphanDefaults and orphanParams[key] == orphanDefaults[key]) or (key in params and orphanParams[key] == params[key])):
del orphanParams[key]
if self.client:
scriptRef = scriptRefs[self.client.networkId]
else:
scriptRef = 'visible' + str(self.displayId)
if self.client and not isinstance(self.client, Neurite) and self.display.autoVisualize:
# Change the existing visualization of the object.
if '(' in scriptRef:
# Create a local variable in the script for the object, e.g. "region1" instead of "network.createRegion(...)".
newScriptRef = self.client._createScriptRef(scriptRefs) # also updates scriptRefs
scriptFragment = '\n' + newScriptRef + ' = ' + scriptRef
if not savingNetwork:
# Add an "or ..." clause to the creation command in case it returns None that calls display.visualizeObject() to create an "orphaned" object.
# Orphan class, label and path end points parameters will be passed to allow a reasonable approximation of the original visible to be created.
scriptFragment += ' or ' + displayRef + '.visualizeObject(orphanClass = ' + self.client.__class__.__name__
# If a label is not going to be set then set one for the orphan.
if 'label' not in params:
label = self._label
if label is None and ((isinstance(self.client, Region) and self.display.showRegionNames()) or (isinstance(self.client, Neuron) and self.display.showNeuronNames())):
label = self.client.abbreviation or self.client.name
if label is not None:
orphanParams['label'] = label
if any(orphanParams):
scriptFragment += ', ' + self._scriptParamsToKeywordArgs(orphanParams, scriptRefs)
scriptFragment += ')\n'
scriptFile.write(scriptFragment)
scriptRef = newScriptRef
if 'position' in params:
scriptFile.write('%s.setVisiblePosition(%s, (%s)' % (displayRef, scriptRef, ', '.join([str(dim) for dim in params['position']])))
if 'positionIsFixed' in params:
scriptFile.write(', fixed = ' + str(params['positionIsFixed']))
scriptFile.write(')\n')
if 'size' in params or 'sizeIsFixed' in params or 'sizeIsAbsolute' in params:
scriptFile.write('%s.setVisibleSize(%s' % (displayRef, scriptRef))
if 'size' in params:
scriptFile.write(', (' + ', '.join([str(dim) for dim in params['size']]) + ')')
if 'sizeIsFixed' in params:
scriptFile.write(', fixed = ' + str(self.sizeIsFixed()))
if 'sizeIsAbsolute' in params:
scriptFile.write(', absolute = ' + str(self.sizeIsAbsolute()))
scriptFile.write(')\n')
if 'rotation' in params:
scriptFile.write('%s.setVisibleRotation(%s, (%s))\n' % (displayRef, scriptRef, ', '.join([str(dim) for dim in params['rotation']])))
if 'label' in params:
scriptFile.write('%s.setLabel(%s, \'%s\')\n' % (displayRef, scriptRef, params['label'].replace('\\', '\\\\').replace('\'', '\\\'')))
if 'labelColor' in params:
scriptFile.write('%s.setLabelColor(%s, (%s))\n' % (displayRef, scriptRef, ', '.join([str(component) for component in params['labelColor']])))
if 'labelPosition' in params:
scriptFile.write('%s.setLabelPosition(%s, (%s))\n' % (displayRef, scriptRef, ', '.join([str(dim) for dim in params['labelPosition']])))
if 'shape' in params:
if params['shape'] == None:
scriptFile.write('%s.setVisibleShape(%s, None)\n' % (displayRef, scriptRef))
else:
scriptFile.write('%s.setVisibleShape(%s, shapes[\'%s\'](%s))\n' % (displayRef, scriptRef, self.shape().__class__.__name__, self._scriptParamsToKeywordArgs(self._shape.persistentAttributes(), scriptRefs)))
if 'color' in params:
scriptFile.write('%s.setVisibleColor(%s, (%s))\n' % (displayRef, scriptRef, ', '.join([str(component) for component in params['color']])))
if 'opacity' in params:
scriptFile.write('%s.setVisibleOpacity(%s, %s)\n' % (displayRef, scriptRef, str(self.opacity())))
if 'weight' in params:
scriptFile.write('%s.setVisibleWeight(%s, %s)\n' % (displayRef, scriptRef, str(self.weight())))
if 'texture' in params:
if self._staticTexture == None:
scriptFile.write('%s.setVisibleTexture(%s, None)\n' % (displayRef, scriptRef))
else:
scriptFile.write('%s.setVisibleTexture(%s, library.texture(\'%s\'), scale = %s)\n' % (displayRef, scriptRef, self._staticTexture.identifier.replace('\\', '\\\\').replace('\'', '\\\''), str(self._staticTextureScale)))
if 'arrangedAxis' in params:
scriptFile.write('%s.setArrangedAxis(%s, \'%s\')\n' % (displayRef, scriptRef, self.arrangedAxis))
if 'arrangedSpacing' in params:
scriptFile.write('%s.setArrangedSpacing(%s, %s)\n' % (displayRef, scriptRef, str(self.arrangedSpacing)))
if 'arrangedWeight' in params:
scriptFile.write('%s.setArrangedWeight(%s, %s)\n' % (displayRef, scriptRef, str(self.arrangedWeight)))
if 'pathEndPoints' in params or 'pathMidPoints' in params or 'pathIsFixed' in params:
startObject, endObject = params.get('pathEndPoints', (self._pathStart.client or self._pathStart, self._pathEnd.client or self._pathEnd))
if isinstance(startObject, Object):
startRef = scriptRefs[startObject.networkId]
elif startObject.client:
startRef = scriptRefs[startObject.client.networkId]
else:
startRef = 'visible' + str(startObject.displayId)
if isinstance(endObject, Object):
endRef = scriptRefs[endObject.networkId]
elif endObject.client:
endRef = scriptRefs[endObject.client.networkId]
else:
endRef = 'visible' + str(endObject.displayId)
scriptFile.write('%s.setVisiblePath(%s, %s, %s' % (displayRef, scriptRef, startRef, endRef))
if 'pathMidPoints' in params:
scriptFile.write(', midPoints = ' + str(params['pathMidPoints']))
if 'pathIsFixed' in params:
scriptFile.write(', fixed = ' + str(params['pathIsFixed']))
scriptFile.write(')\n')
if 'flowToColor' in params or 'flowToSpacing' in params or 'flowToSpeed' in params or 'flowToSpread' in params:
scriptFile.write('%s.setVisibleFlowTo(%s, True' % (displayRef, scriptRef))
if 'flowToColor' in params:
scriptFile.write(', color = (' + ', '.join([str(component) for component in params['flowToColor']]) + ')')
if 'flowToSpacing' in params:
scriptFile.write(', spacing = ' + str(self.flowToSpacing))
if 'flowToSpeed' in params:
scriptFile.write(', speed = ' + str(self.flowToSpeed))
if 'flowToSpread' in params:
scriptFile.write(', spread = ' + str(self.flowToSpread))
scriptFile.write(')\n')
if 'flowFromColor' in params or 'flowFromSpacing' in params or 'flowFromSpeed' in params or 'flowFromSpread' in params:
scriptFile.write('%s.setVisibleFlowFrom(%s, True' % (displayRef, scriptRef))
if 'flowFromColor' in params:
scriptFile.write(', color = (' + ', '.join([str(component) for component in params['flowFromColor']]) + ')')
if 'flowFromSpacing' in params:
scriptFile.write(', spacing = ' + str(self.flowFromSpacing))
if 'flowFromSpeed' in params:
scriptFile.write(', speed = ' + str(self.flowFromSpeed))
if 'flowFromSpread' in params:
scriptFile.write(', spread = ' + str(self.flowFromSpread))
scriptFile.write(')\n')
else:
# Manually visualize the object.
if self.client:
newScriptRef = self.client._createScriptRef(scriptRefs) # also updates scriptRefs
scriptFile.write('\n%s = %s\n' % (newScriptRef, scriptRef))
scriptFile.write('%s.visualizeObject(%s, orphan = (%s is None)' % (displayRef, scriptRef, scriptRef))
else:
scriptFile.write('%s = %s.visualizeObject(None' % (scriptRef, displayRef))
scriptFile.write(', ' + self._scriptParamsToKeywordArgs(params, scriptRefs))
scriptFile.write(')\n')
for childVisible in self.children:
childVisible._toScriptFile(scriptFile, scriptRefs, displayRef)
def shape(self):
"""
Return the shape of this visualized :class:`object <Network.Object.Object>`, a Shape sub-class instance or None.
"""
return self._shape
def setShape(self, shape):
"""
Set the :class:`shape <Display.Shape.Shape>` of this visualized :class:`object <Network.Object.Object>`.
>>> visible.setShape(shapes['Ball'])
>>> visible.setShape(shapes['Ring'](startAngle = 0.0, endAngle = pi))
The shape must one of the classes in shapes, an instance of one of the classes or None.
"""
if shape != None and not isinstance(shape, Shape) and (not type(shape) == type(self.__class__) or not issubclass(shape, Shape)):
raise TypeError, 'The argument passed to setShape() must be a Shape class, an instance of one of the classes or None'
# If shape is a class then create a default instance of the class.
if isinstance(shape, type(self.__class__)):
shape = shape()
if self._shape != shape:
# Clean up any existing shape
if self._shape:
self._shapeGeode.removeDrawable(self._shape.geometry())
self._shape = shape
if self._shape:
if isinstance(shape, PathShape):
shape.setWeight(self._weight)
self._shape.setColor(list(self._color) + [self._opacity])
self._shapeGeode.addDrawable(self._shape.geometry())
if self.childGroup and self._shape.interiorBounds() != None:
minBound, maxBound = self._shape.interiorBounds()
minBound = osg.Vec3(minBound[0], minBound[1], minBound[2])
maxBound = osg.Vec3(maxBound[0], maxBound[1], maxBound[2])
self.childGroup.setMatrix(osg.Matrixd.scale(maxBound - minBound) * osg.Matrixd.translate((minBound + maxBound) / 2.0))
for child in self.children:
dispatcher.send(('set', 'position'), child)
dispatcher.send(('set', 'shape'), self)
if self.isPath():
self._updatePath()
else:
self._updateTransform()
self._updateOpacity()
def _displayChangedShowName(self):
if self.display:
self._updateLabel()
self._updateOpacity()
def _updateLabel(self, opacity = 1.0, isselected = False):
label = self._label
if label is None and ((isinstance(self.client, Region) and self.display.showRegionNames()) or (isinstance(self.client, Neuron) and (self.display.showNeuronNames())
or (self in self.display.selectedVisibles and self.display.showNeuronNamesOnSelection()))):
label = self.client.abbreviation or self.client.name
if label is None:
if self._textDrawable is not None:
self._textGeode.removeDrawable(self._textDrawable)
self._textDrawable = None
self.sgNode.removeChild(self._textGeode)
self._textGeode = None
else:
if self._textDrawable is None:
# Create the text geode and drawable.
self._textGeode = osg.Geode()
self._textGeode.setName(str(self.displayId))
self._textGeode.getOrCreateStateSet().setMode(osg.GL_BLEND, osg.StateAttribute.ON)
self._textGeode.getOrCreateStateSet().setRenderBinDetails(51, 'RenderBin')
self._textGeode.setDataVariance(osg.Object.DYNAMIC)
self.sgNode.addChild(self._textGeode)
self._textDrawable = osgText.Text()
self._textDrawable.setDataVariance(osg.Object.DYNAMIC)
self._textDrawable.setCharacterSizeMode(osgText.Text.SCREEN_COORDS)
if Visible.labelFont is None:
self._textDrawable.setCharacterSize(18.0)
else:
self._textDrawable.setFont(Visible.labelFont)
self._textDrawable.setCharacterSize(18.0)
self._textDrawable.setAxisAlignment(osgText.Text.SCREEN)
self._textDrawable.setAlignment(osgText.Text.CENTER_CENTER)
self._textDrawable.setBackdropType(osgText.Text.OUTLINE)
self._textGeode.addDrawable(self._textDrawable)
self._textDrawable.setColor(osg.Vec4(self._labelColor[0], self._labelColor[1], self._labelColor[2], self._opacity * opacity))
backdropColor = 1.0 if self._labelColor[0] + self._labelColor[1] + self._labelColor[2] <= .75 * 3.0 else 0.0
self._textDrawable.setBackdropColor(osg.Vec4(backdropColor, backdropColor, backdropColor, self._opacity * opacity * 0.25))
if self.display.viewDimensions == 3 or self.display.labelsFloatOnTop():
self._textDrawable.setPosition(osg.Vec3(*self._labelPosition))
self._textGeode.getOrCreateStateSet().setAttribute(osg.Depth(osg.Depth.ALWAYS))
else:
if self.display.orthoViewPlane == 'xy':
self._textDrawable.setPosition(osg.Vec3(self._labelPosition[0], self._labelPosition[1], self._labelPosition[2] + 1.0))
elif self.display.orthoViewPlane == 'xz':
self._textDrawable.setPosition(osg.Vec3(self._labelPosition[0], self._labelPosition[1] - 1.0, self._labelPosition[2]))
else:
self._textDrawable.setPosition(osg.Vec3(self._labelPosition[0] - 1.0, self._labelPosition[1], self._labelPosition[2]))
self._textGeode.getOrCreateStateSet().removeAttribute(osg.StateAttribute.DEPTH)
self._textDrawable.setText(str(label))
def setLabel(self, label):
"""
Set the label that adorns this visualized :class:`object <Network.Object.Object>`.
If the label is set to None then neurons and regions will be automatically labeled with their abbreviation or name (unless those options have been disabled). To really have no label pass in '', an empty string.
"""
if label != self._label:
self._label = label
self._updateLabel()
dispatcher.send(('set', 'label'), self)
def label(self):
"""
Return the label that has been set to adorn this visualized :class:`object <Network.Object.Object>`.
If the object is a region or neuron automatically displaying its abbreviation or name then this method will return None, not what is being displayed.
"""
return self._label
def setLabelColor(self, color):
"""
Set the color of the label that adorns this visualized :class:`object <Network.Object.Object>`.
The color argument should be a tuple or list of three values between 0.0 and 1.0 indicating the red, green and blue values of the color. For example:
* (0.0, 0.0, 0.0) -> black
* (1.0, 0.0, 0.0) -> red
* (0.0, 1.0, 0.0) -> green
* (0.0, 0.0, 1.0) -> blue
* (1.0, 1.0, 1.0) -> white
Any alpha value should be set independently using :meth:`setOpacity <Display.Visible.Visible.setOpacity>`.
"""
if color != self._labelColor:
self._labelColor = color
self._updateLabel()
dispatcher.send(('set', 'labelColor'), self)
def labelColor(self):
"""
Return the color of the the label that adorns this visualized :class:`object <Network.Object.Object>`.
"""
return self._labelColor
def setLabelPosition(self, position):
"""
Set the position of the label that adorns this visualized :class:`object <Network.Object.Object>`.
The position argument should be a tuple or list indicating the position of the label. The coordinates are local to the object with is usually a unit square centered at (0.0, 0.0). For example:
(0.0, 0.0) -> label at center of object
(-0.5, -0.5) -> label at lower left corner of object
(0.0, 0.5) -> label centered at top of object
"""
if len(position) == 2:
position = tuple(position) + (0,)
if position != self._labelPosition:
self._labelPosition = position
self._updateLabel()
dispatcher.send(('set', 'labelPosition'), self)
def labelPosition(self):
"""
Return the position of the label that adorns this visualized :class:`object <Network.Object.Object>`.
"""
return self._labelPosition
def setColor(self, color):
"""
Set the color of this visualized :class:`object <Network.Object.Object>`.
The color argument should be a tuple or list of three values between 0.0 and 1.0 indicating the red, green and blue values of the color. For example:
* (0.0, 0.0, 0.0) -> black
* (1.0, 0.0, 0.0) -> red
* (0.0, 1.0, 0.0) -> green
* (0.0, 0.0, 1.0) -> blue
* (1.0, 1.0, 1.0) -> white
Any alpha value should be set independently using :meth:`setOpacity <Display.Visible.Visible.setOpacity>`.
"""
if (not isinstance(color, (tuple, list)) or len(color) != 3 or
not isinstance(color[0], (int, float)) or color[0] < 0.0 or color[0] > 1.0 or
not isinstance(color[1], (int, float)) or color[1] < 0.0 or color[1] > 1.0 or
not isinstance(color[2], (int, float)) or color[2] < 0.0 or color[2] > 1.0):
raise ValueError, 'The color argument should be a tuple or list of three integer or floating point values between 0.0 and 1.0, inclusively.'
if color != self._color:
if self._shape:
self._shape.setColor(list(color) + [self._opacity])
self._color = color
self._updateOpacity()
dispatcher.send(('set', 'color'), self)
def color(self):
"""
Return the color of this visualized :class:`object <Network.Object.Object>`.
"""
return self._color
def _displayChangedGhosting(self):
if self.display:
self._updateOpacity()
def _updateOpacity(self):
# Figure out the regular opacity.
if any(self.children) and self._shape != None:
# Visible containing other visibles are always transparent.
opacity = 0.5
else:
# Otherwise use the user-specified opacity.
opacity = self._opacity
# Check if this visible should be ghosted.
if self.display.useGhosts() and any(self.display.selection()) and self not in self.display.highlightedVisibles and self not in self.display.animatedVisibles:
# Check if any of the visible's ancestors or descendants are selected or highlighted and if so then only partially ghost.
partialGhost = False
for ancestorOrDescendant in self.ancestors() + self.descendants():
if ancestorOrDescendant in self.display.selectedVisibles or ancestorOrDescendant in self.display.highlightedVisibles:
partialGhost = True
break
ghosting = self.display.ghostingOpacity()
if self.display.hideUnselectedNeurons() and (isinstance(self.client, Neuron) or isinstance(self.client, Arborization)):
opacity = 0
elif partialGhost:
opacity = opacity * (ghosting + (1.0 - ghosting) / 2.0)
else:
opacity = opacity * ghosting
if self._shape:
self._currentOpacity = opacity
self._shape.setColor(list(self._color) + [opacity])
stateSet1 = self._shapeGeode.getOrCreateStateSet()
if opacity == 1.0:
if self._shapeGeode2:
self.sgNode.removeChild(self._shapeGeode2)
self._shapeGeode2 = None
stateSet1.setRenderingHint(osg.StateSet.OPAQUE_BIN)
stateSet1.setMode(osg.GL_BLEND, osg.StateAttribute.OFF)
stateSet1.removeAttribute(osg.StateAttribute.CULLFACE)
else:
if not self._shapeGeode2:
# Technique that may correctly render nested, transparent geometries, from <http://www.mail-archive.com/osg-users@lists.openscenegraph.org/msg06863.html>
self._shapeGeode2 = osg.Geode()
self._shapeGeode2.addDrawable(self._shape.geometry())
stateSet2 = self._shapeGeode2.getOrCreateStateSet()
# TODO - installed version on Mac does not load cull_faces
if hasattr(Visible, 'cullFrontFacesAttr'):
stateSet2.setAttributeAndModes(Visible.cullFrontFacesAttr, osg.StateAttribute.ON)
stateSet1.setAttributeAndModes(Visible.cullBackFacesAttr, osg.StateAttribute.ON)
else:
stateSet2 = self._shapeGeode2.getOrCreateStateSet()
stateSet1.setMode(osg.GL_BLEND, osg.StateAttribute.ON)
stateSet2.setMode(osg.GL_BLEND, osg.StateAttribute.ON)
# Place more deeply nested regions in lower render bins so they are rendered before the containing visible.
# Each nesting depth needs four render bins: two for the front and back face of the shape and one for the glow shape.
# This assumes a maximum nesting depth of 10.
# TODO: The glow node needs to be double rendered as well to ensure it's always visible.
sceneDepth = len(self.ancestors())
stateSet1.setRenderBinDetails(40 - sceneDepth * 3 - 1, 'DepthSortedBin')
stateSet2.setRenderBinDetails(40 - sceneDepth * 3 - 2, 'DepthSortedBin')
if self._textDrawable is not None:
self._updateLabel(opacity)
def setOpacity(self, opacity):
"""
Set the opacity of the visualized :class:`object's <Network.Object.Object>` shape and label.
The opacity parameter should be a number from 0.0 (fully transparent) to 1.0 (fully opaque).
"""
if opacity < 0.0:
opacity = 0.0
elif opacity > 1.0:
opacity = 1.0
if opacity != self.opacity:
self._opacity = opacity
self._updateOpacity()
for pathVisible in self.connectedPaths:
pathVisible._updatePath()
dispatcher.send(('set', 'opacity'), self)
def opacity(self):
"""
Return the opacity of the visualized :class:`object <Network.Object.Object>`.
"""
return self._opacity
def getCurrentOpacity(self):
return self._currentOpacity
def _updateTransform(self):
if isinstance(self._shape, (UnitShape, type(None))):
# update the transform unless we're under an osgGA.Selection node, i.e. being dragged
# TODO fix this when we add dragging back in
draggingDoesntWorkAnyways = True
if len(self.sgNode.getParents()) == 0 or self.display.dragSelection is None or self.sgNode.getParent(0).__repr__() != self.display.dragSelection.asGroup().__repr__() or draggingDoesntWorkAnyways:
if self.parent is None or not self.sizeIsAbsolute():
scale = self._size
else:
parentScale = self.parent.worldSize()
scale = (self._size[0] / parentScale[0], self._size[1] / parentScale[1], self._size[2] / parentScale[2])
self.sgNode.setMatrix(osg.Matrixd.scale(osg.Vec3d(scale[0], scale[1], scale[2])) *
osg.Matrixd.rotate(self.rotation()[3], osg.Vec3d(self.rotation()[0], self.rotation()[1], self.rotation()[2])) *
osg.Matrixd.translate(osg.Vec3d(self.position()[0], self.position()[1], self.position()[2])))
else:
self.sgNode.setMatrix(osg.Matrixd.identity())
self._updateGlow()
def position(self):
"""
Return the position of this visualized :class:`object <Network.Object.Object>`.
"""
return self._position
def setPosition(self, position):
"""
Set the position of this visualized :class:`object <Network.Object.Object>`.
For objects without containers this value is in world-space coordinates. For objects within containers the coordinate space is a unit cube centered at (0.0, 0.0, 0.0). Values should be between -0.5 and 0.5 to be located within the container but values beyond that are allowed.
"""
if position != self._position:
self._position = position
self._updateTransform()
dispatcher.send(('set', 'position'), self)
def offsetPosition(self, offset):
"""
Offset the position of this visualized :class:`object <Network.Object.Object>` by the indicated amounts.
The offset argument should be a tuple of three numbers indicating how much the position should be offset in each dimension.
"""
if offset != (0, 0, 0):
self._position = (self._position[0] + offset[0], self._position[1] + offset[1], self._position[2] + offset[2])
self._updateTransform()
dispatcher.send(('set', 'position'), self)
def worldPosition(self):
"""
Return the position of the visualized :class:`object <Network.Object.Object>` in world-space coordinates.
"""
# TODO: if a parent is rotated does this screw up?
# TODO: will OSG do this for us?
# TODO: merge this with position() and add a coordinate-space argument?
if self.parent is None:
worldPosition = self._position
else:
parentSize = self.parent.worldSize()
parentPosition = self.parent.worldPosition()
trans = osg.Vec3d()
rot = osg.Quat()
scale = osg.Vec3d()
so = osg.Quat()
self.parent.childGroup.getMatrix().decompose(trans, rot, scale, so)
parentPosition = (parentPosition[0] + trans.x() * parentSize[0], parentPosition[1] + trans.y() * parentSize[1], parentPosition[2] + trans.z() * parentSize[2])
parentSize = (parentSize[0] * scale.x(), parentSize[1] * scale.y(), parentSize[2] * scale.z())
worldPosition = (parentPosition[0] + self._position[0] * parentSize[0], parentPosition[1] + self._position[1] * parentSize[1], parentPosition[2] + self._position[2] * parentSize[2])
return worldPosition
def positionIsFixed(self):
"""
Return whether the position of this visualized :class:`object <Network.Object.Object>` should be allowed to change.
"""
return self._positionIsFixed
def setPositionIsFixed(self, isFixed):
"""
Set whether the position of this visualized :class:`object <Network.Object.Object>` should be allowed to change.
Calling :meth:`setPosition <Display.Visible.Visible.setPosition>` ignores this setting.
"""
if self._positionIsFixed != isFixed:
self._positionIsFixed = isFixed
dispatcher.send(('set', 'positionIsFixed'), self)
def size(self):
"""
Return the size of this visualized :class:`object <Network.Object.Object>`.
"""
# TODO: if isinstance(self._shape, UnitShape)...
return self._size
def setSize(self, size):
"""
Set the size of this visualized :class:`object <Network.Object.Object>`.
For objects without containers or those that are :meth:`absolutely sized <Display.Visible.Visible.setSizeIsAbsolute>` this value is in world-space coordinates. For relatively sized objects within containers the coordinate space is a unit cube centered at (0.0, 0.0, 0.0).
"""
for sizeDim in size:
if sizeDim <= 0.0:
raise ValueError, 'The visible size of an object must be greater than zero in all dimensions.'
if self._size != size:
self._size = size
# TODO: if not isinstance(self._shape, UnitShape): # then rebuild geometry with new size
self._updateTransform()
dispatcher.send(('set', 'size'), self)
self._arrangeChildren()
def sizeIsFixed(self):
"""
Return whether the size of this visualized :class:`object <Network.Object.Object>` should be allowed to change.
"""
return self._sizeIsFixed
def setSizeIsFixed(self, isFixed):
"""
Set whether the size of this visualized :class:`object <Network.Object.Object>` should be allowed to change.
Calling :meth:`setSize <Display.Visible.Visible.setSize>` ignores this setting.
"""
if self._sizeIsFixed != isFixed:
self._sizeIsFixed = isFixed
dispatcher.send(('set', 'sizeIsFixed'), self)
def sizeIsAbsolute(self):
"""
Return whether the size set for this visualized :class:`object <Network.Object.Object>` should be in world-space coordinates or relative to the enclosing container.
"""
return self._sizeIsAbsolute
def setSizeIsAbsolute(self, sizeIsAbsolute = True):
"""
Set whether the size set for this visualized :class:`object <Network.Object.Object>` should be in world-space coordinates or relative to the enclosing container.
"""
if self._sizeIsAbsolute != sizeIsAbsolute:
self._sizeIsAbsolute = sizeIsAbsolute
# TODO: convert absolute to relative size or vice versa
self._updateTransform()
dispatcher.send(('set', 'sizeIsAbsolute'), self)
self._arrangeChildren()
for ancestor in self.ancestors():
if self._sizeIsAbsolute:
dispatcher.connect(self._maintainAbsoluteSize, ('set', 'position'), ancestor)
dispatcher.connect(self._maintainAbsoluteSize, ('set', 'size'), ancestor)
dispatcher.connect(self._maintainAbsoluteSize, ('set', 'rotation'), ancestor)
else:
dispatcher.disconnect(self._maintainAbsoluteSize, ('set', 'position'), ancestor)
dispatcher.disconnect(self._maintainAbsoluteSize, ('set', 'size'), ancestor)
dispatcher.disconnect(self._maintainAbsoluteSize, ('set', 'rotation'), ancestor)
def _maintainAbsoluteSize(self):
self._updateTransform()
self._arrangeChildren()
def worldSize(self):
"""
Return the size of the visualized :class:`object <Network.Object.Object>` in world-space coordinates.
"""
# TODO: if a parent is rotated does this screw up?
# TODO: will OSG do this for us?
# TODO: merge this with size() and add a coordinate-space argument?
if self.parent is None or self.sizeIsAbsolute():
worldSize = self._size
else:
parentSize = self.parent.worldSize()
trans = osg.Vec3d()
rot = osg.Quat()
scale = osg.Vec3d()
so = osg.Quat()
self.parent.childGroup.getMatrix().decompose(trans, rot, scale, so)
worldSize = (self._size[0] * parentSize[0] * scale.x(), self._size[1] * parentSize[1] * scale.y(), self._size[2] * parentSize[2] * scale.z())
return worldSize
def rotation(self):
return self._rotation
def setRotation(self, rotation):
if self._rotation != rotation:
self._rotation = rotation
self._updateTransform()
dispatcher.send(('set', 'rotation'), self)
def weight(self):
"""
Return the weight of the visualized :class:`object <Network.Object.Object>`.
"""
return self._weight
def setWeight(self, weight):
"""
Set the weight of the visualized :class:`object <Network.Object.Object>`.
The weight parameter should be a float value with 1.0 being a neutral weight. Currently this only applies to visualized connections.
"""
if self._weight != weight:
self._weight = weight
if isinstance(self._shape, PathShape):
self._shape.setWeight(weight)
self._updateGlow()
elif self.isPath():
self._updatePath()
dispatcher.send(('set', 'weight'), self)
def originalWeight(self):
"""
Return the original weight of the visualized :class:`object <Network.Object.Object>`.
"""
return self._originalWeight
def boldWeight(self, factor):
if self.isPath() and not isinstance(self._shape, PathShape):
if not self._originalWeight:
self._originalWeight = self.weight()
self.setWeight(factor * self._originalWeight)
def addChildVisible(self, childVisible):
"""
Make the indicated visible a child of this visualized :class:`object <Network.Object.Object>`.
A child visible will be drawn inside of the parent visible.
"""
if not isinstance(childVisible, Visible):
raise TypeError, 'The argument passed to addChildVisible() must be a visible in the same display.'
if not any(self.children):
self.childGroup = osg.MatrixTransform(osg.Matrixd.identity())
minBound, maxBound = ((-0.5, -0.5, -0.5), (0.5, 0.5, 0.5)) if not self._shape else self._shape.interiorBounds()
minBound = osg.Vec3(minBound[0], minBound[1], minBound[2])
maxBound = osg.Vec3(maxBound[0], maxBound[1], maxBound[2])
self.childGroup.setMatrix(osg.Matrixd.scale(maxBound - minBound) * osg.Matrixd.translate((minBound + maxBound) / 2.0))
self.sgNode.addChild(self.childGroup)
if childVisible not in self.children:
if childVisible.parent:
childVisible.parent.removeChildVisible(childVisible)
self.children.append(childVisible)
childVisible.parent = self
self.childGroup.addChild(childVisible.sgNode)
dispatcher.connect(self._childArrangedWeightChanged, ('set', 'arrangedWeight'), childVisible)
if childVisible.sizeIsAbsolute():
for ancestor in childVisible.ancestors():
dispatcher.connect(childVisible._maintainAbsoluteSize, ('set', 'position'), ancestor)
dispatcher.connect(childVisible._maintainAbsoluteSize, ('set', 'size'), ancestor)
dispatcher.connect(childVisible._maintainAbsoluteSize, ('set', 'rotation'), ancestor)
self._updateOpacity()
if self.arrangedAxis is None:
childVisible._updateTransform()
else:
self._arrangeChildren()
dispatcher.send(('set', 'children'), self)
def removeChildVisible(self, childVisible):
"""
Remove the indicated visible from this container visible.
After this call the indicated visible will have no parent.
"""
if childVisible in self.children:
if childVisible.sizeIsAbsolute():
for ancestor in childVisible.ancestors():
dispatcher.disconnect(childVisible._maintainAbsoluteSize, ('set', 'position'), ancestor)
dispatcher.disconnect(childVisible._maintainAbsoluteSize, ('set', 'size'), ancestor)
dispatcher.disconnect(childVisible._maintainAbsoluteSize, ('set', 'rotation'), ancestor)
dispatcher.disconnect(self._childArrangedWeightChanged, ('set', 'arrangedWeight'), childVisible)
self.childGroup.removeChild(childVisible.sgNode)
childVisible.parent = None
self.children.remove(childVisible)
if not any(self.children):
self.sgNode.removeChild(self.childGroup)
self.childGroup = None
self._updateOpacity()
if not self.display._closing:
if self.arrangedAxis is None:
childVisible._updateTransform()
else:
self._arrangeChildren()
dispatcher.send(('set', 'children'), self)
def rootVisible(self):
""" Return the outermost container of this visible.
If this visible is not contained within any other visible then this visible itself will be returned.
"""
if self.parent is None:
return self
else:
return self.parent.rootVisible()
def ancestors(self):
"""
Return the containers of this visible in a list with the outermost container appearing last.
If this visible is not contained within any others then an empty list will be returned.
"""
ancestors = []
if self.parent is not None:
ancestors += [self.parent]
ancestors += self.parent.ancestors()
return ancestors
def descendants(self):
"""
Return all visibles contained by this visible.
If this visible does not contain any others then an empty list will be returned.
"""
children = []
for child in self.children:
children += [child]
children += child.descendants()
return children
def _arrangeChildren(self, recurse = True):
if self.arrangedAxis is None or len(self.children) == 0:
return
worldSize = self.worldSize()
if self.arrangedAxis == 'largest':
# Pick the axis in which our size is largest.
if worldSize[0] >= worldSize[1] and worldSize[0] >= worldSize[2]:
axisToUse = 'X'
elif worldSize[1] >= worldSize[0] and worldSize[1] >= worldSize[2]:
axisToUse = 'Y'
else:
axisToUse = 'Z'
else:
axisToUse = self.arrangedAxis
childCount = len(self.children)
weightedChildCount = 0.0
for child in self.children:
weightedChildCount += child.arrangedWeight
if axisToUse == 'X':
worldSpacing = worldSize[0] * self.arrangedSpacing
ySize = (worldSize[1] - 2.0 * worldSpacing) / worldSize[1]
zSize = (worldSize[2] - 2.0 * worldSpacing) / worldSize[2]
curX = -0.5 + self.arrangedSpacing
for index in range(0, childCount):
child = self.children[index]
childWidth = (1.0 - self.arrangedSpacing * (childCount + 1.0)) / weightedChildCount * child.arrangedWeight
child.setPosition((curX + childWidth / 2.0, 0.0, 0.0))
if not child.sizeIsAbsolute():
child.setSize((childWidth, max(ySize, 0.5), max(zSize, 0.5)))
child.setPositionIsFixed(True)
curX += childWidth + self.arrangedSpacing
elif axisToUse == 'Y':
worldSpacing = worldSize[1] * self.arrangedSpacing
xSize = (worldSize[0] - 2.0 * worldSpacing) / worldSize[0]
zSize = (worldSize[2] - 2.0 * worldSpacing) / worldSize[2]
curY = 0.5 - self.arrangedSpacing
for index in range(0, childCount):
child = self.children[index]
childHeight = (1.0 - self.arrangedSpacing * (childCount + 1.0)) / weightedChildCount * child.arrangedWeight
child.setPosition((0.0, curY - childHeight / 2.0, 0.0))
if not child.sizeIsAbsolute():
child.setSize((max(xSize, 0.5), childHeight, max(zSize, 0.5)))
child.setPositionIsFixed(True)
curY -= childHeight + self.arrangedSpacing
else: # axisToUse == 'Z'
worldSpacing = worldSize[2] * self.arrangedSpacing
xSize = (worldSize[0] - 2.0 * worldSpacing) / worldSize[0]
ySize = (worldSize[1] - 2.0 * worldSpacing) / worldSize[1]
curZ = -0.5 + self.arrangedSpacing
for index in range(0, childCount):
child = self.children[index]
childDepth = (1.0 - self.arrangedSpacing * (childCount + 1.0)) / weightedChildCount * child.arrangedWeight
child.setPosition((0.0, 0.0, curZ + childDepth / 2.0))
if not child.sizeIsAbsolute():
child.setSize((max(xSize, 0.5), max(ySize, 0.5), childDepth))
child.setPositionIsFixed(True)
curZ += childDepth + self.arrangedSpacing
if recurse:
for child in self.children:
child._arrangeChildren(recurse = True)
def setArrangedAxis(self, axis = 'largest', recurse = False):
"""
Automatically arrange the children of this visualized :class:`object <Network.Object.Object>` along the specified axis.
The axis value should be one of 'largest', 'X', 'Y', 'Z' or None. When 'largest' is indicated the children will be arranged along whichever axis is longest at any given time. Resizing the parent object therefore can change which axis is used.
If recurse is True then all descendants will have their axes set as well.
"""
if axis not in [None, 'largest', 'X', 'Y', 'Z']:
raise ValueError, 'The axis argument passed to setArrangedAxis() must be one of \'largest\', \'X\', \'Y\', \'Z\' or None.'
if axis != self.arrangedAxis:
self.arrangedAxis = axis
if axis is None:
for child in self.children:
child.setPositionIsFixed(False)
else:
self._arrangeChildren(False)
dispatcher.send(('set', 'arrangedAxis'), self)
if recurse:
for child in self.children:
child.setArrangedAxis(axis = axis, recurse = True)
def setArrangedSpacing(self, spacing = .02, recurse = False):
"""
Set the visible spacing between the children of the visualized :class:`object <Network.Object.Object>`.
The spacing is measured as a fraction of the whole. So a value of .02 uses 2% of the parent's size for the spacing between each object.
If recurse is True then all descendants will have their spacing set as well.
"""
if not isinstance(spacing, (int, float)):
raise TypeError, 'The spacing argument passed to setArrangedSpacing() must be an integer or floating point value.'
if spacing != self.arrangedSpacing:
self.arrangedSpacing = float(spacing)
self._arrangeChildren(False)
dispatcher.send(('set', 'arrangedSpacing'), self)
if recurse:
for child in self.children:
child.setArrangedSpacing(spacing = spacing, recurse = True)
def setArrangedWeight(self, weight = weight):
"""
Set the amount of its parent's space the visualized :class:`object <Network.Object.Object>` should use compared to its siblings.
Larger weight values will result in more of the parent's space being used.
If recurse is True then all descendants will have their spacing set as well.
"""
if not isinstance(weight, (int, float)):
raise TypeError, 'The weight argument passed to setArrangedWeight() must be an integer or floating point value.'
if weight != self.arrangedWeight:
self.arrangedWeight = weight
self._arrangeChildren(False)
dispatcher.send(('set', 'arrangedWeight'), self)
def _childArrangedWeightChanged(self):
self._arrangeChildren()
def _addDependency(self, otherVisible, attribute):
self._dependencies.add(otherVisible)
dispatcher.connect(self._dependentVisibleChanged, ('set', attribute), otherVisible)
ancestor = otherVisible.parent
while ancestor is not None:
dispatcher.connect(self._dependentVisibleChanged, ('set', attribute), ancestor)
ancestor = ancestor.parent
def _dependentVisibleChanged(self):
if self.isPath():
self._updatePath()
dispatcher.send(('set', 'position'), self)
def _positionSizeRotation(self, startPoint, endPoint):
if len(startPoint) == 2:
startPoint = list(startPoint) + [0.0]
if len(endPoint) == 2:
endPoint = list(endPoint) + [0.0]
position = ((startPoint[0] + endPoint[0]) / 2.0,
(startPoint[1] + endPoint[1]) / 2.0,
(startPoint[2] + endPoint[2]) / 2.0)
dx = endPoint[0] - startPoint[0]
dy = endPoint[1] - startPoint[1]
dz = endPoint[2] - startPoint[2]
dsize = (self._weight / 500.0, sqrt(dx * dx + dy * dy + dz * dz), self._weight / 500.0)
dxz = sqrt(dx**2.0 + dz**2.0)
dAngle = atan2(dxz, dy)
cross = osg.Vec3f(0, 1, 0) ^ osg.Vec3f(dx, dy, dz)
cross.normalize()
rotation = (cross.x(), cross.y(), cross.z(), dAngle)
return (position, dsize, rotation)
def _isDraggable(self):
return (self.positionIsFixed() == False and self._pathStart is None)
def setTexture(self, texture):
"""
Set the texture used to paint the surface of the visualized :class:`object <Network.Object.Object>`.
>>> display.setVisibleTexture(region1, library.texture('Stripes'))
The texture parameter should be a :class:`texture <library.texture.Texture>` instance or None.
"""
if not isinstance(texture, (Texture, type(None))):
raise TypeError, 'The texture argument passed to setTexture() must be a texture from the library.'
if self._staticTexture != texture:
if texture is None:
self._shapeGeode.getOrCreateStateSet().removeTextureAttribute(0, osg.StateAttribute.TEXTURE)
else:
self._shapeGeode.getOrCreateStateSet().setTextureAttributeAndModes(0, texture.textureData(), osg.StateAttribute.ON)
if texture.isCube():
self._shapeGeode.getStateSet().setTextureAttributeAndModes(0, osg.TexGen(), osg.StateAttribute.ON)
self._staticTexture = texture
dispatcher.send(('set', 'texture'), self)
def texture(self):
"""
Get the :class:`texture <library.texture.Texture>` used to paint the surface of the visualized :class:`object <Network.Object.Object>`.
"""
return self._staticTexture
def setTextureScale(self, scale):
"""
Set the scale of the :class:`texture <library.texture.Texture>` used to paint the surface of the visualized :class:`object <Network.Object.Object>`.
The scale parameter can be used to reduce or enlarge the texture relative to the visualized object.
"""
if not isinstance(scale, (float, int)):
raise TypeError, 'The scale argument passed to setTextureScale() must be a number.'
if self._staticTextureScale != scale:
if scale == 1.0:
self._shapeGeode.getOrCreateStateSet().removeTextureAttribute(0, osg.StateAttribute.TEXMAT)
else:
textureMatrix = osg.TexMat()
textureMatrix.setMatrix(osg.Matrixd.scale(scale, scale, scale))
self._shapeGeode.getOrCreateStateSet().setTextureAttributeAndModes(0, textureMatrix, osg.StateAttribute.ON)
self._staticTextureScale = scale
self._updateFlowAnimation()
dispatcher.send(('set', 'textureScale'), self)
def textureScale(self):
"""
Return the scale of the :class:`texture <library.texture.Texture>` used to paint the surface of the visualized :class:`object <Network.Object.Object>`.
"""
return self._staticTextureScale
def setFlowTo(self, showFlow = True):
"""
Set whether the flow of information from the start of the path towards the end should be shown.
"""
# Convert to a bool.
showFlow = True if showFlow else False
if showFlow != self._flowTo:
self._flowTo = (showFlow == True)
dispatcher.send(('set', 'flowTo'), self)
self._updateFlowAnimation()
def flowTo(self):
"""
Return whether the flow of information from the start of the path towards the end should be shown.
"""
return self._flowTo
def setFlowToColor(self, color):
"""
Set the color of the pulse used to show the flow of information from the start of the path towards the end.
The color argument should be None or a tuple or list of three values between 0.0 and 1.0 indicating the red, green and blue values of the color. For example:
* (0.0, 0.0, 0.0) -> black
* (1.0, 0.0, 0.0) -> red
* (0.0, 1.0, 0.0) -> green
* (0.0, 0.0, 1.0) -> blue
* (1.0, 1.0, 1.0) -> white
If None is passed then the default flow color will be used.
"""
if not isinstance(color, (list, tuple, type(None))) or (color != None and len(color) not in [3, 4]):
raise ValueError, 'The color passed to setFlowToColor() must be None or a tuple or list of three or four numbers.'
for colorComponent in color:
if not isinstance(colorComponent, (int, float)) or colorComponent < 0.0 or colorComponent > 1.0:
raise ValueError, 'The components of the color passed to setFlowToColor() must all be numbers between 0.0 and 1.0, inclusive.'
if color != None and len(color) == 3:
color = (color[0], color[1], color[2], 1.0)
if color != self._flowToColor:
self._flowToColor = color
if self._flowToColor is None:
self._shapeGeode.getOrCreateStateSet().removeUniform('flowToColor')
else:
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowToColor', osg.Vec4f(*self._flowToColor)))
self._updateFlowAnimation()
dispatcher.send(('set', 'flowToColor'), self)
def flowToColor(self):
"""
Return the color of the pulse used to show the flow of information from the start of the path towards the end.
"""
return self._flowToColor
def setFlowToSpacing(self, spacing):
"""
Set the spacing between pulses used to show the flow of information from the start of the path towards the end.
The spacing argument should be a number value in world-space coordinates.
"""
if not isinstance(spacing, (int, float)):
raise TypeError, 'The spacing passed to setFlowToSpacing() must be a number.'
if spacing != self._flowToSpacing:
self._flowToSpacing =float(spacing)
if self._flowToSpacing is None:
self._shapeGeode.getOrCreateStateSet().removeUniform('flowToSpacing')
else:
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowToSpacing', self._flowToSpacing))
self._updateFlowAnimation()
dispatcher.send(('set', 'flowToSpacing'), self)
def flowToSpacing(self):
"""
Return the spacing between pulses used to show the flow of information from the start of the path towards the end.
"""
return self._flowToSpacing
def setFlowToSpeed(self, speed):
"""
Set the speed of the pulses used to show the flow of information from the start of the path towards the end.
The speed argument should be a number value in world-space coordinates per second.
"""
if not isinstance(speed, (int, float)):
raise TypeError, 'The speed passed to setFlowToSpeed() must be a number.'
if speed != self._flowToSpeed:
self._flowToSpeed = float(speed)
if self._flowToSpeed is None:
self._shapeGeode.getOrCreateStateSet().removeUniform('flowToSpeed')
else:
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowToSpeed', self._flowToSpeed))
self._updateFlowAnimation()
dispatcher.send(('set', 'flowToSpeed'), self)
def flowToSpeed(self):
"""
Return the speed of the pulses used to show the flow of information from the start of the path towards the end.
"""
return self._flowToSpeed
def setFlowToSpread(self, spread):
"""
Set the length of the pulse tails used to show the flow of information from the start of the path towards the end.
The spread argument should be a number value from 0.0 (no tail) to 1.0 (tail extends all the way to the next pulse).
"""
if not isinstance(spread, (int, float)):
raise TypeError, 'The spread passed to setFlowToSpread() must be a number.'
if spread != self._flowToSpread:
self._flowToSpread = float(spread)
if self._flowToSpread is None:
self._shapeGeode.getOrCreateStateSet().removeUniform('flowToSpread')
else:
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowToSpread', self._flowToSpread))
self._updateFlowAnimation()
dispatcher.send(('set', 'flowToSpread'), self)
def flowToSpread(self):
"""
Return the length of the pulse tails used to show the flow of information from the start of the path towards the end.
"""
return self._flowToSpread
def setFlowFrom(self, showFlow = True):
"""
Set whether the flow of information from the end of the path towards the start should be shown.
"""
# Convert to a bool.
showFlow = True if showFlow else False
if showFlow != self._flowFrom:
self._flowFrom = showFlow
dispatcher.send(('set', 'flowFrom'), self)
self._updateFlowAnimation()
def flowFrom(self):
"""
Return whether the flow of information from the end of the path towards the start should be shown.
"""
return self._flowFrom
def setFlowFromColor(self, color):
"""
Set the color of the pulse used to show the flow of information from the end of the path towards the start.
The color argument should be None or a tuple or list of three values between 0.0 and 1.0 indicating the red, green and blue values of the color. For example:
* (0.0, 0.0, 0.0) -> black
* (1.0, 0.0, 0.0) -> red
* (0.0, 1.0, 0.0) -> green
* (0.0, 0.0, 1.0) -> blue
* (1.0, 1.0, 1.0) -> white
If None is passed then the default flow color will be used.
"""
if not isinstance(color, (list, tuple, type(None))) or (color != None and len(color) not in [3, 4]):
raise ValueError, 'The color passed to setFlowFromColor() must be a tuple or list of three or four numbers.'
for colorComponent in color:
if not isinstance(colorComponent, (int, float)) or colorComponent < 0.0 or colorComponent > 1.0:
raise ValueError, 'The components of the color passed to setFlowFromColor() must all be numbers between 0.0 and 1.0, inclusive.'
if color != None and len(color) == 3:
color = (color[0], color[1], color[2], 1.0)
if color != self._flowFromColor:
self._flowFromColor = color
if self._flowFromColor is None:
self._shapeGeode.getOrCreateStateSet().removeUniform('flowFromColor')
else:
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowFromColor', osg.Vec4f(*self._flowFromColor)))
self._updateFlowAnimation()
dispatcher.send(('set', 'flowFromColor'), self)
def flowFromColor(self):
"""
Return the color of the pulse used to show the flow of information from the end of the path towards the start.
"""
return self._flowFromColor
def setFlowFromSpacing(self, spacing):
"""
Set the spacing between pulses used to show the flow of information from the end of the path towards the start.
The spacing argument should be a number value in world-space coordinates.
"""
if not isinstance(spacing, (int, float)):
raise TypeError, 'The spacing passed to setFlowFromSpacing() must be a number.'
if spacing != self._flowFromSpacing:
self._flowFromSpacing = float(spacing)
if self._flowFromSpacing is None:
self._shapeGeode.getOrCreateStateSet().removeUniform('flowFromSpacing')
else:
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowFromSpacing', self._flowFromSpacing))
self._updateFlowAnimation()
dispatcher.send(('set', 'flowFromSpacing'), self)
def flowFromSpacing(self):
"""
Return the spacing between pulses used to show the flow of information from the end of the path towards the start.
"""
return self._flowFromSpacing
def setFlowFromSpeed(self, speed):
"""
Set the speed of the pulses used to show the flow of information from the end of the path towards the start.
The speed argument should be a number value in world-space coordinates per second.
"""
if not isinstance(speed, (int, float)):
raise TypeError, 'The speed passed to setFlowFromSpeed() must be a number.'
if speed != self._flowFromSpeed:
self._flowFromSpeed = float(speed)
if self._flowFromSpeed is None:
self._shapeGeode.getOrCreateStateSet().removeUniform('flowFromSpeed')
else:
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowFromSpeed', self._flowFromSpeed))
self._updateFlowAnimation()
dispatcher.send(('set', 'flowFromSpeed'), self)
def flowFromSpeed(self):
"""
Return the speed of the pulses used to show the flow of information from the end of the path towards the start.
"""
return self._flowFromSpeed
def setFlowFromSpread(self, spread):
"""
Set the length of the pulse tails used to show the flow of information from the end of the path towards the start.
The spread argument should be a number value from 0.0 (no tail) to 1.0 (tail extends all the way to the next pulse).
"""
if not isinstance(spread, (int, float)):
raise TypeError, 'The spread passed to setFlowFromSpread() must be a number.'
if spread != self._flowFromSpread:
self._flowFromSpread = float(spread)
if self._flowFromSpread is None:
self._shapeGeode.getOrCreateStateSet().removeUniform('flowFromSpread')
else:
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowFromSpread', self._flowFromSpread))
self._updateFlowAnimation()
dispatcher.send(('set', 'flowFromSpread'), self)
def flowFromSpread(self):
"""
Return the length of the pulse tails used to show the flow of information from the end of the path towards the start.
"""
return self._flowFromSpread
def _updateFlowAnimation(self):
if self._animateFlow and (self._flowTo or self._flowFrom):
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowTo', self._flowTo))
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('flowFrom', self._flowFrom))
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('textureScale', (self._size[1] if isinstance(self._shape, UnitShape) else 1.0) / self._staticTextureScale))
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('hasTexture', self._staticTexture != None))
self._shapeGeode.getOrCreateStateSet().addUniform(osg.Uniform('opacity', self._opacity))
self._shapeGeode.getOrCreateStateSet().setAttributeAndModes(self.display.flowProgram, osg.StateAttribute.ON)
elif self._shapeGeode.getOrCreateStateSet().getAttribute(osg.StateAttribute.PROGRAM) is not None:
self._shapeGeode.getOrCreateStateSet().removeAttribute(osg.StateAttribute.PROGRAM)
self._shapeGeode.getOrCreateStateSet().removeUniform('flowTo')
self._shapeGeode.getOrCreateStateSet().removeUniform('flowFrom')
self._shapeGeode.getOrCreateStateSet().removeUniform('textureScale')
self._shapeGeode.getOrCreateStateSet().removeUniform('hasTexture')
def animateFlow(self, animate=True):
if self._animateFlow != animate:
self._animateFlow = animate
self._updateFlowAnimation()
def parallelPaths(self):
parallelPaths = []
myEndPoints = set(self.pathEndPoints())
for otherPath in self._pathStart.connectedPaths:
if otherPath is not self and not any(otherPath.pathMidPoints()) and myEndPoints == set(otherPath.pathEndPoints()):
parallelPaths += [otherPath]
return parallelPaths
def _updatePath(self):
path = list(self._pathMidPoints)
path.insert(0, self._pathStart.worldPosition())
parallelPaths = self.parallelPaths()
if len(path) == 1 and self._pathStart == self._pathEnd:
# Special case for paths with the same start and end point. Create a loop via temporary mid-points so the path is visible (and not zero-length).
# Once mid-points become editable in the GUI then the temporary mid-points should become permanent once modified by the user.
# TODO: handle multiple self-connections.
center = path[0]
size = self._pathStart.worldSize()
halfSize = (size[0] / 2.0, size[1] / 2.0, size[2] / 2.0)
minHalfSize = min(halfSize)
pad1 = minHalfSize * 0.1
pad2 = minHalfSize * 0.5
path += [(center[0] + halfSize[0] + pad1, center[1], center[2]), (center[0] + halfSize[0] + pad2, center[1], center[2]), (center[0] + halfSize[0] + pad2, center[1] + halfSize[1] + pad2, center[2]), (center[0], center[1] + halfSize[1] + pad2, center[2]), (center[0], center[1] + halfSize[1] + pad1, center[2]), center]
else:
path += [self._pathEnd.worldPosition()]
# Make sure parallel paths (with no mid-points) are not drawn right on top of each other.
if len(path) == 2 and any(parallelPaths):
# Since each parallel path renders itself we need to have a reliable order to the paths. For now just sorting by each path's id().
pathIds = [id(parallelPath) for parallelPath in parallelPaths] + [id(self)]
pathIds.sort()
pathIndex = pathIds.index(id(self))
# Calculate the path vector going from the end with the lowest id() to the highest.
flip = id(self._pathStart) > id(self._pathEnd)
if flip:
startVec = osg.Vec3d(*self._pathEnd.worldPosition())
endVec = osg.Vec3d(*self._pathStart.worldPosition())
else:
startVec = osg.Vec3d(*self._pathStart.worldPosition())
endVec = osg.Vec3d(*self._pathEnd.worldPosition())
pathVec = endVec - startVec
# Calculate an offset vector that can be used to visually separate the parallel connections.
if self.display.viewDimensions == 2:
# In 2D use an offset vector that evenly spaces the paths in the current view plane. For example with three paths:
#
# \-------------/
# | | |
# | | |
# 0 1 2 <- path index
# | | |
# | | |
# /---------\
numPaths = len(parallelPaths) + 1
size1 = self._pathStart.worldSize()
size2 = self._pathEnd.worldSize()
if self.display.orthoViewPlane == 'xy':
minSize = min(size1[0], size1[1], size2[0], size2[1])
normal = pathVec ^ (osg.Vec3d(0.0, 0.0, 1.0) if pathVec.x() != 0 or pathVec.y() != 0 else osg.Vec3d(1.0, 0.0, 0.0))
elif self.display.orthoViewPlane == 'xz':
minSize = min(size1[0], size1[2], size2[0], size2[2])
normal = pathVec ^ (osg.Vec3d(0.0, 1.0, 0.0) if pathVec.x() != 0 or pathVec.z() != 0 else osg.Vec3d(1.0, 0.0, 0.0))
else: #'zy'
minSize = min(size1[1], size1[2], size2[1], size2[2])
normal = pathVec ^ (osg.Vec3d(1.0, 0.0, 0.0) if pathVec.z() != 0 or pathVec.y() != 0 else osg.Vec3d(0.0, 1.0, 0.0))
# Set the spacing based on the smaller of the objects at the ends of the path.
unitSpacing = minSize / (numPaths + 1)
# Determine the offset magnitude from the path index.
offsetSize = unitSpacing * (pathIndex - (numPaths - 1) / 2.0)
normal.normalize()
offsetVec = normal * offsetSize
else:
# In 3D use an offset vector that evenly spaces the paths around a cylinder centered on where a single path connection would be.
# Ideally the paths would be placed as in 2D but the paths would have to be recalculated every time the camera moved which is impractical.
# TBD: Could a vector shader do that?
normal = pathVec ^ (osg.Vec3d(1.0, 1.0, 0.0) if pathVec.x() == 0.0 and pathVec.y() == 0.0 else osg.Vec3d(0.0, 0.0, 1.0))
normal.normalize()
# Rotate the normal based on this path's order in the path list.
quat = osg.Quat(2.0 * pi / (len(parallelPaths) + 1) * pathIndex, pathVec)
# Base the cylinder's radius on the size of the smaller of the objects at the ends of the path.
spacing = min([min(self._pathStart.worldSize()), min(self._pathEnd.worldSize())]) * 0.3
offsetVec = quat * (normal * spacing)
# Add two points along the path so the lines will be parallel visually.
nearPoint = startVec * 0.4 + endVec * 0.6 + offsetVec
farPoint = startVec * 0.6 + endVec * 0.4 + offsetVec
if flip:
path = [path[0], (nearPoint.x(), nearPoint.y(), nearPoint.z()), (farPoint.x(), farPoint.y(), farPoint.z()), path[1]]
else:
path = [path[0], (farPoint.x(), farPoint.y(), farPoint.z()), (nearPoint.x(), nearPoint.y(), nearPoint.z()), path[1]]
if self._pathStart.shape() and self._pathStart.opacity() > 0.0:
# Try to find the point where the path intersects the shape.
# TODO: use OSG's line segment intersector? Then shapes don't have to write intersectionPoint() methods.
rayOrigin = path[1]
if len(path) > 2:
rayDirection = (path[1][0] - path[2][0], path[1][1] - path[2][1], path[1][2] - path[2][2])
else:
rayDirection = (path[0][0] - path[1][0], path[0][1] - path[1][1], path[0][2] - path[1][2])
if isinstance(self._pathStart.shape(), UnitShape):
# Translate the ray into the shape's coordinate system.
size = self._pathStart.worldSize()
rayOrigin = ((rayOrigin[0] - path[0][0]) / size[0], (rayOrigin[1] - path[0][1]) / size[1], (rayOrigin[2] - path[0][2]) / size[2])
rayDirection = (rayDirection[0] / size[0], rayDirection[1] / size[1], rayDirection[2] / size[2])
intersectionPoint = self._pathStart.shape().intersectionPoint(rayOrigin, rayDirection)
if intersectionPoint:
if isinstance(self._pathStart.shape(), UnitShape):
# Translate back into world space coordinates.
intersectionPoint = (intersectionPoint[0] * size[0] + path[0][0], intersectionPoint[1] * size[1] + path[0][1], intersectionPoint[2] * size[2] + path[0][2])
path[0:1] = [intersectionPoint]
if self._pathEnd.shape() and self._pathEnd.opacity() > 0.0:
# Try to find the point where the path intersects the shape.
# TODO: use OSG's line segment intersector? Then shapes don't have to write intersectionPoint() methods.
rayOrigin = path[-2]
if len(path) > 2:
rayDirection = (path[-2][0] - path[-3][0], path[-2][1] - path[-3][1], path[-2][2] - path[-3][2])
else:
rayDirection = (path[-1][0] - path[-2][0], path[-1][1] - path[-2][1], path[-1][2] - path[-2][2])
if isinstance(self._pathEnd.shape(), UnitShape):
# Translate the ray into the shape's coordinate system.
size = self._pathEnd.worldSize()
rayOrigin = ((rayOrigin[0] - path[-1][0]) / size[0], (rayOrigin[1] - path[-1][1]) / size[1], (rayOrigin[2] - path[-1][2]) / size[2])
rayDirection = (rayDirection[0] / size[0], rayDirection[1] / size[1], rayDirection[2] / size[2])
intersectionPoint = self._pathEnd.shape().intersectionPoint(rayOrigin, rayDirection)
if intersectionPoint:
if isinstance(self._pathEnd.shape(), UnitShape):
# Translate back into world space coordinates.
intersectionPoint = (intersectionPoint[0] * size[0] + path[-1][0], intersectionPoint[1] * size[1] + path[-1][1], intersectionPoint[2] * size[2] + path[-1][2])
path[-1:] = [intersectionPoint]
if isinstance(self._shape, UnitShape):
# Create a straight connection from start to end
# TODO: Will this object ever have a parent? If so then we'll have to translate world to local coordinates here.
position, size, rotation = self._positionSizeRotation(path[0], path[-1])
self.setPosition(position)
self.setSize(size)
self.setRotation(rotation)
self._updateGlow()
else:
minBound = (1e300, 1e300, 1e300)
maxBound = (-1e300, -1e300, -1e300)
for point in path:
minBound = (min(minBound[0], point[0]), min(minBound[1], point[1]), min(minBound[2], point[2]))
maxBound = (max(maxBound[0], point[0]), max(maxBound[1], point[1]), max(maxBound[2], point[2]))
self._position = ((maxBound[0] + minBound[0]) / 2.0, (maxBound[1] + minBound[1]) / 2.0, (maxBound[2] + minBound[2]) / 2.0)
self._size = (maxBound[0] - minBound[0], maxBound[1] - minBound[1], maxBound[2] - minBound[2])
self._rotation = (0, 1, 0, 0)
if isinstance(self._shape, PathShape):
# Remove any glow.
glowColor = self._glowColor
self.setGlowColor(None)
# Remove the previous geometry.
self._shapeGeode.removeDrawable(self._shape.geometry())
# Update the geometry.
self._shape.setPoints(path)
# Add the new geometry.
self._shapeGeode.addDrawable(self._shape.geometry())
# Restore any glow.
self.setGlowColor(glowColor)
self._updateTransform()
self._updateFlowAnimation()
def setPathEndPoints(self, startVisible, endVisible):
"""
Set the start and end points of this path.
The startVisible and endVisible arguments should be other visibles in the same display as this visible or None.
"""
if not isinstance(startVisible, (Visible, type(None))) or not isinstance(endVisible, (Visible, type(None))):
raise TypeError, 'The arguments passed to setPathEndPoints() must be Visible instances or None.'
if (startVisible and (startVisible.display != self.display or startVisible == self)) or (endVisible and (endVisible.display != self.display or endVisible == self)):
raise ValueError, 'The arguments passed to setPathEndPoints() must be other visibles in the same display as this visible.'
if startVisible != self._pathStart or endVisible != self._pathEnd:
if startVisible != self._pathStart:
if self._pathStart:
self._pathStart.dependentVisibles.discard(self)
self._pathStart.connectedPaths.remove(self)
self._dependencies.discard(self._pathStart)
self._pathStart = startVisible
if self._pathStart:
self._addDependency(startVisible, 'position')
self._addDependency(startVisible, 'size')
self._addDependency(startVisible, 'shape')
startVisible.connectedPaths.append(self)
self._pathStart.dependentVisibles.add(self)
if endVisible != self._pathEnd:
if self._pathEnd:
self._pathEnd.dependentVisibles.discard(self)
self._pathEnd.connectedPaths.remove(self)
self._dependencies.discard(self._pathEnd)
self._pathEnd = endVisible
if self._pathEnd:
self._addDependency(endVisible, 'position')
self._addDependency(endVisible, 'size')
self._addDependency(endVisible, 'shape')
endVisible.connectedPaths.append(self)
self._pathEnd.dependentVisibles.add(self)
if self._pathStart and self._pathEnd:
self._updatePath()
for parallelPath in self.parallelPaths():
parallelPath._updatePath()
else:
self._updateTransform()
dispatcher.send(('set', 'path'), self)
def pathEndPoints(self):
"""
Return the start and end points of this path.
"""
return (self._pathStart, self._pathEnd)
def _pathCounterpart(self, visible):
if self._pathStart == visible:
return self._pathEnd
elif self._pathEnd == visible:
return self._pathStart
else:
raise ValueError, 'The visible passed to _pathCounterpart is not connected to the path.'
def setPathMidPoints(self, midPoints):
"""
Set any additional mid-points that should be used to render the path.
The mid-points should be a list of world-space coordinates, e.g. [(0.1, 0.3), (0.1, 0.5), (0.2, 0.5)] or None.
"""
if midPoints == None:
midPoints = []
if not isinstance(midPoints, (list, tuple)):
raise TypeError, 'The argument passed to setPathMidPoints() must be a list, a tuple or None.'
for midPoint in midPoints:
# TODO: figure out a way to skip this part if we're called from Display.setVisiblePath() since it already validated the mid-points.
if not isinstance(midPoint, (list, tuple)) or len(midPoint) not in (2, 3):
raise ValueError, 'The mid-points passed to setPathMidPoints() must be a list or tuple of two or three numbers.'
for midPointDim in midPoint:
if not isinstance(midPointDim, (int, float)):
raise ValueError, 'Each list or tuple mid-point passed to setPathMidPoints() must contain only numbers.'
if midPoints != self._pathMidPoints:
self._pathMidPoints = midPoints
self._updatePath()
dispatcher.send(('set', 'pathMidPoints'), self)
def pathMidPoints(self):
"""
Return any additional mid-points that should be used to render the path.
"""
return self._pathMidPoints
def isPath(self):
"""
Return whether this visible is a path.
"""
return self._pathStart is not None
def setPathIsFixed(self, isFixed):
if self._pathIsFixed != isFixed:
self._pathIsFixed = isFixed
dispatcher.send(('set', 'pathIsFixed'), self)
def pathIsFixed(self):
return self._pathIsFixed
def _updateGlow(self):
# Remove any previous glow.
if self._glowNode is not None:
self.sgNode.removeChild(self._glowNode)
self._glowNode = None
self._glowNodeMaterial = None
self._glowShape = None
# Add a new glow if we have a glow color and a non-empty shape.
shouldGlow = self._glowColor is not None or self.isOrphan()
w, h, d = self.size()
haveNonEmptyShape = self._shape is not None and (not isinstance(self._shape, UnitShape) or (w > 0.0 and h > 0.0 and d > 0.0))
if shouldGlow and haveNonEmptyShape:
# TODO: use a shader effect to produce the glow rather than additional geometry
glowGeode = osg.Geode()
glowGeode.setName(str(self.displayId))
glowColor = self._glowColor or (0.5, 0.0, 0.0 ,0.2)
if isinstance(self._shape, UnitShape):
# Add a copy of the shape using the same geometry scaled up so that it is a fixed amount larger in each dimension.
# It would be nice to use a fixed screen pixel size but as a best guess we use 0.5% of the average width/height/depth of the whole display.
w, h, d = self.worldSize()
glowSize = (self.display.visiblesSize[0] + self.display.visiblesSize[1] + self.display.visiblesSize[2]) / 3.0 * 0.005
self._glowNode = osg.MatrixTransform(osg.Matrixd.scale(osg.Vec3((w + glowSize) / w, (h + glowSize) / h, (d + glowSize) / d)))
glowGeode.addDrawable(self._shape.geometry())
stateSet1 = self._glowNode.getOrCreateStateSet()
stateSet1.clear()
self._glowNodeMaterial = osg.Material()
colorVec = osg.Vec4(*glowColor)
self._glowNodeMaterial.setDiffuse(osg.Material.FRONT_AND_BACK, colorVec)
self._glowNodeMaterial.setEmission(osg.Material.FRONT_AND_BACK, colorVec)
self._glowNodeMaterial.setAlpha(osg.Material.FRONT_AND_BACK, glowColor[3])
stateSet1.setAttribute(self._glowNodeMaterial)
elif isinstance(self._shape, PathShape):
# Add a copy of the shape with a larger width.
self._glowNode = osg.MatrixTransform(osg.Matrixd.identity())
self._glowShape = self._shape.__class__(**self._shape.persistentAttributes())
self._glowShape.setPoints(self._shape.points())
self._glowShape.setWeight(self._shape.weight() + 8.0)
glowGeode.addDrawable(self._glowShape.geometry())
stateSet1 = self._glowNode.getOrCreateStateSet()
stateSet1.clear()
self._glowShape.setColor(glowColor)
self._glowNode.addChild(glowGeode)
self.sgNode.addChild(self._glowNode)
if glowColor[3] == 1:
# Opaque glow.
stateSet1.setRenderingHint(osg.StateSet.TRANSPARENT_BIN)
stateSet1.setMode(osg.GL_BLEND, osg.StateAttribute.OFF)
else:
# Transparent glow.
stateSet1.setMode(osg.GL_BLEND, osg.StateAttribute.ON)
# Place more deeply nested regions in lower render bins so they are rendered before the containing visible.
# Each nesting depth needs four render bins: two for the front and back face of the shape and one for the glow shape.
# This assumes a maximum nesting depth of 10.
# TODO: this code is causing problems where glows are hidden when there are regions on top of them. Removing for now.
# sceneDepth = len(self.ancestors())
# stateSet1.setRenderBinDetails(40 - sceneDepth * 2, 'DepthSortedBin')
def setGlowColor(self, color):
if color != self._glowColor:
self._glowColor = color
self._updateGlow()
dispatcher.send(('set', 'glowColor'), self)
def setOrphanClass(self, orphanClass):
if orphanClass != self._orphanClass:
self._orphanClass = orphanClass
self._updateGlow()
# Add a scaled version of the shape using the same geometry.
dispatcher.send(('set', 'orphanClass'), self)
dispatcher.send(('set', 'isOrphan'), self)
def orphanClass(self):
return self._orphanClass
def isOrphan(self):
return self._orphanClass is not None
|
JaneliaSciComp/Neuroptikon
|
Source/display/visible.py
|
Python
|
bsd-3-clause
| 120,493
|
[
"NEURON"
] |
729ac57610a3fab61bcbbde96111635385fde22b50fd628bffa5ac0d8a8e80a1
|
import asyncio
import pickle
from datetime import datetime, timedelta
import json
from os import path
from astral import Astral, GoogleGeocoder
from Firefly import logging, scheduler
from Firefly.const import DAY_EVENTS, EVENT_TYPE_BROADCAST, SOURCE_LOCATION, TIME, LOCATION_FILE
from Firefly.helpers.events import Event, Command
class Location(object):
def __init__(self, firefly, location_file=LOCATION_FILE):
logging.info('Setup Location')
self.firefly = firefly
self.modes = ['home']
self._mode = 'home'
self.geolocation = None
self.address = 'San Fransisco'
self.old_address = None
self.location_dump = None
self.read_config(location_file)
if self.old_address == self.address and self.geolocation:
logging.info('Importing location from pickle file')
else:
self.update_location(self.address)
self.status_messages = {}
self.setupScheduler()
def read_config(self, location_file, **kwargs):
''' Read config from file
Args:
location_file: path to location config file
**kwargs:
Returns:
'''
with open(location_file) as config_file:
config = json.load(config_file)
self.location_file = location_file
self.modes = config.get('modes', ['home', 'away', 'night', 'vacation', 'alarm'])
self._mode = config.get('mode', 'home')
self._last_mode = config.get('last_mode', 'home')
self.address = config.get('address', 'San Fransisco')
self.old_address = config.get('old_address', '')
pickle_file = location_file.replace('.json', '.pickle')
if path.isfile(pickle_file):
self.geolocation = pickle.load(open(pickle_file, 'rb'))
def update_location(self, address:str, **kwargs):
''' Update location to new address
Args:
address: New address for location
**kwargs:
Returns:
'''
a = Astral(GoogleGeocoder)
a.solar_depression = 'civil'
self.geolocation = a[address]
address = '%s %s' % (self.geolocation.name, self.geolocation.region)
self.old_address = address
self.address = address
self.export_to_file()
self.setupScheduler()
@property
def latitude(self):
return self.geolocation.latitude
@property
def longitude(self):
return self.geolocation.longitude
def add_mode(self, mode, **kwargs):
mode = mode.lower()
if mode not in self.modes:
self.modes.append(mode)
def remove_mode(self, mode, **kwargs):
mode = mode.lower()
if mode in self.modes:
self.modes.remove(mode)
def export_to_file(self, **kwargs):
''' Save the config to the config file
Args:
**kwargs:
Returns:
'''
with open(self.location_file, 'w') as config:
json.dump(self.export(), config, indent=4, sort_keys=True)
pickle_file = self.location_file.replace('.json', '.pickle')
with open(pickle_file, 'wb') as pf:
pickle.dump(self.geolocation, pf)
def export(self, **kwargs) -> dict:
export_data = {
'modes': self.modes,
'mode': self.mode,
'last_mode': self.lastMode,
'address': self.address,
'old_address': self.old_address
}
return export_data
# TODO: Add handling for realtime adding/deleting modes, changing zipcode etc.
def setupScheduler(self) -> None:
for e in DAY_EVENTS:
day_event_time = self.getNextDayEvent(e)
logging.info('Day Event: {} Time: {}'.format(e, str(day_event_time)))
scheduler.runAt(day_event_time, self.DayEventHandler, day_event=e, job_id=e)
event = Event(SOURCE_LOCATION, EVENT_TYPE_BROADCAST, event_action={
SOURCE_LOCATION: 'STARTUP'
})
self.firefly.send_event(event)
# Setup Time Broadcast to start at the next minute
now = self.now
if now.second < 50:
strat_at = now + timedelta(minutes=1) - timedelta(seconds=now.second)
else:
strat_at = now + timedelta(minutes=2) - timedelta(seconds=now.second)
scheduler.runAt(strat_at, self.setup_time_broadcast)
def setup_time_broadcast(self) -> None:
# Setup Time Broadcast
scheduler.runEveryM(1, self.broadcast_time)
self.broadcast_time()
@asyncio.coroutine
def DayEventHandler(self, day_event):
logging.info('day event handler - event: {}'.format(day_event))
event = Event(SOURCE_LOCATION, EVENT_TYPE_BROADCAST, event_action={
SOURCE_LOCATION: day_event
})
self.firefly.send_event(event)
event = Event(SOURCE_LOCATION, EVENT_TYPE_BROADCAST, event_action={
SOURCE_LOCATION: '%s_system' % day_event
})
self.firefly.send_event(event)
next_day_event_time = self.getNextDayEvent(day_event)
scheduler.runAt(next_day_event_time, self.DayEventHandler, day_event=day_event, job_id=day_event)
def getNextDayEvent(self, day_event):
now = self.now
day_event_time = self.geolocation.sun(date=now, local=True).get(day_event)
if day_event_time is None:
return False
if day_event_time < now:
day_event_time = self.geolocation.sun(date=now + timedelta(days=1), local=True).get(day_event)
return day_event_time
@property
def mode(self):
return self._mode
@mode.setter
def mode(self, mode):
mode = str(mode)
if mode in self.modes:
self._last_mode = self._mode
self._mode = mode
event = Event(SOURCE_LOCATION, EVENT_TYPE_BROADCAST, {
'EVENT_ACTION_MODE': mode,
'mode': mode,
'last_mode': self.lastMode,
'is_dark': self.isDark
})
self.firefly.send_event(event)
# Export location to file on change. This will be good for unexpected restarts.
self.firefly.export_location()
@property
def lastMode(self):
return self._last_mode
@property
def isDark(self):
now = self.now
sun = self.geolocation.sun(date=now, local=True)
if now >= sun['sunset'] or now <= sun['sunrise']:
return True
return False
@property
def isLight(self):
return not self.isDark
def isLightOffset(self, sunrise_offset=None):
'''isLightOffset lets you know if the sun is up at the current time.
If sunset_offset (INT Hours) is passed then it will tell you if the
sun will be up in the next x hours from the current time.
i.e: if you want to know if the sun will be up in the next three hours,
you would pass sunrise_offset=-3
[sunset_offset is yet to be added]
'''
if self.isDark:
if sunrise_offset is not None:
offset_time = self.now - timedelta(hours=sunrise_offset)
sun = self.geolocation.sun(date=self.now, local=True)
if offset_time >= sun['sunrise'] and offset_time <= sun['sunset']:
return True
return False
return not self.isDark
def broadcast_time(self) -> None:
now = self.now
event = Event(TIME, EVENT_TYPE_BROADCAST, {
'epoch': now.timestamp(),
'day': now.day,
'month': now.month,
'year': now.year,
'hour': now.hour,
'minute': now.minute,
'weekday': now.isoweekday()
})
self.firefly.send_event(event)
def add_status_message(self, message_id, message):
self.status_messages[message_id] = message
event = Event(SOURCE_LOCATION, EVENT_TYPE_BROADCAST, event_action={
'status_message': 'updated'
})
self.firefly.send_event(event)
def remove_status_message(self, message_id):
if message_id in self.status_messages:
self.status_messages.pop(message_id)
event = Event(SOURCE_LOCATION, EVENT_TYPE_BROADCAST, event_action={
'status_message': 'updated'
})
self.firefly.send_event(event)
def process_command(self, command:Command, **kwargs):
logging.info('[LOCATION] command: %s' % str(command))
logging.info('[LOCATION] command: %s args: %s' % (str(command.command), str(command.args)))
if command.command == 'remove_mode' and command.args.get('mode'):
self.remove_mode(command.args.get('mode'))
if command.command == 'add_mode' and command.args.get('mode'):
self.add_mode(command.args.get('mode'))
if command.command == 'update_address' and command.args.get('address'):
self.update_location(command.args.get('address'))
self.firefly.refresh_firebase()
@property
def now(self) -> datetime:
return datetime.now(self.geolocation.tz)
|
Firefly-Automation/Firefly
|
Firefly/helpers/location.py
|
Python
|
apache-2.0
| 8,331
|
[
"Firefly"
] |
bbfe340f052dde613403d01cb44b54a56b290b15a69a3015ae47ebc84d7c248d
|
# -*- coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2005-2007 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., or visit: http://www.gnu.org/.
##
## Author(s): Stoq Team <stoq-devel@async.com.br>
##
""" Search dialogs for fiscal objects """
import datetime
import gtk
from kiwi.currency import currency
from kiwi.python import enum
from stoqlib.api import api
from stoqlib.domain.fiscal import CfopData, IcmsIpiView, IssView
from stoqlib.enums import SearchFilterPosition
from stoqlib.gui.base.dialogs import run_dialog
from stoqlib.gui.editors.fiscaleditor import (CfopEditor,
FiscalBookEntryEditor)
from stoqlib.gui.search.searchcolumns import SearchColumn
from stoqlib.gui.search.searchdialog import SearchDialog
from stoqlib.gui.search.searcheditor import SearchEditor
from stoqlib.gui.search.searchfilters import ComboSearchFilter
from stoqlib.lib.translation import stoqlib_gettext
_ = stoqlib_gettext
class FiscalBookEntryType(enum):
(ICMS,
IPI,
ISS) = range(3)
fiscal_book_entries = {FiscalBookEntryType.ICMS: _("ICMS"),
FiscalBookEntryType.IPI: _("IPI"),
FiscalBookEntryType.ISS: _("ISS")}
class CfopSearch(SearchEditor):
title = _("C.F.O.P. Search")
search_spec = CfopData
editor_class = CfopEditor
size = (-1, 390)
#
# SearchDialog Hooks
#
def create_filters(self):
self.set_text_field_columns(['description', 'code'])
def get_columns(self):
return [SearchColumn('code', _('C.F.O.P.'), data_type=str, sorted=True,
width=90),
SearchColumn('description', _('Description'), data_type=str,
searchable=True, expand=True)]
class FiscalBookEntrySearch(SearchDialog):
title = _("Search for fiscal entries")
size = (-1, 450)
search_spec = IcmsIpiView
text_field_columns = []
branch_filter_column = IcmsIpiView.branch_id
def _setup_columns(self, column, table, col_name, summary_label_text):
columns = self.get_columns() + [column]
self.results.set_columns(columns)
self.search.set_search_spec(table)
def _setup_icms_columns(self):
col = SearchColumn('icms_value',
title=_('ICMS Total'),
justify=gtk.JUSTIFY_RIGHT,
data_type=currency, width=100)
self._setup_columns(col, IcmsIpiView, 'icms_value',
_("ICMS Total:"))
def _setup_ipi_columns(self):
col = SearchColumn('ipi_value',
title=_('IPI Total'),
justify=gtk.JUSTIFY_RIGHT,
data_type=currency, width=100)
self._setup_columns(col, IcmsIpiView, 'ipi_value',
_("IPI Total:"))
def _setup_iss_columns(self):
col = SearchColumn('iss_value',
title=_('ISS Total'),
justify=gtk.JUSTIFY_RIGHT,
data_type=currency, width=100)
self._setup_columns(col, IssView, 'iss_value',
_("ISS Total:"))
#
# SearchBar Hooks
#
def get_columns(self):
return [SearchColumn('date', title=_('Date'), width=80,
data_type=datetime.date, justify=gtk.JUSTIFY_RIGHT),
SearchColumn('invoice_number', title=_('Invoice'),
data_type=int, width=100, sorted=True),
SearchColumn('cfop_code', title=_('C.F.O.P.'),
data_type=str, width=75),
SearchColumn('drawee_name', title=_('Drawee'),
data_type=str, expand=True)]
def _get_entry_type_query(self, state):
entry_type = state.value
if entry_type == FiscalBookEntryType.ICMS:
self._setup_icms_columns()
elif entry_type == FiscalBookEntryType.ISS:
self._setup_iss_columns()
elif entry_type == FiscalBookEntryType.IPI:
self._setup_ipi_columns()
else:
raise ValueError("Invalid fical book entry type, got %s"
% entry_type)
#
# SearchDialog Hooks
#
def setup_widgets(self):
self.edit_button = self.add_button(_('Edit'))
self.edit_button.connect('clicked', self._on_edit_button__clicked)
self.edit_button.show()
self.edit_button.set_sensitive(False)
self.add_csv_button(_('Fiscal book'), _('fiscal-book'))
def update_widgets(self):
can_edit = bool(self.results.get_selected())
self.edit_button.set_sensitive(can_edit)
def create_filters(self):
items = [(v, k)
for k, v in fiscal_book_entries.items()]
self.entry_type = ComboSearchFilter(_('Show entries of type'), items)
self.add_filter(self.entry_type, callback=self._get_entry_type_query,
position=SearchFilterPosition.TOP)
#
# Callbacks
#
def _on_edit_button__clicked(self, widget):
entry = self.results.get_selected()
assert entry is not None
store = api.new_store()
retval = run_dialog(FiscalBookEntryEditor, self, store,
store.fetch(entry.book_entry))
store.confirm(retval)
store.close()
|
andrebellafronte/stoq
|
stoqlib/gui/search/fiscalsearch.py
|
Python
|
gpl-2.0
| 6,159
|
[
"VisIt"
] |
c65ed0bee31724e5a1e19976077f223d907288618d7f111f81c17c5f38f5df23
|
"""Test of ScaLAPACK diagonalize and inverse cholesky.
The test generates a random symmetric matrix H0 and
positive definite matrix S0 on a 1-by-1 BLACS grid. They
are redistributed to a mprocs-by-nprocs BLACS grid,
diagonalized in parallel, and eigenvalues are compared
against LAPACK. Eigenvectors are not compared.
"""
import sys
import numpy as np
from gpaw.mpi import world, rank
from gpaw.blacs import BlacsGrid, Redistributor, parallelprint
from gpaw.utilities import compiled_with_sl
from gpaw.utilities.lapack import diagonalize, general_diagonalize, \
inverse_cholesky
from gpaw.utilities.blas import rk, gemm
from gpaw.utilities.scalapack import scalapack_general_diagonalize_dc, \
scalapack_general_diagonalize_ex, \
scalapack_diagonalize_dc, scalapack_diagonalize_ex, \
scalapack_inverse_cholesky ## , \
## scalapack_diagonalize_mr3, scalapack_general_diagonalize_mr3
tol = 1.0e-8
def main(N=73, seed=42, mprocs=2, nprocs=2, dtype=float):
gen = np.random.RandomState(seed)
grid = BlacsGrid(world, mprocs, nprocs)
if (dtype==complex):
epsilon = 1.0j
else:
epsilon = 0.0
# Create descriptors for matrices on master:
glob = grid.new_descriptor(N, N, N, N)
# print globA.asarray()
# Populate matrices local to master:
H0 = glob.zeros(dtype=dtype) + gen.rand(*glob.shape)
S0 = glob.zeros(dtype=dtype) + gen.rand(*glob.shape)
C0 = glob.empty(dtype=dtype)
if rank == 0:
# Complex case must have real numbers on the diagonal.
# We make a simple complex Hermitian matrix below.
H0 = H0 + epsilon * (0.1*np.tri(N, N, k= -N // nprocs) + 0.3*np.tri(N, N, k=-1))
S0 = S0 + epsilon * (0.2*np.tri(N, N, k= -N // nprocs) + 0.4*np.tri(N, N, k=-1))
# Make matrices symmetric
rk(1.0, H0.copy(), 0.0, H0)
rk(1.0, S0.copy(), 0.0, S0)
# Overlap matrix must be semi-positive definite
S0 = S0 + 50.0*np.eye(N, N, 0)
# Hamiltonian is usually diagonally dominant
H0 = H0 + 75.0*np.eye(N, N, 0)
C0 = S0.copy()
# Local result matrices
W0 = np.empty((N),dtype=float)
W0_g = np.empty((N),dtype=float)
# Calculate eigenvalues
if rank == 0:
diagonalize(H0.copy(), W0)
general_diagonalize(H0.copy(), W0_g, S0.copy())
inverse_cholesky(C0) # result returned in lower triangle
# tri2full(C0) # symmetrize
assert glob.check(H0) and glob.check(S0) and glob.check(C0)
# Create distributed destriptors with various block sizes:
dist = grid.new_descriptor(N, N, 8, 8)
# Distributed matrices:
# We can use empty here, but end up with garbage on
# on the other half of the triangle when we redistribute.
# This is fine because ScaLAPACK does not care.
H = dist.empty(dtype=dtype)
S = dist.empty(dtype=dtype)
Z = dist.empty(dtype=dtype)
C = dist.empty(dtype=dtype)
# Eigenvalues are non-BLACS matrices
W = np.empty((N), dtype=float)
W_dc = np.empty((N), dtype=float)
W_mr3 = np.empty((N), dtype=float)
W_g = np.empty((N), dtype=float)
W_g_dc = np.empty((N), dtype=float)
W_g_mr3 = np.empty((N), dtype=float)
Glob2dist = Redistributor(world, glob, dist)
Glob2dist.redistribute(H0, H, uplo='L')
Glob2dist.redistribute(S0, S, uplo='L')
Glob2dist.redistribute(S0, C, uplo='L') # C0 was previously overwritten
# we don't test the expert drivers anymore since there
# might be a buffer overflow error
## scalapack_diagonalize_ex(dist, H.copy(), Z, W, 'L')
scalapack_diagonalize_dc(dist, H.copy(), Z, W_dc, 'L')
## scalapack_diagonalize_mr3(dist, H.copy(), Z, W_mr3, 'L')
## scalapack_general_diagonalize_ex(dist, H.copy(), S.copy(), Z, W_g, 'L')
scalapack_general_diagonalize_dc(dist, H.copy(), S.copy(), Z, W_g_dc, 'L')
## scalapack_general_diagonalize_mr3(dist, H.copy(), S.copy(), Z, W_g_mr3, 'L')
scalapack_inverse_cholesky(dist, C, 'L')
# Undo redistribute
C_test = glob.empty(dtype=dtype)
Dist2glob = Redistributor(world, dist, glob)
Dist2glob.redistribute(C, C_test)
if rank == 0:
## diag_ex_err = abs(W - W0).max()
diag_dc_err = abs(W_dc - W0).max()
## diag_mr3_err = abs(W_mr3 - W0).max()
## general_diag_ex_err = abs(W_g - W0_g).max()
general_diag_dc_err = abs(W_g_dc - W0_g).max()
## general_diag_mr3_err = abs(W_g_mr3 - W0_g).max()
inverse_chol_err = abs(C_test-C0).max()
## print 'diagonalize ex err', diag_ex_err
print 'diagonalize dc err', diag_dc_err
## print 'diagonalize mr3 err', diag_mr3_err
## print 'general diagonalize ex err', general_diag_ex_err
print 'general diagonalize dc err', general_diag_dc_err
## print 'general diagonalize mr3 err', general_diag_mr3_err
print 'inverse chol err', inverse_chol_err
else:
## diag_ex_err = 0.0
diag_dc_err = 0.0
## diag_mr3_err = 0.0
## general_diag_ex_err = 0.0
general_diag_dc_err = 0.0
## general_diag_mr3_err = 0.0
inverse_chol_err = 0.0
# We don't like exceptions on only one cpu
## diag_ex_err = world.sum(diag_ex_err)
diag_dc_err = world.sum(diag_dc_err)
## diag_mr3_err = world.sum(diag_mr3_err)
## general_diag_ex_err = world.sum(general_diag_ex_err)
general_diag_dc_err = world.sum(general_diag_dc_err)
## general_diag_mr3_err = world.sum(general_diag_mr3_err)
inverse_chol_err = world.sum(inverse_chol_err)
## assert diag_ex_err < tol
assert diag_dc_err < tol
## assert diag_mr3_err < tol
## assert general_diag_ex_err < tol
assert general_diag_dc_err < tol
## assert general_diag_mr3_err < tol
assert inverse_chol_err < tol
if __name__ in ['__main__', '__builtin__']:
if not compiled_with_sl():
print('Not built with ScaLAPACK. Test does not apply.')
else:
main(dtype=float)
main(dtype=complex)
|
robwarm/gpaw-symm
|
gpaw/test/parallel/scalapack.py
|
Python
|
gpl-3.0
| 6,034
|
[
"GPAW"
] |
e7a5267ff96d1e628440b394a91024a65b4b51d76def2c15839a94f2085a0992
|
# sql/elements.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Core SQL expression elements, including :class:`_expression.ClauseElement`,
:class:`_expression.ColumnElement`, and derived classes.
"""
from __future__ import unicode_literals
import itertools
import numbers
import operator
import re
from . import operators
from . import type_api
from .annotation import Annotated
from .base import _generative
from .base import Executable
from .base import Immutable
from .base import NO_ARG
from .base import PARSE_AUTOCOMMIT
from .visitors import cloned_traverse
from .visitors import traverse
from .visitors import Visitable
from .. import exc
from .. import inspection
from .. import util
def _clone(element, **kw):
return element._clone()
def _document_text_coercion(paramname, meth_rst, param_rst):
return util.add_parameter_text(
paramname,
(
".. warning:: "
"The %s argument to %s can be passed as a Python string argument, "
"which will be treated "
"as **trusted SQL text** and rendered as given. **DO NOT PASS "
"UNTRUSTED INPUT TO THIS PARAMETER**."
)
% (param_rst, meth_rst),
)
def collate(expression, collation):
"""Return the clause ``expression COLLATE collation``.
e.g.::
collate(mycolumn, 'utf8_bin')
produces::
mycolumn COLLATE utf8_bin
The collation expression is also quoted if it is a case sensitive
identifier, e.g. contains uppercase characters.
.. versionchanged:: 1.2 quoting is automatically applied to COLLATE
expressions if they are case sensitive.
"""
expr = _literal_as_binds(expression)
return BinaryExpression(
expr, CollationClause(collation), operators.collate, type_=expr.type
)
def between(expr, lower_bound, upper_bound, symmetric=False):
"""Produce a ``BETWEEN`` predicate clause.
E.g.::
from sqlalchemy import between
stmt = select([users_table]).where(between(users_table.c.id, 5, 7))
Would produce SQL resembling::
SELECT id, name FROM user WHERE id BETWEEN :id_1 AND :id_2
The :func:`.between` function is a standalone version of the
:meth:`_expression.ColumnElement.between` method available on all
SQL expressions, as in::
stmt = select([users_table]).where(users_table.c.id.between(5, 7))
All arguments passed to :func:`.between`, including the left side
column expression, are coerced from Python scalar values if a
the value is not a :class:`_expression.ColumnElement` subclass.
For example,
three fixed values can be compared as in::
print(between(5, 3, 7))
Which would produce::
:param_1 BETWEEN :param_2 AND :param_3
:param expr: a column expression, typically a
:class:`_expression.ColumnElement`
instance or alternatively a Python scalar expression to be coerced
into a column expression, serving as the left side of the ``BETWEEN``
expression.
:param lower_bound: a column or Python scalar expression serving as the
lower bound of the right side of the ``BETWEEN`` expression.
:param upper_bound: a column or Python scalar expression serving as the
upper bound of the right side of the ``BETWEEN`` expression.
:param symmetric: if True, will render " BETWEEN SYMMETRIC ". Note
that not all databases support this syntax.
.. versionadded:: 0.9.5
.. seealso::
:meth:`_expression.ColumnElement.between`
"""
expr = _literal_as_binds(expr)
return expr.between(lower_bound, upper_bound, symmetric=symmetric)
def literal(value, type_=None):
r"""Return a literal clause, bound to a bind parameter.
Literal clauses are created automatically when non-
:class:`_expression.ClauseElement` objects (such as strings, ints, dates,
etc.) are
used in a comparison operation with a :class:`_expression.ColumnElement`
subclass,
such as a :class:`~sqlalchemy.schema.Column` object. Use this function
to force the generation of a literal clause, which will be created as a
:class:`BindParameter` with a bound value.
:param value: the value to be bound. Can be any Python object supported by
the underlying DB-API, or is translatable via the given type argument.
:param type\_: an optional :class:`~sqlalchemy.types.TypeEngine` which
will provide bind-parameter translation for this literal.
"""
return BindParameter(None, value, type_=type_, unique=True)
def outparam(key, type_=None):
"""Create an 'OUT' parameter for usage in functions (stored procedures),
for databases which support them.
The ``outparam`` can be used like a regular function parameter.
The "output" value will be available from the
:class:`~sqlalchemy.engine.ResultProxy` object via its ``out_parameters``
attribute, which returns a dictionary containing the values.
"""
return BindParameter(key, None, type_=type_, unique=False, isoutparam=True)
def not_(clause):
"""Return a negation of the given clause, i.e. ``NOT(clause)``.
The ``~`` operator is also overloaded on all
:class:`_expression.ColumnElement` subclasses to produce the
same result.
"""
return operators.inv(_literal_as_binds(clause))
@inspection._self_inspects
class ClauseElement(Visitable):
"""Base class for elements of a programmatically constructed SQL
expression.
"""
__visit_name__ = "clause"
_annotations = {}
supports_execution = False
_from_objects = []
bind = None
_is_clone_of = None
is_selectable = False
is_clause_element = True
description = None
_order_by_label_element = None
_is_from_container = False
def _clone(self):
"""Create a shallow copy of this ClauseElement.
This method may be used by a generative API. Its also used as
part of the "deep" copy afforded by a traversal that combines
the _copy_internals() method.
"""
c = self.__class__.__new__(self.__class__)
c.__dict__ = self.__dict__.copy()
ClauseElement._cloned_set._reset(c)
ColumnElement.comparator._reset(c)
# this is a marker that helps to "equate" clauses to each other
# when a Select returns its list of FROM clauses. the cloning
# process leaves around a lot of remnants of the previous clause
# typically in the form of column expressions still attached to the
# old table.
c._is_clone_of = self
return c
@property
def _constructor(self):
"""return the 'constructor' for this ClauseElement.
This is for the purposes for creating a new object of
this type. Usually, its just the element's __class__.
However, the "Annotated" version of the object overrides
to return the class of its proxied element.
"""
return self.__class__
@util.memoized_property
def _cloned_set(self):
"""Return the set consisting all cloned ancestors of this
ClauseElement.
Includes this ClauseElement. This accessor tends to be used for
FromClause objects to identify 'equivalent' FROM clauses, regardless
of transformative operations.
"""
s = util.column_set()
f = self
# note this creates a cycle, asserted in test_memusage. however,
# turning this into a plain @property adds tends of thousands of method
# calls to Core / ORM performance tests, so the small overhead
# introduced by the relatively small amount of short term cycles
# produced here is preferable
while f is not None:
s.add(f)
f = f._is_clone_of
return s
def __getstate__(self):
d = self.__dict__.copy()
d.pop("_is_clone_of", None)
return d
def _annotate(self, values):
"""Return a copy of this ClauseElement with annotations
updated by the given dictionary.
"""
return Annotated(self, values)
def _with_annotations(self, values):
"""Return a copy of this ClauseElement with annotations
replaced by the given dictionary.
"""
return Annotated(self, values)
def _deannotate(self, values=None, clone=False):
"""Return a copy of this :class:`_expression.ClauseElement`
with annotations
removed.
:param values: optional tuple of individual values
to remove.
"""
if clone:
# clone is used when we are also copying
# the expression for a deep deannotation
return self._clone()
else:
# if no clone, since we have no annotations we return
# self
return self
def _execute_on_connection(self, connection, multiparams, params):
if self.supports_execution:
return connection._execute_clauseelement(self, multiparams, params)
else:
raise exc.ObjectNotExecutableError(self)
def unique_params(self, *optionaldict, **kwargs):
"""Return a copy with :func:`_expression.bindparam` elements
replaced.
Same functionality as :meth:`_expression.ClauseElement.params`,
except adds `unique=True`
to affected bind parameters so that multiple statements can be
used.
"""
return self._params(True, optionaldict, kwargs)
def params(self, *optionaldict, **kwargs):
"""Return a copy with :func:`_expression.bindparam` elements
replaced.
Returns a copy of this ClauseElement with
:func:`_expression.bindparam`
elements replaced with values taken from the given dictionary::
>>> clause = column('x') + bindparam('foo')
>>> print(clause.compile().params)
{'foo':None}
>>> print(clause.params({'foo':7}).compile().params)
{'foo':7}
"""
return self._params(False, optionaldict, kwargs)
def _params(self, unique, optionaldict, kwargs):
if len(optionaldict) == 1:
kwargs.update(optionaldict[0])
elif len(optionaldict) > 1:
raise exc.ArgumentError(
"params() takes zero or one positional dictionary argument"
)
def visit_bindparam(bind):
if bind.key in kwargs:
bind.value = kwargs[bind.key]
bind.required = False
if unique:
bind._convert_to_unique()
return cloned_traverse(self, {}, {"bindparam": visit_bindparam})
def compare(self, other, **kw):
r"""Compare this :class:`_expression.ClauseElement` to
the given :class:`_expression.ClauseElement`.
Subclasses should override the default behavior, which is a
straight identity comparison.
\**kw are arguments consumed by subclass ``compare()`` methods and
may be used to modify the criteria for comparison
(see :class:`_expression.ColumnElement`).
"""
return self is other
def _copy_internals(self, clone=_clone, **kw):
"""Reassign internal elements to be clones of themselves.
Called during a copy-and-traverse operation on newly
shallow-copied elements to create a deep copy.
The given clone function should be used, which may be applying
additional transformations to the element (i.e. replacement
traversal, cloned traversal, annotations).
"""
pass
def get_children(self, **kwargs):
r"""Return immediate child elements of this
:class:`_expression.ClauseElement`.
This is used for visit traversal.
\**kwargs may contain flags that change the collection that is
returned, for example to return a subset of items in order to
cut down on larger traversals, or to return child items from a
different context (such as schema-level collections instead of
clause-level).
"""
return []
def self_group(self, against=None):
"""Apply a 'grouping' to this :class:`_expression.ClauseElement`.
This method is overridden by subclasses to return a "grouping"
construct, i.e. parenthesis. In particular it's used by "binary"
expressions to provide a grouping around themselves when placed into a
larger expression, as well as by :func:`_expression.select`
constructs when placed into the FROM clause of another
:func:`_expression.select`. (Note that subqueries should be
normally created using the :meth:`_expression.Select.alias` method,
as many
platforms require nested SELECT statements to be named).
As expressions are composed together, the application of
:meth:`self_group` is automatic - end-user code should never
need to use this method directly. Note that SQLAlchemy's
clause constructs take operator precedence into account -
so parenthesis might not be needed, for example, in
an expression like ``x OR (y AND z)`` - AND takes precedence
over OR.
The base :meth:`self_group` method of
:class:`_expression.ClauseElement`
just returns self.
"""
return self
@util.dependencies("sqlalchemy.engine.default")
def compile(self, default, bind=None, dialect=None, **kw):
"""Compile this SQL expression.
The return value is a :class:`~.Compiled` object.
Calling ``str()`` or ``unicode()`` on the returned value will yield a
string representation of the result. The
:class:`~.Compiled` object also can return a
dictionary of bind parameter names and values
using the ``params`` accessor.
:param bind: An ``Engine`` or ``Connection`` from which a
``Compiled`` will be acquired. This argument takes precedence over
this :class:`_expression.ClauseElement`'s bound engine, if any.
:param column_keys: Used for INSERT and UPDATE statements, a list of
column names which should be present in the VALUES clause of the
compiled statement. If ``None``, all columns from the target table
object are rendered.
:param dialect: A ``Dialect`` instance from which a ``Compiled``
will be acquired. This argument takes precedence over the `bind`
argument as well as this :class:`_expression.ClauseElement`
's bound engine,
if any.
:param inline: Used for INSERT statements, for a dialect which does
not support inline retrieval of newly generated primary key
columns, will force the expression used to create the new primary
key value to be rendered inline within the INSERT statement's
VALUES clause. This typically refers to Sequence execution but may
also refer to any server-side default generation function
associated with a primary key `Column`.
:param compile_kwargs: optional dictionary of additional parameters
that will be passed through to the compiler within all "visit"
methods. This allows any custom flag to be passed through to
a custom compilation construct, for example. It is also used
for the case of passing the ``literal_binds`` flag through::
from sqlalchemy.sql import table, column, select
t = table('t', column('x'))
s = select([t]).where(t.c.x == 5)
print(s.compile(compile_kwargs={"literal_binds": True}))
.. versionadded:: 0.9.0
.. seealso::
:ref:`faq_sql_expression_string`
"""
if not dialect:
if bind:
dialect = bind.dialect
elif self.bind:
dialect = self.bind.dialect
bind = self.bind
else:
dialect = default.StrCompileDialect()
return self._compiler(dialect, bind=bind, **kw)
def _compiler(self, dialect, **kw):
"""Return a compiler appropriate for this ClauseElement, given a
Dialect."""
return dialect.statement_compiler(dialect, self, **kw)
def __str__(self):
if util.py3k:
return str(self.compile())
else:
return unicode(self.compile()).encode( # noqa
"ascii", "backslashreplace"
) # noqa
@util.deprecated(
"0.9",
"The :meth:`_expression.ClauseElement.__and__` "
"method is deprecated and will "
"be removed in a future release. Conjunctions should only be "
"used from a :class:`_expression.ColumnElement` subclass, e.g. "
":meth:`_expression.ColumnElement.__and__`.",
)
def __and__(self, other):
"""'and' at the ClauseElement level."""
return and_(self, other)
@util.deprecated(
"0.9",
"The :meth:`_expression.ClauseElement.__or__` "
"method is deprecated and will "
"be removed in a future release. Conjunctions should only be "
"used from a :class:`_expression.ColumnElement` subclass, e.g. "
":meth:`_expression.ColumnElement.__or__`.",
)
def __or__(self, other):
"""'or' at the ClauseElement level."""
return or_(self, other)
def __invert__(self):
if hasattr(self, "negation_clause"):
return self.negation_clause
else:
return self._negate()
def _negate(self):
return UnaryExpression(
self.self_group(against=operators.inv),
operator=operators.inv,
negate=None,
)
def __bool__(self):
raise TypeError("Boolean value of this clause is not defined")
__nonzero__ = __bool__
def __repr__(self):
friendly = self.description
if friendly is None:
return object.__repr__(self)
else:
return "<%s.%s at 0x%x; %s>" % (
self.__module__,
self.__class__.__name__,
id(self),
friendly,
)
class ColumnElement(operators.ColumnOperators, ClauseElement):
"""Represent a column-oriented SQL expression suitable for usage in the
"columns" clause, WHERE clause etc. of a statement.
While the most familiar kind of :class:`_expression.ColumnElement` is the
:class:`_schema.Column` object, :class:`_expression.ColumnElement`
serves as the basis
for any unit that may be present in a SQL expression, including
the expressions themselves, SQL functions, bound parameters,
literal expressions, keywords such as ``NULL``, etc.
:class:`_expression.ColumnElement`
is the ultimate base class for all such elements.
A wide variety of SQLAlchemy Core functions work at the SQL expression
level, and are intended to accept instances of
:class:`_expression.ColumnElement` as
arguments. These functions will typically document that they accept a
"SQL expression" as an argument. What this means in terms of SQLAlchemy
usually refers to an input which is either already in the form of a
:class:`_expression.ColumnElement` object,
or a value which can be **coerced** into
one. The coercion rules followed by most, but not all, SQLAlchemy Core
functions with regards to SQL expressions are as follows:
* a literal Python value, such as a string, integer or floating
point value, boolean, datetime, ``Decimal`` object, or virtually
any other Python object, will be coerced into a "literal bound
value". This generally means that a :func:`.bindparam` will be
produced featuring the given value embedded into the construct; the
resulting :class:`.BindParameter` object is an instance of
:class:`_expression.ColumnElement`.
The Python value will ultimately be sent
to the DBAPI at execution time as a parameterized argument to the
``execute()`` or ``executemany()`` methods, after SQLAlchemy
type-specific converters (e.g. those provided by any associated
:class:`.TypeEngine` objects) are applied to the value.
* any special object value, typically ORM-level constructs, which
feature a method called ``__clause_element__()``. The Core
expression system looks for this method when an object of otherwise
unknown type is passed to a function that is looking to coerce the
argument into a :class:`_expression.ColumnElement` expression. The
``__clause_element__()`` method, if present, should return a
:class:`_expression.ColumnElement` instance. The primary use of
``__clause_element__()`` within SQLAlchemy is that of class-bound
attributes on ORM-mapped classes; a ``User`` class which contains a
mapped attribute named ``.name`` will have a method
``User.name.__clause_element__()`` which when invoked returns the
:class:`_schema.Column`
called ``name`` associated with the mapped table.
* The Python ``None`` value is typically interpreted as ``NULL``,
which in SQLAlchemy Core produces an instance of :func:`.null`.
A :class:`_expression.ColumnElement` provides the ability to generate new
:class:`_expression.ColumnElement`
objects using Python expressions. This means that Python operators
such as ``==``, ``!=`` and ``<`` are overloaded to mimic SQL operations,
and allow the instantiation of further :class:`_expression.ColumnElement`
instances
which are composed from other, more fundamental
:class:`_expression.ColumnElement`
objects. For example, two :class:`.ColumnClause` objects can be added
together with the addition operator ``+`` to produce
a :class:`.BinaryExpression`.
Both :class:`.ColumnClause` and :class:`.BinaryExpression` are subclasses
of :class:`_expression.ColumnElement`::
>>> from sqlalchemy.sql import column
>>> column('a') + column('b')
<sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
>>> print(column('a') + column('b'))
a + b
.. seealso::
:class:`_schema.Column`
:func:`_expression.column`
"""
__visit_name__ = "column_element"
primary_key = False
foreign_keys = []
_proxies = ()
_label = None
"""The named label that can be used to target
this column in a result set.
This label is almost always the label used when
rendering <expr> AS <label> in a SELECT statement. It also
refers to a name that this column expression can be located from
in a result set.
For a regular Column bound to a Table, this is typically the label
<tablename>_<columnname>. For other constructs, different rules
may apply, such as anonymized labels and others.
"""
key = None
"""The 'key' that in some circumstances refers to this object in a
Python namespace.
This typically refers to the "key" of the column as present in the
``.c`` collection of a selectable, e.g. ``sometable.c["somekey"]`` would
return a :class:`_schema.Column` with a ``.key`` of "somekey".
"""
_key_label = None
"""A label-based version of 'key' that in some circumstances refers
to this object in a Python namespace.
_key_label comes into play when a select() statement is constructed with
apply_labels(); in this case, all Column objects in the ``.c`` collection
are rendered as <tablename>_<columnname> in SQL; this is essentially the
value of ._label. But to locate those columns in the ``.c`` collection,
the name is along the lines of <tablename>_<key>; that's the typical
value of .key_label.
"""
_render_label_in_columns_clause = True
"""A flag used by select._columns_plus_names that helps to determine
we are actually going to render in terms of "SELECT <col> AS <label>".
This flag can be returned as False for some Column objects that want
to be rendered as simple "SELECT <col>"; typically columns that don't have
any parent table and are named the same as what the label would be
in any case.
"""
_resolve_label = None
"""The name that should be used to identify this ColumnElement in a
select() object when "label resolution" logic is used; this refers
to using a string name in an expression like order_by() or group_by()
that wishes to target a labeled expression in the columns clause.
The name is distinct from that of .name or ._label to account for the case
where anonymizing logic may be used to change the name that's actually
rendered at compile time; this attribute should hold onto the original
name that was user-assigned when producing a .label() construct.
"""
_allow_label_resolve = True
"""A flag that can be flipped to prevent a column from being resolvable
by string label name."""
_is_implicitly_boolean = False
_alt_names = ()
def self_group(self, against=None):
if (
against in (operators.and_, operators.or_, operators._asbool)
and self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity
):
return AsBoolean(self, operators.istrue, operators.isfalse)
elif against in (operators.any_op, operators.all_op):
return Grouping(self)
else:
return self
def _negate(self):
if self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
return AsBoolean(self, operators.isfalse, operators.istrue)
else:
return super(ColumnElement, self)._negate()
@util.memoized_property
def type(self):
return type_api.NULLTYPE
@util.memoized_property
def comparator(self):
try:
comparator_factory = self.type.comparator_factory
except AttributeError as err:
util.raise_(
TypeError(
"Object %r associated with '.type' attribute "
"is not a TypeEngine class or object" % self.type
),
replace_context=err,
)
else:
return comparator_factory(self)
def __getattr__(self, key):
try:
return getattr(self.comparator, key)
except AttributeError as err:
util.raise_(
AttributeError(
"Neither %r object nor %r object has an attribute %r"
% (
type(self).__name__,
type(self.comparator).__name__,
key,
)
),
replace_context=err,
)
def operate(self, op, *other, **kwargs):
return op(self.comparator, *other, **kwargs)
def reverse_operate(self, op, other, **kwargs):
return op(other, self.comparator, **kwargs)
def _bind_param(self, operator, obj, type_=None):
return BindParameter(
None,
obj,
_compared_to_operator=operator,
type_=type_,
_compared_to_type=self.type,
unique=True,
)
@property
def expression(self):
"""Return a column expression.
Part of the inspection interface; returns self.
"""
return self
@property
def _select_iterable(self):
return (self,)
@util.memoized_property
def base_columns(self):
return util.column_set(c for c in self.proxy_set if not c._proxies)
@util.memoized_property
def proxy_set(self):
s = util.column_set([self])
for c in self._proxies:
s.update(c.proxy_set)
return s
def _uncached_proxy_set(self):
"""An 'uncached' version of proxy set.
This is so that we can read annotations from the list of columns
without breaking the caching of the above proxy_set.
"""
s = util.column_set([self])
for c in self._proxies:
s.update(c._uncached_proxy_set())
return s
def shares_lineage(self, othercolumn):
"""Return True if the given :class:`_expression.ColumnElement`
has a common ancestor to this :class:`_expression.ColumnElement`."""
return bool(self.proxy_set.intersection(othercolumn.proxy_set))
def _compare_name_for_result(self, other):
"""Return True if the given column element compares to this one
when targeting within a result row."""
return (
hasattr(other, "name")
and hasattr(self, "name")
and other.name == self.name
)
def _make_proxy(
self, selectable, name=None, name_is_truncatable=False, **kw
):
"""Create a new :class:`_expression.ColumnElement` representing this
:class:`_expression.ColumnElement`
as it appears in the select list of a
descending selectable.
"""
if name is None:
name = self.anon_label
if self.key:
key = self.key
else:
try:
key = str(self)
except exc.UnsupportedCompilationError:
key = self.anon_label
else:
key = name
co = ColumnClause(
_as_truncated(name) if name_is_truncatable else name,
type_=getattr(self, "type", None),
_selectable=selectable,
)
co._proxies = [self]
if selectable._is_clone_of is not None:
co._is_clone_of = selectable._is_clone_of.columns.get(key)
selectable._columns[key] = co
return co
def compare(self, other, use_proxies=False, equivalents=None, **kw):
"""Compare this ColumnElement to another.
Special arguments understood:
:param use_proxies: when True, consider two columns that
share a common base column as equivalent (i.e. shares_lineage())
:param equivalents: a dictionary of columns as keys mapped to sets
of columns. If the given "other" column is present in this
dictionary, if any of the columns in the corresponding set() pass
the comparison test, the result is True. This is used to expand the
comparison to other columns that may be known to be equivalent to
this one via foreign key or other criterion.
"""
to_compare = (other,)
if equivalents and other in equivalents:
to_compare = equivalents[other].union(to_compare)
for oth in to_compare:
if use_proxies and self.shares_lineage(oth):
return True
elif hash(oth) == hash(self):
return True
else:
return False
def cast(self, type_):
"""Produce a type cast, i.e. ``CAST(<expression> AS <type>)``.
This is a shortcut to the :func:`_expression.cast` function.
.. seealso::
:ref:`coretutorial_casts`
:func:`_expression.cast`
:func:`_expression.type_coerce`
.. versionadded:: 1.0.7
"""
return Cast(self, type_)
def label(self, name):
"""Produce a column label, i.e. ``<columnname> AS <name>``.
This is a shortcut to the :func:`_expression.label` function.
If 'name' is ``None``, an anonymous label name will be generated.
"""
return Label(name, self, self.type)
@util.memoized_property
def anon_label(self):
"""Provides a constant 'anonymous label' for this ColumnElement.
This is a label() expression which will be named at compile time.
The same label() is returned each time ``anon_label`` is called so
that expressions can reference ``anon_label`` multiple times,
producing the same label name at compile time.
The compiler uses this function automatically at compile time
for expressions that are known to be 'unnamed' like binary
expressions and function calls.
"""
while self._is_clone_of is not None:
self = self._is_clone_of
return _anonymous_label(
"%%(%d %s)s" % (id(self), getattr(self, "name", "anon"))
)
class BindParameter(ColumnElement):
r"""Represent a "bound expression".
:class:`.BindParameter` is invoked explicitly using the
:func:`.bindparam` function, as in::
from sqlalchemy import bindparam
stmt = select([users_table]).\
where(users_table.c.name == bindparam('username'))
Detailed discussion of how :class:`.BindParameter` is used is
at :func:`.bindparam`.
.. seealso::
:func:`.bindparam`
"""
__visit_name__ = "bindparam"
_is_crud = False
_expanding_in_types = ()
def __init__(
self,
key,
value=NO_ARG,
type_=None,
unique=False,
required=NO_ARG,
quote=None,
callable_=None,
expanding=False,
isoutparam=False,
_compared_to_operator=None,
_compared_to_type=None,
):
r"""Produce a "bound expression".
The return value is an instance of :class:`.BindParameter`; this
is a :class:`_expression.ColumnElement`
subclass which represents a so-called
"placeholder" value in a SQL expression, the value of which is
supplied at the point at which the statement in executed against a
database connection.
In SQLAlchemy, the :func:`.bindparam` construct has
the ability to carry along the actual value that will be ultimately
used at expression time. In this way, it serves not just as
a "placeholder" for eventual population, but also as a means of
representing so-called "unsafe" values which should not be rendered
directly in a SQL statement, but rather should be passed along
to the :term:`DBAPI` as values which need to be correctly escaped
and potentially handled for type-safety.
When using :func:`.bindparam` explicitly, the use case is typically
one of traditional deferment of parameters; the :func:`.bindparam`
construct accepts a name which can then be referred to at execution
time::
from sqlalchemy import bindparam
stmt = select([users_table]).\
where(users_table.c.name == bindparam('username'))
The above statement, when rendered, will produce SQL similar to::
SELECT id, name FROM user WHERE name = :username
In order to populate the value of ``:username`` above, the value
would typically be applied at execution time to a method
like :meth:`_engine.Connection.execute`::
result = connection.execute(stmt, username='wendy')
Explicit use of :func:`.bindparam` is also common when producing
UPDATE or DELETE statements that are to be invoked multiple times,
where the WHERE criterion of the statement is to change on each
invocation, such as::
stmt = (users_table.update().
where(user_table.c.name == bindparam('username')).
values(fullname=bindparam('fullname'))
)
connection.execute(
stmt, [{"username": "wendy", "fullname": "Wendy Smith"},
{"username": "jack", "fullname": "Jack Jones"},
]
)
SQLAlchemy's Core expression system makes wide use of
:func:`.bindparam` in an implicit sense. It is typical that Python
literal values passed to virtually all SQL expression functions are
coerced into fixed :func:`.bindparam` constructs. For example, given
a comparison operation such as::
expr = users_table.c.name == 'Wendy'
The above expression will produce a :class:`.BinaryExpression`
construct, where the left side is the :class:`_schema.Column` object
representing the ``name`` column, and the right side is a
:class:`.BindParameter` representing the literal value::
print(repr(expr.right))
BindParameter('%(4327771088 name)s', 'Wendy', type_=String())
The expression above will render SQL such as::
user.name = :name_1
Where the ``:name_1`` parameter name is an anonymous name. The
actual string ``Wendy`` is not in the rendered string, but is carried
along where it is later used within statement execution. If we
invoke a statement like the following::
stmt = select([users_table]).where(users_table.c.name == 'Wendy')
result = connection.execute(stmt)
We would see SQL logging output as::
SELECT "user".id, "user".name
FROM "user"
WHERE "user".name = %(name_1)s
{'name_1': 'Wendy'}
Above, we see that ``Wendy`` is passed as a parameter to the database,
while the placeholder ``:name_1`` is rendered in the appropriate form
for the target database, in this case the PostgreSQL database.
Similarly, :func:`.bindparam` is invoked automatically when working
with :term:`CRUD` statements as far as the "VALUES" portion is
concerned. The :func:`_expression.insert` construct produces an
``INSERT`` expression which will, at statement execution time, generate
bound placeholders based on the arguments passed, as in::
stmt = users_table.insert()
result = connection.execute(stmt, name='Wendy')
The above will produce SQL output as::
INSERT INTO "user" (name) VALUES (%(name)s)
{'name': 'Wendy'}
The :class:`_expression.Insert` construct, at
compilation/execution time, rendered a single :func:`.bindparam`
mirroring the column name ``name`` as a result of the single ``name``
parameter we passed to the :meth:`_engine.Connection.execute` method.
:param key:
the key (e.g. the name) for this bind param.
Will be used in the generated
SQL statement for dialects that use named parameters. This
value may be modified when part of a compilation operation,
if other :class:`BindParameter` objects exist with the same
key, or if its length is too long and truncation is
required.
:param value:
Initial value for this bind param. Will be used at statement
execution time as the value for this parameter passed to the
DBAPI, if no other value is indicated to the statement execution
method for this particular parameter name. Defaults to ``None``.
:param callable\_:
A callable function that takes the place of "value". The function
will be called at statement execution time to determine the
ultimate value. Used for scenarios where the actual bind
value cannot be determined at the point at which the clause
construct is created, but embedded bind values are still desirable.
:param type\_:
A :class:`.TypeEngine` class or instance representing an optional
datatype for this :func:`.bindparam`. If not passed, a type
may be determined automatically for the bind, based on the given
value; for example, trivial Python types such as ``str``,
``int``, ``bool``
may result in the :class:`.String`, :class:`.Integer` or
:class:`.Boolean` types being automatically selected.
The type of a :func:`.bindparam` is significant especially in that
the type will apply pre-processing to the value before it is
passed to the database. For example, a :func:`.bindparam` which
refers to a datetime value, and is specified as holding the
:class:`.DateTime` type, may apply conversion needed to the
value (such as stringification on SQLite) before passing the value
to the database.
:param unique:
if True, the key name of this :class:`.BindParameter` will be
modified if another :class:`.BindParameter` of the same name
already has been located within the containing
expression. This flag is used generally by the internals
when producing so-called "anonymous" bound expressions, it
isn't generally applicable to explicitly-named :func:`.bindparam`
constructs.
:param required:
If ``True``, a value is required at execution time. If not passed,
it defaults to ``True`` if neither :paramref:`.bindparam.value`
or :paramref:`.bindparam.callable` were passed. If either of these
parameters are present, then :paramref:`.bindparam.required`
defaults to ``False``.
:param quote:
True if this parameter name requires quoting and is not
currently known as a SQLAlchemy reserved word; this currently
only applies to the Oracle backend, where bound names must
sometimes be quoted.
:param isoutparam:
if True, the parameter should be treated like a stored procedure
"OUT" parameter. This applies to backends such as Oracle which
support OUT parameters.
:param expanding:
if True, this parameter will be treated as an "expanding" parameter
at execution time; the parameter value is expected to be a sequence,
rather than a scalar value, and the string SQL statement will
be transformed on a per-execution basis to accommodate the sequence
with a variable number of parameter slots passed to the DBAPI.
This is to allow statement caching to be used in conjunction with
an IN clause.
.. seealso::
:meth:`.ColumnOperators.in_`
:ref:`baked_in` - with baked queries
.. note:: The "expanding" feature does not support "executemany"-
style parameter sets.
.. versionadded:: 1.2
.. versionchanged:: 1.3 the "expanding" bound parameter feature now
supports empty lists.
.. seealso::
:ref:`coretutorial_bind_param`
:ref:`coretutorial_insert_expressions`
:func:`.outparam`
"""
if isinstance(key, ColumnClause):
type_ = key.type
key = key.key
if required is NO_ARG:
required = value is NO_ARG and callable_ is None
if value is NO_ARG:
value = None
if quote is not None:
key = quoted_name(key, quote)
if unique:
self.key = _anonymous_label(
"%%(%d %s)s"
% (
id(self),
re.sub(r"[%\(\) \$]+", "_", key).strip("_")
if key is not None
else "param",
)
)
else:
self.key = key or _anonymous_label("%%(%d param)s" % id(self))
# identifying key that won't change across
# clones, used to identify the bind's logical
# identity
self._identifying_key = self.key
# key that was passed in the first place, used to
# generate new keys
self._orig_key = key or "param"
self.unique = unique
self.value = value
self.callable = callable_
self.isoutparam = isoutparam
self.required = required
self.expanding = expanding
if type_ is None:
if _compared_to_type is not None:
self.type = _compared_to_type.coerce_compared_value(
_compared_to_operator, value
)
else:
self.type = type_api._resolve_value_to_type(value)
elif isinstance(type_, type):
self.type = type_()
else:
self.type = type_
def _with_expanding_in_types(self, types):
"""Return a copy of this :class:`.BindParameter` in
the context of an expanding IN against a tuple.
"""
cloned = self._clone()
cloned._expanding_in_types = types
return cloned
def _with_value(self, value):
"""Return a copy of this :class:`.BindParameter` with the given value
set.
"""
cloned = self._clone()
cloned.value = value
cloned.callable = None
cloned.required = False
if cloned.type is type_api.NULLTYPE:
cloned.type = type_api._resolve_value_to_type(value)
return cloned
@property
def effective_value(self):
"""Return the value of this bound parameter,
taking into account if the ``callable`` parameter
was set.
The ``callable`` value will be evaluated
and returned if present, else ``value``.
"""
if self.callable:
return self.callable()
else:
return self.value
def _clone(self):
c = ClauseElement._clone(self)
if self.unique:
c.key = _anonymous_label(
"%%(%d %s)s" % (id(c), c._orig_key or "param")
)
return c
def _convert_to_unique(self):
if not self.unique:
self.unique = True
self.key = _anonymous_label(
"%%(%d %s)s" % (id(self), self._orig_key or "param")
)
def compare(self, other, **kw):
"""Compare this :class:`BindParameter` to the given
clause.
"""
return (
isinstance(other, BindParameter)
and self.type._compare_type_affinity(other.type)
and self.value == other.value
and self.callable == other.callable
)
def __getstate__(self):
"""Execute a deferred value for serialization purposes."""
d = self.__dict__.copy()
v = self.value
if self.callable:
v = self.callable()
d["callable"] = None
d["value"] = v
return d
def __setstate__(self, state):
if state.get("unique", False):
state["key"] = _anonymous_label(
"%%(%d %s)s" % (id(self), state.get("_orig_key", "param"))
)
self.__dict__.update(state)
def __repr__(self):
return "BindParameter(%r, %r, type_=%r)" % (
self.key,
self.value,
self.type,
)
class TypeClause(ClauseElement):
"""Handle a type keyword in a SQL statement.
Used by the ``Case`` statement.
"""
__visit_name__ = "typeclause"
def __init__(self, type_):
self.type = type_
class TextClause(Executable, ClauseElement):
"""Represent a literal SQL text fragment.
E.g.::
from sqlalchemy import text
t = text("SELECT * FROM users")
result = connection.execute(t)
The :class:`_expression.TextClause` construct is produced using the
:func:`_expression.text`
function; see that function for full documentation.
.. seealso::
:func:`_expression.text`
"""
__visit_name__ = "textclause"
_bind_params_regex = re.compile(r"(?<![:\w\x5c]):(\w+)(?!:)", re.UNICODE)
_execution_options = Executable._execution_options.union(
{"autocommit": PARSE_AUTOCOMMIT}
)
_is_implicitly_boolean = False
def __and__(self, other):
# support use in select.where(), query.filter()
return and_(self, other)
@property
def _select_iterable(self):
return (self,)
@property
def selectable(self):
# allows text() to be considered by
# _interpret_as_from
return self
_hide_froms = []
# help in those cases where text() is
# interpreted in a column expression situation
key = _label = _resolve_label = None
_allow_label_resolve = False
def __init__(self, text, bind=None):
self._bind = bind
self._bindparams = {}
def repl(m):
self._bindparams[m.group(1)] = BindParameter(m.group(1))
return ":%s" % m.group(1)
# scan the string and search for bind parameter names, add them
# to the list of bindparams
self.text = self._bind_params_regex.sub(repl, text)
@classmethod
@util.deprecated_params(
autocommit=(
"0.6",
"The :paramref:`_expression.text.autocommit` "
"parameter is deprecated and "
"will be removed in a future release. Please use the "
":paramref:`.Connection.execution_options.autocommit` parameter "
"in conjunction with the :meth:`.Executable.execution_options` "
"method.",
),
bindparams=(
"0.9",
"The :paramref:`_expression.text.bindparams` parameter "
"is deprecated and will be removed in a future release. Please "
"refer to the :meth:`_expression.TextClause.bindparams` method.",
),
typemap=(
"0.9",
"The :paramref:`_expression.text.typemap` parameter is "
"deprecated and will be removed in a future release. Please "
"refer to the :meth:`_expression.TextClause.columns` method.",
),
)
@_document_text_coercion("text", ":func:`.text`", ":paramref:`.text.text`")
def _create_text(
self, text, bind=None, bindparams=None, typemap=None, autocommit=None
):
r"""Construct a new :class:`_expression.TextClause` clause,
representing
a textual SQL string directly.
E.g.::
from sqlalchemy import text
t = text("SELECT * FROM users")
result = connection.execute(t)
The advantages :func:`_expression.text`
provides over a plain string are
backend-neutral support for bind parameters, per-statement
execution options, as well as
bind parameter and result-column typing behavior, allowing
SQLAlchemy type constructs to play a role when executing
a statement that is specified literally. The construct can also
be provided with a ``.c`` collection of column elements, allowing
it to be embedded in other SQL expression constructs as a subquery.
Bind parameters are specified by name, using the format ``:name``.
E.g.::
t = text("SELECT * FROM users WHERE id=:user_id")
result = connection.execute(t, user_id=12)
For SQL statements where a colon is required verbatim, as within
an inline string, use a backslash to escape::
t = text("SELECT * FROM users WHERE name='\:username'")
The :class:`_expression.TextClause`
construct includes methods which can
provide information about the bound parameters as well as the column
values which would be returned from the textual statement, assuming
it's an executable SELECT type of statement. The
:meth:`_expression.TextClause.bindparams`
method is used to provide bound
parameter detail, and :meth:`_expression.TextClause.columns`
method allows
specification of return columns including names and types::
t = text("SELECT * FROM users WHERE id=:user_id").\
bindparams(user_id=7).\
columns(id=Integer, name=String)
for id, name in connection.execute(t):
print(id, name)
The :func:`_expression.text` construct is used in cases when
a literal string SQL fragment is specified as part of a larger query,
such as for the WHERE clause of a SELECT statement::
s = select([users.c.id, users.c.name]).where(text("id=:user_id"))
result = connection.execute(s, user_id=12)
:func:`_expression.text` is also used for the construction
of a full, standalone statement using plain text.
As such, SQLAlchemy refers
to it as an :class:`.Executable` object, and it supports
the :meth:`Executable.execution_options` method. For example,
a :func:`_expression.text`
construct that should be subject to "autocommit"
can be set explicitly so using the
:paramref:`.Connection.execution_options.autocommit` option::
t = text("EXEC my_procedural_thing()").\
execution_options(autocommit=True)
Note that SQLAlchemy's usual "autocommit" behavior applies to
:func:`_expression.text` constructs implicitly - that is,
statements which begin
with a phrase such as ``INSERT``, ``UPDATE``, ``DELETE``,
or a variety of other phrases specific to certain backends, will
be eligible for autocommit if no transaction is in progress.
:param text:
the text of the SQL statement to be created. Use ``:<param>``
to specify bind parameters; they will be compiled to their
engine-specific format.
:param autocommit: whether or not to set the "autocommit" execution
option for this :class:`_expression.TextClause` object.
:param bind:
an optional connection or engine to be used for this text query.
:param bindparams:
A list of :func:`.bindparam` instances used to
provide information about parameters embedded in the statement.
E.g.::
stmt = text("SELECT * FROM table WHERE id=:id",
bindparams=[bindparam('id', value=5, type_=Integer)])
:param typemap:
A dictionary mapping the names of columns represented in the columns
clause of a ``SELECT`` statement to type objects.
E.g.::
stmt = text("SELECT * FROM table",
typemap={'id': Integer, 'name': String},
)
.. seealso::
:ref:`sqlexpression_text` - in the Core tutorial
:ref:`orm_tutorial_literal_sql` - in the ORM tutorial
"""
stmt = TextClause(text, bind=bind)
if bindparams:
stmt = stmt.bindparams(*bindparams)
if typemap:
stmt = stmt.columns(**typemap)
if autocommit is not None:
stmt = stmt.execution_options(autocommit=autocommit)
return stmt
@_generative
def bindparams(self, *binds, **names_to_values):
"""Establish the values and/or types of bound parameters within
this :class:`_expression.TextClause` construct.
Given a text construct such as::
from sqlalchemy import text
stmt = text("SELECT id, name FROM user WHERE name=:name "
"AND timestamp=:timestamp")
the :meth:`_expression.TextClause.bindparams`
method can be used to establish
the initial value of ``:name`` and ``:timestamp``,
using simple keyword arguments::
stmt = stmt.bindparams(name='jack',
timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5))
Where above, new :class:`.BindParameter` objects
will be generated with the names ``name`` and ``timestamp``, and
values of ``jack`` and ``datetime.datetime(2012, 10, 8, 15, 12, 5)``,
respectively. The types will be
inferred from the values given, in this case :class:`.String` and
:class:`.DateTime`.
When specific typing behavior is needed, the positional ``*binds``
argument can be used in which to specify :func:`.bindparam` constructs
directly. These constructs must include at least the ``key``
argument, then an optional value and type::
from sqlalchemy import bindparam
stmt = stmt.bindparams(
bindparam('name', value='jack', type_=String),
bindparam('timestamp', type_=DateTime)
)
Above, we specified the type of :class:`.DateTime` for the
``timestamp`` bind, and the type of :class:`.String` for the ``name``
bind. In the case of ``name`` we also set the default value of
``"jack"``.
Additional bound parameters can be supplied at statement execution
time, e.g.::
result = connection.execute(stmt,
timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5))
The :meth:`_expression.TextClause.bindparams`
method can be called repeatedly,
where it will re-use existing :class:`.BindParameter` objects to add
new information. For example, we can call
:meth:`_expression.TextClause.bindparams`
first with typing information, and a
second time with value information, and it will be combined::
stmt = text("SELECT id, name FROM user WHERE name=:name "
"AND timestamp=:timestamp")
stmt = stmt.bindparams(
bindparam('name', type_=String),
bindparam('timestamp', type_=DateTime)
)
stmt = stmt.bindparams(
name='jack',
timestamp=datetime.datetime(2012, 10, 8, 15, 12, 5)
)
The :meth:`_expression.TextClause.bindparams`
method also supports the concept of
**unique** bound parameters. These are parameters that are
"uniquified" on name at statement compilation time, so that multiple
:func:`_expression.text`
constructs may be combined together without the names
conflicting. To use this feature, specify the
:paramref:`.BindParameter.unique` flag on each :func:`.bindparam`
object::
stmt1 = text("select id from table where name=:name").bindparams(
bindparam("name", value='name1', unique=True)
)
stmt2 = text("select id from table where name=:name").bindparams(
bindparam("name", value='name2', unique=True)
)
union = union_all(
stmt1.columns(column("id")),
stmt2.columns(column("id"))
)
The above statement will render as::
select id from table where name=:name_1
UNION ALL select id from table where name=:name_2
.. versionadded:: 1.3.11 Added support for the
:paramref:`.BindParameter.unique` flag to work with
:func:`_expression.text`
constructs.
"""
self._bindparams = new_params = self._bindparams.copy()
for bind in binds:
try:
# the regex used for text() currently will not match
# a unique/anonymous key in any case, so use the _orig_key
# so that a text() construct can support unique parameters
existing = new_params[bind._orig_key]
except KeyError as err:
util.raise_(
exc.ArgumentError(
"This text() construct doesn't define a "
"bound parameter named %r" % bind._orig_key
),
replace_context=err,
)
else:
new_params[existing._orig_key] = bind
for key, value in names_to_values.items():
try:
existing = new_params[key]
except KeyError as err:
util.raise_(
exc.ArgumentError(
"This text() construct doesn't define a "
"bound parameter named %r" % key
),
replace_context=err,
)
else:
new_params[key] = existing._with_value(value)
@util.dependencies("sqlalchemy.sql.selectable")
def columns(self, selectable, *cols, **types):
r"""Turn this :class:`_expression.TextClause` object into a
:class:`.TextAsFrom`
object that can be embedded into another statement.
This function essentially bridges the gap between an entirely
textual SELECT statement and the SQL expression language concept
of a "selectable"::
from sqlalchemy.sql import column, text
stmt = text("SELECT id, name FROM some_table")
stmt = stmt.columns(column('id'), column('name')).alias('st')
stmt = select([mytable]).\
select_from(
mytable.join(stmt, mytable.c.name == stmt.c.name)
).where(stmt.c.id > 5)
Above, we pass a series of :func:`_expression.column` elements to the
:meth:`_expression.TextClause.columns` method positionally. These
:func:`_expression.column`
elements now become first class elements upon the :attr:`.TextAsFrom.c`
column collection, just like any other selectable.
The column expressions we pass to
:meth:`_expression.TextClause.columns` may
also be typed; when we do so, these :class:`.TypeEngine` objects become
the effective return type of the column, so that SQLAlchemy's
result-set-processing systems may be used on the return values.
This is often needed for types such as date or boolean types, as well
as for unicode processing on some dialect configurations::
stmt = text("SELECT id, name, timestamp FROM some_table")
stmt = stmt.columns(
column('id', Integer),
column('name', Unicode),
column('timestamp', DateTime)
)
for id, name, timestamp in connection.execute(stmt):
print(id, name, timestamp)
As a shortcut to the above syntax, keyword arguments referring to
types alone may be used, if only type conversion is needed::
stmt = text("SELECT id, name, timestamp FROM some_table")
stmt = stmt.columns(
id=Integer,
name=Unicode,
timestamp=DateTime
)
for id, name, timestamp in connection.execute(stmt):
print(id, name, timestamp)
The positional form of :meth:`_expression.TextClause.columns`
also provides the
unique feature of **positional column targeting**, which is
particularly useful when using the ORM with complex textual queries. If
we specify the columns from our model to
:meth:`_expression.TextClause.columns`,
the result set will match to those columns positionally, meaning the
name or origin of the column in the textual SQL doesn't matter::
stmt = text("SELECT users.id, addresses.id, users.id, "
"users.name, addresses.email_address AS email "
"FROM users JOIN addresses ON users.id=addresses.user_id "
"WHERE users.id = 1").columns(
User.id,
Address.id,
Address.user_id,
User.name,
Address.email_address
)
query = session.query(User).from_statement(stmt).options(
contains_eager(User.addresses))
.. versionadded:: 1.1 the :meth:`_expression.TextClause.columns`
method now
offers positional column targeting in the result set when
the column expressions are passed purely positionally.
The :meth:`_expression.TextClause.columns` method provides a direct
route to calling :meth:`_expression.FromClause.alias` as well as
:meth:`_expression.SelectBase.cte`
against a textual SELECT statement::
stmt = stmt.columns(id=Integer, name=String).cte('st')
stmt = select([sometable]).where(sometable.c.id == stmt.c.id)
.. versionadded:: 0.9.0 :func:`_expression.text`
can now be converted into a
fully featured "selectable" construct using the
:meth:`_expression.TextClause.columns` method.
"""
positional_input_cols = [
ColumnClause(col.key, types.pop(col.key))
if col.key in types
else col
for col in cols
]
keyed_input_cols = [
ColumnClause(key, type_) for key, type_ in types.items()
]
return selectable.TextAsFrom(
self,
positional_input_cols + keyed_input_cols,
positional=bool(positional_input_cols) and not keyed_input_cols,
)
@property
def type(self):
return type_api.NULLTYPE
@property
def comparator(self):
return self.type.comparator_factory(self)
def self_group(self, against=None):
if against is operators.in_op:
return Grouping(self)
else:
return self
def _copy_internals(self, clone=_clone, **kw):
self._bindparams = dict(
(b.key, clone(b, **kw)) for b in self._bindparams.values()
)
def get_children(self, **kwargs):
return list(self._bindparams.values())
def compare(self, other):
return isinstance(other, TextClause) and other.text == self.text
class Null(ColumnElement):
"""Represent the NULL keyword in a SQL statement.
:class:`.Null` is accessed as a constant via the
:func:`.null` function.
"""
__visit_name__ = "null"
@util.memoized_property
def type(self):
return type_api.NULLTYPE
@classmethod
def _instance(cls):
"""Return a constant :class:`.Null` construct."""
return Null()
def compare(self, other):
return isinstance(other, Null)
class False_(ColumnElement):
"""Represent the ``false`` keyword, or equivalent, in a SQL statement.
:class:`.False_` is accessed as a constant via the
:func:`.false` function.
"""
__visit_name__ = "false"
@util.memoized_property
def type(self):
return type_api.BOOLEANTYPE
def _negate(self):
return True_()
@classmethod
def _instance(cls):
"""Return a :class:`.False_` construct.
E.g.::
>>> from sqlalchemy import false
>>> print(select([t.c.x]).where(false()))
SELECT x FROM t WHERE false
A backend which does not support true/false constants will render as
an expression against 1 or 0::
>>> print(select([t.c.x]).where(false()))
SELECT x FROM t WHERE 0 = 1
The :func:`.true` and :func:`.false` constants also feature
"short circuit" operation within an :func:`.and_` or :func:`.or_`
conjunction::
>>> print(select([t.c.x]).where(or_(t.c.x > 5, true())))
SELECT x FROM t WHERE true
>>> print(select([t.c.x]).where(and_(t.c.x > 5, false())))
SELECT x FROM t WHERE false
.. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature
better integrated behavior within conjunctions and on dialects
that don't support true/false constants.
.. seealso::
:func:`.true`
"""
return False_()
def compare(self, other):
return isinstance(other, False_)
class True_(ColumnElement):
"""Represent the ``true`` keyword, or equivalent, in a SQL statement.
:class:`.True_` is accessed as a constant via the
:func:`.true` function.
"""
__visit_name__ = "true"
@util.memoized_property
def type(self):
return type_api.BOOLEANTYPE
def _negate(self):
return False_()
@classmethod
def _ifnone(cls, other):
if other is None:
return cls._instance()
else:
return other
@classmethod
def _instance(cls):
"""Return a constant :class:`.True_` construct.
E.g.::
>>> from sqlalchemy import true
>>> print(select([t.c.x]).where(true()))
SELECT x FROM t WHERE true
A backend which does not support true/false constants will render as
an expression against 1 or 0::
>>> print(select([t.c.x]).where(true()))
SELECT x FROM t WHERE 1 = 1
The :func:`.true` and :func:`.false` constants also feature
"short circuit" operation within an :func:`.and_` or :func:`.or_`
conjunction::
>>> print(select([t.c.x]).where(or_(t.c.x > 5, true())))
SELECT x FROM t WHERE true
>>> print(select([t.c.x]).where(and_(t.c.x > 5, false())))
SELECT x FROM t WHERE false
.. versionchanged:: 0.9 :func:`.true` and :func:`.false` feature
better integrated behavior within conjunctions and on dialects
that don't support true/false constants.
.. seealso::
:func:`.false`
"""
return True_()
def compare(self, other):
return isinstance(other, True_)
class ClauseList(ClauseElement):
"""Describe a list of clauses, separated by an operator.
By default, is comma-separated, such as a column listing.
"""
__visit_name__ = "clauselist"
def __init__(self, *clauses, **kwargs):
self.operator = kwargs.pop("operator", operators.comma_op)
self.group = kwargs.pop("group", True)
self.group_contents = kwargs.pop("group_contents", True)
self._tuple_values = kwargs.pop("_tuple_values", False)
text_converter = kwargs.pop(
"_literal_as_text", _expression_literal_as_text
)
if self.group_contents:
self.clauses = [
text_converter(clause).self_group(against=self.operator)
for clause in clauses
]
else:
self.clauses = [text_converter(clause) for clause in clauses]
self._is_implicitly_boolean = operators.is_boolean(self.operator)
def __iter__(self):
return iter(self.clauses)
def __len__(self):
return len(self.clauses)
@property
def _select_iterable(self):
return iter(self)
def append(self, clause):
if self.group_contents:
self.clauses.append(
_literal_as_text(clause).self_group(against=self.operator)
)
else:
self.clauses.append(_literal_as_text(clause))
def _copy_internals(self, clone=_clone, **kw):
self.clauses = [clone(clause, **kw) for clause in self.clauses]
def get_children(self, **kwargs):
return self.clauses
@property
def _from_objects(self):
return list(itertools.chain(*[c._from_objects for c in self.clauses]))
def self_group(self, against=None):
if self.group and operators.is_precedent(self.operator, against):
return Grouping(self)
else:
return self
def compare(self, other, **kw):
"""Compare this :class:`.ClauseList` to the given :class:`.ClauseList`,
including a comparison of all the clause items.
"""
if not isinstance(other, ClauseList) and len(self.clauses) == 1:
return self.clauses[0].compare(other, **kw)
elif (
isinstance(other, ClauseList)
and len(self.clauses) == len(other.clauses)
and self.operator is other.operator
):
if self.operator in (operators.and_, operators.or_):
completed = set()
for clause in self.clauses:
for other_clause in set(other.clauses).difference(
completed
):
if clause.compare(other_clause, **kw):
completed.add(other_clause)
break
return len(completed) == len(other.clauses)
else:
for i in range(0, len(self.clauses)):
if not self.clauses[i].compare(other.clauses[i], **kw):
return False
else:
return True
else:
return False
class BooleanClauseList(ClauseList, ColumnElement):
__visit_name__ = "clauselist"
_tuple_values = False
def __init__(self, *arg, **kw):
raise NotImplementedError(
"BooleanClauseList has a private constructor"
)
@classmethod
def _construct(cls, operator, continue_on, skip_on, *clauses, **kw):
convert_clauses = []
clauses = [
_expression_literal_as_text(clause)
for clause in util.coerce_generator_arg(clauses)
]
for clause in clauses:
if isinstance(clause, continue_on):
continue
elif isinstance(clause, skip_on):
return clause.self_group(against=operators._asbool)
convert_clauses.append(clause)
if len(convert_clauses) == 1:
return convert_clauses[0].self_group(against=operators._asbool)
elif not convert_clauses and clauses:
return clauses[0].self_group(against=operators._asbool)
convert_clauses = [
c.self_group(against=operator) for c in convert_clauses
]
self = cls.__new__(cls)
self.clauses = convert_clauses
self.group = True
self.operator = operator
self.group_contents = True
self.type = type_api.BOOLEANTYPE
self._is_implicitly_boolean = True
return self
@classmethod
def and_(cls, *clauses):
r"""Produce a conjunction of expressions joined by ``AND``.
E.g.::
from sqlalchemy import and_
stmt = select([users_table]).where(
and_(
users_table.c.name == 'wendy',
users_table.c.enrolled == True
)
)
The :func:`.and_` conjunction is also available using the
Python ``&`` operator (though note that compound expressions
need to be parenthesized in order to function with Python
operator precedence behavior)::
stmt = select([users_table]).where(
(users_table.c.name == 'wendy') &
(users_table.c.enrolled == True)
)
The :func:`.and_` operation is also implicit in some cases;
the :meth:`_expression.Select.where`
method for example can be invoked multiple
times against a statement, which will have the effect of each
clause being combined using :func:`.and_`::
stmt = select([users_table]).\
where(users_table.c.name == 'wendy').\
where(users_table.c.enrolled == True)
.. seealso::
:func:`.or_`
"""
return cls._construct(operators.and_, True_, False_, *clauses)
@classmethod
def or_(cls, *clauses):
"""Produce a conjunction of expressions joined by ``OR``.
E.g.::
from sqlalchemy import or_
stmt = select([users_table]).where(
or_(
users_table.c.name == 'wendy',
users_table.c.name == 'jack'
)
)
The :func:`.or_` conjunction is also available using the
Python ``|`` operator (though note that compound expressions
need to be parenthesized in order to function with Python
operator precedence behavior)::
stmt = select([users_table]).where(
(users_table.c.name == 'wendy') |
(users_table.c.name == 'jack')
)
.. seealso::
:func:`.and_`
"""
return cls._construct(operators.or_, False_, True_, *clauses)
@property
def _select_iterable(self):
return (self,)
def self_group(self, against=None):
if not self.clauses:
return self
else:
return super(BooleanClauseList, self).self_group(against=against)
def _negate(self):
return ClauseList._negate(self)
and_ = BooleanClauseList.and_
or_ = BooleanClauseList.or_
class Tuple(ClauseList, ColumnElement):
"""Represent a SQL tuple."""
def __init__(self, *clauses, **kw):
"""Return a :class:`.Tuple`.
Main usage is to produce a composite IN construct using
:meth:`.ColumnOperators.in_` ::
from sqlalchemy import tuple_
tuple_(table.c.col1, table.c.col2).in_(
[(1, 2), (5, 12), (10, 19)]
)
.. versionchanged:: 1.3.6 Added support for SQLite IN tuples.
.. warning::
The composite IN construct is not supported by all backends, and is
currently known to work on PostgreSQL, MySQL, and SQLite.
Unsupported backends will raise a subclass of
:class:`~sqlalchemy.exc.DBAPIError` when such an expression is
invoked.
"""
clauses = [_literal_as_binds(c) for c in clauses]
self._type_tuple = [arg.type for arg in clauses]
self.type = kw.pop(
"type_",
self._type_tuple[0] if self._type_tuple else type_api.NULLTYPE,
)
super(Tuple, self).__init__(*clauses, **kw)
@property
def _select_iterable(self):
return (self,)
def _bind_param(self, operator, obj, type_=None):
return Tuple(
*[
BindParameter(
None,
o,
_compared_to_operator=operator,
_compared_to_type=compared_to_type,
unique=True,
type_=type_,
)
for o, compared_to_type in zip(obj, self._type_tuple)
]
).self_group()
class Case(ColumnElement):
"""Represent a ``CASE`` expression.
:class:`.Case` is produced using the :func:`.case` factory function,
as in::
from sqlalchemy import case
stmt = select([users_table]).\
where(
case(
[
(users_table.c.name == 'wendy', 'W'),
(users_table.c.name == 'jack', 'J')
],
else_='E'
)
)
Details on :class:`.Case` usage is at :func:`.case`.
.. seealso::
:func:`.case`
"""
__visit_name__ = "case"
def __init__(self, whens, value=None, else_=None):
r"""Produce a ``CASE`` expression.
The ``CASE`` construct in SQL is a conditional object that
acts somewhat analogously to an "if/then" construct in other
languages. It returns an instance of :class:`.Case`.
:func:`.case` in its usual form is passed a list of "when"
constructs, that is, a list of conditions and results as tuples::
from sqlalchemy import case
stmt = select([users_table]).\
where(
case(
[
(users_table.c.name == 'wendy', 'W'),
(users_table.c.name == 'jack', 'J')
],
else_='E'
)
)
The above statement will produce SQL resembling::
SELECT id, name FROM user
WHERE CASE
WHEN (name = :name_1) THEN :param_1
WHEN (name = :name_2) THEN :param_2
ELSE :param_3
END
When simple equality expressions of several values against a single
parent column are needed, :func:`.case` also has a "shorthand" format
used via the
:paramref:`.case.value` parameter, which is passed a column
expression to be compared. In this form, the :paramref:`.case.whens`
parameter is passed as a dictionary containing expressions to be
compared against keyed to result expressions. The statement below is
equivalent to the preceding statement::
stmt = select([users_table]).\
where(
case(
{"wendy": "W", "jack": "J"},
value=users_table.c.name,
else_='E'
)
)
The values which are accepted as result values in
:paramref:`.case.whens` as well as with :paramref:`.case.else_` are
coerced from Python literals into :func:`.bindparam` constructs.
SQL expressions, e.g. :class:`_expression.ColumnElement` constructs,
are accepted
as well. To coerce a literal string expression into a constant
expression rendered inline, use the :func:`_expression.literal_column`
construct,
as in::
from sqlalchemy import case, literal_column
case(
[
(
orderline.c.qty > 100,
literal_column("'greaterthan100'")
),
(
orderline.c.qty > 10,
literal_column("'greaterthan10'")
)
],
else_=literal_column("'lessthan10'")
)
The above will render the given constants without using bound
parameters for the result values (but still for the comparison
values), as in::
CASE
WHEN (orderline.qty > :qty_1) THEN 'greaterthan100'
WHEN (orderline.qty > :qty_2) THEN 'greaterthan10'
ELSE 'lessthan10'
END
:param whens: The criteria to be compared against,
:paramref:`.case.whens` accepts two different forms, based on
whether or not :paramref:`.case.value` is used.
In the first form, it accepts a list of 2-tuples; each 2-tuple
consists of ``(<sql expression>, <value>)``, where the SQL
expression is a boolean expression and "value" is a resulting value,
e.g.::
case([
(users_table.c.name == 'wendy', 'W'),
(users_table.c.name == 'jack', 'J')
])
In the second form, it accepts a Python dictionary of comparison
values mapped to a resulting value; this form requires
:paramref:`.case.value` to be present, and values will be compared
using the ``==`` operator, e.g.::
case(
{"wendy": "W", "jack": "J"},
value=users_table.c.name
)
:param value: An optional SQL expression which will be used as a
fixed "comparison point" for candidate values within a dictionary
passed to :paramref:`.case.whens`.
:param else\_: An optional SQL expression which will be the evaluated
result of the ``CASE`` construct if all expressions within
:paramref:`.case.whens` evaluate to false. When omitted, most
databases will produce a result of NULL if none of the "when"
expressions evaluate to true.
"""
try:
whens = util.dictlike_iteritems(whens)
except TypeError:
pass
if value is not None:
whenlist = [
(_literal_as_binds(c).self_group(), _literal_as_binds(r))
for (c, r) in whens
]
else:
whenlist = [
(_no_literals(c).self_group(), _literal_as_binds(r))
for (c, r) in whens
]
if whenlist:
type_ = list(whenlist[-1])[-1].type
else:
type_ = None
if value is None:
self.value = None
else:
self.value = _literal_as_binds(value)
self.type = type_
self.whens = whenlist
if else_ is not None:
self.else_ = _literal_as_binds(else_)
else:
self.else_ = None
def _copy_internals(self, clone=_clone, **kw):
if self.value is not None:
self.value = clone(self.value, **kw)
self.whens = [(clone(x, **kw), clone(y, **kw)) for x, y in self.whens]
if self.else_ is not None:
self.else_ = clone(self.else_, **kw)
def get_children(self, **kwargs):
if self.value is not None:
yield self.value
for x, y in self.whens:
yield x
yield y
if self.else_ is not None:
yield self.else_
@property
def _from_objects(self):
return list(
itertools.chain(*[x._from_objects for x in self.get_children()])
)
def literal_column(text, type_=None):
r"""Produce a :class:`.ColumnClause` object that has the
:paramref:`_expression.column.is_literal` flag set to True.
:func:`_expression.literal_column` is similar to
:func:`_expression.column`, except that
it is more often used as a "standalone" column expression that renders
exactly as stated; while :func:`_expression.column`
stores a string name that
will be assumed to be part of a table and may be quoted as such,
:func:`_expression.literal_column` can be that,
or any other arbitrary column-oriented
expression.
:param text: the text of the expression; can be any SQL expression.
Quoting rules will not be applied. To specify a column-name expression
which should be subject to quoting rules, use the :func:`column`
function.
:param type\_: an optional :class:`~sqlalchemy.types.TypeEngine`
object which will
provide result-set translation and additional expression semantics for
this column. If left as ``None`` the type will be :class:`.NullType`.
.. seealso::
:func:`_expression.column`
:func:`_expression.text`
:ref:`sqlexpression_literal_column`
"""
return ColumnClause(text, type_=type_, is_literal=True)
class Cast(ColumnElement):
"""Represent a ``CAST`` expression.
:class:`.Cast` is produced using the :func:`.cast` factory function,
as in::
from sqlalchemy import cast, Numeric
stmt = select([
cast(product_table.c.unit_price, Numeric(10, 4))
])
Details on :class:`.Cast` usage is at :func:`.cast`.
.. seealso::
:ref:`coretutorial_casts`
:func:`.cast`
:func:`.type_coerce` - an alternative to CAST that coerces the type
on the Python side only, which is often sufficient to generate the
correct SQL and data coercion.
"""
__visit_name__ = "cast"
def __init__(self, expression, type_):
r"""Produce a ``CAST`` expression.
:func:`.cast` returns an instance of :class:`.Cast`.
E.g.::
from sqlalchemy import cast, Numeric
stmt = select([
cast(product_table.c.unit_price, Numeric(10, 4))
])
The above statement will produce SQL resembling::
SELECT CAST(unit_price AS NUMERIC(10, 4)) FROM product
The :func:`.cast` function performs two distinct functions when
used. The first is that it renders the ``CAST`` expression within
the resulting SQL string. The second is that it associates the given
type (e.g. :class:`.TypeEngine` class or instance) with the column
expression on the Python side, which means the expression will take
on the expression operator behavior associated with that type,
as well as the bound-value handling and result-row-handling behavior
of the type.
.. versionchanged:: 0.9.0 :func:`.cast` now applies the given type
to the expression such that it takes effect on the bound-value,
e.g. the Python-to-database direction, in addition to the
result handling, e.g. database-to-Python, direction.
An alternative to :func:`.cast` is the :func:`.type_coerce` function.
This function performs the second task of associating an expression
with a specific type, but does not render the ``CAST`` expression
in SQL.
:param expression: A SQL expression, such as a
:class:`_expression.ColumnElement`
expression or a Python string which will be coerced into a bound
literal value.
:param type\_: A :class:`.TypeEngine` class or instance indicating
the type to which the ``CAST`` should apply.
.. seealso::
:ref:`coretutorial_casts`
:func:`.type_coerce` - an alternative to CAST that coerces the type
on the Python side only, which is often sufficient to generate the
correct SQL and data coercion.
"""
self.type = type_api.to_instance(type_)
self.clause = _literal_as_binds(expression, type_=self.type)
self.typeclause = TypeClause(self.type)
def _copy_internals(self, clone=_clone, **kw):
self.clause = clone(self.clause, **kw)
self.typeclause = clone(self.typeclause, **kw)
def get_children(self, **kwargs):
return self.clause, self.typeclause
@property
def _from_objects(self):
return self.clause._from_objects
class TypeCoerce(ColumnElement):
"""Represent a Python-side type-coercion wrapper.
:class:`.TypeCoerce` supplies the :func:`_expression.type_coerce`
function; see that function for usage details.
.. versionchanged:: 1.1 The :func:`.type_coerce` function now produces
a persistent :class:`.TypeCoerce` wrapper object rather than
translating the given object in place.
.. seealso::
:func:`_expression.type_coerce`
:func:`.cast`
"""
__visit_name__ = "type_coerce"
def __init__(self, expression, type_):
r"""Associate a SQL expression with a particular type, without rendering
``CAST``.
E.g.::
from sqlalchemy import type_coerce
stmt = select([type_coerce(log_table.date_string, StringDateTime())])
The above construct will produce a :class:`.TypeCoerce` object, which
does not modify the rendering in any way on the SQL side, with the
possible exception of a generated label if used in a columns clause
context::
SELECT date_string AS anon_1 FROM log
When result rows are fetched, the ``StringDateTime`` type processor
will be applied to result rows on behalf of the ``date_string`` column.
.. note:: the :func:`.type_coerce` construct does not render any
SQL syntax of its own, including that it does not imply
parenthesization. Please use :meth:`.TypeCoerce.self_group`
if explicit parenthesization is required.
In order to provide a named label for the expression, use
:meth:`_expression.ColumnElement.label`::
stmt = select([
type_coerce(log_table.date_string, StringDateTime()).label('date')
])
A type that features bound-value handling will also have that behavior
take effect when literal values or :func:`.bindparam` constructs are
passed to :func:`.type_coerce` as targets.
For example, if a type implements the
:meth:`.TypeEngine.bind_expression`
method or :meth:`.TypeEngine.bind_processor` method or equivalent,
these functions will take effect at statement compilation/execution
time when a literal value is passed, as in::
# bound-value handling of MyStringType will be applied to the
# literal value "some string"
stmt = select([type_coerce("some string", MyStringType)])
When using :func:`.type_coerce` with composed expressions, note that
**parenthesis are not applied**. If :func:`.type_coerce` is being
used in an operator context where the parenthesis normally present from
CAST are necessary, use the :meth:`.TypeCoerce.self_group` method::
>>> some_integer = column("someint", Integer)
>>> some_string = column("somestr", String)
>>> expr = type_coerce(some_integer + 5, String) + some_string
>>> print(expr)
someint + :someint_1 || somestr
>>> expr = type_coerce(some_integer + 5, String).self_group() + some_string
>>> print(expr)
(someint + :someint_1) || somestr
:param expression: A SQL expression, such as a
:class:`_expression.ColumnElement`
expression or a Python string which will be coerced into a bound
literal value.
:param type\_: A :class:`.TypeEngine` class or instance indicating
the type to which the expression is coerced.
.. seealso::
:ref:`coretutorial_casts`
:func:`.cast`
""" # noqa
self.type = type_api.to_instance(type_)
self.clause = _literal_as_binds(expression, type_=self.type)
def _copy_internals(self, clone=_clone, **kw):
self.clause = clone(self.clause, **kw)
self.__dict__.pop("typed_expression", None)
def get_children(self, **kwargs):
return (self.clause,)
@property
def _from_objects(self):
return self.clause._from_objects
@util.memoized_property
def typed_expression(self):
if isinstance(self.clause, BindParameter):
bp = self.clause._clone()
bp.type = self.type
return bp
else:
return self.clause
def self_group(self, against=None):
grouped = self.clause.self_group(against=against)
if grouped is not self.clause:
return TypeCoerce(grouped, self.type)
else:
return self
class Extract(ColumnElement):
"""Represent a SQL EXTRACT clause, ``extract(field FROM expr)``."""
__visit_name__ = "extract"
def __init__(self, field, expr, **kwargs):
"""Return a :class:`.Extract` construct.
This is typically available as :func:`.extract`
as well as ``func.extract`` from the
:data:`.func` namespace.
"""
self.type = type_api.INTEGERTYPE
self.field = field
self.expr = _literal_as_binds(expr, None)
def _copy_internals(self, clone=_clone, **kw):
self.expr = clone(self.expr, **kw)
def get_children(self, **kwargs):
return (self.expr,)
@property
def _from_objects(self):
return self.expr._from_objects
class _label_reference(ColumnElement):
"""Wrap a column expression as it appears in a 'reference' context.
This expression is any that includes an _order_by_label_element,
which is a Label, or a DESC / ASC construct wrapping a Label.
The production of _label_reference() should occur when an expression
is added to this context; this includes the ORDER BY or GROUP BY of a
SELECT statement, as well as a few other places, such as the ORDER BY
within an OVER clause.
"""
__visit_name__ = "label_reference"
def __init__(self, element):
self.element = element
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
@property
def _from_objects(self):
return ()
class _textual_label_reference(ColumnElement):
__visit_name__ = "textual_label_reference"
def __init__(self, element):
self.element = element
@util.memoized_property
def _text_clause(self):
return TextClause._create_text(self.element)
class UnaryExpression(ColumnElement):
"""Define a 'unary' expression.
A unary expression has a single column expression
and an operator. The operator can be placed on the left
(where it is called the 'operator') or right (where it is called the
'modifier') of the column expression.
:class:`.UnaryExpression` is the basis for several unary operators
including those used by :func:`.desc`, :func:`.asc`, :func:`.distinct`,
:func:`.nullsfirst` and :func:`.nullslast`.
"""
__visit_name__ = "unary"
def __init__(
self,
element,
operator=None,
modifier=None,
type_=None,
negate=None,
wraps_column_expression=False,
):
self.operator = operator
self.modifier = modifier
self.element = element.self_group(
against=self.operator or self.modifier
)
self.type = type_api.to_instance(type_)
self.negate = negate
self.wraps_column_expression = wraps_column_expression
@classmethod
def _create_nullsfirst(cls, column):
"""Produce the ``NULLS FIRST`` modifier for an ``ORDER BY`` expression.
:func:`.nullsfirst` is intended to modify the expression produced
by :func:`.asc` or :func:`.desc`, and indicates how NULL values
should be handled when they are encountered during ordering::
from sqlalchemy import desc, nullsfirst
stmt = select([users_table]).order_by(
nullsfirst(desc(users_table.c.name)))
The SQL expression from the above would resemble::
SELECT id, name FROM user ORDER BY name DESC NULLS FIRST
Like :func:`.asc` and :func:`.desc`, :func:`.nullsfirst` is typically
invoked from the column expression itself using
:meth:`_expression.ColumnElement.nullsfirst`,
rather than as its standalone
function version, as in::
stmt = select([users_table]).order_by(
users_table.c.name.desc().nullsfirst())
.. seealso::
:func:`.asc`
:func:`.desc`
:func:`.nullslast`
:meth:`_expression.Select.order_by`
"""
return UnaryExpression(
_literal_as_label_reference(column),
modifier=operators.nullsfirst_op,
wraps_column_expression=False,
)
@classmethod
def _create_nullslast(cls, column):
"""Produce the ``NULLS LAST`` modifier for an ``ORDER BY`` expression.
:func:`.nullslast` is intended to modify the expression produced
by :func:`.asc` or :func:`.desc`, and indicates how NULL values
should be handled when they are encountered during ordering::
from sqlalchemy import desc, nullslast
stmt = select([users_table]).order_by(
nullslast(desc(users_table.c.name)))
The SQL expression from the above would resemble::
SELECT id, name FROM user ORDER BY name DESC NULLS LAST
Like :func:`.asc` and :func:`.desc`, :func:`.nullslast` is typically
invoked from the column expression itself using
:meth:`_expression.ColumnElement.nullslast`,
rather than as its standalone
function version, as in::
stmt = select([users_table]).order_by(
users_table.c.name.desc().nullslast())
.. seealso::
:func:`.asc`
:func:`.desc`
:func:`.nullsfirst`
:meth:`_expression.Select.order_by`
"""
return UnaryExpression(
_literal_as_label_reference(column),
modifier=operators.nullslast_op,
wraps_column_expression=False,
)
@classmethod
def _create_desc(cls, column):
"""Produce a descending ``ORDER BY`` clause element.
e.g.::
from sqlalchemy import desc
stmt = select([users_table]).order_by(desc(users_table.c.name))
will produce SQL as::
SELECT id, name FROM user ORDER BY name DESC
The :func:`.desc` function is a standalone version of the
:meth:`_expression.ColumnElement.desc`
method available on all SQL expressions,
e.g.::
stmt = select([users_table]).order_by(users_table.c.name.desc())
:param column: A :class:`_expression.ColumnElement` (e.g.
scalar SQL expression)
with which to apply the :func:`.desc` operation.
.. seealso::
:func:`.asc`
:func:`.nullsfirst`
:func:`.nullslast`
:meth:`_expression.Select.order_by`
"""
return UnaryExpression(
_literal_as_label_reference(column),
modifier=operators.desc_op,
wraps_column_expression=False,
)
@classmethod
def _create_asc(cls, column):
"""Produce an ascending ``ORDER BY`` clause element.
e.g.::
from sqlalchemy import asc
stmt = select([users_table]).order_by(asc(users_table.c.name))
will produce SQL as::
SELECT id, name FROM user ORDER BY name ASC
The :func:`.asc` function is a standalone version of the
:meth:`_expression.ColumnElement.asc`
method available on all SQL expressions,
e.g.::
stmt = select([users_table]).order_by(users_table.c.name.asc())
:param column: A :class:`_expression.ColumnElement` (e.g.
scalar SQL expression)
with which to apply the :func:`.asc` operation.
.. seealso::
:func:`.desc`
:func:`.nullsfirst`
:func:`.nullslast`
:meth:`_expression.Select.order_by`
"""
return UnaryExpression(
_literal_as_label_reference(column),
modifier=operators.asc_op,
wraps_column_expression=False,
)
@classmethod
def _create_distinct(cls, expr):
"""Produce an column-expression-level unary ``DISTINCT`` clause.
This applies the ``DISTINCT`` keyword to an individual column
expression, and is typically contained within an aggregate function,
as in::
from sqlalchemy import distinct, func
stmt = select([func.count(distinct(users_table.c.name))])
The above would produce an expression resembling::
SELECT COUNT(DISTINCT name) FROM user
The :func:`.distinct` function is also available as a column-level
method, e.g. :meth:`_expression.ColumnElement.distinct`, as in::
stmt = select([func.count(users_table.c.name.distinct())])
The :func:`.distinct` operator is different from the
:meth:`_expression.Select.distinct` method of
:class:`_expression.Select`,
which produces a ``SELECT`` statement
with ``DISTINCT`` applied to the result set as a whole,
e.g. a ``SELECT DISTINCT`` expression. See that method for further
information.
.. seealso::
:meth:`_expression.ColumnElement.distinct`
:meth:`_expression.Select.distinct`
:data:`.func`
"""
expr = _literal_as_binds(expr)
return UnaryExpression(
expr,
operator=operators.distinct_op,
type_=expr.type,
wraps_column_expression=False,
)
@property
def _order_by_label_element(self):
if self.modifier in (operators.desc_op, operators.asc_op):
return self.element._order_by_label_element
else:
return None
@property
def _from_objects(self):
return self.element._from_objects
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
def get_children(self, **kwargs):
return (self.element,)
def compare(self, other, **kw):
"""Compare this :class:`UnaryExpression` against the given
:class:`_expression.ClauseElement`."""
return (
isinstance(other, UnaryExpression)
and self.operator == other.operator
and self.modifier == other.modifier
and self.element.compare(other.element, **kw)
)
def _negate(self):
if self.negate is not None:
return UnaryExpression(
self.element,
operator=self.negate,
negate=self.operator,
modifier=self.modifier,
type_=self.type,
wraps_column_expression=self.wraps_column_expression,
)
elif self.type._type_affinity is type_api.BOOLEANTYPE._type_affinity:
return UnaryExpression(
self.self_group(against=operators.inv),
operator=operators.inv,
type_=type_api.BOOLEANTYPE,
wraps_column_expression=self.wraps_column_expression,
negate=None,
)
else:
return ClauseElement._negate(self)
def self_group(self, against=None):
if self.operator and operators.is_precedent(self.operator, against):
return Grouping(self)
else:
return self
class CollectionAggregate(UnaryExpression):
"""Forms the basis for right-hand collection operator modifiers
ANY and ALL.
The ANY and ALL keywords are available in different ways on different
backends. On PostgreSQL, they only work for an ARRAY type. On
MySQL, they only work for subqueries.
"""
@classmethod
def _create_any(cls, expr):
"""Produce an ANY expression.
This may apply to an array type for some dialects (e.g. postgresql),
or to a subquery for others (e.g. mysql). e.g.::
# postgresql '5 = ANY (somearray)'
expr = 5 == any_(mytable.c.somearray)
# mysql '5 = ANY (SELECT value FROM table)'
expr = 5 == any_(select([table.c.value]))
.. versionadded:: 1.1
.. seealso::
:func:`_expression.all_`
"""
expr = _literal_as_binds(expr)
if expr.is_selectable and hasattr(expr, "as_scalar"):
expr = expr.as_scalar()
expr = expr.self_group()
return CollectionAggregate(
expr,
operator=operators.any_op,
type_=type_api.NULLTYPE,
wraps_column_expression=False,
)
@classmethod
def _create_all(cls, expr):
"""Produce an ALL expression.
This may apply to an array type for some dialects (e.g. postgresql),
or to a subquery for others (e.g. mysql). e.g.::
# postgresql '5 = ALL (somearray)'
expr = 5 == all_(mytable.c.somearray)
# mysql '5 = ALL (SELECT value FROM table)'
expr = 5 == all_(select([table.c.value]))
.. versionadded:: 1.1
.. seealso::
:func:`_expression.any_`
"""
expr = _literal_as_binds(expr)
if expr.is_selectable and hasattr(expr, "as_scalar"):
expr = expr.as_scalar()
expr = expr.self_group()
return CollectionAggregate(
expr,
operator=operators.all_op,
type_=type_api.NULLTYPE,
wraps_column_expression=False,
)
# operate and reverse_operate are hardwired to
# dispatch onto the type comparator directly, so that we can
# ensure "reversed" behavior.
def operate(self, op, *other, **kwargs):
if not operators.is_comparison(op):
raise exc.ArgumentError(
"Only comparison operators may be used with ANY/ALL"
)
kwargs["reverse"] = True
return self.comparator.operate(operators.mirror(op), *other, **kwargs)
def reverse_operate(self, op, other, **kwargs):
# comparison operators should never call reverse_operate
assert not operators.is_comparison(op)
raise exc.ArgumentError(
"Only comparison operators may be used with ANY/ALL"
)
class AsBoolean(UnaryExpression):
def __init__(self, element, operator, negate):
self.element = element
self.type = type_api.BOOLEANTYPE
self.operator = operator
self.negate = negate
self.modifier = None
self.wraps_column_expression = True
self._is_implicitly_boolean = element._is_implicitly_boolean
def self_group(self, against=None):
return self
def _negate(self):
if isinstance(self.element, (True_, False_)):
return self.element._negate()
else:
return AsBoolean(self.element, self.negate, self.operator)
class BinaryExpression(ColumnElement):
"""Represent an expression that is ``LEFT <operator> RIGHT``.
A :class:`.BinaryExpression` is generated automatically
whenever two column expressions are used in a Python binary expression::
>>> from sqlalchemy.sql import column
>>> column('a') + column('b')
<sqlalchemy.sql.expression.BinaryExpression object at 0x101029dd0>
>>> print(column('a') + column('b'))
a + b
"""
__visit_name__ = "binary"
_is_implicitly_boolean = True
"""Indicates that any database will know this is a boolean expression
even if the database does not have an explicit boolean datatype.
"""
def __init__(
self, left, right, operator, type_=None, negate=None, modifiers=None
):
# allow compatibility with libraries that
# refer to BinaryExpression directly and pass strings
if isinstance(operator, util.string_types):
operator = operators.custom_op(operator)
self._orig = (left, right)
self.left = left.self_group(against=operator)
self.right = right.self_group(against=operator)
self.operator = operator
self.type = type_api.to_instance(type_)
self.negate = negate
self._is_implicitly_boolean = operators.is_boolean(operator)
if modifiers is None:
self.modifiers = {}
else:
self.modifiers = modifiers
def __bool__(self):
if self.operator in (operator.eq, operator.ne):
return self.operator(hash(self._orig[0]), hash(self._orig[1]))
else:
raise TypeError("Boolean value of this clause is not defined")
__nonzero__ = __bool__
@property
def is_comparison(self):
return operators.is_comparison(self.operator)
@property
def _from_objects(self):
return self.left._from_objects + self.right._from_objects
def _copy_internals(self, clone=_clone, **kw):
self.left = clone(self.left, **kw)
self.right = clone(self.right, **kw)
def get_children(self, **kwargs):
return self.left, self.right
def compare(self, other, **kw):
"""Compare this :class:`BinaryExpression` against the
given :class:`BinaryExpression`."""
return (
isinstance(other, BinaryExpression)
and self.operator == other.operator
and (
self.left.compare(other.left, **kw)
and self.right.compare(other.right, **kw)
or (
operators.is_commutative(self.operator)
and self.left.compare(other.right, **kw)
and self.right.compare(other.left, **kw)
)
)
)
def self_group(self, against=None):
if operators.is_precedent(self.operator, against):
return Grouping(self)
else:
return self
def _negate(self):
if self.negate is not None:
return BinaryExpression(
self.left,
self.right,
self.negate,
negate=self.operator,
type_=self.type,
modifiers=self.modifiers,
)
else:
return super(BinaryExpression, self)._negate()
class Slice(ColumnElement):
"""Represent SQL for a Python array-slice object.
This is not a specific SQL construct at this level, but
may be interpreted by specific dialects, e.g. PostgreSQL.
"""
__visit_name__ = "slice"
def __init__(self, start, stop, step):
self.start = start
self.stop = stop
self.step = step
self.type = type_api.NULLTYPE
def self_group(self, against=None):
assert against is operator.getitem
return self
class IndexExpression(BinaryExpression):
"""Represent the class of expressions that are like an "index"
operation."""
pass
class Grouping(ColumnElement):
"""Represent a grouping within a column expression"""
__visit_name__ = "grouping"
def __init__(self, element):
self.element = element
self.type = getattr(element, "type", type_api.NULLTYPE)
def self_group(self, against=None):
return self
@util.memoized_property
def _is_implicitly_boolean(self):
return self.element._is_implicitly_boolean
@property
def _key_label(self):
return self._label
@property
def _label(self):
return getattr(self.element, "_label", None) or self.anon_label
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
def get_children(self, **kwargs):
return (self.element,)
@property
def _proxies(self):
if isinstance(self.element, ColumnElement):
return [self.element]
else:
return []
@property
def _from_objects(self):
return self.element._from_objects
def __getattr__(self, attr):
return getattr(self.element, attr)
def __getstate__(self):
return {"element": self.element, "type": self.type}
def __setstate__(self, state):
self.element = state["element"]
self.type = state["type"]
def compare(self, other, **kw):
return isinstance(other, Grouping) and self.element.compare(
other.element
)
RANGE_UNBOUNDED = util.symbol("RANGE_UNBOUNDED")
RANGE_CURRENT = util.symbol("RANGE_CURRENT")
class Over(ColumnElement):
"""Represent an OVER clause.
This is a special operator against a so-called
"window" function, as well as any aggregate function,
which produces results relative to the result set
itself. It's supported only by certain database
backends.
"""
__visit_name__ = "over"
order_by = None
partition_by = None
element = None
"""The underlying expression object to which this :class:`.Over`
object refers towards."""
def __init__(
self, element, partition_by=None, order_by=None, range_=None, rows=None
):
r"""Produce an :class:`.Over` object against a function.
Used against aggregate or so-called "window" functions,
for database backends that support window functions.
:func:`_expression.over` is usually called using
the :meth:`.FunctionElement.over` method, e.g.::
func.row_number().over(order_by=mytable.c.some_column)
Would produce::
ROW_NUMBER() OVER(ORDER BY some_column)
Ranges are also possible using the :paramref:`.expression.over.range_`
and :paramref:`.expression.over.rows` parameters. These
mutually-exclusive parameters each accept a 2-tuple, which contains
a combination of integers and None::
func.row_number().over(
order_by=my_table.c.some_column, range_=(None, 0))
The above would produce::
ROW_NUMBER() OVER(ORDER BY some_column
RANGE BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW)
A value of ``None`` indicates "unbounded", a
value of zero indicates "current row", and negative / positive
integers indicate "preceding" and "following":
* RANGE BETWEEN 5 PRECEDING AND 10 FOLLOWING::
func.row_number().over(order_by='x', range_=(-5, 10))
* ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW::
func.row_number().over(order_by='x', rows=(None, 0))
* RANGE BETWEEN 2 PRECEDING AND UNBOUNDED FOLLOWING::
func.row_number().over(order_by='x', range_=(-2, None))
* RANGE BETWEEN 1 FOLLOWING AND 3 FOLLOWING::
func.row_number().over(order_by='x', range_=(1, 3))
.. versionadded:: 1.1 support for RANGE / ROWS within a window
:param element: a :class:`.FunctionElement`, :class:`.WithinGroup`,
or other compatible construct.
:param partition_by: a column element or string, or a list
of such, that will be used as the PARTITION BY clause
of the OVER construct.
:param order_by: a column element or string, or a list
of such, that will be used as the ORDER BY clause
of the OVER construct.
:param range\_: optional range clause for the window. This is a
tuple value which can contain integer values or ``None``,
and will render a RANGE BETWEEN PRECEDING / FOLLOWING clause.
.. versionadded:: 1.1
:param rows: optional rows clause for the window. This is a tuple
value which can contain integer values or None, and will render
a ROWS BETWEEN PRECEDING / FOLLOWING clause.
.. versionadded:: 1.1
This function is also available from the :data:`~.expression.func`
construct itself via the :meth:`.FunctionElement.over` method.
.. seealso::
:data:`.expression.func`
:func:`_expression.within_group`
"""
self.element = element
if order_by is not None:
self.order_by = ClauseList(
*util.to_list(order_by),
_literal_as_text=_literal_as_label_reference
)
if partition_by is not None:
self.partition_by = ClauseList(
*util.to_list(partition_by),
_literal_as_text=_literal_as_label_reference
)
if range_:
self.range_ = self._interpret_range(range_)
if rows:
raise exc.ArgumentError(
"'range_' and 'rows' are mutually exclusive"
)
else:
self.rows = None
elif rows:
self.rows = self._interpret_range(rows)
self.range_ = None
else:
self.rows = self.range_ = None
def __reduce__(self):
return self.__class__, (
self.element,
self.partition_by,
self.order_by,
self.range_,
self.rows,
)
def _interpret_range(self, range_):
if not isinstance(range_, tuple) or len(range_) != 2:
raise exc.ArgumentError("2-tuple expected for range/rows")
if range_[0] is None:
lower = RANGE_UNBOUNDED
else:
try:
lower = int(range_[0])
except ValueError as err:
util.raise_(
exc.ArgumentError(
"Integer or None expected for range value"
),
replace_context=err,
)
else:
if lower == 0:
lower = RANGE_CURRENT
if range_[1] is None:
upper = RANGE_UNBOUNDED
else:
try:
upper = int(range_[1])
except ValueError as err:
util.raise_(
exc.ArgumentError(
"Integer or None expected for range value"
),
replace_context=err,
)
else:
if upper == 0:
upper = RANGE_CURRENT
return lower, upper
@property
@util.deprecated(
"1.1",
"the :attr:`.Over.func` member of the :class:`.Over` "
"class is deprecated and will be removed in a future release. "
"Please refer to the :attr:`.Over.element` attribute.",
)
def func(self):
"""the element referred to by this :class:`.Over`
clause.
"""
return self.element
@util.memoized_property
def type(self):
return self.element.type
def get_children(self, **kwargs):
return [
c
for c in (self.element, self.partition_by, self.order_by)
if c is not None
]
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
if self.partition_by is not None:
self.partition_by = clone(self.partition_by, **kw)
if self.order_by is not None:
self.order_by = clone(self.order_by, **kw)
@property
def _from_objects(self):
return list(
itertools.chain(
*[
c._from_objects
for c in (self.element, self.partition_by, self.order_by)
if c is not None
]
)
)
class WithinGroup(ColumnElement):
"""Represent a WITHIN GROUP (ORDER BY) clause.
This is a special operator against so-called
"ordered set aggregate" and "hypothetical
set aggregate" functions, including ``percentile_cont()``,
``rank()``, ``dense_rank()``, etc.
It's supported only by certain database backends, such as PostgreSQL,
Oracle and MS SQL Server.
The :class:`.WithinGroup` construct extracts its type from the
method :meth:`.FunctionElement.within_group_type`. If this returns
``None``, the function's ``.type`` is used.
"""
__visit_name__ = "withingroup"
order_by = None
def __init__(self, element, *order_by):
r"""Produce a :class:`.WithinGroup` object against a function.
Used against so-called "ordered set aggregate" and "hypothetical
set aggregate" functions, including :class:`.percentile_cont`,
:class:`.rank`, :class:`.dense_rank`, etc.
:func:`_expression.within_group` is usually called using
the :meth:`.FunctionElement.within_group` method, e.g.::
from sqlalchemy import within_group
stmt = select([
department.c.id,
func.percentile_cont(0.5).within_group(
department.c.salary.desc()
)
])
The above statement would produce SQL similar to
``SELECT department.id, percentile_cont(0.5)
WITHIN GROUP (ORDER BY department.salary DESC)``.
:param element: a :class:`.FunctionElement` construct, typically
generated by :data:`~.expression.func`.
:param \*order_by: one or more column elements that will be used
as the ORDER BY clause of the WITHIN GROUP construct.
.. versionadded:: 1.1
.. seealso::
:data:`.expression.func`
:func:`_expression.over`
"""
self.element = element
if order_by is not None:
self.order_by = ClauseList(
*util.to_list(order_by),
_literal_as_text=_literal_as_label_reference
)
def over(self, partition_by=None, order_by=None, range_=None, rows=None):
"""Produce an OVER clause against this :class:`.WithinGroup`
construct.
This function has the same signature as that of
:meth:`.FunctionElement.over`.
"""
return Over(
self,
partition_by=partition_by,
order_by=order_by,
range_=range_,
rows=rows,
)
@util.memoized_property
def type(self):
wgt = self.element.within_group_type(self)
if wgt is not None:
return wgt
else:
return self.element.type
def get_children(self, **kwargs):
return [c for c in (self.element, self.order_by) if c is not None]
def _copy_internals(self, clone=_clone, **kw):
self.element = clone(self.element, **kw)
if self.order_by is not None:
self.order_by = clone(self.order_by, **kw)
@property
def _from_objects(self):
return list(
itertools.chain(
*[
c._from_objects
for c in (self.element, self.order_by)
if c is not None
]
)
)
class FunctionFilter(ColumnElement):
"""Represent a function FILTER clause.
This is a special operator against aggregate and window functions,
which controls which rows are passed to it.
It's supported only by certain database backends.
Invocation of :class:`.FunctionFilter` is via
:meth:`.FunctionElement.filter`::
func.count(1).filter(True)
.. versionadded:: 1.0.0
.. seealso::
:meth:`.FunctionElement.filter`
"""
__visit_name__ = "funcfilter"
criterion = None
def __init__(self, func, *criterion):
"""Produce a :class:`.FunctionFilter` object against a function.
Used against aggregate and window functions,
for database backends that support the "FILTER" clause.
E.g.::
from sqlalchemy import funcfilter
funcfilter(func.count(1), MyClass.name == 'some name')
Would produce "COUNT(1) FILTER (WHERE myclass.name = 'some name')".
This function is also available from the :data:`~.expression.func`
construct itself via the :meth:`.FunctionElement.filter` method.
.. versionadded:: 1.0.0
.. seealso::
:meth:`.FunctionElement.filter`
"""
self.func = func
self.filter(*criterion)
def filter(self, *criterion):
"""Produce an additional FILTER against the function.
This method adds additional criteria to the initial criteria
set up by :meth:`.FunctionElement.filter`.
Multiple criteria are joined together at SQL render time
via ``AND``.
"""
for criterion in list(criterion):
criterion = _expression_literal_as_text(criterion)
if self.criterion is not None:
self.criterion = self.criterion & criterion
else:
self.criterion = criterion
return self
def over(self, partition_by=None, order_by=None, range_=None, rows=None):
"""Produce an OVER clause against this filtered function.
Used against aggregate or so-called "window" functions,
for database backends that support window functions.
The expression::
func.rank().filter(MyClass.y > 5).over(order_by='x')
is shorthand for::
from sqlalchemy import over, funcfilter
over(funcfilter(func.rank(), MyClass.y > 5), order_by='x')
See :func:`_expression.over` for a full description.
"""
return Over(
self,
partition_by=partition_by,
order_by=order_by,
range_=range_,
rows=rows,
)
def self_group(self, against=None):
if operators.is_precedent(operators.filter_op, against):
return Grouping(self)
else:
return self
@util.memoized_property
def type(self):
return self.func.type
def get_children(self, **kwargs):
return [c for c in (self.func, self.criterion) if c is not None]
def _copy_internals(self, clone=_clone, **kw):
self.func = clone(self.func, **kw)
if self.criterion is not None:
self.criterion = clone(self.criterion, **kw)
@property
def _from_objects(self):
return list(
itertools.chain(
*[
c._from_objects
for c in (self.func, self.criterion)
if c is not None
]
)
)
class Label(ColumnElement):
"""Represents a column label (AS).
Represent a label, as typically applied to any column-level
element using the ``AS`` sql keyword.
"""
__visit_name__ = "label"
def __init__(self, name, element, type_=None):
"""Return a :class:`Label` object for the
given :class:`_expression.ColumnElement`.
A label changes the name of an element in the columns clause of a
``SELECT`` statement, typically via the ``AS`` SQL keyword.
This functionality is more conveniently available via the
:meth:`_expression.ColumnElement.label` method on
:class:`_expression.ColumnElement`.
:param name: label name
:param obj: a :class:`_expression.ColumnElement`.
"""
if isinstance(element, Label):
self._resolve_label = element._label
while isinstance(element, Label):
element = element.element
if name:
self.name = name
self._resolve_label = self.name
else:
self.name = _anonymous_label(
"%%(%d %s)s" % (id(self), getattr(element, "name", "anon"))
)
self.key = self._label = self._key_label = self.name
self._element = element
self._type = type_
self._proxies = [element]
def __reduce__(self):
return self.__class__, (self.name, self._element, self._type)
@util.memoized_property
def _is_implicitly_boolean(self):
return self.element._is_implicitly_boolean
@util.memoized_property
def _allow_label_resolve(self):
return self.element._allow_label_resolve
@property
def _order_by_label_element(self):
return self
@util.memoized_property
def type(self):
return type_api.to_instance(
self._type or getattr(self._element, "type", None)
)
@util.memoized_property
def element(self):
return self._element.self_group(against=operators.as_)
def self_group(self, against=None):
return self._apply_to_inner(self._element.self_group, against=against)
def _negate(self):
return self._apply_to_inner(self._element._negate)
def _apply_to_inner(self, fn, *arg, **kw):
sub_element = fn(*arg, **kw)
if sub_element is not self._element:
return Label(self.name, sub_element, type_=self._type)
else:
return self
@property
def primary_key(self):
return self.element.primary_key
@property
def foreign_keys(self):
return self.element.foreign_keys
def get_children(self, **kwargs):
return (self.element,)
def _copy_internals(self, clone=_clone, anonymize_labels=False, **kw):
self._element = clone(self._element, **kw)
self.__dict__.pop("element", None)
self.__dict__.pop("_allow_label_resolve", None)
if anonymize_labels:
self.name = self._resolve_label = _anonymous_label(
"%%(%d %s)s"
% (id(self), getattr(self.element, "name", "anon"))
)
self.key = self._label = self._key_label = self.name
@property
def _from_objects(self):
return self.element._from_objects
def _make_proxy(self, selectable, name=None, **kw):
e = self.element._make_proxy(
selectable,
name=name if name else self.name,
disallow_is_literal=True,
)
e._proxies.append(self)
if self._type is not None:
e.type = self._type
return e
class ColumnClause(Immutable, ColumnElement):
"""Represents a column expression from any textual string.
The :class:`.ColumnClause`, a lightweight analogue to the
:class:`_schema.Column` class, is typically invoked using the
:func:`_expression.column` function, as in::
from sqlalchemy import column
id, name = column("id"), column("name")
stmt = select([id, name]).select_from("user")
The above statement would produce SQL like::
SELECT id, name FROM user
:class:`.ColumnClause` is the immediate superclass of the schema-specific
:class:`_schema.Column` object. While the :class:`_schema.Column`
class has all the
same capabilities as :class:`.ColumnClause`, the :class:`.ColumnClause`
class is usable by itself in those cases where behavioral requirements
are limited to simple SQL expression generation. The object has none of
the associations with schema-level metadata or with execution-time
behavior that :class:`_schema.Column` does,
so in that sense is a "lightweight"
version of :class:`_schema.Column`.
Full details on :class:`.ColumnClause` usage is at
:func:`_expression.column`.
.. seealso::
:func:`_expression.column`
:class:`_schema.Column`
"""
__visit_name__ = "column"
onupdate = default = server_default = server_onupdate = None
_is_multiparam_column = False
_memoized_property = util.group_expirable_memoized_property()
def __init__(self, text, type_=None, is_literal=False, _selectable=None):
"""Produce a :class:`.ColumnClause` object.
The :class:`.ColumnClause` is a lightweight analogue to the
:class:`_schema.Column` class. The :func:`_expression.column`
function can
be invoked with just a name alone, as in::
from sqlalchemy import column
id, name = column("id"), column("name")
stmt = select([id, name]).select_from("user")
The above statement would produce SQL like::
SELECT id, name FROM user
Once constructed, :func:`_expression.column`
may be used like any other SQL
expression element such as within :func:`_expression.select`
constructs::
from sqlalchemy.sql import column
id, name = column("id"), column("name")
stmt = select([id, name]).select_from("user")
The text handled by :func:`_expression.column`
is assumed to be handled
like the name of a database column; if the string contains mixed case,
special characters, or matches a known reserved word on the target
backend, the column expression will render using the quoting
behavior determined by the backend. To produce a textual SQL
expression that is rendered exactly without any quoting,
use :func:`_expression.literal_column` instead,
or pass ``True`` as the
value of :paramref:`_expression.column.is_literal`. Additionally,
full SQL
statements are best handled using the :func:`_expression.text`
construct.
:func:`_expression.column` can be used in a table-like
fashion by combining it with the :func:`.table` function
(which is the lightweight analogue to :class:`_schema.Table`
) to produce
a working table construct with minimal boilerplate::
from sqlalchemy import table, column, select
user = table("user",
column("id"),
column("name"),
column("description"),
)
stmt = select([user.c.description]).where(user.c.name == 'wendy')
A :func:`_expression.column` / :func:`.table`
construct like that illustrated
above can be created in an
ad-hoc fashion and is not associated with any
:class:`_schema.MetaData`, DDL, or events, unlike its
:class:`_schema.Table` counterpart.
.. versionchanged:: 1.0.0 :func:`_expression.column` can now
be imported from the plain ``sqlalchemy`` namespace like any
other SQL element.
:param text: the text of the element.
:param type: :class:`_types.TypeEngine` object which can associate
this :class:`.ColumnClause` with a type.
:param is_literal: if True, the :class:`.ColumnClause` is assumed to
be an exact expression that will be delivered to the output with no
quoting rules applied regardless of case sensitive settings. the
:func:`_expression.literal_column()` function essentially invokes
:func:`_expression.column` while passing ``is_literal=True``.
.. seealso::
:class:`_schema.Column`
:func:`_expression.literal_column`
:func:`.table`
:func:`_expression.text`
:ref:`sqlexpression_literal_column`
"""
self.key = self.name = text
self.table = _selectable
self.type = type_api.to_instance(type_)
self.is_literal = is_literal
def _compare_name_for_result(self, other):
if (
self.is_literal
or self.table is None
or self.table._textual
or not hasattr(other, "proxy_set")
or (
isinstance(other, ColumnClause)
and (
other.is_literal
or other.table is None
or other.table._textual
)
)
):
return (hasattr(other, "name") and self.name == other.name) or (
hasattr(other, "_label") and self._label == other._label
)
else:
return other.proxy_set.intersection(self.proxy_set)
def _get_table(self):
return self.__dict__["table"]
def _set_table(self, table):
self._memoized_property.expire_instance(self)
self.__dict__["table"] = table
table = property(_get_table, _set_table)
@_memoized_property
def _from_objects(self):
t = self.table
if t is not None:
return [t]
else:
return []
@util.memoized_property
def description(self):
if util.py3k:
return self.name
else:
return self.name.encode("ascii", "backslashreplace")
@_memoized_property
def _key_label(self):
if self.key != self.name:
return self._gen_label(self.key)
else:
return self._label
@_memoized_property
def _label(self):
return self._gen_label(self.name)
@_memoized_property
def _render_label_in_columns_clause(self):
return self.table is not None
@property
def _ddl_label(self):
return self._gen_label(self.name, dedupe_on_key=False)
def _gen_label(self, name, dedupe_on_key=True):
t = self.table
if self.is_literal:
return None
elif t is not None and t.named_with_column:
if getattr(t, "schema", None):
label = t.schema.replace(".", "_") + "_" + t.name + "_" + name
else:
label = t.name + "_" + name
# propagate name quoting rules for labels.
if getattr(name, "quote", None) is not None:
if isinstance(label, quoted_name):
label.quote = name.quote
else:
label = quoted_name(label, name.quote)
elif getattr(t.name, "quote", None) is not None:
# can't get this situation to occur, so let's
# assert false on it for now
assert not isinstance(label, quoted_name)
label = quoted_name(label, t.name.quote)
if dedupe_on_key:
# ensure the label name doesn't conflict with that of an
# existing column. note that this implies that any Column
# must **not** set up its _label before its parent table has
# all of its other Column objects set up. There are several
# tables in the test suite which will fail otherwise; example:
# table "owner" has columns "name" and "owner_name". Therefore
# column owner.name cannot use the label "owner_name", it has
# to be "owner_name_1".
if label in t.c:
_label = label
counter = 1
while _label in t.c:
_label = label + "_" + str(counter)
counter += 1
label = _label
return _as_truncated(label)
else:
return name
def _bind_param(self, operator, obj, type_=None):
return BindParameter(
self.key,
obj,
_compared_to_operator=operator,
_compared_to_type=self.type,
type_=type_,
unique=True,
)
def _make_proxy(
self,
selectable,
name=None,
attach=True,
name_is_truncatable=False,
disallow_is_literal=False,
**kw
):
# the "is_literal" flag normally should never be propagated; a proxied
# column is always a SQL identifier and never the actual expression
# being evaluated. however, there is a case where the "is_literal" flag
# might be used to allow the given identifier to have a fixed quoting
# pattern already, so maintain the flag for the proxy unless a
# :class:`.Label` object is creating the proxy. See [ticket:4730].
is_literal = (
not disallow_is_literal
and self.is_literal
and (
# note this does not accommodate for quoted_name differences
# right now
name is None
or name == self.name
)
)
c = self._constructor(
_as_truncated(name or self.name)
if name_is_truncatable
else (name or self.name),
type_=self.type,
_selectable=selectable,
is_literal=is_literal,
)
if name is None:
c.key = self.key
c._proxies = [self]
if selectable._is_clone_of is not None:
c._is_clone_of = selectable._is_clone_of.columns.get(c.key)
if attach:
selectable._columns[c.key] = c
return c
class CollationClause(ColumnElement):
__visit_name__ = "collation"
def __init__(self, collation):
self.collation = collation
class _IdentifiedClause(Executable, ClauseElement):
__visit_name__ = "identified"
_execution_options = Executable._execution_options.union(
{"autocommit": False}
)
def __init__(self, ident):
self.ident = ident
class SavepointClause(_IdentifiedClause):
__visit_name__ = "savepoint"
class RollbackToSavepointClause(_IdentifiedClause):
__visit_name__ = "rollback_to_savepoint"
class ReleaseSavepointClause(_IdentifiedClause):
__visit_name__ = "release_savepoint"
class quoted_name(util.MemoizedSlots, util.text_type):
"""Represent a SQL identifier combined with quoting preferences.
:class:`.quoted_name` is a Python unicode/str subclass which
represents a particular identifier name along with a
``quote`` flag. This ``quote`` flag, when set to
``True`` or ``False``, overrides automatic quoting behavior
for this identifier in order to either unconditionally quote
or to not quote the name. If left at its default of ``None``,
quoting behavior is applied to the identifier on a per-backend basis
based on an examination of the token itself.
A :class:`.quoted_name` object with ``quote=True`` is also
prevented from being modified in the case of a so-called
"name normalize" option. Certain database backends, such as
Oracle, Firebird, and DB2 "normalize" case-insensitive names
as uppercase. The SQLAlchemy dialects for these backends
convert from SQLAlchemy's lower-case-means-insensitive convention
to the upper-case-means-insensitive conventions of those backends.
The ``quote=True`` flag here will prevent this conversion from occurring
to support an identifier that's quoted as all lower case against
such a backend.
The :class:`.quoted_name` object is normally created automatically
when specifying the name for key schema constructs such as
:class:`_schema.Table`, :class:`_schema.Column`, and others.
The class can also be
passed explicitly as the name to any function that receives a name which
can be quoted. Such as to use the :meth:`_engine.Engine.has_table`
method with
an unconditionally quoted name::
from sqlalchemy import create_engine
from sqlalchemy.sql import quoted_name
engine = create_engine("oracle+cx_oracle://some_dsn")
engine.has_table(quoted_name("some_table", True))
The above logic will run the "has table" logic against the Oracle backend,
passing the name exactly as ``"some_table"`` without converting to
upper case.
.. versionadded:: 0.9.0
.. versionchanged:: 1.2 The :class:`.quoted_name` construct is now
importable from ``sqlalchemy.sql``, in addition to the previous
location of ``sqlalchemy.sql.elements``.
"""
__slots__ = "quote", "lower", "upper"
def __new__(cls, value, quote):
if value is None:
return None
# experimental - don't bother with quoted_name
# if quote flag is None. doesn't seem to make any dent
# in performance however
# elif not sprcls and quote is None:
# return value
elif isinstance(value, cls) and (
quote is None or value.quote == quote
):
return value
self = super(quoted_name, cls).__new__(cls, value)
self.quote = quote
return self
def __reduce__(self):
return quoted_name, (util.text_type(self), self.quote)
def _memoized_method_lower(self):
if self.quote:
return self
else:
return util.text_type(self).lower()
def _memoized_method_upper(self):
if self.quote:
return self
else:
return util.text_type(self).upper()
def __repr__(self):
if util.py2k:
backslashed = self.encode("ascii", "backslashreplace")
if not util.py2k:
backslashed = backslashed.decode("ascii")
return "'%s'" % backslashed
else:
return str.__repr__(self)
class _truncated_label(quoted_name):
"""A unicode subclass used to identify symbolic "
"names that may require truncation."""
__slots__ = ()
def __new__(cls, value, quote=None):
quote = getattr(value, "quote", quote)
# return super(_truncated_label, cls).__new__(cls, value, quote, True)
return super(_truncated_label, cls).__new__(cls, value, quote)
def __reduce__(self):
return self.__class__, (util.text_type(self), self.quote)
def apply_map(self, map_):
return self
class conv(_truncated_label):
"""Mark a string indicating that a name has already been converted
by a naming convention.
This is a string subclass that indicates a name that should not be
subject to any further naming conventions.
E.g. when we create a :class:`.Constraint` using a naming convention
as follows::
m = MetaData(naming_convention={
"ck": "ck_%(table_name)s_%(constraint_name)s"
})
t = Table('t', m, Column('x', Integer),
CheckConstraint('x > 5', name='x5'))
The name of the above constraint will be rendered as ``"ck_t_x5"``.
That is, the existing name ``x5`` is used in the naming convention as the
``constraint_name`` token.
In some situations, such as in migration scripts, we may be rendering
the above :class:`.CheckConstraint` with a name that's already been
converted. In order to make sure the name isn't double-modified, the
new name is applied using the :func:`_schema.conv` marker. We can
use this explicitly as follows::
m = MetaData(naming_convention={
"ck": "ck_%(table_name)s_%(constraint_name)s"
})
t = Table('t', m, Column('x', Integer),
CheckConstraint('x > 5', name=conv('ck_t_x5')))
Where above, the :func:`_schema.conv` marker indicates that the constraint
name here is final, and the name will render as ``"ck_t_x5"`` and not
``"ck_t_ck_t_x5"``
.. versionadded:: 0.9.4
.. seealso::
:ref:`constraint_naming_conventions`
"""
__slots__ = ()
class _defer_name(_truncated_label):
"""Mark a name as 'deferred' for the purposes of automated name
generation.
"""
__slots__ = ()
def __new__(cls, value):
if value is None:
return _NONE_NAME
elif isinstance(value, conv):
return value
else:
return super(_defer_name, cls).__new__(cls, value)
def __reduce__(self):
return self.__class__, (util.text_type(self),)
class _defer_none_name(_defer_name):
"""Indicate a 'deferred' name that was ultimately the value None."""
__slots__ = ()
_NONE_NAME = _defer_none_name("_unnamed_")
# for backwards compatibility in case
# someone is re-implementing the
# _truncated_identifier() sequence in a custom
# compiler
_generated_label = _truncated_label
class _anonymous_label(_truncated_label):
"""A unicode subclass used to identify anonymously
generated names."""
__slots__ = ()
def __add__(self, other):
return _anonymous_label(
quoted_name(
util.text_type.__add__(self, util.text_type(other)), self.quote
)
)
def __radd__(self, other):
return _anonymous_label(
quoted_name(
util.text_type.__add__(util.text_type(other), self), self.quote
)
)
def apply_map(self, map_):
if self.quote is not None:
# preserve quoting only if necessary
return quoted_name(self % map_, self.quote)
else:
# else skip the constructor call
return self % map_
def _as_truncated(value):
"""Coerce the given value to :class:`._truncated_label`.
Existing :class:`._truncated_label` and
:class:`._anonymous_label` objects are passed
unchanged.
"""
if isinstance(value, _truncated_label):
return value
else:
return _truncated_label(value)
def _string_or_unprintable(element):
if isinstance(element, util.string_types):
return element
else:
try:
return str(element)
except Exception:
return "unprintable element %r" % element
def _expand_cloned(elements):
"""Expand the given set of ClauseElements to be the set of all 'cloned'
predecessors.
"""
return itertools.chain(*[x._cloned_set for x in elements])
def _select_iterables(elements):
"""Expand tables into individual columns in the
given list of column expressions.
"""
return itertools.chain(*[c._select_iterable for c in elements])
def _cloned_intersection(a, b):
"""Return the intersection of sets a and b, counting
any overlap between 'cloned' predecessors.
The returned set is in terms of the entities present within 'a'.
"""
all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
return set(
elem for elem in a if all_overlap.intersection(elem._cloned_set)
)
def _cloned_difference(a, b):
all_overlap = set(_expand_cloned(a)).intersection(_expand_cloned(b))
return set(
elem for elem in a if not all_overlap.intersection(elem._cloned_set)
)
@util.dependencies("sqlalchemy.sql.functions")
def _labeled(functions, element):
if not hasattr(element, "name") or isinstance(
element, functions.FunctionElement
):
return element.label(None)
else:
return element
def _is_column(col):
"""True if ``col`` is an instance of
:class:`_expression.ColumnElement`."""
return isinstance(col, ColumnElement)
def _find_columns(clause):
"""Locate Column objects within the given expression."""
cols = util.column_set()
traverse(clause, {}, {"column": cols.add})
return cols
# there is some inconsistency here between the usage of
# inspect() vs. checking for Visitable and __clause_element__.
# Ideally all functions here would derive from inspect(),
# however the inspect() versions add significant callcount
# overhead for critical functions like _interpret_as_column_or_from().
# Generally, the column-based functions are more performance critical
# and are fine just checking for __clause_element__(). It is only
# _interpret_as_from() where we'd like to be able to receive ORM entities
# that have no defined namespace, hence inspect() is needed there.
def _column_as_key(element):
if isinstance(element, util.string_types):
return element
if hasattr(element, "__clause_element__"):
element = element.__clause_element__()
try:
return element.key
except AttributeError:
return None
def _clause_element_as_expr(element):
if hasattr(element, "__clause_element__"):
return element.__clause_element__()
else:
return element
def _literal_as_label_reference(element):
if isinstance(element, util.string_types):
return _textual_label_reference(element)
elif hasattr(element, "__clause_element__"):
element = element.__clause_element__()
return _literal_as_text(element)
def _literal_and_labels_as_label_reference(element):
if isinstance(element, util.string_types):
return _textual_label_reference(element)
elif hasattr(element, "__clause_element__"):
element = element.__clause_element__()
if (
isinstance(element, ColumnElement)
and element._order_by_label_element is not None
):
return _label_reference(element)
else:
return _literal_as_text(element)
def _expression_literal_as_text(element):
return _literal_as_text(element)
def _literal_as(element, text_fallback):
if isinstance(element, Visitable):
return element
elif hasattr(element, "__clause_element__"):
return element.__clause_element__()
elif isinstance(element, util.string_types):
return text_fallback(element)
elif isinstance(element, (util.NoneType, bool)):
return _const_expr(element)
else:
raise exc.ArgumentError(
"SQL expression object expected, got object of type %r "
"instead" % type(element)
)
def _literal_as_text(element, allow_coercion_to_text=False):
if allow_coercion_to_text:
return _literal_as(element, TextClause)
else:
return _literal_as(element, _no_text_coercion)
def _literal_as_column(element):
return _literal_as(element, ColumnClause)
def _no_column_coercion(element):
element = str(element)
guess_is_literal = not _guess_straight_column.match(element)
raise exc.ArgumentError(
"Textual column expression %(column)r should be "
"explicitly declared with text(%(column)r), "
"or use %(literal_column)s(%(column)r) "
"for more specificity"
% {
"column": util.ellipses_string(element),
"literal_column": "literal_column"
if guess_is_literal
else "column",
}
)
def _no_text_coercion(
element, exc_cls=exc.ArgumentError, extra=None, err=None
):
util.raise_(
exc_cls(
"%(extra)sTextual SQL expression %(expr)r should be "
"explicitly declared as text(%(expr)r)"
% {
"expr": util.ellipses_string(element),
"extra": "%s " % extra if extra else "",
}
),
replace_context=err,
)
def _no_literals(element):
if hasattr(element, "__clause_element__"):
return element.__clause_element__()
elif not isinstance(element, Visitable):
raise exc.ArgumentError(
"Ambiguous literal: %r. Use the 'text()' "
"function to indicate a SQL expression "
"literal, or 'literal()' to indicate a "
"bound value." % (element,)
)
else:
return element
def _is_literal(element):
return not isinstance(element, Visitable) and not hasattr(
element, "__clause_element__"
)
def _only_column_elements_or_none(element, name):
if element is None:
return None
else:
return _only_column_elements(element, name)
def _only_column_elements(element, name):
if hasattr(element, "__clause_element__"):
element = element.__clause_element__()
if not isinstance(element, ColumnElement):
raise exc.ArgumentError(
"Column-based expression object expected for argument "
"'%s'; got: '%s', type %s" % (name, element, type(element))
)
return element
def _literal_as_binds(element, name=None, type_=None):
if hasattr(element, "__clause_element__"):
return element.__clause_element__()
elif not isinstance(element, Visitable):
if element is None:
return Null()
else:
return BindParameter(name, element, type_=type_, unique=True)
else:
return element
_guess_straight_column = re.compile(r"^\w\S*$", re.I)
def _interpret_as_column_or_from(element):
if isinstance(element, Visitable):
return element
elif hasattr(element, "__clause_element__"):
return element.__clause_element__()
insp = inspection.inspect(element, raiseerr=False)
if insp is None:
if isinstance(element, (util.NoneType, bool)):
return _const_expr(element)
elif hasattr(insp, "selectable"):
return insp.selectable
# be forgiving as this is an extremely common
# and known expression
if element == "*":
guess_is_literal = True
elif isinstance(element, (numbers.Number)):
return ColumnClause(str(element), is_literal=True)
else:
_no_column_coercion(element)
return ColumnClause(element, is_literal=guess_is_literal)
def _const_expr(element):
if isinstance(element, (Null, False_, True_)):
return element
elif element is None:
return Null()
elif element is False:
return False_()
elif element is True:
return True_()
else:
raise exc.ArgumentError("Expected None, False, or True")
def _type_from_args(args):
for a in args:
if not a.type._isnull:
return a.type
else:
return type_api.NULLTYPE
def _corresponding_column_or_error(fromclause, column, require_embedded=False):
c = fromclause.corresponding_column(
column, require_embedded=require_embedded
)
if c is None:
raise exc.InvalidRequestError(
"Given column '%s', attached to table '%s', "
"failed to locate a corresponding column from table '%s'"
% (column, getattr(column, "table", None), fromclause.description)
)
return c
class AnnotatedColumnElement(Annotated):
def __init__(self, element, values):
Annotated.__init__(self, element, values)
ColumnElement.comparator._reset(self)
for attr in ("name", "key", "table"):
if self.__dict__.get(attr, False) is None:
self.__dict__.pop(attr)
def _with_annotations(self, values):
clone = super(AnnotatedColumnElement, self)._with_annotations(values)
ColumnElement.comparator._reset(clone)
return clone
@util.memoized_property
def name(self):
"""Pull 'name' from parent, if not present"""
return self._Annotated__element.name
@util.memoized_property
def table(self):
"""Pull 'table' from parent, if not present"""
return self._Annotated__element.table
@util.memoized_property
def key(self):
"""Pull 'key' from parent, if not present"""
return self._Annotated__element.key
@util.memoized_property
def info(self):
return self._Annotated__element.info
@util.memoized_property
def anon_label(self):
return self._Annotated__element.anon_label
|
gltn/stdm
|
stdm/third_party/sqlalchemy/sql/elements.py
|
Python
|
gpl-2.0
| 161,275
|
[
"VisIt"
] |
63bad5cc7d36b54d40851d05830041ce328d320e54fa3c897f7f6a6c99d9b30d
|
#pylint: disable=missing-docstring
####################################################################################################
# DO NOT MODIFY THIS HEADER #
# MOOSE - Multiphysics Object Oriented Simulation Environment #
# #
# (c) 2010 Battelle Energy Alliance, LLC #
# ALL RIGHTS RESERVED #
# #
# Prepared by Battelle Energy Alliance, LLC #
# Under Contract No. DE-AC07-05ID14517 #
# With the U. S. Department of Energy #
# #
# See COPYRIGHT for full restrictions #
####################################################################################################
#pylint: enable=missing-docstring
import os
import re
import collections
import logging
import anytree
import mooseutils
import MooseDocs
LOG = logging.getLogger(__name__)
class NodeCore(anytree.NodeMixin):
"""
A general NodeMixin that provides color printing and the full_name property. This serves as
the base class for all node used throughout MooseDocs.
Inputs:
name[str]: The name of the node.
parent[NodeCore]: The parent node, use None to create a root node.
"""
COLOR = 'RESET'
def __init__(self, name, parent=None, display=None, root_directory=None):
super(NodeCore, self).__init__()
self.parent = parent
self.name = name
self.status = collections.defaultdict(int)
self._root_directory = root_directory if root_directory else MooseDocs.ROOT_DIR
self.__cache = dict()
self._display = display
if self._display is None:
self._display = name
@property
def root_directory(self):
"""
Return the root directory of the file (default if MooseDocs.ROOT_DIR)
"""
return self._root_directory
@property
def full_name(self):
"""
Return the full name of the node, recursively call the name of parents up to the root.
"""
if self.parent is not None:
return self.separator.join([self.parent.full_name, self.name]).replace('//', '/')
return self.name
def findall(self, name='', filter_=None):
"""
Locate nodes based on a filter. By
Args:
name[str]: (optional) When the 'filter_' options is not supplied, this name is used
in the default filter (default: '').
filter_[function]: A filter function, if it returns true keep the node. If not supplied
the default is to search for all nodes that have a "full_name" that
ends with the name provided in the 'name' argument. If the 'name'
argument is not set the default will return all nodes. The supplied
function should accept the node as an argument.
"""
if name and (filter_ is not None) and (name in self.__cache):
return self.__cache[name]
if filter_ is None:
filter_ = lambda n: n.full_name.endswith(name)
nodes = [node for node in anytree.iterators.PreOrderIter(self.root, filter_=filter_)]
if name and (filter_ is not None):
self.__cache[name] = nodes
return nodes
def replace(self, node):
"""
Replace the current node with the node provided.
Args:
node[anytree.NodeMixin]: The node that is replacing the current node.
"""
node.parent = self.parent
for child in self.children:
child.parent = node
self.parent = None
return node
def reset(self):
"""
Called before a page is re-created.
"""
self.status.clear()
@property
def display(self):
"""
Return the display name.
"""
return self._display
def __repr__(self):
"""
Print the node name.
"""
oname = self.__class__.__name__[:-4]
msg = '{}: {}'.format(oname, self.full_name)
return mooseutils.colorText(msg, self.COLOR)
def __str__(self):
"""
Calling print on this object will print the tree nice and pretty.
"""
return str(anytree.RenderTree(self))
class MarkdownNode(NodeCore):
"""
A simple node that accepts markdown content as raw text.
"""
def __init__(self, name, content=None, **kwargs):
super(MarkdownNode, self).__init__(name, **kwargs)
self._content = content
@property
def filename(self):
"""
Provide a 'filename' for when MooseMarkdown object is used with a raw string.
"""
return 'supplied string.'
@property
def content(self):
"""
Return the supplied markdown content.
"""
return self._content
class FileTreeNodeBase(NodeCore):
"""
Base node type for the markdown file tree.
"""
COLOR = 'YELLOW'
def __init__(self, name, base=None, **kwargs):
super(FileTreeNodeBase, self).__init__(name, **kwargs)
if base is None:
if self.parent:
base = self.parent.base
else:
base = ''
self._base = base
@property
def base(self):
"""
Return the 'base' location.
"""
return self._base
class DirectoryNode(FileTreeNodeBase):
"""
This node is used when for directories that do not contain 'index.md' files. This is a separate
class for color output as to have a distinct class for comparing against.
"""
@property
def destination(self):
"""
Return in invalid location, this is needed for breadcrumbs.
"""
return None
class FileNodeBase(FileTreeNodeBase):
"""
Base class for building file tree for converting markdown and copying files (e.g., png, js).
"""
@property
def basename(self):
"""
Return the absolute "base name" of the node.
"""
return os.path.join(self.root_directory, self._base, self.full_name.strip('/'))
@property
def filename(self):
"""
Return the absolute path to the file.
"""
raise NotImplementedError("The 'filename' property must be defined.")
@property
def destination(self):
"""
Return the local path to the html file to be created, i.e. the name the full_name.
"""
return os.path.join(self.full_name.strip('/'), 'index.html')
class MarkdownFileNodeBase(FileNodeBase): #pylint: disable=abstract-method
"""
Base class for node that is part of the markdown file tree and has an associated markdown file.
As with all paths in MooseDocs it is assumed that all supplied paths are given relative to the
repository root directory (i.e., ROOT_DIR).
Inputs:
name[str]: The name of the node.
base[str]: The base directory name that is inserted into the filename before the 'full_name'.
parent[NodeCore]: The parent node, use None to create a root node.
"""
@property
def content(self):
"""
Return the raw markdown content.
"""
with open(self.filename, 'r') as fid:
return fid.read().decode('utf-8')
class CopyFileNode(FileNodeBase):
"""
File tree node for general files that need be copied and linked.
"""
COLOR = 'BLUE'
@property
def filename(self):
return os.path.join(self.basename)
@property
def destination(self):
return self.full_name.lstrip('/')
class MarkdownFileIndexNode(MarkdownFileNodeBase):
"""
Node for directories that contain a index.md file within the build file tree.
"""
COLOR = 'MAGENTA'
@property
def filename(self):
return os.path.join(self.basename, 'index.md')
@property
def display(self):
if self.name == '':
return 'home'
else:
return self._display
class MarkdownFilePageNode(MarkdownFileNodeBase):
"""
Node for markdown files (e.g., Diffusion.md) within the build file tree.
"""
COLOR = 'CYAN'
@property
def filename(self):
return self.basename + '.md'
class SyntaxNodeBase(NodeCore):
"""
Node for MOOSE syntax that serves as the parent for actions/objects.
"""
STUB_HEADER = '<!-- MOOSE Documentation Stub: Remove this when content is added. -->\n'
def __init__(self, name, **kwargs):
super(SyntaxNodeBase, self).__init__(name, **kwargs)
self.__hidden = False
self.__check_status = None
@property
def hidden(self):
"""
Return the hidden status of the node.
"""
return self.__hidden
@hidden.setter
def hidden(self, value):
"""
Set the hidden status for the node.
"""
if isinstance(value, bool):
self.__hidden = value
else:
raise TypeError("The supplied value must be a boolean.")
@property
def groups(self):
"""
Return groups associated with this node or entire tree (i.e., where the syntax is defined).
"""
out = dict()
for node in self.descendants:
if isinstance(node, ActionNode):
out.update(node.groups)
return out
def hasGroups(self, groups):
"""
Return True if ANY of the supplied groups exist in this object or children of this object.
"""
all_groups = set()
for node in self.descendants:
all_groups.update(node.groups.keys())
return len(all_groups.intersection(groups)) > 0
def syntax(self, *args, **kwargs):
"""
Return SyntaxNode nodes (see __nodeFinder).
"""
return self.__nodeFinder(SyntaxNode, *args, **kwargs)
def objects(self, *args, **kwargs):
"""
Return MooseObjectNode nodes (see __nodeFinder).
"""
return self.__nodeFinder(MooseObjectNode, *args, **kwargs)
def actions(self, *args, **kwargs):
"""
Return ActionNode nodes (see __nodeFinder).
"""
return self.__nodeFinder(ActionNode, *args, **kwargs)
def markdown(self, install, absolute=True):
"""
Return the expected markdown file name.
"""
raise NotImplementedError("The 'markdown' method must return the expected markdown "
"filename.")
def check(self, install, generate=False, groups=None, update=None):
"""
Check that the expected documentation exists.
Return:
True, False, or None, where True indicates that the page exists, False indicates the
page does not exist or doesn't contain content, and None indicates that the page is
hidden.
"""
out = None # not checked because it was hidden
if self.hidden:
LOG.debug("Skipping documentation check for %s, it is hidden.", self.full_name)
elif groups and not set(self.groups).intersection(groups):
LOG.debug("Skipping documentation check for %s (%s), it is not listed in the provided "
"groups: %s.", self.full_name, self.groups.keys(), groups)
else:
filename = self.markdown(install)
if not os.path.isfile(filename):
out = False
LOG.error("No documentation for %s, documentation for this object should be "
"created in: %s", self.full_name, filename)
if generate:
if not os.path.exists(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
LOG.info('Creating stub page for %s %s', self.full_name, filename)
with open(filename, 'w') as fid:
content = self._defaultContent()
if not isinstance(content, str):
raise TypeError("The _defaultContent method must return a str.")
fid.write(content)
else:
with open(filename, 'r') as fid:
lines = fid.readlines()
if lines and self.STUB_HEADER in lines[0]:
out = False
LOG.error("A MOOSE generated stub page for %s exists, but no content was "
"added. Add documentation content to %s.", self.name, filename)
if update:
LOG.info("Updating stub page for %s in file %s.", self.name, filename)
with open(filename, 'w') as fid:
content = self._defaultContent()
if not isinstance(content, str):
raise TypeError("The _defaultContent method must return a str.")
fid.write(content)
return out
def _defaultContent(self):
"""
Markdown stub content.
"""
raise NotImplementedError("The _defaultContent method must be defined in child classes "
"and return a string.")
def __nodeFinder(self, node_type, syntax='', group=None, recursive=False):
"""
A helper method for finding nodes of a given type, syntax, and group.
Inputs:
node_type[NodeCore]: The type of node to consider.
syntax: (optional) The syntax that must be within the object 'full_name' property.
group: (optional) The group to limit the search.
recursive: When True the search will look through all nodes in the entire tree, when
False only the children of the node are considered.
"""
if recursive:
filter_ = lambda node: (syntax in node.full_name) and \
isinstance(node, node_type) and \
(group is None or group in node.groups)
return self.findall(filter_=filter_)
else:
return [node for node in self.children if (syntax in node.full_name) and \
isinstance(node, node_type) and \
(group is None or group in node.groups)]
def __repr__(self):
"""
Print the node name.
"""
oname = self.__class__.__name__[:-4]
msg = '{}: {} hidden={} groups={}'.format(oname,
str(self.full_name),
self.hidden,
self.groups.keys())
return mooseutils.colorText(msg, self.COLOR)
class SyntaxNode(SyntaxNodeBase):
"""
Defines a class for syntax only (i.e., a node not attached to a C++ class).
This needs to be a separate class for type checking.
"""
COLOR = 'GREEN'
def markdown(self, install, absolute=True):
"""
Return the expected markdown file name.
"""
path = os.path.join(install, self.full_name.strip('/')).split('/')
path += ['index.md']
if absolute:
return os.path.join(self.root_directory, *path)
else:
return os.path.join(*path)
def _defaultContent(self):
"""
Markdown stub content.
"""
stub = self.STUB_HEADER
stub += '\n# {} System\n'.format(self.name)
stub += '!syntax objects {}\n\n'.format(self.full_name)
stub += '!syntax subsystems {}\n\n'.format(self.full_name)
stub += '!syntax actions {}\n'.format(self.full_name)
return stub
class ObjectNode(SyntaxNodeBase): #pylint: disable=abstract-method
"""
Base class for nodes associated with C++ objects (Action, MooseObjectAction, or MooseObject).
"""
def __init__(self, name, item, **kwargs):
super(ObjectNode, self).__init__(name, **kwargs)
self.__description = item['description']
self.__parameters = item['parameters']
self.__groups = dict()
self._locateGroupNames(item)
if 'tasks' in item:
for values in item['tasks'].itervalues():
self._locateGroupNames(values)
@property
def description(self):
"""
Return the object description.
"""
return self.__description
@property
def parameters(self):
"""
Return the object parameters.
"""
return self.__parameters
def markdown(self, install, absolute=True):
"""
The expected markdown file.
"""
folder = self.__groups.keys()[0]
path = os.path.join(install, self.full_name.strip('/')).split('/')
path.insert(-1, folder)
if absolute:
return os.path.join(self.root_directory, '/'.join(path) + '.md')
else:
return os.path.join(*path) + '.md'
@property
def groups(self):
"""
Return groups associated with this node or entire tree (i.e., where the syntax is defined).
"""
return self.__groups
def _locateGroupNames(self, item):
"""
Creates a list of groups (i.e., Apps).
"""
if 'file_info' in item:
for info in item['file_info'].iterkeys():
match = re.search(r'/(?P<group>\w+)(?:App|Syntax)\.C', info)
if match:
heading = re.sub(r'(?<=[a-z])([A-Z])', r' \1', match.group('group'))
folder = heading.replace(' ', '_').lower()
self.__groups[folder] = heading
else:
self.__groups['framework'] = 'Framework'
class MooseObjectNode(ObjectNode):
"""
MooseObject nodes.
"""
COLOR = 'YELLOW'
def __init__(self, key, item, **kwargs):
super(MooseObjectNode, self).__init__(key, item, **kwargs)
self.__class_name = item['class'] if 'class' in item else key
@property
def class_name(self):
"""
Return the name of the C++ class, which can be different than the input file name.
"""
return self.__class_name
def _defaultContent(self):
"""
Markdown stub content.
"""
stub = self.STUB_HEADER
stub += '\n# {}\n'.format(self.name)
stub += '!syntax description {}\n\n'.format(self.full_name)
stub += '!syntax parameters {}\n\n'.format(self.full_name)
stub += '!syntax inputs {}\n\n'.format(self.full_name)
stub += '!syntax children {}\n'.format(self.full_name)
return stub
class ActionNode(ObjectNode):
"""
Action nodes.
"""
COLOR = 'MAGENTA'
@property
def class_name(self):
"""
Return the name of the C++ class for the action.
"""
return self.name
def _defaultContent(self):
"""
Markdown stub content.
"""
stub = self.STUB_HEADER
stub += '\n# {}\n'.format(self.name)
stub += '!syntax description {}\n\n'.format(self.full_name)
stub += '!syntax parameters {}\n'.format(self.full_name)
return stub
class MooseObjectActionNode(ActionNode):
"""
MooseObjectAction nodes.
"""
COLOR = 'CYAN'
|
yipenggao/moose
|
python/MooseDocs/common/nodes.py
|
Python
|
lgpl-2.1
| 20,082
|
[
"MOOSE"
] |
12a6e376cdc816d66aa57363b7bd7723193863be53490bd4110be35665a87980
|
# Copyright 2016-2017 Nitor Creations Oy
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Main module for nitor-deploy-tools
"""
PATH_COMMANDS = [
'bin/create-shell-archive.sh',
'bin/ensure-letsencrypt-certs.sh',
'bin/lastpass-fetch-notes.sh',
'bin/lpssh',
'bin/encrypt-and-mount.sh',
'bin/setup-fetch-secrets.sh',
'bin/ssh-hostkeys-collect.sh'
]
NDT_AND_CONSOLE = [
'n-include=n_utils.cli:resolve_include',
'n-include-all=n_utils.cli:resolve_all_includes',
'cf-logs-to-cloudwatch=n_utils.cli:logs_to_cloudwatch',
'logs-to-cloudwatch=n_utils.cli:logs_to_cloudwatch',
'associate-eip=n_utils.cli:associate_eip',
'signal-cf-status=n_utils.cli:signal_cf_status',
'ec2-associate-eip=n_utils.cli:associate_eip'
]
NDT_ONLY = [
'assume-role=n_utils.cli:assume_role',
'list-file-to-json=n_utils.cli:list_file_to_json',
'add-deployer-server=n_utils.cli:add_deployer_server',
'yaml-to-json=n_utils.cli:yaml_to_json',
'yaml-to-yaml=n_utils.cli:yaml_to_yaml',
'json-to-yaml=n_utils.cli:json_to_yaml',
'pytail=n_utils.cli:read_and_follow',
'account-id=n_utils.cli:get_account_id',
'cf-follow-logs=n_utils.cli:tail_stack_logs',
'logs=n_utils.cli:get_logs',
'cf-logical-id=n_utils.cli:logical_id',
'cf-region=n_utils.cli:cf_region',
'cf-get-parameter=n_utils.cli:get_parameter',
'cf-signal-status=n_utils.cli:signal_cf_status',
'cf-stack-name=n_utils.cli:stack_name',
'cf-stack-id=n_utils.cli:stack_id',
'ec2-clean-snapshots=n_utils.cli:clean_snapshots',
'ec2-instance-id=n_utils.cli:instance_id',
'ec2-region=n_utils.cli:ec2_region',
'ec2-wait-for-metadata=n_utils.cli:wait_for_metadata',
'region=n_utils.cli:ec2_region',
'ec2-get-tag=n_utils.cli:tag',
'ec2-get-userdata=n_utils.cli:get_userdata',
'detach-volume=n_utils.cli:detach_volume',
'mfa-add-token=n_utils.cli:cli_mfa_add_token',
'mfa-delete-token=n_utils.cli:cli_mfa_delete_token',
'mfa-code=n_utils.cli:cli_mfa_code',
'mfa-backup=n_utils.cli:cli_mfa_backup_tokens',
'mfa-qrcode=n_utils.cli:cli_mfa_to_qrcode',
'cf-delete-stack=n_utils.cli:delete_stack',
'setup-cli=n_utils.cli:setup_cli',
'volume-from-snapshot=n_utils.cli:volume_from_snapshot',
'snapshot-from-volume=n_utils.cli:snapshot_from_volume',
'show-stack-params-and-outputs=n_utils.cli:show_stack_params_and_outputs',
'get-images=n_utils.cli:cli_get_images',
'promote-image=n_utils.cli:cli_promote_image',
'share-to-another-region=n_utils.cli:cli_share_to_another_region',
'register-private-dns=n_utils.cli:cli_register_private_dns',
'interpolate-file=n_utils.cli:cli_interpolate_file',
'ecr-ensure-repo=n_utils.cli:cli_ecr_ensure_repo',
'ecr-repo-uri=n_utils.cli:cli_ecr_repo_uri',
'upsert-cloudfront-records=n_utils.cli:cli_upsert_cloudfront_records',
'create-stack=n_utils.cf_bootstrap:create_stack',
'latest-snapshot=n_utils.volumes:latest_snapshot',
'create-account=n_utils.cli:cli_create_account',
'load-parameters=n_utils.cli:cli_load_parameters',
'read-profile-expiry=n_utils.profile_util:cli_read_profile_expiry',
'assumed-role-name=n_utils.cli:cli_assumed_role_name',
'profile-to-env=n_utils.profile_util:profile_to_env',
'profile-expiry-to-env=n_utils.profile_util:profile_expiry_to_env',
'enable-profile=n_utils.profile_util:cli_enable_profile',
'list-jobs=n_utils.cli:cli_list_jobs',
'list-components=n_utils.cli:cli_list_components'
]
NDT_ONLY_SCRIPT = [
'bake-docker.sh',
'bake-image.sh',
'deploy-stack.sh',
'undeploy-stack.sh',
'deploy-serverless.sh',
'undeploy-serverless.sh',
'deploy-cdk.sh',
'undeploy-undeploy.sh',
'deploy-terraform.sh',
'undeploy-terraform.sh',
'print-create-instructions.sh'
]
CONSOLE_ONLY = [
'cf-update-stack=n_utils.cli:update_stack',
'ndt=n_utils.ndt:ndt',
'nitor-dt-register-complete=n_utils.project_util:ndt_register_complete',
'nitor-dt-load-project-env=n_utils.project_util:load_project_env',
'nitor-dt-enable-profile=n_utils.profile_util:cli_enable_profile'
]
CONSOLESCRIPTS = CONSOLE_ONLY + NDT_AND_CONSOLE
COMMAND_MAPPINGS = {}
for script in NDT_ONLY_SCRIPT:
name = script
value = "ndtscript"
if name.endswith(".sh"):
name = name[:-3]
value = "ndtshell"
COMMAND_MAPPINGS[name] = value
for script in NDT_AND_CONSOLE + NDT_ONLY:
name, value = script.split("=")
COMMAND_MAPPINGS[name] = value
|
NitorCreations/nitor-deploy-tools
|
n_utils/__init__.py
|
Python
|
apache-2.0
| 5,020
|
[
"CDK"
] |
c07e1bef07ab747b160131eda1f8d7b517960d47346c90f323df6b73ba7a08dd
|
#* This file is part of the MOOSE framework
#* https://www.mooseframework.org
#*
#* All rights reserved, see COPYRIGHT for full restrictions
#* https://github.com/idaholab/moose/blob/master/COPYRIGHT
#*
#* Licensed under LGPL 2.1, please see LICENSE for details
#* https://www.gnu.org/licenses/lgpl-2.1.html
import matplotlib.pyplot as plt
import tools
# Create Figure and Axes
figure = plt.figure(facecolor='white')
axes0 = figure.add_subplot(111)
# Level set alone
data = tools.PostprocessorReader('vortex_out.csv')
axes0.plot(data('time'), data('area'), linewidth=1, color='blue', linestyle='-', label='Level set')
# Level set SUPG
data = tools.PostprocessorReader('vortex_supg_out.csv')
axes0.plot(data('time'), data('area'), linewidth=1, color='red', linestyle='-', label='Level set w/ SUPG')
# Level set reinitializtion
data = tools.PostprocessorReader('vortex_reinit_out.csv')
axes0.plot(data('time'), data('area'), linewidth=1, color='green', linestyle='-', label='Level set w/ Reinitialization')
# Exact
axes0.plot([0, 2], [0.0706858347,0.0706858347], linewidth=1, color='black', linestyle='-', label='Initial')
# x0-axis Settings
axes0.legend(loc='best')
axes0.set_xlabel('Time, t')
#axes0.set_xlim([0.0, 0.5])
# y0-axis Settings
axes0.set_ylabel('Area')
#axes0.set_ylim([0.068, 0.074])
# Show figure and write pdf
plt.show()
figure.savefig("example_vortex_area.png")
|
nuclear-wizard/moose
|
modules/level_set/examples/vortex/area.py
|
Python
|
lgpl-2.1
| 1,388
|
[
"MOOSE"
] |
f4cd2b2866d1b7e0495cc2ef3ff02ecb30b6833fab3baf81cbf8e31637f6498a
|
"""
Results for test_glm.py.
Hard-coded from R or Stata. Note that some of the remaining discrepancy vs.
Stata may be because Stata uses ML by default unless you specifically ask for
IRLS.
"""
import numpy as np
from scikits.statsmodels.compatnp.py3k import asbytes
import glm_test_resids
import os
from scikits.statsmodels.api import add_constant, categorical
# Test Precisions
DECIMAL_4 = 4
DECIMAL_3 = 3
DECIMAL_2 = 2
DECIMAL_1 = 1
DECIMAL_0 = 0
class Longley(object):
"""
Longley used for TestGlmGaussian
Results are from Stata and R.
"""
def __init__(self):
self.resids = np.array([[ 267.34002976, 267.34002976, 267.34002976,
267.34002976, 267.34002976],
[ -94.0139424 , -94.0139424 , -94.0139424 , -94.0139424 ,
-94.0139424 ],
[ 46.28716776, 46.28716776, 46.28716776, 46.28716776,
46.28716776],
[-410.11462193, -410.11462193, -410.11462193, -410.11462193,
-410.11462193],
[ 309.71459076, 309.71459076, 309.71459076, 309.71459076,
309.71459076],
[-249.31121533, -249.31121533, -249.31121533, -249.31121533,
-249.31121533],
[-164.0489564 , -164.0489564 , -164.0489564 , -164.0489564 ,
-164.0489564 ],
[ -13.18035687, -13.18035687, -13.18035687, -13.18035687,
-13.18035687],
[ 14.3047726 , 14.3047726 , 14.3047726 , 14.3047726 ,
14.3047726 ],
[ 455.39409455, 455.39409455, 455.39409455, 455.39409455,
455.39409455],
[ -17.26892711, -17.26892711, -17.26892711, -17.26892711,
-17.26892711],
[ -39.05504252, -39.05504252, -39.05504252, -39.05504252,
-39.05504252],
[-155.5499736 , -155.5499736 , -155.5499736 , -155.5499736 ,
-155.5499736 ],
[ -85.67130804, -85.67130804, -85.67130804, -85.67130804,
-85.67130804],
[ 341.93151396, 341.93151396, 341.93151396, 341.93151396,
341.93151396],
[-206.75782519, -206.75782519, -206.75782519, -206.75782519,
-206.75782519]])
self.null_deviance = 185008826 # taken from R.
self.params = np.array([ 1.50618723e+01, -3.58191793e-02,
-2.02022980e+00, -1.03322687e+00, -5.11041057e-02,
1.82915146e+03, -3.48225863e+06])
self.bse = np.array([8.49149258e+01, 3.34910078e-02, 4.88399682e-01,
2.14274163e-01, 2.26073200e-01, 4.55478499e+02, 8.90420384e+05])
self.aic_R = 235.23486961695903 # R adds 2 for dof to AIC
self.aic_Stata = 14.57717943930524 # stata divides by nobs
self.deviance = 836424.0555058046 # from R
self.scale = 92936.006167311629
self.llf = -109.61743480847952
self.null_deviance = 185008826 # taken from R. Rpy bug
self.bic_Stata = 836399.1760177979 # no bic in R?
self.df_model = 6
self.df_resid = 9
self.chi2 = 1981.711859508729 #TODO: taken from Stata not available
# in sm yet
# self.pearson_chi2 = 836424.1293162981 # from Stata (?)
self.fittedvalues = np.array([60055.659970240202, 61216.013942398131,
60124.71283224225, 61597.114621930756, 62911.285409240052,
63888.31121532945, 65153.048956395127, 63774.180356866214,
66004.695227399934, 67401.605905447621,
68186.268927114084, 66552.055042522494,
68810.549973595422, 69649.67130804155, 68989.068486039061,
70757.757825193927])
class GaussianLog(object):
"""
Uses generated data. These results are from R and Stata.
"""
def __init__(self):
# self.resids = np.genfromtxt('./glm_gaussian_log_resid.csv', ',')
self.resids = np.array([[3.20800000e-04, 3.20800000e-04,
8.72100000e-04, 3.20800000e-04, 3.20800000e-04],
[ 8.12100000e-04, 8.12100000e-04, 2.16350000e-03,
8.12100000e-04, 8.12100000e-04],
[ -2.94800000e-04, -2.94800000e-04, -7.69700000e-04,
-2.94800000e-04, -2.94800000e-04],
[ 1.40190000e-03, 1.40190000e-03, 3.58560000e-03,
1.40190000e-03, 1.40190000e-03],
[ -2.30910000e-03, -2.30910000e-03, -5.78490000e-03,
-2.30910000e-03, -2.30910000e-03],
[ 1.10380000e-03, 1.10380000e-03, 2.70820000e-03,
1.10380000e-03, 1.10380000e-03],
[ -5.14000000e-06, -5.14000000e-06, -1.23000000e-05,
-5.14000000e-06, -5.14000000e-06],
[ -1.65500000e-04, -1.65500000e-04, -3.89200000e-04,
-1.65500000e-04, -1.65500000e-04],
[ -7.55400000e-04, -7.55400000e-04, -1.73870000e-03,
-7.55400000e-04, -7.55400000e-04],
[ -1.39800000e-04, -1.39800000e-04, -3.14800000e-04,
-1.39800000e-04, -1.39800000e-04],
[ -7.17000000e-04, -7.17000000e-04, -1.58000000e-03,
-7.17000000e-04, -7.17000000e-04],
[ -1.12200000e-04, -1.12200000e-04, -2.41900000e-04,
-1.12200000e-04, -1.12200000e-04],
[ 3.22100000e-04, 3.22100000e-04, 6.79000000e-04,
3.22100000e-04, 3.22100000e-04],
[ -3.78000000e-05, -3.78000000e-05, -7.79000000e-05,
-3.78000000e-05, -3.78000000e-05],
[ 5.54500000e-04, 5.54500000e-04, 1.11730000e-03,
5.54500000e-04, 5.54500000e-04],
[ 3.38400000e-04, 3.38400000e-04, 6.66300000e-04,
3.38400000e-04, 3.38400000e-04],
[ 9.72000000e-05, 9.72000000e-05, 1.87000000e-04,
9.72000000e-05, 9.72000000e-05],
[ -7.92900000e-04, -7.92900000e-04, -1.49070000e-03,
-7.92900000e-04, -7.92900000e-04],
[ 3.33000000e-04, 3.33000000e-04, 6.11500000e-04,
3.33000000e-04, 3.33000000e-04],
[ -8.35300000e-04, -8.35300000e-04, -1.49790000e-03,
-8.35300000e-04, -8.35300000e-04],
[ -3.99700000e-04, -3.99700000e-04, -6.99800000e-04,
-3.99700000e-04, -3.99700000e-04],
[ 1.41300000e-04, 1.41300000e-04, 2.41500000e-04,
1.41300000e-04, 1.41300000e-04],
[ -8.50700000e-04, -8.50700000e-04, -1.41920000e-03,
-8.50700000e-04, -8.50700000e-04],
[ 1.43000000e-06, 1.43000000e-06, 2.33000000e-06,
1.43000000e-06, 1.43000000e-06],
[ -9.12000000e-05, -9.12000000e-05, -1.44900000e-04,
-9.12000000e-05, -9.12000000e-05],
[ 6.75500000e-04, 6.75500000e-04, 1.04650000e-03,
6.75500000e-04, 6.75500000e-04],
[ 3.97900000e-04, 3.97900000e-04, 6.01100000e-04,
3.97900000e-04, 3.97900000e-04],
[ 1.07000000e-05, 1.07000000e-05, 1.57000000e-05,
1.07000000e-05, 1.07000000e-05],
[ -8.15200000e-04, -8.15200000e-04, -1.17060000e-03,
-8.15200000e-04, -8.15200000e-04],
[ -8.46400000e-04, -8.46400000e-04, -1.18460000e-03,
-8.46400000e-04, -8.46400000e-04],
[ 9.91200000e-04, 9.91200000e-04, 1.35180000e-03,
9.91200000e-04, 9.91200000e-04],
[ -5.07400000e-04, -5.07400000e-04, -6.74200000e-04,
-5.07400000e-04, -5.07400000e-04],
[ 1.08520000e-03, 1.08520000e-03, 1.40450000e-03,
1.08520000e-03, 1.08520000e-03],
[ 9.56100000e-04, 9.56100000e-04, 1.20500000e-03,
9.56100000e-04, 9.56100000e-04],
[ 1.87500000e-03, 1.87500000e-03, 2.30090000e-03,
1.87500000e-03, 1.87500000e-03],
[ -1.93920000e-03, -1.93920000e-03, -2.31650000e-03,
-1.93920000e-03, -1.93920000e-03],
[ 8.16000000e-04, 8.16000000e-04, 9.48700000e-04,
8.16000000e-04, 8.16000000e-04],
[ 1.01520000e-03, 1.01520000e-03, 1.14860000e-03,
1.01520000e-03, 1.01520000e-03],
[ 1.04150000e-03, 1.04150000e-03, 1.14640000e-03,
1.04150000e-03, 1.04150000e-03],
[ -3.88200000e-04, -3.88200000e-04, -4.15600000e-04,
-3.88200000e-04, -3.88200000e-04],
[ 9.95900000e-04, 9.95900000e-04, 1.03690000e-03,
9.95900000e-04, 9.95900000e-04],
[ -6.82800000e-04, -6.82800000e-04, -6.91200000e-04,
-6.82800000e-04, -6.82800000e-04],
[ -8.11400000e-04, -8.11400000e-04, -7.98500000e-04,
-8.11400000e-04, -8.11400000e-04],
[ -1.79050000e-03, -1.79050000e-03, -1.71250000e-03,
-1.79050000e-03, -1.79050000e-03],
[ 6.10000000e-04, 6.10000000e-04, 5.66900000e-04,
6.10000000e-04, 6.10000000e-04],
[ 2.52600000e-04, 2.52600000e-04, 2.28100000e-04,
2.52600000e-04, 2.52600000e-04],
[ -8.62500000e-04, -8.62500000e-04, -7.56400000e-04,
-8.62500000e-04, -8.62500000e-04],
[ -3.47300000e-04, -3.47300000e-04, -2.95800000e-04,
-3.47300000e-04, -3.47300000e-04],
[ -7.79000000e-05, -7.79000000e-05, -6.44000000e-05,
-7.79000000e-05, -7.79000000e-05],
[ 6.72000000e-04, 6.72000000e-04, 5.39400000e-04,
6.72000000e-04, 6.72000000e-04],
[ -3.72100000e-04, -3.72100000e-04, -2.89900000e-04,
-3.72100000e-04, -3.72100000e-04],
[ -1.22900000e-04, -1.22900000e-04, -9.29000000e-05,
-1.22900000e-04, -1.22900000e-04],
[ -1.63470000e-03, -1.63470000e-03, -1.19900000e-03,
-1.63470000e-03, -1.63470000e-03],
[ 2.64400000e-04, 2.64400000e-04, 1.88100000e-04,
2.64400000e-04, 2.64400000e-04],
[ 1.79230000e-03, 1.79230000e-03, 1.23650000e-03,
1.79230000e-03, 1.79230000e-03],
[ -1.40500000e-04, -1.40500000e-04, -9.40000000e-05,
-1.40500000e-04, -1.40500000e-04],
[ -2.98500000e-04, -2.98500000e-04, -1.93600000e-04,
-2.98500000e-04, -2.98500000e-04],
[ -9.33100000e-04, -9.33100000e-04, -5.86400000e-04,
-9.33100000e-04, -9.33100000e-04],
[ 9.11200000e-04, 9.11200000e-04, 5.54900000e-04,
9.11200000e-04, 9.11200000e-04],
[ -1.31840000e-03, -1.31840000e-03, -7.77900000e-04,
-1.31840000e-03, -1.31840000e-03],
[ -1.30200000e-04, -1.30200000e-04, -7.44000000e-05,
-1.30200000e-04, -1.30200000e-04],
[ 9.09300000e-04, 9.09300000e-04, 5.03200000e-04,
9.09300000e-04, 9.09300000e-04],
[ -2.39500000e-04, -2.39500000e-04, -1.28300000e-04,
-2.39500000e-04, -2.39500000e-04],
[ 7.15300000e-04, 7.15300000e-04, 3.71000000e-04,
7.15300000e-04, 7.15300000e-04],
[ 5.45000000e-05, 5.45000000e-05, 2.73000000e-05,
5.45000000e-05, 5.45000000e-05],
[ 2.85310000e-03, 2.85310000e-03, 1.38600000e-03,
2.85310000e-03, 2.85310000e-03],
[ 4.63400000e-04, 4.63400000e-04, 2.17800000e-04,
4.63400000e-04, 4.63400000e-04],
[ 2.80900000e-04, 2.80900000e-04, 1.27700000e-04,
2.80900000e-04, 2.80900000e-04],
[ 5.42000000e-05, 5.42000000e-05, 2.38000000e-05,
5.42000000e-05, 5.42000000e-05],
[ -3.62300000e-04, -3.62300000e-04, -1.54000000e-04,
-3.62300000e-04, -3.62300000e-04],
[ -1.11900000e-03, -1.11900000e-03, -4.59800000e-04,
-1.11900000e-03, -1.11900000e-03],
[ 1.28900000e-03, 1.28900000e-03, 5.11900000e-04,
1.28900000e-03, 1.28900000e-03],
[ -1.40820000e-03, -1.40820000e-03, -5.40400000e-04,
-1.40820000e-03, -1.40820000e-03],
[ -1.69300000e-04, -1.69300000e-04, -6.28000000e-05,
-1.69300000e-04, -1.69300000e-04],
[ -1.03620000e-03, -1.03620000e-03, -3.71000000e-04,
-1.03620000e-03, -1.03620000e-03],
[ 1.49150000e-03, 1.49150000e-03, 5.15800000e-04,
1.49150000e-03, 1.49150000e-03],
[ -7.22000000e-05, -7.22000000e-05, -2.41000000e-05,
-7.22000000e-05, -7.22000000e-05],
[ 5.49000000e-04, 5.49000000e-04, 1.76900000e-04,
5.49000000e-04, 5.49000000e-04],
[ -2.12320000e-03, -2.12320000e-03, -6.60400000e-04,
-2.12320000e-03, -2.12320000e-03],
[ 7.84000000e-06, 7.84000000e-06, 2.35000000e-06,
7.84000000e-06, 7.84000000e-06],
[ 1.15580000e-03, 1.15580000e-03, 3.34700000e-04,
1.15580000e-03, 1.15580000e-03],
[ 4.83400000e-04, 4.83400000e-04, 1.35000000e-04,
4.83400000e-04, 4.83400000e-04],
[ -5.26100000e-04, -5.26100000e-04, -1.41700000e-04,
-5.26100000e-04, -5.26100000e-04],
[ -1.75100000e-04, -1.75100000e-04, -4.55000000e-05,
-1.75100000e-04, -1.75100000e-04],
[ -1.84600000e-03, -1.84600000e-03, -4.62100000e-04,
-1.84600000e-03, -1.84600000e-03],
[ 2.07200000e-04, 2.07200000e-04, 5.00000000e-05,
2.07200000e-04, 2.07200000e-04],
[ -8.54700000e-04, -8.54700000e-04, -1.98700000e-04,
-8.54700000e-04, -8.54700000e-04],
[ -9.20000000e-05, -9.20000000e-05, -2.06000000e-05,
-9.20000000e-05, -9.20000000e-05],
[ 5.35700000e-04, 5.35700000e-04, 1.15600000e-04,
5.35700000e-04, 5.35700000e-04],
[ -7.67300000e-04, -7.67300000e-04, -1.59400000e-04,
-7.67300000e-04, -7.67300000e-04],
[ -1.79710000e-03, -1.79710000e-03, -3.59500000e-04,
-1.79710000e-03, -1.79710000e-03],
[ 1.10910000e-03, 1.10910000e-03, 2.13500000e-04,
1.10910000e-03, 1.10910000e-03],
[ -5.53800000e-04, -5.53800000e-04, -1.02600000e-04,
-5.53800000e-04, -5.53800000e-04],
[ 7.48000000e-04, 7.48000000e-04, 1.33400000e-04,
7.48000000e-04, 7.48000000e-04],
[ 4.23000000e-04, 4.23000000e-04, 7.26000000e-05,
4.23000000e-04, 4.23000000e-04],
[ -3.16400000e-04, -3.16400000e-04, -5.22000000e-05,
-3.16400000e-04, -3.16400000e-04],
[ -6.63200000e-04, -6.63200000e-04, -1.05200000e-04,
-6.63200000e-04, -6.63200000e-04],
[ 1.33540000e-03, 1.33540000e-03, 2.03700000e-04,
1.33540000e-03, 1.33540000e-03],
[ -7.81200000e-04, -7.81200000e-04, -1.14600000e-04,
-7.81200000e-04, -7.81200000e-04],
[ 1.67880000e-03, 1.67880000e-03, 2.36600000e-04,
1.67880000e-03, 1.67880000e-03]])
self.null_deviance = 56.691617808182208
self.params = np.array([9.99964386e-01,-1.99896965e-02,
-1.00027232e-04])
self.bse = np.array([1.42119293e-04, 1.20276468e-05, 1.87347682e-07])
self.aic_R = -1103.8187213072656 # adds 2 for dof for scale
self.aic_Stata = -11.05818072104212 # divides by nobs for e(aic)
self.deviance = 8.68876986288542e-05
self.scale = 8.9574946938163984e-07 # from R but e(phi) in Stata
self.llf = 555.9093606536328
self.bic_Stata = -446.7014211525822
self.df_model = 2
self.df_resid = 97
self.chi2 = 33207648.86501769 # from Stata not in sm
self.fittedvalues = np.array([2.7181850213327747, 2.664122305869506,
2.6106125414084405, 2.5576658143523567, 2.5052916730829535,
2.4534991313100165, 2.4022966718815781, 2.3516922510411282,
2.3016933031175575, 2.2523067456332542, 2.2035389848154616,
2.1553959214958001, 2.107882957382607, 2.0610050016905817,
2.0147664781120667, 1.969171332114154, 1.9242230385457144,
1.8799246095383746, 1.8362786026854092, 1.7932871294825108,
1.7509518640143886, 1.7092740518711942, 1.6682545192788105,
1.6278936824271399, 1.5881915569806042, 1.5491477677552221,
1.5107615585467538, 1.4730318020945796, 1.4359570101661721,
1.3995353437472129, 1.3637646233226499, 1.3286423392342188,
1.2941656621002184, 1.2603314532836074, 1.2271362753947765,
1.1945764028156565, 1.162647832232141, 1.1313462931621328,
1.1006672584668622, 1.0706059548334832, 1.0411573732173065,
1.0123162792324054, 0.98407722347970683, 0.95643455180206194,
0.92938241545618494, 0.90291478119174029, 0.87702544122826565,
0.85170802312101246, 0.82695599950720078, 0.80276269772458597,
0.77912130929465073, 0.75602489926313921, 0.73346641539106316,
0.71143869718971686, 0.68993448479364294, 0.66894642766589496,
0.64846709313034534, 0.62848897472617915, 0.60900450038011367,
0.5900060403922629, 0.57148591523195513, 0.55343640314018494,
0.5358497475357491, 0.51871816422248385, 0.50203384839536769,
0.48578898144361343, 0.46997573754920047, 0.45458629007964013,
0.4396128177740814, 0.42504751072218311, 0.41088257613548018,
0.39711024391126759, 0.38372277198930843, 0.37071245150195081,
0.35807161171849949, 0.34579262478494655, 0.33386791026040569,
0.32228993945183393, 0.31105123954884056, 0.30014439756060574,
0.28956206405712448, 0.27929695671718968, 0.26934186368570684,
0.25968964674310463, 0.25033324428976694, 0.24126567414856051,
0.23248003618867552, 0.22396951477412205, 0.21572738104035141,
0.20774699500257574, 0.20002180749946474, 0.19254536197598673,
0.18531129610924435, 0.17831334328122878, 0.17154533390247831,
0.16500119659068577, 0.15867495920834204, 0.15256074976354628,
0.14665279717814039, 0.14094543192735109])
class GaussianInverse(object):
"""
This test uses generated data. Results are from R and Stata.
"""
def __init__(self):
self.resids = np.array([[-5.15300000e-04, -5.15300000e-04,
5.14800000e-04, -5.15300000e-04, -5.15300000e-04],
[ -2.12500000e-04, -2.12500000e-04, 2.03700000e-04,
-2.12500000e-04, -2.12500000e-04],
[ -1.71400000e-04, -1.71400000e-04, 1.57200000e-04,
-1.71400000e-04, -1.71400000e-04],
[ 1.94020000e-03, 1.94020000e-03, -1.69710000e-03,
1.94020000e-03, 1.94020000e-03],
[ -6.81100000e-04, -6.81100000e-04, 5.66900000e-04,
-6.81100000e-04, -6.81100000e-04],
[ 1.21370000e-03, 1.21370000e-03, -9.58800000e-04,
1.21370000e-03, 1.21370000e-03],
[ -1.51090000e-03, -1.51090000e-03, 1.13070000e-03,
-1.51090000e-03, -1.51090000e-03],
[ 3.21500000e-04, 3.21500000e-04, -2.27400000e-04,
3.21500000e-04, 3.21500000e-04],
[ -3.18500000e-04, -3.18500000e-04, 2.12600000e-04,
-3.18500000e-04, -3.18500000e-04],
[ 3.75600000e-04, 3.75600000e-04, -2.36300000e-04,
3.75600000e-04, 3.75600000e-04],
[ 4.82300000e-04, 4.82300000e-04, -2.85500000e-04,
4.82300000e-04, 4.82300000e-04],
[ -1.41870000e-03, -1.41870000e-03, 7.89300000e-04,
-1.41870000e-03, -1.41870000e-03],
[ 6.75000000e-05, 6.75000000e-05, -3.52000000e-05,
6.75000000e-05, 6.75000000e-05],
[ 4.06300000e-04, 4.06300000e-04, -1.99100000e-04,
4.06300000e-04, 4.06300000e-04],
[ -3.61500000e-04, -3.61500000e-04, 1.66000000e-04,
-3.61500000e-04, -3.61500000e-04],
[ -2.97400000e-04, -2.97400000e-04, 1.28000000e-04,
-2.97400000e-04, -2.97400000e-04],
[ -9.32700000e-04, -9.32700000e-04, 3.75800000e-04,
-9.32700000e-04, -9.32700000e-04],
[ 1.16270000e-03, 1.16270000e-03, -4.38500000e-04,
1.16270000e-03, 1.16270000e-03],
[ 6.77900000e-04, 6.77900000e-04, -2.39200000e-04,
6.77900000e-04, 6.77900000e-04],
[ -1.29330000e-03, -1.29330000e-03, 4.27000000e-04,
-1.29330000e-03, -1.29330000e-03],
[ 2.24500000e-04, 2.24500000e-04, -6.94000000e-05,
2.24500000e-04, 2.24500000e-04],
[ 1.05510000e-03, 1.05510000e-03, -3.04900000e-04,
1.05510000e-03, 1.05510000e-03],
[ 2.50400000e-04, 2.50400000e-04, -6.77000000e-05,
2.50400000e-04, 2.50400000e-04],
[ 4.08600000e-04, 4.08600000e-04, -1.03400000e-04,
4.08600000e-04, 4.08600000e-04],
[ -1.67610000e-03, -1.67610000e-03, 3.96800000e-04,
-1.67610000e-03, -1.67610000e-03],
[ 7.47600000e-04, 7.47600000e-04, -1.65700000e-04,
7.47600000e-04, 7.47600000e-04],
[ 2.08200000e-04, 2.08200000e-04, -4.32000000e-05,
2.08200000e-04, 2.08200000e-04],
[ -8.00800000e-04, -8.00800000e-04, 1.55700000e-04,
-8.00800000e-04, -8.00800000e-04],
[ 5.81200000e-04, 5.81200000e-04, -1.05900000e-04,
5.81200000e-04, 5.81200000e-04],
[ 1.00980000e-03, 1.00980000e-03, -1.72400000e-04,
1.00980000e-03, 1.00980000e-03],
[ 2.77400000e-04, 2.77400000e-04, -4.44000000e-05,
2.77400000e-04, 2.77400000e-04],
[ -5.02800000e-04, -5.02800000e-04, 7.55000000e-05,
-5.02800000e-04, -5.02800000e-04],
[ 2.69800000e-04, 2.69800000e-04, -3.80000000e-05,
2.69800000e-04, 2.69800000e-04],
[ 2.01300000e-04, 2.01300000e-04, -2.67000000e-05,
2.01300000e-04, 2.01300000e-04],
[ -1.19690000e-03, -1.19690000e-03, 1.48900000e-04,
-1.19690000e-03, -1.19690000e-03],
[ -6.94200000e-04, -6.94200000e-04, 8.12000000e-05,
-6.94200000e-04, -6.94200000e-04],
[ 5.65500000e-04, 5.65500000e-04, -6.22000000e-05,
5.65500000e-04, 5.65500000e-04],
[ 4.93100000e-04, 4.93100000e-04, -5.10000000e-05,
4.93100000e-04, 4.93100000e-04],
[ 3.25000000e-04, 3.25000000e-04, -3.17000000e-05,
3.25000000e-04, 3.25000000e-04],
[ -7.70200000e-04, -7.70200000e-04, 7.07000000e-05,
-7.70200000e-04, -7.70200000e-04],
[ 2.58000000e-05, 2.58000000e-05, -2.23000000e-06,
2.58000000e-05, 2.58000000e-05],
[ -1.52800000e-04, -1.52800000e-04, 1.25000000e-05,
-1.52800000e-04, -1.52800000e-04],
[ 4.52000000e-05, 4.52000000e-05, -3.48000000e-06,
4.52000000e-05, 4.52000000e-05],
[ -6.83900000e-04, -6.83900000e-04, 4.97000000e-05,
-6.83900000e-04, -6.83900000e-04],
[ -7.77600000e-04, -7.77600000e-04, 5.34000000e-05,
-7.77600000e-04, -7.77600000e-04],
[ 1.03170000e-03, 1.03170000e-03, -6.70000000e-05,
1.03170000e-03, 1.03170000e-03],
[ 1.20000000e-03, 1.20000000e-03, -7.37000000e-05,
1.20000000e-03, 1.20000000e-03],
[ -7.71600000e-04, -7.71600000e-04, 4.48000000e-05,
-7.71600000e-04, -7.71600000e-04],
[ -3.37000000e-04, -3.37000000e-04, 1.85000000e-05,
-3.37000000e-04, -3.37000000e-04],
[ 1.19880000e-03, 1.19880000e-03, -6.25000000e-05,
1.19880000e-03, 1.19880000e-03],
[ -1.54610000e-03, -1.54610000e-03, 7.64000000e-05,
-1.54610000e-03, -1.54610000e-03],
[ 9.11600000e-04, 9.11600000e-04, -4.27000000e-05,
9.11600000e-04, 9.11600000e-04],
[ -4.70800000e-04, -4.70800000e-04, 2.09000000e-05,
-4.70800000e-04, -4.70800000e-04],
[ -1.21550000e-03, -1.21550000e-03, 5.13000000e-05,
-1.21550000e-03, -1.21550000e-03],
[ 1.09160000e-03, 1.09160000e-03, -4.37000000e-05,
1.09160000e-03, 1.09160000e-03],
[ -2.72000000e-04, -2.72000000e-04, 1.04000000e-05,
-2.72000000e-04, -2.72000000e-04],
[ -7.84500000e-04, -7.84500000e-04, 2.84000000e-05,
-7.84500000e-04, -7.84500000e-04],
[ 1.53330000e-03, 1.53330000e-03, -5.28000000e-05,
1.53330000e-03, 1.53330000e-03],
[ -1.84450000e-03, -1.84450000e-03, 6.05000000e-05,
-1.84450000e-03, -1.84450000e-03],
[ 1.68550000e-03, 1.68550000e-03, -5.26000000e-05,
1.68550000e-03, 1.68550000e-03],
[ -3.06100000e-04, -3.06100000e-04, 9.10000000e-06,
-3.06100000e-04, -3.06100000e-04],
[ 1.00950000e-03, 1.00950000e-03, -2.86000000e-05,
1.00950000e-03, 1.00950000e-03],
[ 5.22000000e-04, 5.22000000e-04, -1.41000000e-05,
5.22000000e-04, 5.22000000e-04],
[ -2.18000000e-05, -2.18000000e-05, 5.62000000e-07,
-2.18000000e-05, -2.18000000e-05],
[ -7.80600000e-04, -7.80600000e-04, 1.92000000e-05,
-7.80600000e-04, -7.80600000e-04],
[ 6.81400000e-04, 6.81400000e-04, -1.60000000e-05,
6.81400000e-04, 6.81400000e-04],
[ -1.43800000e-04, -1.43800000e-04, 3.23000000e-06,
-1.43800000e-04, -1.43800000e-04],
[ 7.76000000e-04, 7.76000000e-04, -1.66000000e-05,
7.76000000e-04, 7.76000000e-04],
[ 2.54900000e-04, 2.54900000e-04, -5.22000000e-06,
2.54900000e-04, 2.54900000e-04],
[ 5.77500000e-04, 5.77500000e-04, -1.13000000e-05,
5.77500000e-04, 5.77500000e-04],
[ 7.58100000e-04, 7.58100000e-04, -1.42000000e-05,
7.58100000e-04, 7.58100000e-04],
[ -8.31000000e-04, -8.31000000e-04, 1.49000000e-05,
-8.31000000e-04, -8.31000000e-04],
[ -2.10340000e-03, -2.10340000e-03, 3.62000000e-05,
-2.10340000e-03, -2.10340000e-03],
[ -8.89900000e-04, -8.89900000e-04, 1.47000000e-05,
-8.89900000e-04, -8.89900000e-04],
[ 1.08570000e-03, 1.08570000e-03, -1.71000000e-05,
1.08570000e-03, 1.08570000e-03],
[ -1.88600000e-04, -1.88600000e-04, 2.86000000e-06,
-1.88600000e-04, -1.88600000e-04],
[ 9.10000000e-05, 9.10000000e-05, -1.32000000e-06,
9.10000000e-05, 9.10000000e-05],
[ 1.07700000e-03, 1.07700000e-03, -1.50000000e-05,
1.07700000e-03, 1.07700000e-03],
[ 9.04100000e-04, 9.04100000e-04, -1.21000000e-05,
9.04100000e-04, 9.04100000e-04],
[ -2.20000000e-04, -2.20000000e-04, 2.83000000e-06,
-2.20000000e-04, -2.20000000e-04],
[ -1.64030000e-03, -1.64030000e-03, 2.02000000e-05,
-1.64030000e-03, -1.64030000e-03],
[ 2.20600000e-04, 2.20600000e-04, -2.62000000e-06,
2.20600000e-04, 2.20600000e-04],
[ -2.78300000e-04, -2.78300000e-04, 3.17000000e-06,
-2.78300000e-04, -2.78300000e-04],
[ -4.93000000e-04, -4.93000000e-04, 5.40000000e-06,
-4.93000000e-04, -4.93000000e-04],
[ -1.85000000e-04, -1.85000000e-04, 1.95000000e-06,
-1.85000000e-04, -1.85000000e-04],
[ -7.64000000e-04, -7.64000000e-04, 7.75000000e-06,
-7.64000000e-04, -7.64000000e-04],
[ 7.79600000e-04, 7.79600000e-04, -7.61000000e-06,
7.79600000e-04, 7.79600000e-04],
[ 2.88400000e-04, 2.88400000e-04, -2.71000000e-06,
2.88400000e-04, 2.88400000e-04],
[ 1.09370000e-03, 1.09370000e-03, -9.91000000e-06,
1.09370000e-03, 1.09370000e-03],
[ 3.07000000e-04, 3.07000000e-04, -2.68000000e-06,
3.07000000e-04, 3.07000000e-04],
[ -8.76000000e-04, -8.76000000e-04, 7.37000000e-06,
-8.76000000e-04, -8.76000000e-04],
[ -1.85300000e-04, -1.85300000e-04, 1.50000000e-06,
-1.85300000e-04, -1.85300000e-04],
[ 3.24700000e-04, 3.24700000e-04, -2.54000000e-06,
3.24700000e-04, 3.24700000e-04],
[ 4.59600000e-04, 4.59600000e-04, -3.47000000e-06,
4.59600000e-04, 4.59600000e-04],
[ -2.73300000e-04, -2.73300000e-04, 1.99000000e-06,
-2.73300000e-04, -2.73300000e-04],
[ 1.32180000e-03, 1.32180000e-03, -9.29000000e-06,
1.32180000e-03, 1.32180000e-03],
[ -1.32620000e-03, -1.32620000e-03, 9.00000000e-06,
-1.32620000e-03, -1.32620000e-03],
[ 9.62000000e-05, 9.62000000e-05, -6.31000000e-07,
9.62000000e-05, 9.62000000e-05],
[ -6.04400000e-04, -6.04400000e-04, 3.83000000e-06,
-6.04400000e-04, -6.04400000e-04],
[ -6.66300000e-04, -6.66300000e-04, 4.08000000e-06,
-6.66300000e-04, -6.66300000e-04]])
self.null_deviance = 6.8088354977561 # from R, Rpy bug
self.params = np.array([ 1.00045997, 0.01991666, 0.00100126])
self.bse = np.array([ 4.55214070e-04, 7.00529313e-05, 1.84478509e-06])
self.aic_R = -1123.1528237643774
self.aic_Stata = -11.25152876811373
self.deviance = 7.1612915365488368e-05
self.scale = 7.3827747608449547e-07
self.llf = 565.57641188218872
self.bic_Stata = -446.7014364279675
self.df_model = 2
self.df_resid = 97
self.chi2 = 2704006.698904491
self.fittedvalues = np.array([ 0.99954024, 0.97906956, 0.95758077,
0.93526008, 0.91228657,
0.88882978, 0.8650479 , 0.84108646, 0.81707757, 0.79313958,
0.76937709, 0.74588129, 0.72273051, 0.69999099, 0.67771773,
0.65595543, 0.63473944, 0.61409675, 0.59404691, 0.57460297,
0.55577231, 0.53755742, 0.51995663, 0.50296478, 0.48657379,
0.47077316, 0.4555505 , 0.44089187, 0.42678213, 0.41320529,
0.40014475, 0.38758348, 0.37550428, 0.36388987, 0.35272306,
0.34198684, 0.33166446, 0.32173953, 0.31219604, 0.30301842,
0.29419156, 0.28570085, 0.27753216, 0.26967189, 0.26210695,
0.25482476, 0.24781324, 0.2410608 , 0.23455636, 0.22828931,
0.22224947, 0.21642715, 0.21081306, 0.20539835, 0.20017455,
0.19513359, 0.19026777, 0.18556972, 0.18103243, 0.17664922,
0.1724137 , 0.16831977, 0.16436164, 0.16053377, 0.15683086,
0.15324789, 0.14978003, 0.1464227 , 0.14317153, 0.14002232,
0.13697109, 0.13401403, 0.1311475 , 0.12836802, 0.12567228,
0.1230571 , 0.12051944, 0.11805642, 0.11566526, 0.1133433 ,
0.11108802, 0.10889699, 0.10676788, 0.10469847, 0.10268664,
0.10073034, 0.09882763, 0.09697663, 0.09517555, 0.09342267,
0.09171634, 0.09005498, 0.08843707, 0.08686116, 0.08532585,
0.08382979, 0.0823717 , 0.08095035, 0.07956453, 0.07821311])
class Star98(object):
"""
Star98 class used with TestGlmBinomial
"""
def __init__(self):
self.params = (-0.0168150366, 0.0099254766, -0.0187242148,
-0.0142385609, 0.2544871730, 0.2406936644, 0.0804086739,
-1.9521605027, -0.3340864748, -0.1690221685, 0.0049167021,
-0.0035799644, -0.0140765648, -0.0040049918, -0.0039063958,
0.0917143006, 0.0489898381, 0.0080407389, 0.0002220095,
-0.0022492486, 2.9588779262)
self.bse = (4.339467e-04, 6.013714e-04, 7.435499e-04, 4.338655e-04,
2.994576e-02, 5.713824e-02, 1.392359e-02, 3.168109e-01,
6.126411e-02, 3.270139e-02, 1.253877e-03, 2.254633e-04,
1.904573e-03, 4.739838e-04, 9.623650e-04, 1.450923e-02,
7.451666e-03, 1.499497e-03, 2.988794e-05, 3.489838e-04,
1.546712e+00)
self.null_deviance = 34345.3688931
self.df_null = 302
self.deviance = 4078.76541772
self.df_resid = 282
self.df_model = 20
self.aic_R = 6039.22511799
self.aic_Stata = 19.93143846737438
self.bic_Stata = 2467.493504191302
self.llf = -2998.61255899391 # from R
self.llf_Stata = -2998.612927807218
self.scale = 1.
self.pearson_chi2 = 4051.921614
self.resids = glm_test_resids.star98_resids
self.fittedvalues = np.array([ 0.5833118 , 0.75144661, 0.50058272,
0.68534524, 0.32251021,
0.68693601, 0.33299827, 0.65624766, 0.49851481, 0.506736,
0.23954874, 0.86631452, 0.46432936, 0.44171873, 0.66797935,
0.73988491, 0.51966014, 0.42442446, 0.5649369 , 0.59251634,
0.34798337, 0.56415024, 0.49974355, 0.3565539 , 0.20752309,
0.18269097, 0.44932642, 0.48025128, 0.59965277, 0.58848671,
0.36264203, 0.33333196, 0.74253352, 0.5081886 , 0.53421878,
0.56291445, 0.60205239, 0.29174423, 0.2954348 , 0.32220414,
0.47977903, 0.23687535, 0.11776464, 0.1557423 , 0.27854799,
0.22699533, 0.1819439 , 0.32554433, 0.22681989, 0.15785389,
0.15268609, 0.61094772, 0.20743222, 0.51649059, 0.46502006,
0.41031788, 0.59523288, 0.65733285, 0.27835336, 0.2371213 ,
0.25137045, 0.23953942, 0.27854519, 0.39652413, 0.27023163,
0.61411863, 0.2212025 , 0.42005842, 0.55940397, 0.35413774,
0.45724563, 0.57399437, 0.2168918 , 0.58308738, 0.17181104,
0.49873249, 0.22832683, 0.14846056, 0.5028073 , 0.24513863,
0.48202096, 0.52823155, 0.5086262 , 0.46295993, 0.57869402,
0.78363217, 0.21144435, 0.2298366 , 0.17954825, 0.32232586,
0.8343015 , 0.56217006, 0.47367315, 0.52535649, 0.60350746,
0.43210701, 0.44712008, 0.35858239, 0.2521347 , 0.19787004,
0.63256553, 0.51386532, 0.64997027, 0.13402072, 0.81756174,
0.74543642, 0.30825852, 0.23988707, 0.17273125, 0.27880599,
0.17395893, 0.32052828, 0.80467697, 0.18726218, 0.23842081,
0.19020381, 0.85835388, 0.58703615, 0.72415106, 0.64433695,
0.68766653, 0.32923663, 0.16352185, 0.38868816, 0.44980444,
0.74810044, 0.42973792, 0.53762581, 0.72714996, 0.61229484,
0.30267667, 0.24713253, 0.65086008, 0.48957265, 0.54955545,
0.5697156 , 0.36406211, 0.48906545, 0.45919413, 0.4930565 ,
0.39785555, 0.5078719 , 0.30159626, 0.28524393, 0.34687707,
0.22522042, 0.52947159, 0.29277287, 0.8585002 , 0.60800389,
0.75830521, 0.35648175, 0.69508796, 0.45518355, 0.21567675,
0.39682985, 0.49042948, 0.47615798, 0.60588234, 0.62910299,
0.46005639, 0.71755165, 0.48852156, 0.47940661, 0.60128813,
0.16589699, 0.68512861, 0.46305199, 0.68832227, 0.7006721 ,
0.56564937, 0.51753941, 0.54261733, 0.56072214, 0.34545715,
0.30226104, 0.3572956 , 0.40996287, 0.33517519, 0.36248407,
0.33937041, 0.34140691, 0.2627528 , 0.29955161, 0.38581683,
0.24840026, 0.15414272, 0.40415991, 0.53936252, 0.52111887,
0.28060168, 0.45600958, 0.51110589, 0.43757523, 0.46891953,
0.39425249, 0.5834369 , 0.55817308, 0.32051259, 0.43567448,
0.34134195, 0.43016545, 0.4885413 , 0.28478325, 0.2650776 ,
0.46784606, 0.46265983, 0.42655938, 0.18972234, 0.60448491,
0.211896 , 0.37886032, 0.50727577, 0.39782309, 0.50427121,
0.35882898, 0.39596807, 0.49160806, 0.35618002, 0.6819922 ,
0.36871093, 0.43079679, 0.67985516, 0.41270595, 0.68952767,
0.52587734, 0.32042126, 0.39120123, 0.56870985, 0.32962349,
0.32168989, 0.54076251, 0.4592907 , 0.48480182, 0.4408386 ,
0.431178 , 0.47078232, 0.55911605, 0.30331618, 0.50310393,
0.65036038, 0.45078895, 0.62354291, 0.56435463, 0.50034281,
0.52693538, 0.57217285, 0.49221472, 0.40707122, 0.44226533,
0.3475959 , 0.54746396, 0.86385832, 0.48402233, 0.54313657,
0.61586824, 0.27097185, 0.69717808, 0.52156974, 0.50401189,
0.56724181, 0.6577178 , 0.42732047, 0.44808396, 0.65435634,
0.54766225, 0.38160648, 0.49890847, 0.50879037, 0.5875452 ,
0.45101593, 0.5709704 , 0.3175516 , 0.39813159, 0.28305688,
0.40521062, 0.30120578, 0.26400428, 0.44205496, 0.40545798,
0.39366599, 0.55288196, 0.14104184, 0.17550155, 0.1949095 ,
0.40255144, 0.21016822, 0.09712017, 0.63151487, 0.25885514,
0.57323748, 0.61836898, 0.43268601, 0.67008878, 0.75801989,
0.50353406, 0.64222315, 0.29925757, 0.32592036, 0.39634977,
0.39582747, 0.41037006, 0.34174944])
class Lbw(object):
'''
The LBW data can be found here
http://www.stata-press.com/data/r9/rmain.html
'''
def __init__(self):
# data set up for data not in datasets
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"stata_lbw_glm.csv")
data=np.recfromcsv(open(filename, 'rb'), converters={4: lambda s: s.strip(asbytes("\""))})
data = categorical(data, col='race', drop=True)
self.endog = data.low
design = np.column_stack((data['age'], data['lwt'],
data['race_black'], data['race_other'], data['smoke'],
data['ptl'], data['ht'], data['ui']))
self.exog = add_constant(design)
# Results for Canonical Logit Link
self.params = (-.02710031, -.01515082, 1.26264728,
.86207916, .92334482, .54183656, 1.83251780,
.75851348, .46122388)
self.bse = (0.036449917, 0.006925765, 0.526405169,
0.439146744, 0.400820976, 0.346246857, 0.691623875,
0.459373871, 1.204574885)
self.aic_R = 219.447991133
self.aic_Stata = 1.161100482182551
self.deviance = 201.4479911325021
self.scale = 1
self.llf = -100.7239955662511
self.chi2 = 25.65329337867037 # from Stata not used by sm
self.null_deviance = 234.671996193219
self.bic_Stata = -742.0664715782335
self.df_resid = 180
self.df_model = 8
self.df_null = 188
self.pearson_chi2 = 182.023342493558
self.resids = glm_test_resids.lbw_resids
self.fittedvalues = np.array([ 0.31217507, 0.12793027, 0.32119762,
0.48442686, 0.50853393,
0.24517662, 0.12755193, 0.33226988, 0.22013309, 0.26268069,
0.34729955, 0.18782188, 0.75404181, 0.54723527, 0.35016393,
0.35016393, 0.45824406, 0.25336683, 0.43087357, 0.23284101,
0.20146616, 0.24315597, 0.02725586, 0.22207692, 0.39800383,
0.05584178, 0.28403447, 0.06931188, 0.35371946, 0.3896279 ,
0.3896279 , 0.47812002, 0.60043853, 0.07144772, 0.29995988,
0.17910031, 0.22773411, 0.22691015, 0.06221253, 0.2384528 ,
0.32633864, 0.05131047, 0.2954536 , 0.07364416, 0.57241299,
0.57241299, 0.08272435, 0.23298882, 0.12658158, 0.58967487,
0.46989562, 0.22455631, 0.2348285 , 0.29571887, 0.28212464,
0.31499013, 0.68340511, 0.14090647, 0.31448425, 0.28082972,
0.28082972, 0.24918728, 0.27018297, 0.08175784, 0.64808999,
0.38252574, 0.25550797, 0.09113411, 0.40736693, 0.32644055,
0.54367425, 0.29606968, 0.47028421, 0.39972155, 0.25079125,
0.09678472, 0.08807264, 0.27467837, 0.5675742 , 0.045619 ,
0.10719293, 0.04826292, 0.23934092, 0.24179618, 0.23802197,
0.49196179, 0.31379451, 0.10605469, 0.04047396, 0.11620849,
0.09937016, 0.21822964, 0.29770265, 0.83912829, 0.25079125,
0.08548557, 0.06550308, 0.2046457 , 0.2046457 , 0.08110349,
0.13519643, 0.47862055, 0.38891913, 0.1383964 , 0.26176764,
0.31594589, 0.11418612, 0.06324112, 0.28468594, 0.21663702,
0.03827107, 0.27237604, 0.20246694, 0.19042999, 0.15019447,
0.18759474, 0.12308435, 0.19700616, 0.11564002, 0.36595033,
0.07765727, 0.14119063, 0.13584627, 0.11012759, 0.10102472,
0.10002166, 0.07439288, 0.27919958, 0.12491598, 0.06774594,
0.72513764, 0.17714986, 0.67373352, 0.80679436, 0.52908941,
0.15695938, 0.49722003, 0.41970014, 0.62375224, 0.53695622,
0.25474238, 0.79135707, 0.2503871 , 0.25352337, 0.33474211,
0.19308929, 0.24658944, 0.25495092, 0.30867144, 0.41240259,
0.59412526, 0.16811226, 0.48282791, 0.36566756, 0.09279325,
0.75337353, 0.57128885, 0.52974123, 0.44548504, 0.77748843,
0.3224082 , 0.40054277, 0.29522468, 0.19673553, 0.73781774,
0.57680312, 0.44545573, 0.30242355, 0.38720223, 0.16632904,
0.30804092, 0.56385194, 0.60012179, 0.48324821, 0.24636345,
0.26153216, 0.2348285 , 0.29023669, 0.41011454, 0.36472083,
0.65922069, 0.30476903, 0.09986775, 0.70658332, 0.30713075,
0.36096386, 0.54962701, 0.71996086, 0.6633756 ])
class Scotvote(object):
"""
Scotvot class is used with TestGlmGamma.
"""
def __init__(self):
self.params = (4.961768e-05, 2.034423e-03, -7.181429e-05, 1.118520e-04,
-1.467515e-07, -5.186831e-04, -2.42717498e-06, -1.776527e-02)
self.bse = (1.621577e-05, 5.320802e-04, 2.711664e-05, 4.057691e-05,
1.236569e-07, 2.402534e-04, 7.460253e-07, 1.147922e-02)
self.null_deviance = 0.536072
self.df_null = 31
self.deviance = 0.087388516417
self.df_resid = 24
self.df_model = 7
self.aic_R = 182.947045954721
self.aic_Stata = 10.72212
self.bic_Stata = -83.09027
self.llf = -163.5539382 # from Stata, same as ours with scale = 1
# self.llf = -82.47352 # Very close to ours as is
self.scale = 0.003584283
self.pearson_chi2 = .0860228056
self.resids = glm_test_resids.scotvote_resids
self.fittedvalues = np.array([57.80431482, 53.2733447, 50.56347993,
58.33003783,
70.46562169, 56.88801284, 66.81878401, 66.03410393,
57.92937473, 63.23216907, 53.9914785 , 61.28993391,
64.81036393, 63.47546816, 60.69696114, 74.83508176,
56.56991106, 72.01804172, 64.35676519, 52.02445881,
64.24933079, 71.15070332, 45.73479688, 54.93318588,
66.98031261, 52.02479973, 56.18413736, 58.12267471,
67.37947398, 60.49162862, 73.82609217, 69.61515621])
class Cancer(object):
'''
The Cancer data can be found here
http://www.stata-press.com/data/r10/rmain.html
'''
def __init__(self):
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"stata_cancer_glm.csv")
data = np.recfromcsv(open(filename, 'rb'))
self.endog = data.studytime
design = np.column_stack((data.age,data.drug))
design = categorical(design, col=1, drop=True)
design = np.delete(design, 1, axis=1) # drop first dummy
self.exog = add_constant(design)
class CancerLog(Cancer):
"""
CancerLog is used TestGlmGammaLog
"""
def __init__(self):
super(CancerLog, self).__init__()
self.resids = np.array([[-8.52598100e-01,-1.45739100e+00,
-3.92408100e+01,
-1.41526900e+00, -5.78417200e+00],
[ -8.23683800e-01, -1.35040200e+00, -2.64957500e+01,
-1.31777000e+00, -4.67162900e+00],
[ -7.30450400e-01, -1.07754600e+00, -4.02136400e+01,
-1.06208800e+00, -5.41978500e+00],
[ -7.04471600e-01, -1.01441500e+00, -7.25951500e+01,
-1.00172900e+00, -7.15130900e+00],
[ -5.28668000e-01, -6.68617300e-01, -3.80758100e+01,
-6.65304600e-01, -4.48658700e+00],
[ -2.28658500e-01, -2.48859700e-01, -6.14913600e+00,
-2.48707200e-01, -1.18577100e+00],
[ -1.93939400e-01, -2.08119900e-01, -7.46226500e+00,
-2.08031700e-01, -1.20300800e+00],
[ -3.55635700e-01, -4.09525000e-01, -2.14132500e+01,
-4.08815100e-01, -2.75958600e+00],
[ -5.73360000e-02, -5.84700000e-02, -4.12946200e+00,
-5.84681000e-02, -4.86586900e-01],
[ 3.09828000e-02, 3.06685000e-02, 1.86551100e+00,
3.06682000e-02, 2.40413800e-01],
[ -2.11924300e-01, -2.29071300e-01, -2.18386100e+01,
-2.28953000e-01, -2.15130900e+00],
[ -3.10989000e-01, -3.50739300e-01, -4.19249500e+01,
-3.50300400e-01, -3.61084500e+00],
[ -9.22250000e-03, -9.25100000e-03, -1.13679700e+00,
-9.25100000e-03, -1.02392100e-01],
[ 2.39402500e-01, 2.22589700e-01, 1.88577300e+01,
2.22493500e-01, 2.12475600e+00],
[ 3.35166000e-02, 3.31493000e-02, 4.51842400e+00,
3.31489000e-02, 3.89155400e-01],
[ 8.49829400e-01, 6.85180200e-01, 3.57627500e+01,
6.82689900e-01, 5.51291500e+00],
[ 4.12934200e-01, 3.66785200e-01, 4.65392600e+01,
3.66370400e-01, 4.38379500e+00],
[ 4.64148400e-01, 4.07123200e-01, 6.25726500e+01,
4.06561900e-01, 5.38915500e+00],
[ 1.71104600e+00, 1.19474800e+00, 1.12676500e+02,
1.18311900e+00, 1.38850500e+01],
[ 1.26571800e+00, 9.46389000e-01, 1.30431000e+02,
9.40244600e-01, 1.28486900e+01],
[ -3.48532600e-01, -3.99988300e-01, -2.95638100e+01,
-3.99328600e-01, -3.20997700e+00],
[ -4.04340300e-01, -4.76960100e-01, -4.10254300e+01,
-4.75818000e-01, -4.07286500e+00],
[ -4.92057900e-01, -6.08818300e-01, -9.34509600e+01,
-6.06357200e-01, -6.78109700e+00],
[ -4.02876400e-01, -4.74878400e-01, -9.15226200e+01,
-4.73751900e-01, -6.07225700e+00],
[ -5.15056700e-01, -6.46013300e-01, -2.19014600e+02,
-6.43043500e-01, -1.06209700e+01],
[ -8.70423000e-02, -8.97043000e-02, -1.26361400e+01,
-8.96975000e-02, -1.04875100e+00],
[ 1.28362300e-01, 1.23247800e-01, 1.70383300e+01,
1.23231000e-01, 1.47887800e+00],
[ -2.39271900e-01, -2.61562100e-01, -9.30283300e+01,
-2.61384400e-01, -4.71795100e+00],
[ 7.37246500e-01, 6.08186000e-01, 6.25359600e+01,
6.06409700e-01, 6.79002300e+00],
[ -3.64110000e-02, -3.68626000e-02, -1.41565300e+01,
-3.68621000e-02, -7.17951200e-01],
[ 2.68833000e-01, 2.47933100e-01, 6.67934100e+01,
2.47801000e-01, 4.23748400e+00],
[ 5.96389600e-01, 5.07237700e-01, 1.13265500e+02,
5.06180100e-01, 8.21890300e+00],
[ 1.98218000e-02, 1.96923000e-02, 1.00820900e+01,
1.96923000e-02, 4.47040700e-01],
[ 7.74936000e-01, 6.34305300e-01, 2.51883900e+02,
6.32303700e-01, 1.39711800e+01],
[ -7.63925100e-01, -1.16591700e+00, -4.93461700e+02,
-1.14588000e+00, -1.94156600e+01],
[ -6.23771700e-01, -8.41174800e-01, -4.40679600e+02,
-8.34266300e-01, -1.65796100e+01],
[ -1.63272900e-01, -1.73115100e-01, -6.73975900e+01,
-1.73064800e-01, -3.31725800e+00],
[ -4.28562500e-01, -5.11932900e-01, -4.73787800e+02,
-5.10507400e-01, -1.42494800e+01],
[ 8.00693000e-02, 7.80269000e-02, 3.95353400e+01,
7.80226000e-02, 1.77920500e+00],
[ -2.13674400e-01, -2.31127400e-01, -2.15987000e+02,
-2.31005700e-01, -6.79344600e+00],
[ -1.63544000e-02, -1.64444000e-02, -1.05642100e+01,
-1.64444000e-02, -4.15657600e-01],
[ 2.04900500e-01, 1.92372100e-01, 1.10651300e+02,
1.92309400e-01, 4.76156600e+00],
[ -1.94758900e-01, -2.09067700e-01, -2.35484100e+02,
-2.08978200e-01, -6.77219400e+00],
[ 3.16727400e-01, 2.88367800e-01, 1.87065600e+02,
2.88162100e-01, 7.69732400e+00],
[ 6.24234900e-01, 5.27632500e-01, 2.57678500e+02,
5.26448400e-01, 1.26827400e+01],
[ 8.30241100e-01, 6.72002100e-01, 2.86513700e+02,
6.69644800e-01, 1.54232100e+01],
[ 6.55140000e-03, 6.53710000e-03, 7.92130700e+00,
6.53710000e-03, 2.27805800e-01],
[ 3.41595200e-01, 3.08985000e-01, 2.88667600e+02,
3.08733300e-01, 9.93012900e+00]])
self.null_deviance = 27.92207137420696 # From R (bug in rpy)
self.params = np.array([-0.04477778, 0.57437126, 1.05210726,
4.64604002])
self.bse = np.array([ 0.0147328 , 0.19694727, 0.19772507,
0.83534671])
self.aic_R = 331.89022395372069
self.aic_Stata = 7.403608467857651
self.deviance = 16.174635536991005
self.scale = 0.31805268736385695
# self.llf = -160.94511197686035 # From R
self.llf = -173.6866032285836 # from Staa
self.bic_Stata = -154.1582089453923 # from Stata
self.df_model = 3
self.df_resid = 44
self.chi2 = 36.77821448266359 # from Stata not in sm
self.fittedvalues = np.array([ 6.78419193, 5.67167253, 7.41979002,
10.15123371,
8.48656317, 5.18582263, 6.20304079, 7.75958258,
8.48656317, 7.75958258, 10.15123371, 11.61071755,
11.10228357, 8.87520908, 11.61071755, 6.48711178,
10.61611394, 11.61071755, 8.11493609, 10.15123371,
9.21009116, 10.07296716, 13.78112366, 15.07225103,
20.62079147, 12.04881666, 11.5211983 , 19.71780584,
9.21009116, 19.71780584, 15.76249142, 13.78112366,
22.55271436, 18.02872842, 25.41575239, 26.579678 ,
20.31745227, 33.24937131, 22.22095589, 31.79337946,
25.41575239, 23.23857437, 34.77204095, 24.30279515,
20.31745227, 18.57700761, 34.77204095, 29.06987768])
class CancerIdentity(Cancer):
"""
CancerIdentity is used with TestGlmGammaIdentity
"""
def __init__(self):
super(CancerIdentity, self).__init__()
self.resids = np.array([[ -8.52598100e-01, -1.45739100e+00,
-3.92408100e+01,
-1.41526900e+00, -5.78417200e+00],
[ -8.23683800e-01, -1.35040200e+00, -2.64957500e+01,
-1.31777000e+00, -4.67162900e+00],
[ -7.30450400e-01, -1.07754600e+00, -4.02136400e+01,
-1.06208800e+00, -5.41978500e+00],
[ -7.04471600e-01, -1.01441500e+00, -7.25951500e+01,
-1.00172900e+00, -7.15130900e+00],
[ -5.28668000e-01, -6.68617300e-01, -3.80758100e+01,
-6.65304600e-01, -4.48658700e+00],
[ -2.28658500e-01, -2.48859700e-01, -6.14913600e+00,
-2.48707200e-01, -1.18577100e+00],
[ -1.93939400e-01, -2.08119900e-01, -7.46226500e+00,
-2.08031700e-01, -1.20300800e+00],
[ -3.55635700e-01, -4.09525000e-01, -2.14132500e+01,
-4.08815100e-01, -2.75958600e+00],
[ -5.73360000e-02, -5.84700000e-02, -4.12946200e+00,
-5.84681000e-02, -4.86586900e-01],
[ 3.09828000e-02, 3.06685000e-02, 1.86551100e+00,
3.06682000e-02, 2.40413800e-01],
[ -2.11924300e-01, -2.29071300e-01, -2.18386100e+01,
-2.28953000e-01, -2.15130900e+00],
[ -3.10989000e-01, -3.50739300e-01, -4.19249500e+01,
-3.50300400e-01, -3.61084500e+00],
[ -9.22250000e-03, -9.25100000e-03, -1.13679700e+00,
-9.25100000e-03, -1.02392100e-01],
[ 2.39402500e-01, 2.22589700e-01, 1.88577300e+01,
2.22493500e-01, 2.12475600e+00],
[ 3.35166000e-02, 3.31493000e-02, 4.51842400e+00,
3.31489000e-02, 3.89155400e-01],
[ 8.49829400e-01, 6.85180200e-01, 3.57627500e+01,
6.82689900e-01, 5.51291500e+00],
[ 4.12934200e-01, 3.66785200e-01, 4.65392600e+01,
3.66370400e-01, 4.38379500e+00],
[ 4.64148400e-01, 4.07123200e-01, 6.25726500e+01,
4.06561900e-01, 5.38915500e+00],
[ 1.71104600e+00, 1.19474800e+00, 1.12676500e+02,
1.18311900e+00, 1.38850500e+01],
[ 1.26571800e+00, 9.46389000e-01, 1.30431000e+02,
9.40244600e-01, 1.28486900e+01],
[ -3.48532600e-01, -3.99988300e-01, -2.95638100e+01,
-3.99328600e-01, -3.20997700e+00],
[ -4.04340300e-01, -4.76960100e-01, -4.10254300e+01,
-4.75818000e-01, -4.07286500e+00],
[ -4.92057900e-01, -6.08818300e-01, -9.34509600e+01,
-6.06357200e-01, -6.78109700e+00],
[ -4.02876400e-01, -4.74878400e-01, -9.15226200e+01,
-4.73751900e-01, -6.07225700e+00],
[ -5.15056700e-01, -6.46013300e-01, -2.19014600e+02,
-6.43043500e-01, -1.06209700e+01],
[ -8.70423000e-02, -8.97043000e-02, -1.26361400e+01,
-8.96975000e-02, -1.04875100e+00],
[ 1.28362300e-01, 1.23247800e-01, 1.70383300e+01,
1.23231000e-01, 1.47887800e+00],
[ -2.39271900e-01, -2.61562100e-01, -9.30283300e+01,
-2.61384400e-01, -4.71795100e+00],
[ 7.37246500e-01, 6.08186000e-01, 6.25359600e+01,
6.06409700e-01, 6.79002300e+00],
[ -3.64110000e-02, -3.68626000e-02, -1.41565300e+01,
-3.68621000e-02, -7.17951200e-01],
[ 2.68833000e-01, 2.47933100e-01, 6.67934100e+01,
2.47801000e-01, 4.23748400e+00],
[ 5.96389600e-01, 5.07237700e-01, 1.13265500e+02,
5.06180100e-01, 8.21890300e+00],
[ 1.98218000e-02, 1.96923000e-02, 1.00820900e+01,
1.96923000e-02, 4.47040700e-01],
[ 7.74936000e-01, 6.34305300e-01, 2.51883900e+02,
6.32303700e-01, 1.39711800e+01],
[ -7.63925100e-01, -1.16591700e+00, -4.93461700e+02,
-1.14588000e+00, -1.94156600e+01],
[ -6.23771700e-01, -8.41174800e-01, -4.40679600e+02,
-8.34266300e-01, -1.65796100e+01],
[ -1.63272900e-01, -1.73115100e-01, -6.73975900e+01,
-1.73064800e-01, -3.31725800e+00],
[ -4.28562500e-01, -5.11932900e-01, -4.73787800e+02,
-5.10507400e-01, -1.42494800e+01],
[ 8.00693000e-02, 7.80269000e-02, 3.95353400e+01,
7.80226000e-02, 1.77920500e+00],
[ -2.13674400e-01, -2.31127400e-01, -2.15987000e+02,
-2.31005700e-01, -6.79344600e+00],
[ -1.63544000e-02, -1.64444000e-02, -1.05642100e+01,
-1.64444000e-02, -4.15657600e-01],
[ 2.04900500e-01, 1.92372100e-01, 1.10651300e+02,
1.92309400e-01, 4.76156600e+00],
[ -1.94758900e-01, -2.09067700e-01, -2.35484100e+02,
-2.08978200e-01, -6.77219400e+00],
[ 3.16727400e-01, 2.88367800e-01, 1.87065600e+02,
2.88162100e-01, 7.69732400e+00],
[ 6.24234900e-01, 5.27632500e-01, 2.57678500e+02,
5.26448400e-01, 1.26827400e+01],
[ 8.30241100e-01, 6.72002100e-01, 2.86513700e+02,
6.69644800e-01, 1.54232100e+01],
[ 6.55140000e-03, 6.53710000e-03, 7.92130700e+00,
6.53710000e-03, 2.27805800e-01],
[ 3.41595200e-01, 3.08985000e-01, 2.88667600e+02,
3.08733300e-01, 9.93012900e+00]])
self.params = np.array([ -0.5369833, 6.47296332, 16.20336802,
38.96617431])
self.bse = np.array([ 0.13341238, 2.1349966 , 3.87411875, 8.19235553])
self.aic_R = 328.39209118952965
#TODO: the below will fail
self.aic_Stata = 7.381090276021671
self.deviance = 15.093762327607557
self.scale = 0.29512089119443752
self.null_deviance = 27.92207137420696 # from R bug in RPy
#NOTE: our scale is Stata's dispers_p (pearson?)
#NOTE: if scale is analagous to Stata's dispersion, then this might be
#where the discrepancies come from?
# self.llf = -159.19604559476483 # From R
self.llf = -173.1461666245201 # From Stata
self.bic_Stata = -155.2390821535193
self.df_model = 3
self.df_resid = 44
self.chi2 = 51.56632068622578
self.fittedvalues = np.array([ 6.21019277, 4.06225956,
7.28415938, 11.04304251,
8.89510929, 2.98829295, 5.13622616, 7.82114268,
8.89510929, 7.82114268, 11.04304251, 12.65399242,
12.11700911, 9.43209259, 12.65399242, 5.67320947,
11.58002581, 12.65399242, 8.35812599, 11.04304251,
9.46125627, 10.53522287, 14.294106 , 15.36807261,
19.12695574, 12.68315609, 12.14617279, 18.58997243,
9.46125627, 18.58997243, 15.90505591, 14.294106 ,
20.20092234, 17.51600582, 25.63546061, 26.17244391,
22.95054409, 28.85736043, 24.0245107 , 28.32037713,
25.63546061, 24.561494 , 29.39434374, 25.09847731,
22.95054409, 21.87657748, 29.39434374, 27.24641052])
class Cpunish(object):
'''
The following are from the R script in models.datasets.cpunish
Slightly different than published results, but should be correct
Probably due to rounding in cleaning?
'''
def __init__(self):
self.params = (2.611017e-04, 7.781801e-02, -9.493111e-02, 2.969349e-01,
2.301183e+00, -1.872207e+01, -6.801480e+00)
self.bse = (5.187132e-05, 7.940193e-02, 2.291926e-02, 4.375164e-01,
4.283826e-01, 4.283961e+00, 4.146850e+00)
self.null_deviance = 136.57281747225
self.df_null = 16
self.deviance = 18.591641759528944
self.df_resid = 10
self.df_model = 6
self.aic_R = 77.8546573896503 # same as Stata
self.aic_Stata = 4.579685683305706
self.bic_Stata = -9.740492454486446
self.chi2 = 128.8021169250578 # from Stata not in sm
self.llf = -31.92732869482515
self.scale = 1
self.pearson_chi2 = 24.75374835
self.resids = glm_test_resids.cpunish_resids
self.fittedvalues = np.array([35.2263655, 8.1965744, 1.3118966,
3.6862982, 2.0823003, 1.0650316, 1.9260424, 2.4171405,
1.8473219, 2.8643241, 3.1211989, 3.3382067, 2.5269969,
0.8972542, 0.9793332, 0.5346209, 1.9790936])
class InvGauss(object):
'''
Usef
Data was generated by Hardin and Hilbe using Stata.
Note only the first 5000 observations are used because
the models code currently uses np.eye.
'''
# np.random.seed(54321)
# x1 = np.abs(stats.norm.ppf((np.random.random(5000))))
# x2 = np.abs(stats.norm.ppf((np.random.random(5000))))
# X = np.column_stack((x1,x2))
# X = add_constant(X)
# params = np.array([.5, -.25, 1])
# eta = np.dot(X, params)
# mu = 1/np.sqrt(eta)
# sigma = .5
# This isn't correct. Errors need to be normally distributed
# But Y needs to be Inverse Gaussian, so we could build it up
# by throwing out data?
# Refs: Lai (2009) Generating inverse Gaussian random variates by
# approximation
# Atkinson (1982) The simulation of generalized inverse gaussian and
# hyperbolic random variables seems to be the canonical ref
# Y = np.dot(X,params) + np.random.wald(mu, sigma, 1000)
# model = GLM(Y, X, family=models.family.InverseGaussian(link=\
# models.family.links.identity))
def __init__(self):
# set up data #
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"inv_gaussian.csv")
data=np.genfromtxt(open(filename, 'rb'), delimiter=",", dtype=float)[1:]
self.endog = data[:5000,0]
self.exog = data[:5000,1:]
self.exog = add_constant(self.exog)
#class InvGaussDefault(InvGauss)
# def __init__(self):
# super(InvGaussDefault, self).__init__()
# Results
#NOTE: loglikelihood difference in R vs. Stata vs. Models
# is the same situation as gamma
self.params = (0.4519770, -0.2508288, 1.0359574)
self.bse = (0.03148291, 0.02237211, 0.03429943)
self.null_deviance = 1520.673165475461
self.df_null = 4999
self.deviance = 1423.943980407997
self.df_resid = 4997
self.df_model = 2
self.aic_R = 5059.41911646446
self.aic_Stata = 1.552280060977946
self.bic_Stata = -41136.47039418921
self.llf = -3877.700354 # Stata is same as ours with scale set to 1
# self.llf = -2525.70955823223 # from R, close to ours
self.scale = 0.2867266359127567
self.pearson_chi2 = 1432.771536
self.resids = glm_test_resids.invgauss_resids
self.fittedvalues = np.array([ 1.0404339 , 0.96831526,
0.81265833, 0.9958362 , 1.05433442,
1.09866137, 0.95548191, 1.38082105, 0.98942888, 0.96521958,
1.02684056, 0.91412576, 0.91492102, 0.92639676, 0.96763425,
0.80250852, 0.85281816, 0.90962261, 0.95550299, 0.86386815,
0.94760134, 0.94269533, 0.98960509, 0.84787252, 0.78949111,
0.76873582, 0.98933453, 0.95105574, 0.8489395 , 0.88962971,
0.84856357, 0.88567313, 0.84505405, 0.84626147, 0.77250421,
0.90175601, 1.15436378, 0.98375558, 0.83539542, 0.82845381,
0.90703971, 0.85546165, 0.96707286, 0.84127197, 0.82096543,
1.1311227 , 0.87617029, 0.91194419, 1.05125511, 0.95330314,
0.75556148, 0.82573228, 0.80424982, 0.83800144, 0.8203644 ,
0.84423807, 0.98348433, 0.93165089, 0.83968706, 0.79256287,
1.0302839 , 0.90982028, 0.99471562, 0.70931825, 0.85471721,
1.02668021, 1.11308301, 0.80497105, 1.02708486, 1.07671424,
0.821108 , 0.86373486, 0.99104964, 1.06840593, 0.94947784,
0.80982122, 0.95778065, 1.0254212 , 1.03480946, 0.83942363,
1.17194944, 0.91772559, 0.92368795, 1.10410916, 1.12558875,
1.11290791, 0.87816503, 1.04299294, 0.89631173, 1.02093004,
0.86331723, 1.13134858, 1.01807861, 0.98441692, 0.72567667,
1.42760495, 0.78987436, 0.72734482, 0.81750166, 0.86451854,
0.90564264, 0.81022323, 0.98720325, 0.98263709, 0.99364823,
0.7264445 , 0.81632452, 0.7627845 , 1.10726938, 0.79195664,
0.86836774, 1.01558149, 0.82673675, 0.99529548, 0.97155636,
0.980696 , 0.85460503, 1.00460782, 0.77395244, 0.81229831,
0.94078297, 1.05910564, 0.95921954, 0.97841172, 0.93093166,
0.93009865, 0.89888111, 1.18714408, 0.98964763, 1.03388898,
1.67554215, 0.82998876, 1.34100687, 0.86766346, 0.96392316,
0.91371033, 0.76589296, 0.92329051, 0.82560326, 0.96758148,
0.8412995 , 1.02550678, 0.74911108, 0.8751611 , 1.01389312,
0.87865556, 1.24095868, 0.90678261, 0.85973204, 1.05617845,
0.94163038, 0.88087351, 0.95699844, 0.86083491, 0.89669384,
0.78646825, 1.0014202 , 0.82399199, 1.05313139, 1.06458324,
0.88501766, 1.19043294, 0.8458026 , 1.00231535, 0.72464305,
0.94790753, 0.7829744 , 1.1953009 , 0.85574035, 0.95433052,
0.96341484, 0.91362908, 0.94097713, 0.87273804, 0.81126399,
0.72715262, 0.85526116, 0.76015834, 0.8403826 , 0.9831501 ,
1.17104665, 0.78862494, 1.01054909, 0.91511601, 1.0990797 ,
0.91352124, 1.13671162, 0.98793866, 1.0300545 , 1.04490115,
0.85778231, 0.94824343, 1.14510618, 0.81305136, 0.88085051,
0.94743792, 0.94875465, 0.96206997, 0.94493612, 0.93547218,
1.09212018, 0.86934651, 0.90532353, 1.07066001, 1.26197714,
0.93858662, 0.9685039 , 0.7946546 , 1.03052031, 0.75395899,
0.87527062, 0.82156476, 0.949774 , 1.01000235, 0.82613526,
1.0224591 , 0.91529149, 0.91608832, 1.09418385, 0.8228272 ,
1.06337472, 1.05533176, 0.93513063, 1.00055806, 0.95474743,
0.91329368, 0.88711836, 0.95584926, 0.9825458 , 0.74954073,
0.96964967, 0.88779583, 0.95321846, 0.95390055, 0.95369029,
0.94326714, 1.31881201, 0.71512263, 0.84526602, 0.92323824,
1.01993108, 0.85155992, 0.81416851, 0.98749128, 1.00034192,
0.98763473, 1.05974138, 1.05912658, 0.89772172, 0.97905626,
1.1534306 , 0.92304181, 1.16450278, 0.7142307 , 0.99846981,
0.79861247, 0.73939835, 0.93776385, 1.0072242 , 0.89159707,
1.05514263, 1.05254569, 0.81005146, 0.95179784, 1.00278795,
1.04910398, 0.88427798, 0.74394266, 0.92941178, 0.83622845,
0.84064958, 0.93426956, 1.03619314, 1.22439347, 0.73510451,
0.82997071, 0.90828036, 0.80866989, 1.34078212, 0.85079169,
0.88346039, 0.76871666, 0.96763454, 0.66936914, 0.94175741,
0.97127617, 1.00844382, 0.83449557, 0.88095564, 1.17711652,
1.0547188 , 1.04525593, 0.93817487, 0.77978294, 1.36143199,
1.16127997, 1.03792952, 1.03151637, 0.83837387, 0.94326066,
1.0054787 , 0.99656841, 1.05575689, 0.97641643, 0.85108163,
0.82631589, 0.77407305, 0.90566132, 0.91308164, 0.95560906,
1.04523011, 1.03773723, 0.97378685, 0.83999133, 1.06926871,
1.01073982, 0.9804959 , 1.06473061, 1.25315673, 0.969175 ,
0.63443508, 0.84574684, 1.06031239, 0.93834605, 1.01784925,
0.93488249, 0.80240225, 0.88757274, 0.9224097 , 0.99158962,
0.87412592, 0.76418199, 0.78044069, 1.03117412, 0.82042521,
1.10272129, 1.09673757, 0.89626935, 1.01678612, 0.84911824,
0.95821431, 0.99169558, 0.86853864, 0.92172772, 0.94046199,
0.89750517, 1.09599258, 0.92387291, 1.07770118, 0.98831383,
0.86352396, 0.83079533, 0.94431185, 1.12424626, 1.02553104,
0.8357513 , 0.97019669, 0.76816092, 1.34011343, 0.86489527,
0.82156358, 1.25529129, 0.86820218, 0.96970237, 0.85850546,
0.97429559, 0.84826078, 1.02498396, 0.72478517, 0.993497 ,
0.76918521, 0.91079198, 0.80988325, 0.75431095, 1.02918073,
0.88884197, 0.82625507, 0.78564563, 0.91505355, 0.88896863,
0.85882361, 0.81538316, 0.67656235, 0.8564822 , 0.82473022,
0.92928331, 0.98068415, 0.82605685, 1.0150412 , 1.00631678,
0.92405101, 0.88909552, 0.94873568, 0.87657342, 0.8280683 ,
0.77596382, 0.96598811, 0.78922426, 0.87637606, 0.98698735,
0.92207026, 0.71487846, 1.03845478, 0.70749745, 1.08603388,
0.92697779, 0.86470448, 0.70119494, 1.00596847, 0.91426549,
1.05318838, 0.79621712, 0.96169742, 0.88053405, 0.98963934,
0.94152997, 0.88413591, 0.75035344, 0.86007123, 0.83713514,
0.91234911, 0.79562744, 0.84099675, 1.0334279 , 1.00272243,
0.95359383, 0.84292969, 0.94234155, 0.90190899, 0.97302022,
1.1009829 , 1.0148975 , 0.99082987, 0.75916515, 0.9204784 ,
0.94477378, 1.01108683, 1.00038149, 0.9259798 , 1.19400436,
0.80191877, 0.79565851, 0.81865924, 0.79003506, 0.8995508 ,
0.73137983, 0.88336018, 0.7855268 , 1.04478073, 0.90857981,
1.16076951, 0.76096486, 0.90004113, 0.83819665, 0.95295365,
1.09911441, 0.78498197, 0.95094991, 0.94333419, 0.95131688,
0.82961049, 1.08001761, 1.06426458, 0.94291798, 1.04381938,
0.90380364, 0.74060138, 0.98701862, 0.72250236, 0.86125293,
0.76488061, 0.9858051 , 0.98099677, 0.96849209, 0.90053351,
0.88469597, 0.80688516, 1.06396217, 1.02446023, 0.911863 ,
0.98837746, 0.91102987, 0.92810392, 1.13526335, 1.00419541,
1.00866175, 0.74352261, 0.91051641, 0.81868428, 0.93538014,
0.87822651, 0.93278572, 1.0356074 , 1.25158731, 0.98372647,
0.81335741, 1.06441863, 0.80305786, 0.95201148, 0.90283451,
1.17319519, 0.8984894 , 0.88911288, 0.91474736, 0.94512294,
0.92956283, 0.86682085, 1.08937227, 0.94825713, 0.9787145 ,
1.16747163, 0.80863682, 0.98314119, 0.91052823, 0.80913225,
0.78503169, 0.78751737, 1.08932193, 0.86859845, 0.96847458,
0.93468839, 1.10769915, 1.1769249 , 0.84916138, 1.00556408,
0.84508585, 0.92617942, 0.93985886, 1.17303268, 0.81172495,
0.93482682, 1.04082486, 1.03209348, 0.97220394, 0.90274672,
0.93686291, 0.91116431, 1.14814563, 0.83279158, 0.95853283,
1.0261179 , 0.95779432, 0.86995883, 0.78164915, 0.89946906,
0.9194465 , 0.97919367, 0.92719039, 0.89063569, 0.80847805,
0.81192101, 0.75044535, 0.86819023, 1.03420014, 0.8899434 ,
0.94899544, 0.9860773 , 1.10047297, 1.00243849, 0.82153972,
1.14289945, 0.8604684 , 0.87187524, 1.00415032, 0.78460709,
0.86319884, 0.92818335, 1.08892111, 1.06841003, 1.00735918,
1.20775251, 0.72613554, 1.25768191, 1.08573511, 0.89671127,
0.91259535, 1.01414208, 0.87422903, 0.82720677, 0.9568079 ,
1.00450416, 0.91043845, 0.84095709, 1.08010574, 0.69848293,
0.90769214, 0.94713501, 1.14808251, 1.0605676 , 1.21734482,
0.78578521, 1.01516235, 0.94330326, 0.98363817, 0.99650084,
0.74280796, 0.96227123, 0.95741454, 1.00980406, 0.93468092,
1.10098591, 1.18175828, 0.8553791 , 0.81713219, 0.82912143,
0.87599518, 1.15006511, 1.03151163, 0.8751847 , 1.15701331,
0.73394166, 0.91426368, 0.96953458, 1.13901709, 0.83028721,
1.15742641, 0.9395442 , 0.98118552, 0.89585426, 0.74147117,
0.8902096 , 1.00212097, 0.97665858, 0.92624514, 0.98006601,
0.9507215 , 1.00889825, 1.2406772 , 0.88768719, 0.76587533,
1.0081044 , 0.89608494, 1.00083526, 0.85594415, 0.76425576,
1.0286636 , 1.13570272, 0.82020405, 0.81961271, 1.04586579,
1.26560245, 0.89721521, 1.19324037, 0.948205 , 0.79414261,
0.85157002, 0.95155101, 0.91969239, 0.87699126, 1.03452982,
0.97093572, 1.14355781, 0.85088592, 0.79032079, 0.84521733,
0.99547581, 0.87593455, 0.8776799 , 1.05531013, 0.94557017,
0.91538439, 0.79679863, 1.03398557, 0.88379021, 0.98850319,
1.05833423, 0.90055078, 0.92267584, 0.76273738, 0.98222632,
0.86392524, 0.78242646, 1.19417739, 0.89159895, 0.97565002,
0.85818308, 0.85334266, 1.85008011, 0.87199282, 0.77873231,
0.78036174, 0.96023918, 0.91574121, 0.89217979, 1.16421151,
1.29817786, 1.18683283, 0.96096225, 0.89964569, 1.00401442,
0.80758845, 0.89458758, 0.7994919 , 0.85889356, 0.73147252,
0.7777221 , 0.9148438 , 0.72388117, 0.91134001, 1.0892724 ,
1.01736424, 0.86503014, 0.77344917, 1.04515616, 1.06677211,
0.93421936, 0.8821777 , 0.91860774, 0.96381507, 0.70913689,
0.82354748, 1.12416046, 0.85989778, 0.90588737, 1.22832895,
0.65955579, 0.93828405, 0.88946418, 0.92152859, 0.83168025,
0.93346887, 0.96456078, 0.9039245 , 1.03598695, 0.78405559,
1.21739525, 0.79019383, 0.84034646, 1.00273203, 0.96356393,
0.948103 , 0.90279217, 1.0187839 , 0.91630508, 1.15965854,
0.84203423, 0.98803156, 0.91604459, 0.90986512, 0.93384826,
0.76687038, 0.96251902, 0.80648134, 0.77336547, 0.85720164,
0.9351947 , 0.88004728, 0.91083961, 1.06225829, 0.90230812,
0.72383932, 0.8343425 , 0.8850996 , 1.19037918, 0.93595522,
0.85061223, 0.84330949, 0.82397482, 0.92075047, 0.86129584,
0.99296756, 0.84912251, 0.8569699 , 0.75252201, 0.80591772,
1.03902954, 1.04379139, 0.87360195, 0.97452318, 0.93240609,
0.85406409, 1.11717394, 0.95758536, 0.82772817, 0.67947416,
0.85957788, 0.93731268, 0.90349227, 0.79464185, 0.99148637,
0.8461071 , 0.95399991, 1.04320664, 0.87290871, 0.96780849,
0.99467159, 0.96421545, 0.80174643, 0.86475812, 0.74421362,
0.85230296, 0.89891758, 0.77589592, 0.98331957, 0.87387233,
0.92023388, 1.03037742, 0.83796515, 1.0296667 , 0.85891747,
1.02239978, 0.90958406, 1.09731875, 0.8032638 , 0.84482057,
0.8233118 , 0.86184709, 0.93105929, 0.99443502, 0.77442109,
0.98367982, 0.95786272, 0.81183444, 1.0526009 , 0.86993018,
0.985886 , 0.92016756, 1.00847155, 1.2309469 , 0.97732206,
0.83074957, 0.87406987, 0.95268492, 0.94189139, 0.87056443,
1.0135018 , 0.93051004, 1.5170931 , 0.80948763, 0.83737473,
1.05461331, 0.97501633, 1.01449333, 0.79760056, 1.05756482,
0.97300884, 0.92674035, 0.8933763 , 0.91624084, 1.13127607,
0.88115305, 0.9351562 , 0.91430431, 1.11668229, 1.10000526,
0.88171963, 0.74914744, 0.94610698, 1.13841497, 0.90551414,
0.89773592, 1.01696097, 0.85096063, 0.80935471, 0.68458106,
1.2718979 , 0.93550219, 0.96071403, 0.75434294, 0.95112257,
1.16233368, 0.73664915, 1.02195777, 1.07487625, 0.8937445 ,
0.78006023, 0.89588994, 1.16354892, 1.02629448, 0.89208642,
1.02088244, 0.85385355, 0.88586061, 0.94571704, 0.89710576,
0.95191525, 0.99819848, 0.97117841, 1.13899808, 0.88414949,
0.90938883, 1.02937917, 0.92936684, 0.87323594, 0.8384819 ,
0.87766945, 1.05869911, 0.91028734, 0.969953 , 1.11036647,
0.94996802, 1.01305483, 1.03697568, 0.9750155 , 1.04537837,
0.9314676 , 0.86589798, 1.17446667, 1.02564533, 0.82088708,
0.96481845, 0.86148642, 0.79174298, 1.18029919, 0.82132544,
0.92193776, 1.03669516, 0.96637464, 0.83725933, 0.88776321,
1.08395861, 0.91255709, 0.96884738, 0.89840008, 0.91168146,
0.99652569, 0.95693101, 0.83144932, 0.99886503, 1.02819927,
0.95273533, 0.95959945, 1.08515986, 0.70269432, 0.79529303,
0.93355669, 0.92597539, 1.0745695 , 0.87949758, 0.86133964,
0.95653873, 1.09161425, 0.91402143, 1.13895454, 0.89384443,
1.16281703, 0.8427015 , 0.7657266 , 0.92724079, 0.95383649,
0.86820891, 0.78942366, 1.11752711, 0.97902686, 0.87425286,
0.83944794, 1.12576718, 0.9196059 , 0.89844835, 1.10874172,
1.00396783, 0.9072041 , 1.63580253, 0.98327489, 0.68564426,
1.01007087, 0.92746473, 1.01328833, 0.99584546, 0.86381679,
1.0082541 , 0.85414132, 0.87620981, 1.22461203, 1.03935516,
0.86457326, 0.95165828, 0.84762138, 0.83080254, 0.84715241,
0.80323344, 1.09282941, 1.00902453, 1.02834261, 1.09810743,
0.86560231, 1.31568763, 1.03754782, 0.81298745, 1.14500629,
0.87364384, 0.89928367, 0.96118471, 0.83321743, 0.90590461,
0.98739499, 0.79408399, 1.18513754, 1.05619307, 0.99920088,
1.04347259, 1.07689022, 1.24916765, 0.74246274, 0.90949597,
0.87077335, 0.81233276, 1.05403934, 0.98333063, 0.77689527,
0.93181907, 0.98853585, 0.80700332, 0.89570662, 0.97102475,
0.69178123, 0.72950409, 0.89661719, 0.84821737, 0.8724469 ,
0.96453177, 0.9690018 , 0.87132764, 0.91711564, 1.79521288,
0.75894855, 0.90733112, 0.86565687, 0.90433268, 0.83412618,
1.26779628, 1.06999114, 0.73181364, 0.90334838, 0.86634581,
0.76999285, 1.55403008, 0.74712547, 0.84702579, 0.72396203,
0.82292773, 0.73633208, 0.90524618, 0.9954355 , 0.85076517,
0.96097585, 1.21655611, 0.77658146, 0.81026686, 1.07540173,
0.94219623, 0.97472554, 0.72422803, 0.85055855, 0.85905477,
1.17391419, 0.87644114, 1.03573284, 1.16647944, 0.87810532,
0.89134419, 0.83531593, 0.93448128, 1.04967869, 1.00110843,
0.936784 , 1.00143426, 0.79714807, 0.82656251, 0.95057309,
0.93821813, 0.93469098, 0.99825205, 0.95384714, 1.07063008,
0.97603699, 0.816668 , 0.98286184, 0.86061483, 0.88166732,
0.93730982, 0.77633837, 0.87671549, 0.99192439, 0.86452825,
0.95880282, 0.7098419 , 1.12717149, 1.16707939, 0.84854333,
0.87486963, 0.9255293 , 1.06534197, 0.9888494 , 1.09931069,
1.21859221, 0.97489537, 0.82508579, 1.14868922, 0.98076133,
0.85524084, 0.69042079, 0.93012936, 0.96908499, 0.94284892,
0.80114327, 0.919846 , 0.95753354, 1.04536666, 0.77109284,
0.99942571, 0.79004323, 0.91820045, 0.97665489, 0.64689716,
0.89444405, 0.96106598, 0.74196857, 0.92905294, 0.70500318,
0.95074586, 0.98518665, 1.0794044 , 1.00364488, 0.96710486,
0.92429638, 0.94383006, 1.12554253, 0.95199191, 0.87380738,
0.72183594, 0.94453761, 0.98663804, 0.68247366, 1.02761427,
0.93255355, 0.85264705, 1.00341417, 1.07765999, 0.97396039,
0.90770805, 0.82750901, 0.73824542, 1.24491161, 0.83152629,
0.78656996, 0.99062838, 0.98276905, 0.98291014, 1.12795903,
0.98742704, 0.9579893 , 0.80451701, 0.87198344, 1.24746127,
0.95839155, 1.11708725, 0.97113877, 0.7721646 , 0.95781621,
0.67069168, 1.05509376, 0.96071852, 0.99768666, 0.83008521,
0.9156695 , 0.86314088, 1.23081412, 1.14723685, 0.8007289 ,
0.81590842, 1.31857558, 0.7753396 , 1.11091566, 1.03560198,
1.01837739, 0.94882818, 0.82551111, 0.93188019, 0.99532255,
0.93848495, 0.77764975, 0.85192319, 0.79913938, 0.99495229,
0.96122733, 1.13845155, 0.95846389, 0.8891543 , 0.97979531,
0.87167192, 0.88119611, 0.79655111, 0.9298217 , 0.96399321,
1.02005428, 1.06936503, 0.86948022, 1.02560548, 0.9149464 ,
0.83797207, 0.86175383, 0.92455994, 0.89218435, 0.81546463,
0.98488771, 0.92784833, 0.87895608, 0.93366386, 1.17487238,
0.79088952, 0.9237694 , 0.76389869, 0.931953 , 0.76272078,
1.00304977, 0.86612561, 0.87870143, 0.93808276, 1.12489343,
1.00668791, 0.88027101, 0.88845209, 0.88574216, 0.84284514,
0.96594357, 0.94363002, 0.78245367, 0.92941326, 0.99622557,
0.83812683, 0.77901691, 0.9588432 , 0.82057415, 0.95178868,
1.01904651, 0.97598844, 0.99369336, 1.12041918, 1.19432836,
0.91709572, 0.94645855, 0.93656587, 0.68754669, 0.80869784,
0.86704186, 0.83033797, 0.71892193, 0.97549489, 1.12150683,
0.76214802, 1.08564181, 0.84677802, 0.68080207, 1.03577057,
1.07937239, 0.6773357 , 1.0279076 , 0.89945816, 0.97765439,
0.91322633, 0.92490964, 0.92693575, 1.12297137, 0.81825246,
0.87598377, 1.11873032, 0.83472799, 1.21424495, 1.02318444,
1.01563195, 1.05663193, 0.82533918, 0.88766496, 0.95906474,
0.90738779, 0.93509534, 1.06658145, 1.00231797, 1.3131534 ,
0.88839464, 1.081006 , 0.866936 , 0.89030904, 0.91197562,
0.73449761, 0.95767806, 1.03407868, 0.79812826, 1.10555445,
0.85610722, 0.87420881, 1.04251375, 1.14286242, 1.00025972,
0.83742693, 1.11116502, 0.97424809, 0.92059325, 0.93958773,
0.80386755, 0.6881267 , 0.88620708, 1.01715536, 1.12403581,
0.91078992, 0.81101399, 1.17271429, 1.09980447, 0.86063042,
0.80805811, 0.87988444, 0.97398188, 0.91808966, 0.90676805,
0.80042891, 0.84060789, 0.9710147 , 1.00012669, 1.04805667,
0.66912164, 0.96111694, 0.86948596, 0.9056999 , 1.01489333,
1.27876763, 0.873881 , 0.98276702, 0.95553234, 0.82877996,
0.79697623, 0.77015376, 0.8234212 , 1.13394959, 0.96244655,
1.06516156, 0.82743856, 1.02931842, 0.78093489, 1.01322256,
1.00348929, 0.9408142 , 1.06495299, 0.8599522 , 0.81640723,
0.81505589, 1.02506487, 0.91148383, 1.11134309, 0.83992234,
0.82982074, 0.9721429 , 0.98897262, 1.01815004, 0.87838456,
0.80573592, 1.103707 , 0.97326218, 1.08921236, 1.2638062 ,
0.83142563, 1.16028769, 0.86701564, 1.15610014, 0.98303722,
0.87138463, 0.75281511, 1.07715535, 0.91526065, 1.08769832,
0.83598308, 1.03580956, 0.9390066 , 0.78544378, 1.03635836,
0.7974467 , 0.99273331, 0.89639711, 0.9250066 , 1.14323824,
0.9783478 , 1.15460639, 0.94265587, 1.09317654, 0.78585439,
0.99523323, 0.95104776, 0.85582572, 0.96100168, 0.9131529 ,
0.86496966, 0.72414589, 1.05142704, 0.85570039, 0.98217968,
0.99031168, 1.01867086, 0.96781667, 0.98581487, 1.00415938,
1.0339337 , 1.13987579, 1.14205543, 0.83393745, 0.96348647,
0.91895164, 0.77055293, 1.0053723 , 0.93168993, 1.00332386,
1.04195993, 1.11933891, 0.87439883, 0.87156457, 0.96050419,
0.72718399, 1.13546762, 0.89614816, 0.85081037, 0.8831463 ,
0.76370482, 0.99582951, 1.01844155, 1.08611311, 1.15832217,
1.17551069, 0.97057262, 0.95163548, 0.98310701, 0.65874788,
0.9655409 , 0.85675853, 1.34637286, 0.93779619, 1.0005791 ,
0.88104966, 1.14530829, 0.93687034, 1.01472112, 1.62464726,
0.84652357, 0.84639676, 0.87513324, 0.94837881, 0.85425129,
0.89820401, 0.94906277, 0.97796792, 0.98969445, 0.8036801 ,
1.03936478, 0.95898918, 0.82919938, 1.29609354, 0.97833841,
0.86862799, 0.88040491, 0.8741178 , 0.80617278, 0.95983882,
0.9752235 , 0.84292828, 0.9327284 , 0.93297136, 1.06255543,
0.88756716, 1.13601403, 0.72311518, 0.95250034, 0.95369843,
1.02562728, 0.74354691, 0.78463923, 0.88720818, 1.07763289,
0.94502062, 0.81170329, 0.96516347, 0.76884811, 0.84169312,
0.83752837, 1.1487847 , 1.04311868, 0.78128663, 0.74604211,
0.96488513, 1.1722513 , 0.91661948, 1.06642815, 0.92185781,
0.93289001, 0.65208625, 0.75734648, 0.99580571, 1.21871511,
0.96316283, 1.06093093, 0.7914337 , 0.90494572, 0.79235327,
0.90771769, 0.91355145, 0.98754767, 0.88938619, 0.89503537,
0.82764566, 0.77267065, 0.81520031, 0.90423926, 0.94289609,
0.88678376, 1.03209085, 0.81319963, 0.91600997, 0.81608666,
0.72429125, 0.95585073, 1.14039309, 1.00326452, 0.99629944,
0.95647901, 0.8927127 , 0.96558599, 0.86305195, 1.0366906 ,
0.90494731, 0.95148458, 1.11229696, 1.17059748, 0.74867876,
0.99621909, 0.94246499, 0.82403515, 0.92144961, 0.93209989,
0.9705427 , 0.97915309, 0.92431525, 0.7589944 , 0.75208652,
0.89375154, 0.78820016, 1.24061454, 1.08031776, 0.88364539,
0.86909794, 0.98635253, 0.97620372, 1.24278282, 1.01146474,
0.93726261, 0.94411536, 1.08344492, 0.75389972, 1.09979822,
0.84271329, 1.16616317, 0.88177625, 0.8451345 , 0.91355741,
0.99833789, 0.86172172, 0.87076203, 0.83743078, 0.99771528,
1.0469295 , 0.87952668, 1.04362453, 0.96350831, 0.95744466,
0.84284283, 0.8773066 , 0.85984544, 1.00589365, 0.88069101,
1.02331332, 1.06616241, 0.78475212, 1.02296979, 0.81480926,
1.09008244, 0.71435844, 0.79655626, 1.09824162, 0.87785428,
1.18020492, 0.99852432, 0.79028362, 0.80081103, 1.10940685,
1.08752313, 0.90673214, 0.84978348, 0.69466992, 0.77497046,
0.83074014, 0.87865947, 0.78890395, 0.7925195 , 0.99749611,
0.91430636, 0.87863864, 0.95392862, 0.91430684, 0.97358575,
0.87999755, 0.88234274, 0.71682337, 1.09723693, 0.71907671,
0.97487202, 0.71792963, 0.88374828, 0.73386811, 0.9315647 ,
1.05020628, 0.99128682, 0.71831173, 1.07119604, 1.02028122,
1.04696848, 0.93335813, 1.04275931, 0.72181913, 0.8837163 ,
0.90283411, 0.96642474, 0.89851984, 0.8397063 , 0.91185676,
1.00573193, 0.88430729, 0.7738957 , 1.07361285, 0.92617819,
0.64251751, 1.05229257, 0.73378537, 1.08270418, 0.99490809,
1.13634433, 1.11979997, 1.03383516, 1.00661234, 1.05778729,
1.05977357, 1.13779694, 0.91237075, 1.04866775, 0.9163203 ,
0.93152436, 0.83607634, 1.13426049, 1.26438419, 0.93515536,
0.92181847, 0.86558905, 1.01985742, 1.44095931, 0.92256398,
0.83369288, 0.93369164, 0.8243758 , 0.98278708, 0.80512458,
1.02092014, 0.73575074, 1.2214659 , 0.85391033, 0.97617313,
0.82054292, 1.04792993, 0.93961791, 1.01145014, 0.89301558,
0.93167504, 0.88221321, 1.23543354, 0.97023998, 1.00197517,
0.85394662, 0.89426495, 0.81344186, 1.08242456, 0.76253284,
1.00642867, 0.76685541, 1.01487961, 0.84028343, 0.87979545,
0.92796937, 0.99796437, 1.28844084, 1.02827514, 1.03663144,
0.83164521, 0.95644234, 0.77797914, 0.96748275, 1.09139879,
0.84329253, 0.9539873 , 0.80094065, 1.13771172, 0.91557533,
0.93370323, 0.79977904, 1.02721929, 1.16292026, 0.92976802,
0.85806865, 0.97824974, 1.02721582, 0.82773004, 0.9297126 ,
0.93769842, 1.14995068, 1.02895292, 0.90307101, 0.85918303,
1.14903979, 1.0344768 , 0.7502627 , 1.27452448, 1.12150928,
0.87274005, 1.09807041, 0.98634666, 1.03086907, 0.94743667,
0.91145542, 1.04395791, 0.83396016, 0.94783374, 0.96693806,
0.88864359, 0.93400675, 1.08563936, 0.78599906, 0.92142347,
1.15487344, 1.19946426, 0.92729226, 0.83333347, 0.90837637,
0.89191831, 1.0581614 , 0.85162688, 1.10081699, 0.98295351,
0.86684217, 1.00867408, 0.95966205, 0.73170785, 1.3207658 ,
0.87988622, 0.82869937, 0.9620586 , 0.71668579, 1.04105616,
0.71415591, 1.30198958, 0.81934393, 0.86731955, 0.99773712,
0.99943609, 0.87678188, 1.01650692, 0.73917494, 0.92077402,
0.98322263, 0.90623212, 0.88261034, 1.12798871, 0.84698889,
0.85312827, 0.91214965, 0.8778361 , 0.99621569, 0.94155734,
0.66441342, 0.85925635, 0.98064691, 0.97107172, 0.96438785,
0.95670408, 0.87601389, 0.9388234 , 0.91165254, 1.14769638,
0.99856344, 0.84391431, 0.94850194, 0.93754548, 0.86398937,
0.95090327, 1.07959765, 1.16684297, 0.82354834, 0.93165852,
0.91422292, 1.14872038, 0.87050113, 0.92322683, 1.04111597,
0.87780005, 0.94602618, 1.10071675, 0.88412438, 0.91286998,
0.9045216 , 0.91750005, 0.98647095, 1.10986959, 0.98912028,
1.01565645, 0.93891294, 0.97696431, 0.91186476, 0.77363533,
1.00075969, 0.89608139, 0.99828964, 0.87239569, 0.87540604,
0.76152791, 0.82501538, 0.91656546, 0.74389243, 1.07923575,
1.00241137, 1.05628365, 1.04407879, 0.90048788, 1.1134027 ,
0.89745966, 0.96534 , 0.71151925, 0.91798511, 0.7337992 ,
0.83636115, 0.75279928, 0.95570185, 0.89073922, 0.90307955,
0.8030445 , 0.84374939, 0.89769981, 0.99002578, 1.01849373,
0.92436541, 0.79675699, 1.03910383, 1.07487895, 0.8906169 ,
0.97729004, 0.97284392, 0.76338988, 0.82756432, 1.12289431,
0.9582901 , 0.97160038, 0.90141331, 0.83271234, 1.16065947,
0.90605662, 1.13389282, 0.8557889 , 0.77149889, 0.9462268 ,
0.95908887, 1.03399986, 0.92795031, 0.73529029, 0.93630494,
0.96730298, 1.05490026, 0.93313995, 0.96980639, 0.9177592 ,
0.95483326, 0.85262905, 0.95170479, 0.9601628 , 0.94878173,
0.87627934, 1.00561764, 0.83441231, 0.90890643, 0.97177858,
1.26394809, 0.80773622, 0.72205262, 0.87692143, 1.01842034,
0.98128171, 1.10776014, 0.94400422, 0.92697961, 0.79523284,
0.8609763 , 0.96303262, 1.17190075, 1.01259271, 1.04973619,
0.94837034, 0.86592734, 0.85908444, 1.14914962, 0.98113587,
1.03070712, 0.89916573, 0.90618114, 0.93223156, 0.96031901,
0.94162334, 0.98908438, 0.95170104, 0.95056422, 0.81782932,
0.81770133, 1.32039255, 1.28822384, 0.82916292, 1.01626284,
0.97537737, 0.83235746, 0.78645733, 0.77916206, 0.93591612,
0.8469273 , 0.74309279, 0.91331015, 1.11240033, 1.41018987,
0.95320314, 0.95807535, 0.89382722, 0.9259679 , 0.92570222,
0.84567759, 0.82332966, 0.98371126, 1.00248628, 0.72107053,
1.09687436, 0.78399705, 0.85224803, 0.92151262, 0.85618586,
0.88485527, 0.954487 , 0.86659146, 1.12800711, 0.93019359,
0.91388385, 0.95298992, 0.96834137, 0.90256791, 1.01222062,
0.84883116, 1.01234642, 0.91135106, 0.83362478, 0.94928359,
0.82247066, 0.7671973 , 0.85663382, 0.88838144, 0.92491567,
0.88698604, 0.87485584, 1.08494606, 0.96431031, 1.06243095,
1.14062212, 1.02081623, 0.72229471, 0.82390737, 0.86599633,
0.95284398, 0.87238315, 1.02818071, 0.98462575, 0.81992808,
1.01207538, 1.0081178 , 0.88458825, 1.01726135, 0.97708359,
0.79820777, 1.06081843, 0.97028599, 0.95203124, 1.00482088,
0.71764193, 0.88115767, 0.90628038, 0.97304174, 0.77015983,
1.06109546, 0.89575454, 0.94824633, 0.93822134, 0.98048549,
0.812265 , 0.95744328, 0.79087999, 1.0222571 , 0.89100453,
1.03590214, 0.92699983, 0.86840126, 0.99455198, 0.87912973,
0.93506231, 0.80706147, 0.89931563, 0.7861299 , 0.89253527,
0.90052785, 0.82420191, 0.97042004, 1.03249619, 0.92354267,
0.80482118, 0.9007601 , 0.80123508, 0.82285143, 0.88105118,
1.03519622, 0.8620259 , 0.96447485, 0.80399664, 1.00324939,
0.96317193, 0.83260244, 0.98561657, 0.88445103, 0.70777743,
0.81608832, 0.98073402, 1.1206105 , 0.69903403, 0.84353026,
0.9064964 , 0.97055276, 0.82747966, 0.85400205, 1.01205886,
0.85324973, 0.90899616, 0.92797575, 0.94646632, 0.89358892,
0.7981183 , 0.96559671, 0.88352248, 1.09804477, 0.79152196,
1.1054838 , 0.93272283, 0.96165854, 0.8899703 , 0.8792494 ,
0.74563326, 0.85371604, 0.87760912, 0.87184716, 0.92049887,
0.99459292, 0.93699011, 0.90492494, 1.12981885, 1.10621082,
0.91391466, 1.05207781, 1.13395097, 0.87022945, 0.93165871,
0.89083332, 0.99584874, 0.98626911, 1.13885184, 1.17350384,
0.93294232, 0.79602714, 0.93670114, 1.09726582, 1.05378961,
0.9457279 , 1.03257053, 1.11349021, 0.80111296, 0.96415105,
0.99447221, 0.75745769, 0.77537636, 0.83860967, 0.90122484,
0.78850128, 1.19877642, 0.91190085, 0.80851919, 0.79484738,
0.93093657, 0.87619908, 1.22781715, 0.89734952, 0.8678127 ,
0.76177975, 0.82089769, 0.89288915, 1.01603179, 0.95279916,
0.84037366, 0.99962719, 0.84298093, 0.77234882, 0.99876963,
1.01856707, 1.2133211 , 0.73822878, 0.83465671, 1.08879938,
0.8878534 , 1.24133317, 0.89264527, 0.83938655, 1.03853109,
0.9842176 , 0.94257497, 0.98282054, 0.90632313, 0.75810741,
1.02540204, 0.86648513, 0.98430307, 0.84561701, 1.13483974,
1.12446434, 1.00220923, 1.23248603, 0.98999724, 0.81980761,
0.91334393, 0.92831557, 1.16798373, 0.8888053 , 0.9319632 ,
0.89206108, 0.86764558, 0.69337981, 0.9021983 , 1.09931186,
1.15290804, 0.62304114, 1.1205393 , 1.27030677, 1.12718725,
0.93002501, 0.83367301, 0.96589068, 0.86578968, 0.79204086,
0.85124905, 0.89121046, 0.96406141, 0.99249204, 0.93363878,
1.11258502, 0.92020983, 1.16020824, 0.99075915, 0.73994574,
0.9335638 , 0.97410789, 1.00029038, 1.43611904, 0.93089581,
0.94758878, 0.84808364, 0.92192819, 1.0249259 , 0.69529827,
0.94629021, 0.7330735 , 1.07902207, 0.93022729, 0.77375973,
0.95019291, 0.92333668, 0.81483081, 0.78044978, 0.85101115,
0.88859716, 0.88720344, 0.89291167, 1.10372601, 0.91132273,
1.04156844, 0.94867703, 0.83546241, 0.84227545, 0.97043199,
0.73281541, 0.74512501, 0.9128489 , 0.99223543, 0.7319106 ,
0.93065507, 1.07907995, 0.86895295, 0.84344015, 0.89394039,
0.88802964, 1.00580322, 1.04286883, 0.82233574, 1.0279258 ,
0.97550628, 1.03867605, 1.10231813, 0.9642628 , 0.91684874,
1.11066089, 0.99439688, 0.88595489, 0.88725073, 0.78921585,
0.80397616, 0.71088468, 0.98316478, 0.72820659, 0.96964036,
1.03825415, 1.01438989, 1.02763769, 1.29949298, 1.06450406,
0.86198627, 0.85588074, 0.90445183, 1.01268187, 0.87927487,
0.9263951 , 0.93582126, 0.88738294, 1.20707424, 0.92887657,
0.97891062, 0.92893689, 0.84846424, 0.96287008, 0.99565057,
0.93483385, 1.21357183, 0.82369562, 0.65144728, 1.11249654,
0.7785981 , 0.88248898, 0.8953217 , 0.95884666, 0.77538093,
0.82272417, 0.91073072, 1.17185169, 0.99645708, 0.88693463,
0.90293325, 0.93368474, 0.87575633, 1.01924242, 0.80011545,
0.99762674, 0.75834671, 0.91952152, 0.86754419, 0.81073894,
0.8880299 , 0.74868718, 0.99979109, 0.90652154, 0.92463566,
0.93894041, 0.92370595, 0.88766357, 1.04614978, 1.77193759,
0.85480724, 0.85208602, 0.96154559, 0.95832935, 0.84210613,
0.9604567 , 0.88597666, 1.0010723 , 0.91890105, 1.10529207,
0.91123688, 0.88466788, 1.09759195, 0.8946647 , 0.78066485,
1.04376296, 1.02951755, 0.88455241, 0.99284282, 0.82423576,
0.80612213, 0.80915541, 0.9482253 , 0.8887192 , 0.86163309,
0.891385 , 0.84850622, 1.03353375, 1.09248204, 1.05337218,
0.85927317, 0.89167858, 1.04868715, 0.92933249, 1.1177299 ,
0.99846776, 0.82418972, 0.86041965, 0.88015748, 0.89785813,
0.85997945, 0.97102367, 0.86679181, 1.00848475, 0.9091588 ,
0.92565039, 0.84019067, 0.86978485, 1.21977681, 1.14920817,
1.05177219, 0.84202905, 0.85356083, 1.01379321, 0.93364219,
1.01999942, 0.85906744, 0.98178266, 0.87218886, 0.93983742,
0.79713053, 1.01123331, 0.86551625, 0.81983929, 0.86782985,
0.86735664, 1.43316935, 0.8490094 , 0.99909103, 0.85715326,
0.89452366, 1.08380518, 0.74686847, 1.62233058, 0.81046611,
0.83563461, 0.96925792, 0.82863186, 0.87147202, 0.92609558,
0.8879082 , 0.93933353, 0.90043906, 0.81677055, 0.78016427,
0.68871014, 0.83329967, 0.81570171, 0.89780443, 0.81337668,
1.00772749, 0.96220158, 0.90035459, 1.06031906, 0.85832752,
0.93636203, 0.96336629, 0.94686138, 0.98499419, 0.87223701,
0.96079992, 0.81302793, 0.99287479, 0.99369685, 1.21897038,
0.94547481, 0.80785132, 1.02033902, 0.93270741, 0.90386512,
1.05290969, 1.08873223, 0.81226537, 0.87185463, 0.96283379,
0.95065022, 1.07603824, 1.22279786, 0.83749284, 0.93504869,
0.93554565, 0.95255889, 0.96665227, 0.92370811, 0.76627742,
1.14267254, 0.98268052, 1.10017739, 0.79569048, 0.86494449,
1.17939799, 0.80655859, 0.76799971, 1.0018905 , 0.83051793,
1.37419036, 1.10424623, 0.93729691, 0.99655914, 0.94900303,
1.157402 , 0.93397459, 0.8133195 , 0.8592273 , 1.024661 ,
0.83708977, 1.06537435, 0.93561942, 1.00402051, 0.68981047,
0.92807172, 0.72192097, 1.232419 , 0.97080757, 0.90350598,
0.95122672, 1.04663207, 0.79080723, 0.8421381 , 1.01956925,
0.93307897, 0.88011784, 0.78674974, 0.97537097, 0.7582792 ,
0.85704507, 0.97683858, 0.7739793 , 0.96245444, 0.99506991,
0.76853035, 0.90875698, 0.97951121, 0.93350388, 1.16380858,
0.8154485 , 1.16902243, 0.98644779, 0.969998 , 0.73120517,
1.19059456, 0.85953661, 0.99193867, 0.88144929, 0.99254885,
1.02956121, 0.90689455, 0.89494433, 0.85625065, 0.86227273,
0.99830845, 0.97635222, 0.83420327, 1.02359646, 0.93694813,
0.88462353, 0.97040788, 1.02543309, 0.91904348, 1.2527365 ,
0.82235812, 0.92026753, 0.93935859, 0.88919482, 1.00405208,
1.06835782, 1.34738363, 0.97831176, 0.92053317, 1.09692339,
0.86156677, 1.02455351, 1.25572326, 0.89721167, 0.95787106,
0.85059479, 0.92044416, 0.99210399, 0.94334232, 0.76604642,
0.8239008 , 0.70790815, 1.06013034, 1.12729012, 0.88584074,
0.91995677, 0.82002708, 0.91612106, 0.86556894, 0.88014564,
0.95764757, 0.96559535, 0.97882426, 0.70725389, 0.9273384 ,
0.86511581, 0.85436928, 1.26804081, 1.02018914, 0.95359667,
0.89336753, 0.91851577, 0.78166458, 1.02673106, 1.01340992,
1.34916703, 0.77389899, 1.12009884, 0.94523179, 0.87991868,
0.82919239, 0.98198121, 0.83653977, 0.91748611, 1.0642761 ,
0.86964263, 0.86304793, 1.11500797, 0.7234409 , 1.00464282,
1.01835251, 0.73389264, 0.88471293, 0.85754755, 1.05383962,
0.73121546, 0.85445808, 0.768308 , 0.81396206, 1.01261272,
0.76696225, 1.01770784, 0.76742866, 0.98390583, 0.96277488,
0.87998292, 0.85264282, 1.12704234, 0.79612317, 0.92206712,
1.09846877, 0.99874997, 0.87707457, 1.03404785, 1.00726392,
0.91613763, 0.74242708, 0.80247702, 0.90702146, 0.81638055,
0.78507729, 1.00066404, 0.84687328, 0.76488847, 0.89697089,
0.82524207, 0.84940145, 1.022041 , 0.75856559, 1.15434195,
1.09781849, 0.93256477, 0.96021119, 1.00796782, 0.88193493,
0.87902107, 0.82245196, 1.04739362, 1.133521 , 0.82969043,
1.01007529, 1.07135903, 0.981338 , 0.86178089, 0.77930618,
0.82512349, 1.2017057 , 1.30452154, 1.12652148, 1.03670177,
0.90631643, 0.74222362, 0.84452965, 0.86366363, 0.79192948,
1.10288297, 0.9554774 , 1.00912465, 0.95545229, 0.93584303,
0.91604017, 0.91681165, 0.76792072, 1.66615421, 0.99044246,
1.05068209, 0.88197497, 0.91153792, 0.82702508, 0.95182748,
1.05320356, 0.8466656 , 1.01676717, 0.65881123, 1.02589358,
1.03902555, 1.00199915, 1.03022137, 0.93427176, 0.94600332,
0.94594696, 0.86465228, 0.91241272, 0.72232997, 0.93380167,
1.1960032 , 0.87463367, 0.78428202, 0.88088 , 0.97202961,
0.99425528, 0.89567214, 0.84908979, 0.81004889, 0.85484368,
0.68478631, 0.96563032, 0.78298607, 0.71894276, 0.88632131,
0.8885966 , 0.99235811, 0.84002222, 0.91265424, 0.91999157,
0.89786651, 1.18062511, 0.92378385, 0.82501238, 1.09009807,
0.96787582, 1.12456979, 0.86339677, 0.8786218 , 0.89865768,
1.02943564, 0.98886502, 0.97135566, 0.95914954, 1.05080931,
0.76554446, 0.80142172, 0.99661393, 1.14749469, 0.93695459,
0.95769957, 1.00811373, 1.00352699, 0.98747546, 0.99436785,
1.10256609, 0.84366101, 0.85931876, 0.90745126, 1.04928733,
0.84499693, 1.14018589, 1.2337188 , 0.90516077, 0.84991869,
0.72984467, 0.9729476 , 0.97483938, 0.88626286, 1.02838695,
0.89750089, 0.80324802, 1.40726294, 0.91149383, 0.86837826,
1.21798148, 0.96459285, 0.71897535, 0.76230781, 0.88042964,
0.8205186 , 1.0517869 , 0.74269565, 0.98278109, 1.1454159 ,
1.03806052, 0.75238659, 0.94224089, 0.94931526, 1.24018529,
0.99048689, 0.88108251, 0.81008694, 0.95443294, 0.99975781,
0.83336879, 0.74422074, 0.87934792, 0.81994499, 0.98684546,
0.82176924, 0.91652824, 0.77571479, 0.77039071, 0.9951089 ,
0.92896121, 0.96234268, 1.00295341, 1.01455466, 0.75014075,
0.95568202, 0.80995874, 1.24671334, 0.89480962, 0.81300194,
0.76967074, 0.92514927, 0.89610963, 0.97441759, 1.19354494,
0.87041262, 0.97344039, 0.88983828, 0.91614149, 0.85782814,
0.78403196, 0.96665254, 0.91000054, 0.78641804, 0.96920714,
0.89670528, 0.79247817, 1.04189638, 0.86777037, 1.18686087,
0.79506403, 0.92389297, 0.76211023, 0.93617759, 0.91879446,
0.8207635 , 0.78984486, 0.93005953, 0.78743101, 0.9814347 ,
0.94882561, 0.9577075 , 0.81121566, 1.01025446, 0.90587214,
0.94842798, 0.8811194 , 1.01942816, 0.94698308, 0.92603676,
0.86119014, 0.97543551, 0.84730649, 0.77552262, 0.97536054,
0.96944817, 0.8736804 , 0.86809673, 0.98134953, 1.16303105,
0.81534447, 1.35930512, 0.83221293, 0.94136243, 0.76926289,
1.05844282, 0.87783288, 0.78921971, 0.84360428, 0.78722128,
1.00022607, 0.96779519, 0.95891975, 0.91900001, 1.07307813,
1.03713093, 0.96257742, 0.90363152, 0.88729834, 0.91929215,
1.00508255, 0.80838454, 0.92165553, 0.94513005, 0.95429071,
0.80829571, 0.79531708, 1.01317347, 0.75337253, 0.85965134,
0.77014567, 0.77680991, 0.77158741, 0.88882588, 0.91466414,
0.82815897, 0.80251251, 1.04901425, 1.03386161, 1.3267075 ,
1.12457236, 0.8267327 , 0.89313417, 0.85992512, 0.93482733,
0.83456348, 0.87991138, 0.8110149 , 0.77913188, 0.89391799,
0.73646974, 0.87038816, 0.99533506, 0.90744083, 0.98175496,
1.17458551, 0.86718975, 0.93125366, 0.76131575, 0.90419708,
0.95122171, 0.97531776, 1.05955142, 0.94714906, 0.79360281,
1.02765349, 0.85192628, 0.84680852, 0.85470655, 0.94950982,
0.75868699, 0.89731933, 1.00736877, 1.05171121, 0.73336848,
0.97323586, 0.9848978 , 1.27418684, 0.83954394, 0.73979357,
1.06785996, 0.97832832, 0.7903268 , 0.76600605, 0.94906446,
0.81383465, 0.83620612, 1.00573379, 0.86359645, 0.9962139 ,
0.98779432, 1.13793814, 1.02764992, 0.9070168 , 0.81340349,
0.94807089, 0.90499083, 0.83805736, 0.99623054, 0.91875275,
0.95603557, 0.93156095, 0.83858677, 1.03667466, 1.01436655,
0.85551979, 0.76227045, 0.84743986, 0.88487423, 0.93800365,
0.8984666 , 0.92600404, 0.89230381, 1.34625848, 1.10026015,
0.9314026 , 0.82450724, 1.0299575 , 0.98494286, 1.07564492,
0.96565301, 0.89677015, 1.15236174, 0.85476951, 1.00169288,
0.90520725, 1.06235248, 1.04267637, 0.8311949 , 0.82017897,
0.81635968, 0.97246582, 0.84554172, 0.85409644, 1.18006461,
0.96488389, 0.69228637, 0.97812108, 0.91764623, 0.86250551,
0.91067775, 1.04692847, 0.94594707, 1.04351374, 0.9861303 ,
0.92192581, 0.835444 , 0.84362223, 1.13770705, 0.8075574 ,
1.02260109, 1.13786456, 0.80862839, 0.89291687, 0.90278047,
1.11613951, 1.29900454, 1.5622857 , 0.70999772, 0.99692653,
0.89109939, 0.77506441, 0.86054356, 0.99498141, 0.84222293,
0.95213508, 0.91438286, 0.89305591, 0.9716793 , 0.88609491,
1.00275797, 0.90086022, 0.75336995, 1.1572679 , 0.75952094,
0.89203313, 0.82115965, 0.81459913, 1.02943406, 0.67063452,
1.08707079, 0.92139483, 0.89855103, 0.89910955, 1.07169531,
0.93684641, 0.84893365, 1.08659966, 1.43385982, 0.94788914,
0.95277539, 0.94709274, 1.08412066, 0.90274516, 0.85147284,
0.89327944, 0.92176174, 0.83820774, 0.90981839, 0.82303984,
0.95189716, 0.95154905, 0.73628819, 1.18956148, 1.20224654,
0.97666968, 1.08057375, 0.90369444, 0.98589538, 0.81426873,
0.75127684, 0.93200745, 0.833666 , 0.79532088, 0.91965037,
0.99540522, 0.75449668, 0.85698312, 0.79328453, 0.94667443,
0.7637764 , 0.77203985, 0.73841377, 0.98587851, 1.34642268,
0.78002774, 1.04356217, 1.02266882, 1.08936378, 0.9794388 ,
1.07623423, 0.78069571, 1.12194495, 0.8072132 , 0.91672662,
1.36102062, 0.86933509, 1.15282756, 1.06219505, 0.80295502,
1.00999033, 0.69418333, 0.93678452, 1.13002256, 0.91465628,
0.73558316, 1.1302073 , 0.85856238, 0.89450543, 1.11812369,
0.75891878, 0.66859534, 0.97445338, 0.82210227, 0.76292085,
0.79289499, 1.04380135, 0.95586226, 0.87480096, 0.81244036,
0.86097575, 0.84111811, 0.85369732, 0.99160655, 0.90911501,
0.81315845, 0.74037745, 1.04369233, 1.03535223, 1.18886682,
0.87092491, 0.93562683, 0.92555142, 0.95268616, 0.9653025 ,
0.93447525, 0.9043932 , 1.25701034, 1.10354218, 0.96588129,
0.94717991, 0.97010307, 0.78264501, 0.80991731, 0.98540974,
0.83174886, 0.66966351, 1.01747376, 1.21553117, 0.80527296,
1.06556826, 1.00870321, 1.03316522, 0.88994006, 0.89092714,
0.94119254, 0.83930854, 1.01500087, 1.03581272, 0.97608081,
1.11919255, 1.16586474, 0.85064102, 1.06070274, 1.00679658,
0.75848826, 0.97969353, 0.94834777, 1.64970724, 0.82448941,
1.02236919, 0.95252025, 0.98638842, 0.89094895, 0.95522527,
0.91533774, 0.83716951, 0.92612154, 0.8662328 , 0.9675949 ,
0.96758398, 0.84309291, 0.95071171, 1.0165785 , 0.96628063,
1.00096151, 0.83175371, 0.79063043, 0.97371271, 0.76009001,
1.02409279, 0.97232166, 0.8480577 , 0.8982739 , 0.9959743 ,
0.96604729, 0.8681602 , 0.99850841, 0.96162481, 1.01259965,
0.98580061, 0.82751273, 0.90469122, 0.98254028, 0.78012425,
0.87023012, 0.96830515, 0.9415831 , 0.8591063 , 0.82961507,
0.89166083, 0.88509907, 0.95987837, 1.12356244, 0.71406404,
0.99047619, 0.93735587, 0.80540831, 1.0024624 , 0.95179491,
0.83602101, 0.90343297, 0.90510417, 0.96477126, 0.79995299,
0.93123762, 0.73763362, 1.0619498 , 0.80929865, 0.86110233,
0.84552556, 0.9943 , 0.97085623, 0.75751174, 0.9201456 ,
1.02268858, 0.9642899 , 0.79078558, 1.03160502, 0.85200219,
1.02246639, 1.08771483, 0.81997868, 0.82499763, 0.92767703,
1.06700018, 0.7882174 , 0.7789828 , 0.89096139, 0.73155973,
1.01717651, 0.91889525, 0.93256065, 0.84716063, 1.00965969,
0.74505112, 0.80104245, 0.76003901, 0.96662605, 0.96594583,
1.04571121, 0.97700878, 0.85461917, 0.9150222 , 0.89110471,
1.11183096, 0.98143747, 1.02346975, 0.9059266 , 1.00771483,
0.96336096, 0.93783898, 0.90545613, 1.10404183, 0.75297691,
0.92548654, 0.79889783, 0.88177552, 0.93896814, 0.87309811,
0.80691061, 0.89725699, 1.16586955, 0.98948281, 0.94524894,
0.86085608, 0.76716851, 0.85362573, 1.09936882, 0.9328761 ,
0.74819673, 0.94331186, 0.81077304, 0.88610499, 1.01452015,
0.91513953, 0.92846128, 0.93539081, 0.8946682 , 0.9270336 ,
0.96673629, 0.9897488 , 1.11891899, 0.87551585, 0.85854576,
1.13458763, 1.11450768, 0.79887951, 1.091154 , 1.04180374,
0.79252573, 0.90484245, 0.94221016, 0.95721137, 0.86776103,
0.97167404, 0.83404166, 0.94634038, 0.98907413, 0.92321459,
1.03547804, 0.79660212, 0.94870239, 0.70027204, 0.79841059,
0.92563393, 1.4385341 , 0.8331731 , 0.844816 , 0.97851389,
1.24048695, 0.83765698, 0.83600835, 1.13901283, 1.05994936,
0.84292427, 0.86759056, 0.9272156 , 0.77375499, 0.99972839,
0.95570976, 0.97879539, 0.95528351, 0.84555495, 0.95296134,
0.87469056, 0.78862024, 0.793795 , 0.8516853 , 0.92816818,
1.02492208, 0.8037345 , 0.95481283, 0.75138828, 0.72110948,
1.36815666, 0.9661646 , 0.81651816, 0.87764538, 0.97397297,
0.99845266, 0.77433798, 0.9266279 , 1.92493013, 1.07588789,
0.90412593, 1.03165475, 1.00826548, 0.75500744, 0.87198881,
0.86871262, 0.97854606, 0.80954477, 0.84130266, 0.89674826,
1.43926644, 0.74873088, 1.01894282, 0.93606154, 1.08241489,
0.76626357, 0.97434747, 0.82824599, 1.00267494, 0.97168761,
1.06433173, 1.22741978, 1.46998419, 0.9521923 , 0.98276685,
0.92422781, 1.14241216, 1.13339577, 1.05586816, 1.04923068,
0.83364505, 0.98007268, 0.94322393, 0.84310173, 1.03481955,
1.18281181, 0.79807678, 0.840274 , 1.00344058, 1.09442855,
0.88033836, 0.86189964, 1.1395012 , 1.18808865, 0.78667714,
1.09323293, 0.81511099, 0.95830848, 0.99637275, 0.9146258 ,
0.96358155, 0.79048719, 0.80395604, 1.00828722, 0.92872342,
0.98789363, 0.96720252, 0.80541021, 0.73697557, 0.86692999,
0.86795696, 1.1516694 , 0.95911714, 1.13981603, 1.02002866,
0.90808456, 0.94208296, 0.93691739, 0.87653118, 0.72824225,
0.78177906, 1.2139146 , 0.83405505, 0.91764545, 0.83318595,
0.77930256, 0.86499397, 0.95599882, 0.73850016, 0.9630604 ,
0.97913407, 1.1790714 , 0.94994057, 1.04379512, 0.80815459,
1.16560205, 0.97486893, 1.02780804, 1.10633754, 0.78679252,
0.94643528, 1.19999119, 0.98621069, 0.8899674 , 0.89235261,
0.8728921 , 0.77089094, 0.8492628 , 0.86905159, 0.90741875,
0.81065291, 0.91208596, 1.04616696, 1.24291958, 0.98628605,
0.99751975, 0.83249612, 0.96343385, 0.77862866, 0.72381238,
1.17384381, 1.06013687, 0.73460652, 1.09554763, 0.82015886,
0.90862905, 0.89037104, 0.7866143 , 0.8570287 , 0.75061334,
0.94950855, 0.8091383 , 1.04055212, 0.96679573, 0.78338675,
0.75968533, 1.00495071, 0.6491633 , 1.02802735, 1.00725883,
0.89333988, 0.87539291, 0.99374251, 1.10241119, 1.14935785,
0.9369769 , 0.84772646, 1.05024743, 0.97411124, 0.76972352,
0.92161017, 0.88689841, 0.78598549, 0.93400036, 1.14699647,
0.98636563, 0.93051079, 1.00131515, 0.82749213, 0.96665447,
0.84457933, 0.95172036, 0.86372572, 0.97034285, 0.99877807,
0.8724721 , 0.86281118, 0.96253742, 1.13485439, 1.03410559,
0.83113167, 1.02644607, 1.0669284 , 0.947969 , 1.13373538,
0.85495039, 1.15829218, 0.72662405, 0.81755747, 0.78381403,
0.84360371, 1.10945791, 0.80215303, 0.8861351 , 0.97484684,
1.02996282, 0.86219328, 0.95675062, 1.10753315, 0.92496918,
0.79323289, 0.76891191, 0.93106762, 0.94523682, 0.9534338 ,
0.8954424 , 0.81732651, 1.00443776, 0.96178195, 0.89727229,
0.88917552, 0.88660003, 0.941933 , 1.03900381, 0.75262915,
0.94265862, 0.84472046, 1.09834757, 0.81516259, 0.90865634,
0.9582531 , 0.99819053, 0.8815072 , 0.92425525, 0.79085083,
0.98173446, 0.95199169, 0.71653726, 1.11863725, 0.97855807,
0.87873181, 1.37925403, 0.8085008 , 1.40027689, 0.79367826,
0.82070449, 0.87039383, 0.95896081, 0.75617612, 1.3196712 ,
0.9335008 , 0.9461447 , 1.0838461 , 0.83347962, 0.69558254,
0.92358528, 0.99423247, 0.94884494, 0.75094955, 0.90429063,
1.13740548, 0.89354463, 1.13094104, 1.7373979 , 0.87808028,
0.72820621, 1.02995089, 0.80134468, 0.97511989, 0.93823103,
0.98097787, 0.73179813, 0.93764192, 1.04399599, 0.95644709,
0.80476939, 0.87463727, 0.83220517, 0.76978546, 0.97056432,
1.1693819 , 1.0368387 , 0.98606478, 1.03538075, 0.88253058,
0.91105775, 0.93745618, 0.80272442, 0.77045021, 0.8482449 ,
1.04505306, 0.90427753, 0.706451 , 1.02687396, 0.82931474,
1.24255717, 0.91343217, 0.8692726 , 0.98422894, 0.82142068,
0.86854354, 0.77715916, 0.94490329, 0.97686366, 1.05198512,
0.888989 , 1.09252847, 0.8034292 , 1.04727187, 0.87246831,
0.89474556, 1.06031526, 0.93056174, 0.7747956 , 0.87772054,
1.1183045 , 0.78938083, 0.82019511, 0.82553273, 1.04324276,
0.7676436 , 0.68914756, 0.88400598, 0.79611901, 0.77011016,
0.76727015, 0.84523666, 1.09972447, 1.03942974, 1.07322466,
1.01079248, 1.03469338, 0.90450148, 0.87367007, 0.88432601,
0.85312482, 0.7328442 , 1.12256832, 0.8837547 , 0.81023384,
0.87068285, 0.94466637, 1.13236695, 0.95958423, 0.8099625 ,
1.07509372, 1.03306035, 0.99385633, 1.06433672, 1.07385915,
0.92709455, 1.03502217, 0.88961476, 0.8307198 , 0.98819038,
1.09916368, 0.8919766 , 0.90349117, 0.97554616, 0.98376763,
0.89285893, 0.99941071, 1.16078972, 0.66336693, 1.16389515,
1.10395069, 1.20381952, 0.98928899, 1.17155389, 0.81707565,
0.82903836, 0.95892646, 0.8437454 , 0.79017432, 0.81562954,
0.65169124, 0.87950793, 0.9017879 , 0.82160564, 0.87079127,
0.88100146, 1.00783979, 0.84102603, 1.16817499, 0.97697533,
0.89115235, 0.77254376, 0.7679024 , 0.97093775, 1.13881665,
0.90348632, 1.14654277, 1.08625707, 0.98787902, 1.49057495,
0.99639001, 0.97623973, 0.74807856, 0.76656108, 0.79095998,
1.04583503, 0.95124469, 0.90228738, 1.03129265, 1.02663212,
0.67704952, 0.95335397, 1.01726294, 0.78765385, 0.91140255,
1.04097119, 0.71881619, 1.14572601, 0.79708798, 1.07104057,
0.95925248, 0.72556831, 0.92256392, 1.08702165, 0.95977251,
0.99670254, 0.95276505, 1.15268752, 0.68215678, 1.05573208,
0.89672437, 0.89396611, 1.01814905, 0.81969778, 0.74390457,
1.20909881, 0.82388701, 1.00574083, 1.01348114, 1.01492015,
0.94759788, 0.99758684, 1.19912008, 0.92749943, 1.16660441,
0.97646538, 0.8189475 , 0.97464158, 1.01050799, 0.94368665,
0.70995047, 0.94469581, 1.02534612, 1.3513094 , 0.88081968,
1.00576693, 0.9695495 , 1.0549135 , 1.29993316, 0.91050559,
0.95543198, 1.02161725, 0.76895773, 1.03685293, 0.88201449,
0.90345561, 1.02793048, 1.00267831, 0.84653161, 0.9217411 ,
0.94666576, 0.94946561, 0.77482488, 0.94358305, 0.89779666,
1.01462131, 1.05829923, 1.13217729, 1.12260175, 0.89810828,
0.96305689, 0.90466377, 0.8091617 , 0.93070824, 1.03997521,
1.04076373, 0.95858477, 0.94382748, 0.7585222 , 1.22890096,
0.97300529, 0.87424719, 0.90435141, 0.91894865, 0.97819677,
0.80300175, 1.03729016, 1.19305569, 0.81633791, 0.7930351 ,
0.8141721 , 0.86764479, 0.89207142, 0.89691482, 0.86243171,
0.91184679, 0.94284352, 1.01357831, 1.03806277, 0.92000143,
0.91018767, 0.90555137, 0.89089532, 1.3530331 , 0.96933587,
0.82350429, 0.71549154, 1.13399156, 0.87838533, 0.99177078,
0.93296992, 1.43078263, 0.90278792, 0.85789581, 0.93531789,
0.84948314, 0.95778101, 0.80962713, 0.88865859, 1.15297165,
0.85695093, 0.88601982, 0.96665296, 0.9320964 , 1.04193558,
1.006005 , 0.78939639, 0.79344784, 0.87012624, 0.8532022 ,
0.93351167, 0.91705323, 0.74384626, 0.84219843, 0.78265573,
1.07759963, 1.0236098 , 1.00202257, 1.18687122, 1.00869294,
0.8809502 , 0.76397598, 0.81845324, 0.97439912, 1.10466318,
1.10678275, 0.96692316, 0.84120323, 1.13151276, 0.72574077,
0.82457571, 0.8179266 , 1.01118196, 0.84303742, 0.86255339,
1.03927791, 0.82302701, 1.03586066, 0.75785864, 0.9186558 ,
0.97139449, 0.92424514, 1.00415659, 1.08544681, 0.80940032,
0.9073428 , 0.83621672, 1.04027879, 0.79447936, 0.94829305,
1.16176292, 1.11185195, 0.88652664, 0.98676451, 0.89310091,
0.72272527, 0.79963233, 0.94651986, 0.91540761, 1.0498236 ,
0.84938647, 1.15539602, 1.03118991, 0.86565049, 0.77764016,
0.77866522, 0.78008955, 0.89062575, 0.81285464, 0.92554114,
1.08747324, 0.84338687, 0.76746516, 0.99205474, 0.86649541,
0.97586166, 0.9721711 , 1.14895298, 1.04659345, 1.0605085 ,
1.06392238, 1.08286448, 0.93612266, 0.82545354, 0.84305431,
0.83650404, 1.11073704, 0.91760695, 0.83281572, 0.84244131,
1.05843708, 0.94695861, 0.95469608, 0.96038612, 0.81373042,
0.94943303, 1.00824522, 0.86416102, 0.87121008, 1.04208739,
0.81171276, 1.12798927, 0.99122576, 0.80626996, 1.07103151,
0.99809277, 1.08490135, 0.9441509 , 0.98766371, 1.33205139,
0.92145678, 0.88112784, 0.9297591 , 1.17549838, 0.8481953 ,
0.96359948, 0.98478935, 0.77028684, 0.86408555, 0.92863805,
0.94593549, 0.78705212, 1.1923026 , 0.9983487 , 0.99152533,
0.95313678, 1.01847515, 1.05728959, 0.88009142, 1.00351951,
1.00549552, 0.81671365, 0.90545602, 0.77895202, 0.82217088,
0.94838645, 0.85928327, 0.90729044, 0.92975916, 0.91946285,
0.80537364, 1.11885357, 0.84691232, 0.85356231, 0.85102988,
1.06499659, 1.0242127 , 0.91245632, 0.83131215, 0.72151085,
0.9295769 , 0.89549018, 0.87914839, 0.93541175, 0.97319188,
0.791944 , 1.08008186, 0.79549907, 0.90967683, 0.80506028,
1.1206821 , 0.91258859, 1.24855319, 0.96112955, 1.14305514,
0.79327927, 0.84209204, 0.94494251, 0.89573237, 1.0571304 ,
0.94504292, 0.84446547, 0.92060829, 0.82347072, 0.86280426,
0.85516098, 0.78649432, 0.89522516, 0.94529795, 0.90322825,
0.9616288 , 0.77439126, 1.0130917 , 0.84021262, 0.97337238,
0.93206526, 0.93809914, 0.87626441, 0.92706652, 0.86819358,
0.74060652, 0.84046045, 0.94130171, 0.92537388, 0.80485074,
0.81633347, 0.76401825, 0.81300784, 0.8052467 , 1.27234895,
0.92674704, 1.12106762, 0.91743016, 0.94694287, 0.87309918,
0.99163895, 0.83777703, 0.89713459, 0.88208343, 0.90205904,
0.9708827 , 0.94965009, 0.81446019, 0.89512677, 0.97025135,
1.02314481, 0.88399736, 1.01059963, 0.86193889, 0.94621507,
0.97334837, 0.90122433, 0.71015398, 1.17491792, 1.13869784,
1.03908735, 0.85480742, 0.98971408, 1.04147459, 0.85170846,
0.94861439, 0.7778831 , 0.73445723, 0.89587488, 0.88627975,
0.98253057, 0.86159356, 1.06559385, 0.90852704, 0.86562284,
0.92122779, 0.98233847, 0.94989946, 0.97171474, 0.92428639,
1.03712828, 0.88170861, 0.86802004, 0.79670394, 0.85606075,
1.09636421, 0.85048902, 0.99393971, 1.10510884, 0.80515088,
0.95559246, 0.96803475, 0.98115871, 0.94603995, 0.8654312 ,
0.90759845, 0.9010954 , 0.77979965, 0.83322032, 0.8485444 ,
0.89217626, 0.78817966, 1.03815705, 0.84076982, 0.93362471,
1.06173045, 0.82612852, 0.8336989 , 0.93943901, 0.91775212,
1.00501856, 1.04269442, 0.93195426, 0.78377288, 1.03372915,
0.8415154 , 1.02888978, 0.93202174, 0.78683383, 0.85106996,
0.9724203 , 0.93409182, 0.97876305, 1.17153649, 0.9434591 ,
0.81361398, 1.09554602, 1.48193137, 0.96349931, 0.93586569,
1.0210303 , 0.88980694, 0.88890459, 1.05330284, 1.09511186,
0.91202441, 0.78753378, 0.98074421, 1.04268892, 1.14265114,
0.86482628, 0.87233851, 1.18915875, 0.82556032, 0.87461473,
1.08396187, 0.69206719, 0.88113605, 0.96951674, 0.89248729,
0.909926 , 0.82966779, 0.8261611 , 0.9551228 , 0.79879533,
1.09416042, 1.01020839, 1.04133795, 1.09654304, 0.84060693,
1.02612223, 1.00177693, 0.90510435, 1.2091018 , 1.03290288,
0.80529305, 0.74332311, 1.04728164, 1.04647891, 0.83707027,
0.81648396, 1.07180239, 0.7926372 , 0.99855278, 1.16851397,
0.94566149, 0.75612408, 0.94975744, 0.92924923, 1.03215206,
0.82394984, 0.84142091, 0.88028348, 1.11036047, 0.82451341,
0.83694112, 0.84207459, 0.94095384, 1.00173733, 1.10241786,
0.86609134, 0.86859604, 1.1211537 , 0.84188088, 0.89023025,
0.99062899, 0.96828743, 0.80106184, 0.86745454, 0.99013196,
0.91838615, 0.86400837, 0.95679525, 0.78893711, 1.03753175,
0.97177648, 0.88685941, 0.9441012 , 0.69289996, 0.84219432,
1.01050959, 0.83578317, 0.79907595, 1.21281139, 0.91613925,
1.00202544, 0.95293036, 0.84583258, 0.84574886, 0.76470341,
1.23606485, 1.10063291, 0.93852084, 0.97201415, 0.68523403,
0.94560108, 0.81903039, 1.14332074, 0.80914367, 1.46398921,
0.85155227, 1.41106313, 0.85740937, 0.91107708, 0.9003576 ,
0.94132363, 0.85710825, 0.74805485, 1.2521402 , 0.95307547,
0.94274593, 0.86732331, 0.83850172, 0.96835288, 1.09443821,
0.68532627, 0.84736457, 1.06989165, 0.81424504, 1.02942437,
0.80255995, 0.89258275, 0.93560962, 1.04192911, 1.13498644,
1.24409985, 0.93295415, 1.08360355, 1.16468059, 0.81482388,
0.92387137, 1.07508578, 0.86564567, 1.0142773 , 0.86143907,
0.91214944, 0.9757589 , 0.90588817, 0.74168224, 0.91222552,
0.96119617, 0.95431519, 0.78080736, 1.0327991 , 1.05112022,
0.92761155, 1.0183631 , 0.73188757, 0.85617225, 0.93341155,
0.95106173, 0.9481304 , 0.92996766, 1.08092599, 0.96485228,
0.97964284, 0.94224551, 1.00654477, 1.01367565, 0.89785325,
0.80725703, 0.7495798 , 0.78240339, 1.04479122, 0.88200252,
1.0664992 , 1.05951775, 0.82508097, 0.81201381, 0.81860218,
1.07561763, 1.02830358, 0.87348993, 1.0081337 , 0.87470565,
1.45597242, 0.77540871, 0.8036279 , 0.80514427, 0.92688461,
0.88152328, 1.56288788, 0.87251203, 0.92808414, 1.03548911,
0.65226699, 0.81243827, 1.03103554, 1.11995602, 0.78956176,
0.96734427, 0.91600861, 0.8246106 , 1.09390498, 0.98187349,
0.8919928 , 0.98746862, 0.96298125, 0.93854424, 0.83060031,
0.74692856, 0.99757209, 0.78888849, 1.17517182, 1.06657933,
1.1244446 , 0.93608433, 0.88898472, 0.96823218, 0.87496056,
0.81776683, 0.98863687, 0.82962648, 1.02395766, 0.99622674,
1.07138771, 0.86669915, 0.98172208, 0.8787271 , 0.86125353,
0.79554881, 0.93382729, 1.00706175, 1.08386454, 0.69664542,
0.77316657, 0.79978147, 0.80764736, 0.9969375 , 0.83554928,
0.91017317, 0.95323454, 1.29872357, 1.08851275, 1.01673108,
0.79536208, 0.84878371, 0.95165619, 0.87733936, 0.86319684,
0.96758495, 0.87763237, 0.95094713, 1.00143077, 1.0596993 ,
1.27278299, 0.82281481, 0.89765404, 0.94538181, 0.88161857,
0.77679456, 0.84274277, 0.89864342, 0.98705162, 0.95456512,
0.92712401, 0.77427128, 1.03292269, 0.87034158, 1.24316113,
0.98278702, 1.17325118, 1.18863971, 0.88678137, 0.90389731,
1.01740421, 0.80228624, 0.97742223, 0.82741518, 0.8359407 ,
0.7177401 , 1.02297899, 0.81896048, 0.77127181, 0.83328601,
0.96939523, 0.94073198, 0.90356023, 1.12355064, 1.12811114,
0.92403138, 1.05423548, 0.70827734, 0.95891358, 0.89898027,
1.02318421, 0.93775375, 0.8245529 , 0.80604304, 0.77555283,
0.92112699, 0.85662169, 0.92725859, 0.93599147, 0.78971931,
0.8337306 , 0.93775212, 0.91025099, 0.75308822, 0.95391173,
0.96840576, 0.8394416 , 0.89087015, 0.73703219, 0.97812386,
0.8787356 , 0.93985266, 0.96406021, 0.88666152, 0.89242745,
0.97900374, 0.85697634, 0.8795755 , 0.78581812, 0.87138735,
0.74602994, 0.96158936, 0.84529806, 0.85333232, 1.06116542,
1.05929382, 1.09720986, 1.28959453, 0.91541148, 0.87657407,
1.06514793, 0.8668096 , 1.07325125, 0.85009534, 0.95542191,
0.86977409, 0.96249874, 0.97715908, 0.89360331, 0.98859647,
0.67560717, 0.90213348, 1.12051182, 0.99684949, 0.9863559 ,
1.32246221, 0.84632664, 0.89707447, 1.00486846, 0.90843649,
1.02399424, 0.97899017, 0.95693977, 0.8384806 , 0.93927435,
0.79153251, 1.08694094, 1.01785553, 0.99674552, 0.898566 ,
0.94116882, 0.95224977, 0.99859129, 0.81125029, 0.85985586,
1.14418875, 0.96306241, 1.31398561, 0.77961419, 1.01958366,
0.9575668 , 0.771084 , 1.04473363, 1.01569517, 1.04560744,
0.9648178 , 0.93466398, 1.09313672, 0.90349389, 1.00193114,
0.79991514, 0.91102351, 0.9795356 , 0.89285193, 1.04898573,
0.93031782, 0.95087069, 1.15644699, 0.91155375, 0.93005986,
0.70098757, 0.82751625, 0.85462106, 1.34969332, 0.93382692,
1.05558387, 1.25417819, 1.0546501 , 1.05217032, 0.86031346,
1.00864463, 0.73592482, 1.01899722, 1.00462831, 0.96882832,
0.81334751, 1.05102745, 0.82288113, 1.05798623, 0.77971966,
1.38584414, 1.0248193 , 0.78951056, 0.76171823, 0.78407227,
1.14808104, 0.97890501, 0.99870905, 0.96006489, 0.78442704,
0.99315422, 0.83653213, 0.95210661, 0.97233777, 0.78140495,
0.95996216, 0.76318841, 0.82333311, 0.87123204, 0.79531258,
0.82681452, 1.00492217, 0.93549261, 1.00240153, 1.02086339,
1.00424549, 0.87437775, 0.84675564, 0.98014462, 0.77262117,
1.02620976, 0.91162462, 1.0275041 , 1.1475431 , 0.78167746,
0.86273856, 0.84499552, 0.99712362, 0.9694771 , 0.94523806,
0.8450763 , 0.93068519, 1.29362523, 1.0249628 , 1.05522183,
1.13433408, 1.06981137, 0.85666419, 0.98203234, 0.75867592,
0.8844762 , 0.89708521, 0.75482121, 0.80137918, 0.90412883,
0.88815714, 1.11497471, 0.77441965, 0.93853353, 0.8962444 ,
0.83055142, 0.99776183, 0.92581583, 0.78783745, 0.90934299,
0.81136457, 0.99000726, 0.9669203 , 1.2890399 , 1.01923088,
1.11076459, 1.01331706, 1.02470946, 0.92950448, 1.10298478,
1.03723287, 1.09129035, 0.95138186, 0.85764624, 0.86606803,
0.8141785 , 1.0129293 , 0.93267714, 0.95663734, 1.01940702,
0.8072268 , 1.0707215 , 0.90482063, 1.01546955, 0.84018308,
0.95938216, 0.96454054, 0.93114659, 1.09705112, 0.88720628,
0.81067916, 0.82667413, 0.89494027, 0.9173495 , 0.73326273,
1.00209461, 0.9560545 , 1.09126364, 0.95709908, 0.81314274,
0.8274943 , 1.37605062, 0.99097917, 1.02221806, 0.90277482,
1.01611791, 0.79663017, 1.16686882, 1.19669266, 0.88366356,
0.77661102, 0.73467145, 1.15438391, 0.91439204, 0.78280849,
1.07238853, 1.03588797, 1.0438292 , 0.75935005, 0.76200114,
0.81603429, 0.74402367, 1.1171573 , 0.90227791, 0.94762351,
0.92462278, 0.8847803 , 1.1343863 , 0.8662186 , 1.00410699,
1.05008842, 0.94783969, 0.89555844, 0.98278045, 0.80396855,
1.00483139, 0.82540491, 0.83284354, 0.93132265, 0.91191039,
0.95753995, 1.18260689, 0.84124197, 0.87429189, 0.67617592,
0.89495946, 0.92898357, 1.10528183, 1.06994417, 0.82259834,
0.74746328, 0.99070832, 1.07386274, 0.84007203, 0.89720099,
0.9670094 , 1.02728082, 0.78001838, 0.97709347, 0.90602469,
1.49985196, 0.80256976, 1.05905677, 0.98298874, 0.94679703,
0.94305923, 0.98720786, 0.82091251, 0.91644161, 0.79576881,
0.98942172, 0.92974761, 0.99307545, 0.86959859, 0.88549807,
1.09246144, 0.87265047, 1.01449921, 0.74353851, 0.95029192,
0.94385304, 0.84779449, 1.00690543, 0.79727923, 0.92285822,
0.83164749, 1.06508941, 1.09757529, 0.9059649 , 0.9146043 ,
0.74474669, 0.71306438, 0.77989422, 0.84965464, 0.9424323 ,
0.82492634, 0.85076686, 1.01110574, 1.01445751, 0.87929754,
0.8773275 , 0.72314196, 0.92285502, 1.18173931, 0.86460799,
0.91795108, 1.16580482, 0.79880497, 0.72734786, 0.97579653,
0.76967834, 0.97543732, 1.04996964, 1.16439594, 1.08656546,
1.15644902, 0.98333436, 1.24374723, 0.95810117, 0.8488915 ,
1.06288523, 0.99055893, 0.75517736, 0.95856183, 0.85574796,
1.00426506, 1.25275675, 0.92735225, 0.83351314, 0.90216604,
0.87996386, 1.13312875, 1.00891523, 0.76513657, 0.85659621,
0.91142459, 1.05893495, 0.92253051, 0.87153684, 1.03190013,
0.92160845, 1.01768282, 0.80590054, 1.05172907, 0.92758177,
0.86902046, 0.93927127, 0.80389584, 0.96016014, 0.9720314 ,
0.93255573, 0.85792534, 0.97826842, 0.80506149, 0.97170364,
1.08397772, 1.01866333, 1.18898045, 1.02855427, 0.94848891,
0.94336541, 0.93119013, 0.92907817, 1.11806635, 0.88409637,
0.88809707, 1.06735612, 0.98447974, 0.88816438, 1.00099784,
0.92443453, 1.00325146, 0.86977836, 0.84621801, 0.92361073,
0.85573903, 0.77309241, 0.86717528, 1.19892035, 1.07497019,
1.02178857, 0.8718756 , 0.90646803, 0.92912096, 1.04538692,
0.95245707, 0.99698525, 0.94583199, 0.92537599, 0.86720487,
0.89927054, 0.86111792, 0.94401208, 1.01130191, 1.03759681,
0.8177749 , 1.07784373, 0.79823294, 1.00839713, 1.39409602,
0.87146241, 1.21218822, 0.84895926, 1.01742432, 0.8044077 ,
0.78632084, 1.07751744, 1.13147508, 0.90268302, 0.90024653,
0.92072578, 0.87763264, 1.00736787, 0.90978808, 0.90895492,
0.90766826, 0.98956566, 0.92075658, 0.77613105, 0.93815569,
0.95455546, 1.00607757, 0.82187828, 0.94197599, 0.867015 ,
0.90709762, 0.75604815, 0.91312261, 0.9286002 , 0.74623204,
0.87368702, 0.83879278, 0.92224793, 0.81676402, 0.90355168,
0.92762955, 0.91784037, 0.82273304, 0.75947806, 0.92687078,
0.87971276, 1.15037445, 0.86707445, 0.8611453 , 0.91921763,
1.07088129, 1.05150864, 1.02162325, 0.90305964, 0.99912687,
0.87693204, 0.6186911 , 0.95526533, 1.15975655, 1.00061222,
0.74608861, 0.954568 , 0.84965574, 0.79177899, 0.9741051 ,
1.0119514 , 0.79147502, 0.81367071, 0.87757421, 1.01270813,
0.86044808, 0.9689615 , 0.9577413 , 0.79480242, 0.76073002,
0.83131288, 0.96379259, 0.84679732, 0.82508685, 0.89977283,
0.86766439, 1.12231836, 0.93058445, 1.04584181, 0.88838751,
0.96615893, 0.98731619, 1.05517799, 1.02860493, 0.98881473,
0.85210319, 0.91497438, 0.9275787 , 0.97456134, 0.9011687 ,
0.69417417, 0.89661214, 0.79038577, 1.08118303, 1.0509366 ,
0.97813138, 0.85714945, 0.97330329, 0.83611871, 0.99772489,
0.83591193, 0.75592677, 0.85392601, 1.02734573, 0.72404609,
0.83534547, 0.91630472, 0.88463459, 1.12044562, 1.10991104,
0.96047701, 1.12342573, 0.72046647, 0.96852239, 0.89605698,
0.98310243, 0.92300659, 0.87794646, 0.83109321, 1.43297752,
0.80609029, 0.8692251 , 0.90254649, 0.81647796, 1.07521371,
1.03942973, 0.96156488, 1.25225334, 1.0265727 , 0.9518054 ,
0.87765718, 1.15552582, 0.79577766, 0.66849239, 0.87236017,
1.03437641, 0.98567811, 0.78463682, 1.09573491, 0.89858959,
0.94056747, 1.16075317, 1.06296054, 0.85844006, 0.95475376,
0.67038747, 0.7924646 , 0.94009167, 0.88282093, 0.97711174,
0.9209607 , 1.03230176, 0.99981312, 1.12345314, 1.11705968,
1.02453864, 0.91724212, 0.98337942, 0.89195196, 0.83800177,
0.95044243, 0.76543521, 0.8613025 , 0.83907753, 0.69333275,
0.84411739, 0.68621941, 0.9847701 , 1.13328481, 1.1432074 ,
0.97156328, 0.86464461, 0.74258211, 0.97319505, 1.11453917,
0.87344741, 0.91382664, 1.01635943, 1.38708812, 0.81377942,
1.3828856 , 0.74476285, 0.86657537, 1.1216954 , 0.91008346,
0.800862 , 0.98356936, 0.92409916, 1.13970543, 0.97547004,
0.99385865, 1.16476579, 0.78678084, 1.003947 , 0.81491463,
1.19724322, 0.9173622 , 0.93274116, 0.80047839, 0.86798029,
0.9433708 , 0.82376832, 1.01726905, 0.81914971, 0.73290844])
class Medpar1(object):
'''
The medpar1 data can be found here.
http://www.stata-press.com/data/hh2/medpar1
'''
def __init__(self):
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"stata_medpar1_glm.csv")
data = np.recfromcsv(open(filename, 'rb'), converters ={1: lambda s: s.strip(asbytes("\""))})
self.endog = data.los
design = np.column_stack((data.admitype, data.codes))
design = categorical(design, col=0, drop=True)
design = np.delete(design, 1, axis=1) # drop first dummy
self.exog = add_constant(design)
class InvGaussLog(Medpar1):
"""
InvGaussLog is used with TestGlmInvgaussLog
"""
def __init__(self):
super(InvGaussLog, self).__init__()
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"medparlogresids.csv")
self.resids = np.genfromtxt(open(filename, 'rb'), delimiter=",")
self.null_deviance = 335.1539777981053 # from R, Rpy bug
self.params = np.array([ 0.09927544, -0.19161722, 1.05712336])
self.bse = np.array([ 0.00600728, 0.02632126, 0.04915765])
self.aic_R = 18545.836421595981
self.aic_Stata = 6.619000588187141
self.deviance = 304.27188306012789
self.scale = 0.10240599519220173
# self.llf = -9268.9182107979905 # from R
self.llf = -12162.72308108797 # from Stata, big rounding diff with R
self.bic_Stata = -29849.51723280784
self.chi2 = 398.5465213008323 # from Stata not in sm
self.df_model = 2
self.df_resid = 3673
self.fittedvalues = np.array([ 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
5.22145448, 7.03292237, 5.22145448, 4.72799187, 4.72799187,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.76642001,
7.03292237, 4.28116479, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 3.87656588, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 6.36826384, 6.36826384, 4.28116479,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 6.36826384,
6.36826384, 5.22145448, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 3.87656588, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.22145448, 5.22145448, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 7.03292237, 6.36826384,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 6.36826384, 5.22145448,
7.03292237, 7.03292237, 4.72799187, 5.76642001, 7.03292237,
4.72799187, 6.36826384, 3.87656588, 7.03292237, 7.03292237,
5.22145448, 5.22145448, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 4.28116479,
7.03292237, 6.36826384, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 6.36826384, 3.87656588, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 5.76642001, 4.28116479,
5.76642001, 6.36826384, 6.36826384, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 4.28116479, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 4.28116479, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.28116479, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 4.28116479, 5.76642001,
5.22145448, 6.36826384, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
5.76642001, 7.03292237, 5.22145448, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
4.28116479, 7.03292237, 5.22145448, 7.03292237, 6.36826384,
5.76642001, 4.28116479, 4.28116479, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 4.28116479,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 7.03292237,
5.76642001, 7.03292237, 4.72799187, 4.28116479, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
3.87656588, 4.72799187, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 3.87656588, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.28116479, 7.03292237, 6.36826384,
7.03292237, 5.22145448, 5.22145448, 6.36826384, 7.03292237,
6.36826384, 6.36826384, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 6.36826384, 7.03292237,
3.87656588, 6.36826384, 5.22145448, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
5.22145448, 7.03292237, 6.36826384, 5.22145448, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 6.36826384,
7.03292237, 6.36826384, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 3.87656588, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 3.87656588,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 3.87656588, 7.03292237, 6.36826384, 6.36826384,
4.72799187, 5.76642001, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.87656588, 5.22145448, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 6.36826384,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 5.22145448, 5.76642001, 7.03292237, 5.76642001,
6.36826384, 5.76642001, 5.76642001, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 4.28116479, 6.36826384, 3.87656588,
7.03292237, 3.5102043 , 7.03292237, 7.03292237, 5.76642001,
5.22145448, 7.03292237, 5.76642001, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 4.72799187,
7.03292237, 6.36826384, 7.03292237, 5.22145448, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
5.22145448, 4.72799187, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 6.36826384, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 5.76642001, 7.03292237, 5.76642001,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 6.36826384,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 6.36826384, 5.22145448, 5.76642001, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
6.36826384, 6.36826384, 7.03292237, 5.76642001, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
5.22145448, 7.03292237, 3.87656588, 5.76642001, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 6.36826384, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 6.36826384, 3.87656588, 7.03292237, 7.03292237,
6.36826384, 4.72799187, 4.28116479, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 3.87656588, 7.03292237, 7.03292237, 7.03292237,
3.87656588, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 3.87656588,
7.03292237, 4.72799187, 5.22145448, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 6.36826384, 5.76642001,
5.76642001, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 4.72799187, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
6.36826384, 7.03292237, 7.03292237, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 6.36826384, 7.03292237, 5.76642001, 4.28116479,
5.76642001, 7.03292237, 3.87656588, 7.03292237, 7.03292237,
7.03292237, 3.5102043 , 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 5.76642001, 5.76642001, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 4.28116479, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.5102043 , 7.03292237, 7.03292237,
7.03292237, 3.87656588, 6.36826384, 5.76642001, 7.03292237,
7.03292237, 6.36826384, 4.72799187, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 3.87656588, 5.22145448, 6.36826384,
4.28116479, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 5.22145448, 6.36826384, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.5102043 , 7.03292237, 5.22145448,
5.22145448, 7.03292237, 6.36826384, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
5.76642001, 7.03292237, 3.87656588, 7.03292237, 5.22145448,
3.87656588, 4.72799187, 6.36826384, 5.76642001, 7.03292237,
6.36826384, 7.03292237, 4.28116479, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 4.28116479, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
3.5102043 , 4.72799187, 7.03292237, 4.28116479, 7.03292237,
4.72799187, 7.03292237, 5.22145448, 5.76642001, 5.76642001,
3.87656588, 5.76642001, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
4.72799187, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
6.36826384, 5.76642001, 7.03292237, 5.76642001, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 5.76642001, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 5.76642001, 6.36826384, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 6.36826384, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 5.76642001, 6.36826384,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 6.36826384, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 5.76642001, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
5.76642001, 7.03292237, 4.28116479, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 4.28116479, 7.03292237, 7.03292237,
6.36826384, 3.87656588, 3.5102043 , 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 4.72799187, 5.76642001, 7.03292237, 7.03292237,
3.87656588, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 5.76642001,
7.03292237, 6.36826384, 5.76642001, 7.03292237, 6.36826384,
5.76642001, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 5.76642001, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 4.28116479,
7.03292237, 5.76642001, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
6.36826384, 6.36826384, 7.03292237, 7.03292237, 6.36826384,
3.87656588, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
3.5102043 , 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 7.03292237, 6.36826384, 4.72799187,
4.72799187, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 4.28116479, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 6.36826384, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 6.36826384,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 6.36826384, 7.03292237, 4.72799187,
4.28116479, 4.72799187, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 4.28116479, 4.28116479, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
3.87656588, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 4.72799187, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 5.22145448,
7.03292237, 7.03292237, 3.87656588, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 5.22145448, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 5.76642001, 7.03292237, 5.76642001,
7.03292237, 4.28116479, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 3.87656588,
6.36826384, 5.76642001, 7.03292237, 4.28116479, 7.03292237,
5.76642001, 5.22145448, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 3.5102043 ,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 4.28116479, 4.72799187, 6.36826384, 7.03292237,
7.03292237, 4.28116479, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 4.28116479, 7.03292237, 7.03292237, 5.22145448,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 5.22145448, 6.36826384, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
3.5102043 , 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 6.36826384,
4.72799187, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 4.72799187, 5.22145448,
5.76642001, 7.03292237, 6.36826384, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 5.22145448, 4.72799187, 5.76642001,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 5.22145448,
7.03292237, 6.36826384, 3.87656588, 6.36826384, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 3.5102043 , 7.03292237, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 6.36826384, 7.03292237, 6.36826384,
7.03292237, 6.36826384, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 6.36826384, 7.03292237, 7.03292237,
6.36826384, 4.72799187, 7.03292237, 5.22145448, 7.03292237,
4.72799187, 7.03292237, 4.28116479, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 6.36826384, 7.03292237, 3.87656588, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 5.76642001, 6.36826384, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 5.22145448, 7.03292237, 3.5102043 ,
6.36826384, 6.36826384, 7.03292237, 6.36826384, 7.03292237,
5.22145448, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 4.28116479, 7.03292237, 7.03292237,
4.72799187, 4.72799187, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 5.76642001,
4.28116479, 7.03292237, 4.28116479, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.5102043 , 7.03292237, 5.22145448,
7.03292237, 6.36826384, 7.03292237, 6.36826384, 7.03292237,
4.72799187, 7.03292237, 7.03292237, 4.72799187, 3.5102043 ,
3.17846635, 3.87656588, 5.22145448, 6.36826384, 7.03292237,
4.28116479, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 3.5102043 ,
7.03292237, 7.03292237, 5.22145448, 6.36826384, 3.87656588,
4.72799187, 7.03292237, 7.03292237, 3.87656588, 7.03292237,
6.36826384, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 4.72799187, 6.36826384, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 5.22145448, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 4.28116479, 7.03292237, 6.36826384, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 5.76642001, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 4.72799187, 5.76642001, 6.36826384, 7.03292237,
4.28116479, 6.36826384, 7.03292237, 6.36826384, 5.76642001,
7.03292237, 4.28116479, 5.22145448, 4.72799187, 7.03292237,
7.03292237, 6.36826384, 5.22145448, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
6.36826384, 5.22145448, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 6.36826384, 7.03292237,
5.76642001, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 3.87656588, 6.36826384, 6.36826384,
5.22145448, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 4.28116479, 7.03292237, 3.87656588, 7.03292237,
7.03292237, 5.22145448, 6.36826384, 4.72799187, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
4.28116479, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
5.76642001, 5.22145448, 5.76642001, 7.03292237, 4.28116479,
7.03292237, 7.03292237, 4.72799187, 6.36826384, 7.03292237,
4.72799187, 5.76642001, 7.03292237, 7.03292237, 6.36826384,
6.36826384, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 7.03292237, 6.36826384,
7.03292237, 4.72799187, 4.72799187, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
5.76642001, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
3.5102043 , 6.36826384, 5.22145448, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
4.72799187, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 4.72799187, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 3.87656588, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
3.5102043 , 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 4.72799187, 7.03292237, 7.03292237, 4.28116479,
6.36826384, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
5.76642001, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
4.72799187, 7.03292237, 4.72799187, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 3.87656588, 5.22145448, 7.03292237, 7.03292237,
6.36826384, 4.28116479, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 3.87656588, 6.36826384, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 5.22145448, 7.03292237,
5.76642001, 4.72799187, 7.03292237, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 5.76642001,
5.22145448, 7.03292237, 5.76642001, 6.36826384, 4.28116479,
7.03292237, 4.72799187, 3.87656588, 5.22145448, 7.03292237,
6.36826384, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 6.36826384, 5.76642001, 6.36826384, 7.03292237,
5.76642001, 7.03292237, 5.76642001, 5.22145448, 3.87656588,
5.76642001, 6.36826384, 7.03292237, 5.22145448, 6.36826384,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 4.72799187, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 3.5102043 ,
3.87656588, 7.03292237, 4.72799187, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 3.87656588,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
4.28116479, 7.03292237, 4.72799187, 4.72799187, 7.03292237,
6.36826384, 5.76642001, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 7.03292237,
5.76642001, 5.22145448, 7.03292237, 4.72799187, 7.03292237,
4.28116479, 5.76642001, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 5.22145448, 5.22145448, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 6.36826384, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 7.03292237, 5.76642001,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 3.87656588,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 7.03292237, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 4.28116479, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 3.5102043 ,
7.03292237, 7.03292237, 7.03292237, 5.76642001, 4.28116479,
5.22145448, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 5.76642001, 6.36826384, 7.03292237,
5.22145448, 5.76642001, 5.76642001, 7.03292237, 7.03292237,
5.22145448, 7.03292237, 7.03292237, 5.22145448, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.22145448,
6.36826384, 5.22145448, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 5.22145448, 7.03292237, 5.76642001, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 4.72799187, 7.03292237,
7.03292237, 7.03292237, 6.36826384, 4.72799187, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 5.76642001, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 7.03292237,
4.72799187, 3.87656588, 7.03292237, 7.03292237, 4.72799187,
7.03292237, 7.03292237, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 3.87656588, 5.76642001,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
5.22145448, 7.03292237, 6.36826384, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 5.76642001,
5.76642001, 7.03292237, 5.76642001, 3.87656588, 6.36826384,
7.03292237, 7.03292237, 7.03292237, 6.36826384, 5.76642001,
5.22145448, 7.03292237, 5.22145448, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 4.72799187,
7.03292237, 6.36826384, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 7.03292237, 7.03292237, 5.22145448, 6.36826384,
7.03292237, 7.03292237, 3.17846635, 5.76642001, 7.03292237,
3.5102043 , 7.03292237, 7.03292237, 7.03292237, 3.87656588,
7.03292237, 6.36826384, 6.36826384, 7.03292237, 5.22145448,
7.03292237, 7.03292237, 7.03292237, 7.03292237, 7.03292237,
7.03292237, 4.28116479, 6.36826384, 7.03292237, 6.36826384,
4.72799187, 7.03292237, 7.03292237, 5.22145448, 4.28116479,
7.03292237, 6.36826384, 7.03292237, 4.72799187, 5.76642001,
6.36826384, 5.22145448, 7.03292237, 7.03292237, 7.03292237,
6.36826384, 7.03292237, 7.03292237, 3.87656588, 7.03292237,
4.72799187, 7.03292237, 3.53462742, 4.76088805, 5.25778406,
4.31095206, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 5.25778406, 5.80654132, 5.80654132,
3.90353806, 5.25778406, 4.31095206, 5.80654132, 5.25778406,
3.53462742, 2.89810483, 5.80654132, 5.25778406, 5.80654132,
2.89810483, 5.80654132, 5.25778406, 3.53462742, 4.76088805,
5.80654132, 3.20058132, 5.80654132, 5.80654132, 4.76088805,
5.80654132, 3.53462742, 3.53462742, 5.80654132, 5.80654132,
5.80654132, 4.76088805, 5.80654132, 4.76088805, 3.90353806,
5.80654132, 3.53462742, 5.80654132, 2.6242144 , 3.20058132,
5.80654132, 5.80654132, 3.90353806, 3.20058132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
2.89810483, 5.80654132, 5.80654132, 3.90353806, 3.53462742,
4.31095206, 5.80654132, 5.80654132, 4.76088805, 5.80654132,
3.53462742, 5.80654132, 4.76088805, 2.89810483, 5.25778406,
4.31095206, 5.80654132, 4.31095206, 5.80654132, 5.80654132,
4.76088805, 4.31095206, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 4.76088805, 5.80654132, 5.25778406,
5.25778406, 5.80654132, 5.80654132, 3.53462742, 5.80654132,
3.53462742, 5.80654132, 4.31095206, 5.80654132, 5.80654132,
5.25778406, 5.80654132, 3.20058132, 5.80654132, 5.80654132,
3.20058132, 3.90353806, 5.80654132, 5.80654132, 5.25778406,
3.53462742, 3.20058132, 5.80654132, 4.31095206, 5.80654132,
5.80654132, 5.80654132, 3.20058132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 4.31095206, 5.80654132, 3.90353806,
5.80654132, 4.31095206, 4.31095206, 5.80654132, 4.76088805,
3.90353806, 3.90353806, 4.76088805, 3.90353806, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 3.53462742, 5.80654132, 3.53462742,
5.80654132, 5.80654132, 5.80654132, 2.89810483, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 4.76088805, 4.76088805,
5.80654132, 2.89810483, 5.80654132, 4.76088805, 5.80654132,
5.80654132, 4.31095206, 3.20058132, 5.80654132, 4.76088805,
5.80654132, 2.89810483, 2.89810483, 5.25778406, 3.90353806,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 5.80654132,
3.90353806, 5.80654132, 5.25778406, 4.76088805, 5.80654132,
2.89810483, 5.25778406, 5.80654132, 5.80654132, 4.31095206,
5.25778406, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
2.89810483, 5.80654132, 3.53462742, 3.90353806, 5.25778406,
5.80654132, 3.20058132, 2.89810483, 5.80654132, 4.31095206,
5.80654132, 3.53462742, 5.25778406, 4.76088805, 5.80654132,
3.53462742, 3.90353806, 5.80654132, 3.20058132, 5.80654132,
5.80654132, 3.53462742, 5.25778406, 4.76088805, 4.76088805,
5.80654132, 5.80654132, 2.89810483, 3.20058132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.25778406, 5.25778406,
5.80654132, 5.80654132, 4.76088805, 5.80654132, 4.31095206,
5.25778406, 5.80654132, 4.31095206, 4.31095206, 5.80654132,
5.80654132, 3.53462742, 4.76088805, 3.53462742, 4.76088805,
4.31095206, 5.80654132, 3.90353806, 5.80654132, 4.76088805,
5.80654132, 5.80654132, 5.80654132, 4.31095206, 3.90353806,
5.80654132, 4.76088805, 4.76088805, 3.53462742, 5.80654132,
5.80654132, 5.25778406, 3.53462742, 3.20058132, 3.53462742,
3.90353806, 5.80654132, 4.31095206, 4.76088805, 5.80654132,
5.80654132, 5.80654132, 3.90353806, 4.76088805, 2.89810483,
5.80654132, 5.80654132, 5.80654132, 4.76088805, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.90353806, 5.25778406, 4.76088805,
5.80654132, 4.76088805, 3.90353806, 5.80654132, 5.80654132,
4.76088805, 5.80654132, 5.25778406, 5.80654132, 2.89810483,
5.80654132, 5.25778406, 3.90353806, 3.90353806, 5.80654132,
5.25778406, 3.53462742, 5.80654132, 4.76088805, 5.25778406,
5.80654132, 3.90353806, 4.31095206, 5.80654132, 5.25778406,
3.90353806, 3.53462742, 5.25778406, 2.89810483, 5.80654132,
3.53462742, 4.76088805, 4.31095206, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 3.90353806, 5.80654132,
4.31095206, 5.80654132, 5.80654132, 5.25778406, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.25778406, 5.25778406,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 4.31095206, 5.80654132, 5.25778406,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.31095206, 5.25778406, 3.53462742, 2.89810483,
5.80654132, 5.80654132, 3.20058132, 5.80654132, 4.31095206,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.90353806,
3.90353806, 3.90353806, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.76088805, 3.20058132, 4.31095206, 5.80654132,
3.90353806, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.90353806, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.90353806, 5.80654132, 3.90353806, 3.53462742,
5.80654132, 4.76088805, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
4.76088805, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
3.53462742, 5.25778406, 5.80654132, 3.53462742, 5.80654132,
3.90353806, 5.80654132, 5.80654132, 5.80654132, 3.90353806,
3.20058132, 5.80654132, 5.80654132, 3.90353806, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.53462742, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.53462742, 5.25778406, 3.90353806,
5.80654132, 4.76088805, 4.76088805, 3.90353806, 5.80654132,
5.80654132, 4.31095206, 2.89810483, 5.80654132, 5.80654132,
3.90353806, 5.80654132, 3.53462742, 3.90353806, 5.80654132,
5.80654132, 4.76088805, 5.80654132, 4.31095206, 5.25778406,
5.25778406, 3.20058132, 3.53462742, 5.80654132, 4.31095206,
5.80654132, 4.76088805, 3.90353806, 4.76088805, 4.76088805,
5.80654132, 5.80654132, 5.25778406, 3.90353806, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.53462742, 4.31095206, 3.90353806, 4.76088805,
4.31095206, 3.53462742, 3.90353806, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.20058132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 3.90353806, 4.76088805,
5.25778406, 3.53462742, 3.20058132, 5.80654132, 3.90353806,
5.80654132, 3.53462742, 5.80654132, 5.80654132, 3.90353806,
5.80654132, 3.90353806, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.76088805, 3.90353806, 4.76088805, 5.25778406,
2.89810483, 5.80654132, 4.31095206, 5.80654132, 4.76088805,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
3.53462742, 2.89810483, 5.80654132, 5.80654132, 5.80654132,
3.90353806, 4.76088805, 5.80654132, 5.25778406, 4.76088805,
5.25778406, 5.80654132, 5.80654132, 5.25778406, 5.80654132,
5.80654132, 5.80654132, 2.89810483, 5.25778406, 5.80654132,
5.80654132, 4.76088805, 4.76088805, 5.25778406, 5.80654132,
5.80654132, 4.31095206, 3.20058132, 3.53462742, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 3.90353806, 4.76088805, 5.80654132,
3.53462742, 5.80654132, 5.25778406, 2.89810483, 5.80654132,
5.25778406, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.31095206, 5.80654132, 3.20058132, 5.80654132,
5.25778406, 4.76088805, 5.25778406, 5.80654132, 4.76088805,
5.80654132, 3.90353806, 4.31095206, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 3.90353806,
4.76088805, 3.90353806, 5.80654132, 3.53462742, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 3.53462742, 5.80654132,
4.76088805, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.90353806,
2.6242144 , 5.80654132, 5.80654132, 5.80654132, 5.80654132,
4.76088805, 5.80654132, 3.53462742, 5.80654132, 5.80654132,
3.90353806, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.20058132, 3.20058132, 5.80654132,
5.80654132, 5.80654132, 3.90353806, 5.80654132, 5.25778406,
4.31095206, 5.25778406, 4.31095206, 4.31095206, 4.76088805,
5.80654132, 4.76088805, 5.80654132, 3.53462742, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.20058132,
5.80654132, 3.90353806, 5.80654132, 4.76088805, 5.80654132,
3.90353806, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 5.80654132, 4.31095206, 5.25778406,
4.31095206, 5.80654132, 3.90353806, 5.80654132, 3.53462742,
5.25778406, 5.80654132, 5.80654132, 4.31095206, 3.90353806,
3.53462742, 5.80654132, 5.80654132, 5.80654132, 4.31095206,
5.80654132, 5.80654132, 5.25778406, 4.76088805, 4.31095206,
3.20058132, 5.80654132, 3.53462742, 3.20058132, 5.80654132,
5.80654132, 3.20058132, 3.20058132, 5.80654132, 4.31095206,
4.31095206, 5.80654132, 5.80654132, 3.90353806, 3.90353806,
3.53462742, 5.80654132, 3.90353806, 3.53462742, 5.80654132,
3.90353806, 5.25778406, 5.80654132, 3.53462742, 5.80654132,
5.25778406, 5.80654132, 4.31095206, 3.90353806, 5.80654132,
5.80654132, 4.31095206, 5.25778406, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
3.20058132, 5.25778406, 2.89810483, 3.90353806, 5.80654132,
3.53462742, 5.80654132, 5.25778406, 5.80654132, 2.89810483,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.20058132,
5.80654132, 5.25778406, 3.53462742, 4.31095206, 4.76088805,
3.90353806, 5.80654132, 5.80654132, 5.25778406, 3.90353806,
4.76088805, 4.31095206, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 3.90353806, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
3.53462742, 5.80654132, 5.80654132, 5.25778406, 5.80654132,
3.20058132, 5.80654132, 4.76088805, 5.80654132, 4.76088805,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 2.89810483, 5.80654132, 5.80654132,
2.89810483, 3.53462742, 5.80654132, 5.80654132, 2.89810483,
4.31095206, 3.53462742, 4.31095206, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 4.31095206,
4.76088805, 5.25778406, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 3.90353806, 5.80654132, 5.25778406,
5.80654132, 2.89810483, 2.89810483, 5.80654132, 3.53462742,
5.80654132, 3.53462742, 5.80654132, 4.31095206, 2.89810483,
5.80654132, 5.80654132, 2.89810483, 4.76088805, 5.80654132,
5.80654132, 3.20058132, 5.80654132, 3.90353806, 5.80654132,
5.80654132, 3.20058132, 3.90353806, 4.76088805, 4.76088805,
5.80654132, 3.90353806, 4.31095206, 5.80654132, 4.31095206,
5.80654132, 3.20058132, 4.31095206, 4.76088805, 3.53462742,
5.80654132, 5.80654132, 3.53462742, 3.53462742, 3.53462742,
5.80654132, 5.80654132, 3.90353806, 3.90353806, 3.20058132,
5.80654132, 5.80654132, 2.89810483, 3.90353806, 5.80654132,
2.89810483, 3.53462742, 3.53462742, 4.31095206, 5.80654132,
3.53462742, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 4.76088805, 5.80654132, 5.80654132, 4.76088805,
5.80654132, 5.80654132, 4.76088805, 4.76088805, 5.80654132,
5.25778406, 4.31095206, 5.80654132, 4.76088805, 3.90353806,
4.31095206, 5.80654132, 2.89810483, 4.31095206, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 3.20058132,
5.25778406, 5.80654132, 4.76088805, 5.80654132, 4.31095206,
5.80654132, 5.80654132, 4.76088805, 4.31095206, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 4.31095206,
4.31095206, 3.20058132, 4.76088805, 5.80654132, 3.20058132,
3.20058132, 5.80654132, 3.90353806, 5.25778406, 3.20058132,
4.76088805, 3.20058132, 3.53462742, 4.76088805, 5.80654132,
5.80654132, 4.31095206, 4.76088805, 5.80654132, 4.31095206,
5.80654132, 4.76088805, 4.31095206, 2.89810483, 5.80654132,
5.80654132, 5.80654132, 4.76088805, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 4.76088805, 5.25778406, 4.31095206,
5.80654132, 3.90353806, 3.53462742, 4.76088805, 5.80654132,
4.31095206, 5.80654132, 5.80654132, 3.20058132, 5.80654132,
5.25778406, 5.80654132, 5.80654132, 5.80654132, 3.53462742,
2.6242144 , 5.80654132, 5.80654132, 3.53462742, 5.25778406,
3.90353806, 5.80654132, 2.89810483, 5.80654132, 3.90353806,
5.80654132, 5.80654132, 3.90353806, 2.89810483, 5.80654132,
4.76088805, 4.31095206, 5.80654132, 5.25778406, 5.80654132,
5.80654132, 4.31095206, 5.80654132, 5.80654132, 5.80654132,
3.90353806, 4.76088805, 5.80654132, 4.76088805, 5.80654132,
4.76088805, 3.53462742, 3.90353806, 5.80654132, 5.80654132,
5.80654132, 5.25778406, 5.80654132, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
3.53462742, 3.53462742, 3.90353806, 5.80654132, 4.31095206,
3.53462742, 5.80654132, 4.76088805, 4.76088805, 3.20058132,
3.90353806, 5.80654132, 5.25778406, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 4.31095206, 5.25778406, 4.31095206,
5.80654132, 3.20058132, 5.80654132, 4.31095206, 4.31095206,
4.76088805, 5.80654132, 4.76088805, 4.31095206, 5.80654132,
5.25778406, 3.53462742, 3.53462742, 5.25778406, 5.80654132,
3.90353806, 5.25778406, 4.31095206, 4.31095206, 3.53462742,
5.80654132, 3.90353806, 5.80654132, 5.80654132, 4.76088805,
5.25778406, 3.20058132, 3.90353806, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 4.31095206,
5.25778406, 4.76088805, 5.80654132, 5.80654132, 5.25778406,
3.53462742, 5.80654132, 5.80654132, 5.80654132, 5.25778406,
5.25778406, 5.80654132, 3.20058132, 5.80654132, 5.80654132,
3.53462742, 5.80654132, 5.80654132, 5.80654132, 4.31095206,
5.80654132, 4.76088805, 5.80654132, 5.80654132, 5.80654132,
3.90353806, 4.31095206, 5.25778406, 5.80654132, 3.53462742,
3.90353806, 5.25778406, 4.31095206, 5.80654132, 5.25778406,
5.25778406, 2.89810483, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 5.25778406, 5.80654132, 4.76088805,
5.80654132, 5.80654132, 5.80654132, 4.31095206, 5.80654132,
3.20058132, 3.90353806, 5.80654132, 5.80654132, 5.25778406,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 5.80654132, 2.6242144 , 5.80654132, 3.90353806,
5.25778406, 4.76088805, 5.80654132, 5.80654132, 3.90353806,
5.80654132, 3.53462742, 2.89810483, 5.80654132, 3.53462742,
2.89810483, 4.76088805, 5.80654132, 5.80654132, 5.80654132,
4.31095206, 5.80654132, 4.76088805, 3.90353806, 2.89810483,
4.76088805, 5.80654132, 2.6242144 , 3.53462742, 4.31095206,
5.25778406, 5.25778406, 3.20058132, 4.31095206, 4.31095206,
3.20058132, 4.31095206, 5.25778406, 4.31095206, 5.25778406,
3.90353806, 4.31095206, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.90353806, 5.80654132, 5.80654132, 5.80654132,
4.31095206, 5.80654132, 5.80654132, 5.80654132, 3.90353806,
5.25778406, 3.90353806, 4.31095206, 4.76088805, 3.90353806,
5.80654132, 5.80654132, 5.80654132, 2.89810483, 5.80654132,
5.80654132, 5.80654132, 5.80654132, 5.80654132, 5.80654132,
5.80654132, 3.90353806, 3.20058132, 5.25778406, 4.76088805,
5.25778406])
class InvGaussIdentity(Medpar1):
"""
Accuracy is different for R vs Stata ML vs Stata IRLS, we are close.
"""
def __init__(self):
super(InvGaussIdentity, self).__init__()
self.params = np.array([ 0.44538838, -1.05872706, 2.83947966])
self.bse = np.array([ 0.02586783, 0.13830023, 0.20834864])
filename = os.path.join(os.path.dirname(os.path.abspath(__file__)),
"igaussident_resids.csv")
self.resids = np.genfromtxt(open(filename, 'rb'), delimiter=",")
self.null_deviance = 335.1539777981053 # from R, Rpy bug
self.df_null = 3675
self.deviance = 305.33661191013988
self.df_resid = 3673
self.df_model = 2
self.aic_R = 18558.677276882016
self.aic_Stata = 6.619290231464371
self.bic_Stata = -29848.45250412075
self.llf_stata = -12163.25544543151
self.chi2 = 567.1229375785638 # in Stata not sm
# self.llf = -9275.3386384410078 # from R
self.llf = -12163.25545 # from Stata, big diff with R
self.scale = 0.10115387793455666
self.pearson_chi2 = 371.5346609292967 # deviance_p in Stata
self.fittedvalues = np.array([ 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
5.51180993, 6.84797506, 5.51180993, 5.06642155, 5.06642155,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.9571983 ,
6.84797506, 4.62103317, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 6.40258668, 6.40258668, 4.62103317,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.40258668,
6.40258668, 5.51180993, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 4.17564479, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.51180993, 5.51180993, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.84797506, 6.40258668,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.40258668, 5.51180993,
6.84797506, 6.84797506, 5.06642155, 5.9571983 , 6.84797506,
5.06642155, 6.40258668, 4.17564479, 6.84797506, 6.84797506,
5.51180993, 5.51180993, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 4.62103317,
6.84797506, 6.40258668, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.40258668, 4.17564479, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 5.9571983 , 4.62103317,
5.9571983 , 6.40258668, 6.40258668, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 4.62103317, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 4.62103317, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 4.62103317, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 5.06642155, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 4.62103317, 5.9571983 ,
5.51180993, 6.40258668, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
5.9571983 , 6.84797506, 5.51180993, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
4.62103317, 6.84797506, 5.51180993, 6.84797506, 6.40258668,
5.9571983 , 4.62103317, 4.62103317, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 4.62103317,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.84797506,
5.9571983 , 6.84797506, 5.06642155, 4.62103317, 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
4.17564479, 5.06642155, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 4.17564479, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 4.62103317, 6.84797506, 6.40258668,
6.84797506, 5.51180993, 5.51180993, 6.40258668, 6.84797506,
6.40258668, 6.40258668, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.40258668, 6.84797506,
4.17564479, 6.40258668, 5.51180993, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
5.51180993, 6.84797506, 6.40258668, 5.51180993, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.40258668,
6.84797506, 6.40258668, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.17564479, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.17564479,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 6.84797506, 6.40258668, 6.40258668,
5.06642155, 5.9571983 , 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 4.17564479, 5.51180993, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.40258668,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 5.51180993, 5.9571983 , 6.84797506, 5.9571983 ,
6.40258668, 5.9571983 , 5.9571983 , 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 4.62103317, 6.40258668, 4.17564479,
6.84797506, 3.73025641, 6.84797506, 6.84797506, 5.9571983 ,
5.51180993, 6.84797506, 5.9571983 , 4.62103317, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 5.06642155,
6.84797506, 6.40258668, 6.84797506, 5.51180993, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
5.51180993, 5.06642155, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.40258668, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.06642155, 5.9571983 , 6.84797506, 5.9571983 ,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.40258668,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 6.40258668, 5.51180993, 5.9571983 , 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.40258668, 6.40258668, 6.84797506, 5.9571983 , 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
5.51180993, 6.84797506, 4.17564479, 5.9571983 , 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 6.40258668, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.40258668, 4.17564479, 6.84797506, 6.84797506,
6.40258668, 5.06642155, 4.62103317, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 6.84797506, 6.84797506, 6.84797506,
4.17564479, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 4.17564479,
6.84797506, 5.06642155, 5.51180993, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.40258668, 5.9571983 ,
5.9571983 , 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 5.06642155, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 5.06642155, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.40258668, 6.84797506, 6.84797506, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.40258668, 6.84797506, 5.9571983 , 4.62103317,
5.9571983 , 6.84797506, 4.17564479, 6.84797506, 6.84797506,
6.84797506, 3.73025641, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 5.9571983 , 5.9571983 , 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 4.62103317, 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 3.73025641, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 6.40258668, 5.9571983 , 6.84797506,
6.84797506, 6.40258668, 5.06642155, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 4.17564479, 5.51180993, 6.40258668,
4.62103317, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 5.51180993, 6.40258668, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 3.73025641, 6.84797506, 5.51180993,
5.51180993, 6.84797506, 6.40258668, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
5.9571983 , 6.84797506, 4.17564479, 6.84797506, 5.51180993,
4.17564479, 5.06642155, 6.40258668, 5.9571983 , 6.84797506,
6.40258668, 6.84797506, 4.62103317, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.62103317, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
3.73025641, 5.06642155, 6.84797506, 4.62103317, 6.84797506,
5.06642155, 6.84797506, 5.51180993, 5.9571983 , 5.9571983 ,
4.17564479, 5.9571983 , 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
5.06642155, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
6.40258668, 5.9571983 , 6.84797506, 5.9571983 , 6.40258668,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 5.9571983 , 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.40258668, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 5.9571983 , 6.40258668, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.40258668, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 5.9571983 , 6.40258668,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.40258668, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 5.9571983 , 5.51180993,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
5.9571983 , 6.84797506, 4.62103317, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 4.62103317, 6.84797506, 6.84797506,
6.40258668, 4.17564479, 3.73025641, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 5.06642155, 5.9571983 , 6.84797506, 6.84797506,
4.17564479, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 5.9571983 ,
6.84797506, 6.40258668, 5.9571983 , 6.84797506, 6.40258668,
5.9571983 , 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.06642155, 5.9571983 , 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 4.62103317,
6.84797506, 5.9571983 , 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.40258668, 6.40258668, 6.84797506, 6.84797506, 6.40258668,
4.17564479, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
3.73025641, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 5.06642155, 6.84797506, 6.40258668, 5.06642155,
5.06642155, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.40258668, 4.62103317, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 6.40258668, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.40258668,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.40258668, 6.84797506, 5.06642155,
4.62103317, 5.06642155, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 4.62103317, 4.62103317, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
4.17564479, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 5.06642155, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 5.51180993,
6.84797506, 6.84797506, 4.17564479, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 5.51180993, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 5.9571983 , 6.84797506, 5.9571983 ,
6.84797506, 4.62103317, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 4.17564479,
6.40258668, 5.9571983 , 6.84797506, 4.62103317, 6.84797506,
5.9571983 , 5.51180993, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 3.73025641,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 4.62103317, 5.06642155, 6.40258668, 6.84797506,
6.84797506, 4.62103317, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 4.62103317, 6.84797506, 6.84797506, 5.51180993,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 5.51180993, 6.40258668, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
3.73025641, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.40258668,
5.06642155, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 5.06642155, 5.51180993,
5.9571983 , 6.84797506, 6.40258668, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 5.51180993, 5.06642155, 5.9571983 ,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 5.51180993,
6.84797506, 6.40258668, 4.17564479, 6.40258668, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 3.73025641, 6.84797506, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.40258668, 6.84797506, 6.40258668,
6.84797506, 6.40258668, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.40258668, 6.84797506, 6.84797506,
6.40258668, 5.06642155, 6.84797506, 5.51180993, 6.84797506,
5.06642155, 6.84797506, 4.62103317, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.40258668, 6.84797506, 4.17564479, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 5.9571983 , 6.40258668, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 5.51180993, 6.84797506, 3.73025641,
6.40258668, 6.40258668, 6.84797506, 6.40258668, 6.84797506,
5.51180993, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 4.62103317, 6.84797506, 6.84797506,
5.06642155, 5.06642155, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 5.9571983 ,
4.62103317, 6.84797506, 4.62103317, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 3.73025641, 6.84797506, 5.51180993,
6.84797506, 6.40258668, 6.84797506, 6.40258668, 6.84797506,
5.06642155, 6.84797506, 6.84797506, 5.06642155, 3.73025641,
3.28486804, 4.17564479, 5.51180993, 6.40258668, 6.84797506,
4.62103317, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 3.73025641,
6.84797506, 6.84797506, 5.51180993, 6.40258668, 4.17564479,
5.06642155, 6.84797506, 6.84797506, 4.17564479, 6.84797506,
6.40258668, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 5.06642155, 6.40258668, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 5.51180993, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 4.62103317, 6.84797506, 6.40258668, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 5.9571983 , 6.84797506, 5.06642155, 6.84797506,
6.84797506, 5.06642155, 5.9571983 , 6.40258668, 6.84797506,
4.62103317, 6.40258668, 6.84797506, 6.40258668, 5.9571983 ,
6.84797506, 4.62103317, 5.51180993, 5.06642155, 6.84797506,
6.84797506, 6.40258668, 5.51180993, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.40258668, 5.51180993, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.62103317,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 6.40258668, 6.84797506,
5.9571983 , 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 4.17564479, 6.40258668, 6.40258668,
5.51180993, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 4.62103317, 6.84797506, 4.17564479, 6.84797506,
6.84797506, 5.51180993, 6.40258668, 5.06642155, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
4.62103317, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
5.9571983 , 5.51180993, 5.9571983 , 6.84797506, 4.62103317,
6.84797506, 6.84797506, 5.06642155, 6.40258668, 6.84797506,
5.06642155, 5.9571983 , 6.84797506, 6.84797506, 6.40258668,
6.40258668, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 6.84797506, 6.40258668,
6.84797506, 5.06642155, 5.06642155, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
5.9571983 , 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
3.73025641, 6.40258668, 5.51180993, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
5.06642155, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 5.06642155, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.17564479, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
3.73025641, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 5.06642155, 6.84797506, 6.84797506, 4.62103317,
6.40258668, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
5.9571983 , 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
5.06642155, 6.84797506, 5.06642155, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.17564479, 5.51180993, 6.84797506, 6.84797506,
6.40258668, 4.62103317, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 4.17564479, 6.40258668, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 5.51180993, 6.84797506,
5.9571983 , 5.06642155, 6.84797506, 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 5.9571983 ,
5.51180993, 6.84797506, 5.9571983 , 6.40258668, 4.62103317,
6.84797506, 5.06642155, 4.17564479, 5.51180993, 6.84797506,
6.40258668, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.40258668, 5.9571983 , 6.40258668, 6.84797506,
5.9571983 , 6.84797506, 5.9571983 , 5.51180993, 4.17564479,
5.9571983 , 6.40258668, 6.84797506, 5.51180993, 6.40258668,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.06642155, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 3.73025641,
4.17564479, 6.84797506, 5.06642155, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 4.17564479,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
4.62103317, 6.84797506, 5.06642155, 5.06642155, 6.84797506,
6.40258668, 5.9571983 , 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.84797506,
5.9571983 , 5.51180993, 6.84797506, 5.06642155, 6.84797506,
4.62103317, 5.9571983 , 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 5.51180993, 5.51180993, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.40258668, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 6.84797506, 5.9571983 ,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 4.17564479,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.84797506, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 4.62103317, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 3.73025641,
6.84797506, 6.84797506, 6.84797506, 5.9571983 , 4.62103317,
5.51180993, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 5.9571983 , 6.40258668, 6.84797506,
5.51180993, 5.9571983 , 5.9571983 , 6.84797506, 6.84797506,
5.51180993, 6.84797506, 6.84797506, 5.51180993, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.51180993,
6.40258668, 5.51180993, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 5.51180993, 6.84797506, 5.9571983 , 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.06642155, 6.84797506,
6.84797506, 6.84797506, 6.40258668, 5.06642155, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 5.9571983 , 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 6.84797506,
5.06642155, 4.17564479, 6.84797506, 6.84797506, 5.06642155,
6.84797506, 6.84797506, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 4.17564479, 5.9571983 ,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
5.51180993, 6.84797506, 6.40258668, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 5.9571983 ,
5.9571983 , 6.84797506, 5.9571983 , 4.17564479, 6.40258668,
6.84797506, 6.84797506, 6.84797506, 6.40258668, 5.9571983 ,
5.51180993, 6.84797506, 5.51180993, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 5.06642155,
6.84797506, 6.40258668, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 6.84797506, 6.84797506, 5.51180993, 6.40258668,
6.84797506, 6.84797506, 3.28486804, 5.9571983 , 6.84797506,
3.73025641, 6.84797506, 6.84797506, 6.84797506, 4.17564479,
6.84797506, 6.40258668, 6.40258668, 6.84797506, 5.51180993,
6.84797506, 6.84797506, 6.84797506, 6.84797506, 6.84797506,
6.84797506, 4.62103317, 6.40258668, 6.84797506, 6.40258668,
5.06642155, 6.84797506, 6.84797506, 5.51180993, 4.62103317,
6.84797506, 6.40258668, 6.84797506, 5.06642155, 5.9571983 ,
6.40258668, 5.51180993, 6.84797506, 6.84797506, 6.84797506,
6.40258668, 6.84797506, 6.84797506, 4.17564479, 6.84797506,
5.06642155, 6.84797506, 3.56230611, 4.89847125, 5.34385962,
4.45308287, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 5.34385962, 5.789248 , 5.789248 ,
4.00769449, 5.34385962, 4.45308287, 5.789248 , 5.34385962,
3.56230611, 2.67152936, 5.789248 , 5.34385962, 5.789248 ,
2.67152936, 5.789248 , 5.34385962, 3.56230611, 4.89847125,
5.789248 , 3.11691773, 5.789248 , 5.789248 , 4.89847125,
5.789248 , 3.56230611, 3.56230611, 5.789248 , 5.789248 ,
5.789248 , 4.89847125, 5.789248 , 4.89847125, 4.00769449,
5.789248 , 3.56230611, 5.789248 , 2.22614098, 3.11691773,
5.789248 , 5.789248 , 4.00769449, 3.11691773, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
2.67152936, 5.789248 , 5.789248 , 4.00769449, 3.56230611,
4.45308287, 5.789248 , 5.789248 , 4.89847125, 5.789248 ,
3.56230611, 5.789248 , 4.89847125, 2.67152936, 5.34385962,
4.45308287, 5.789248 , 4.45308287, 5.789248 , 5.789248 ,
4.89847125, 4.45308287, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.89847125, 5.789248 , 5.34385962,
5.34385962, 5.789248 , 5.789248 , 3.56230611, 5.789248 ,
3.56230611, 5.789248 , 4.45308287, 5.789248 , 5.789248 ,
5.34385962, 5.789248 , 3.11691773, 5.789248 , 5.789248 ,
3.11691773, 4.00769449, 5.789248 , 5.789248 , 5.34385962,
3.56230611, 3.11691773, 5.789248 , 4.45308287, 5.789248 ,
5.789248 , 5.789248 , 3.11691773, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.45308287, 5.789248 , 4.00769449,
5.789248 , 4.45308287, 4.45308287, 5.789248 , 4.89847125,
4.00769449, 4.00769449, 4.89847125, 4.00769449, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 3.56230611, 5.789248 , 3.56230611,
5.789248 , 5.789248 , 5.789248 , 2.67152936, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 4.89847125, 4.89847125,
5.789248 , 2.67152936, 5.789248 , 4.89847125, 5.789248 ,
5.789248 , 4.45308287, 3.11691773, 5.789248 , 4.89847125,
5.789248 , 2.67152936, 2.67152936, 5.34385962, 4.00769449,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 5.789248 ,
4.00769449, 5.789248 , 5.34385962, 4.89847125, 5.789248 ,
2.67152936, 5.34385962, 5.789248 , 5.789248 , 4.45308287,
5.34385962, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
2.67152936, 5.789248 , 3.56230611, 4.00769449, 5.34385962,
5.789248 , 3.11691773, 2.67152936, 5.789248 , 4.45308287,
5.789248 , 3.56230611, 5.34385962, 4.89847125, 5.789248 ,
3.56230611, 4.00769449, 5.789248 , 3.11691773, 5.789248 ,
5.789248 , 3.56230611, 5.34385962, 4.89847125, 4.89847125,
5.789248 , 5.789248 , 2.67152936, 3.11691773, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.34385962, 5.34385962,
5.789248 , 5.789248 , 4.89847125, 5.789248 , 4.45308287,
5.34385962, 5.789248 , 4.45308287, 4.45308287, 5.789248 ,
5.789248 , 3.56230611, 4.89847125, 3.56230611, 4.89847125,
4.45308287, 5.789248 , 4.00769449, 5.789248 , 4.89847125,
5.789248 , 5.789248 , 5.789248 , 4.45308287, 4.00769449,
5.789248 , 4.89847125, 4.89847125, 3.56230611, 5.789248 ,
5.789248 , 5.34385962, 3.56230611, 3.11691773, 3.56230611,
4.00769449, 5.789248 , 4.45308287, 4.89847125, 5.789248 ,
5.789248 , 5.789248 , 4.00769449, 4.89847125, 2.67152936,
5.789248 , 5.789248 , 5.789248 , 4.89847125, 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.00769449, 5.34385962, 4.89847125,
5.789248 , 4.89847125, 4.00769449, 5.789248 , 5.789248 ,
4.89847125, 5.789248 , 5.34385962, 5.789248 , 2.67152936,
5.789248 , 5.34385962, 4.00769449, 4.00769449, 5.789248 ,
5.34385962, 3.56230611, 5.789248 , 4.89847125, 5.34385962,
5.789248 , 4.00769449, 4.45308287, 5.789248 , 5.34385962,
4.00769449, 3.56230611, 5.34385962, 2.67152936, 5.789248 ,
3.56230611, 4.89847125, 4.45308287, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 4.00769449, 5.789248 ,
4.45308287, 5.789248 , 5.789248 , 5.34385962, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.34385962, 5.34385962,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 4.45308287, 5.789248 , 5.34385962,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.45308287, 5.34385962, 3.56230611, 2.67152936,
5.789248 , 5.789248 , 3.11691773, 5.789248 , 4.45308287,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 4.00769449,
4.00769449, 4.00769449, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.89847125, 3.11691773, 4.45308287, 5.789248 ,
4.00769449, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.00769449, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.00769449, 5.789248 , 4.00769449, 3.56230611,
5.789248 , 4.89847125, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
4.89847125, 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.34385962,
3.56230611, 5.34385962, 5.789248 , 3.56230611, 5.789248 ,
4.00769449, 5.789248 , 5.789248 , 5.789248 , 4.00769449,
3.11691773, 5.789248 , 5.789248 , 4.00769449, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 3.56230611, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 3.56230611, 5.34385962, 4.00769449,
5.789248 , 4.89847125, 4.89847125, 4.00769449, 5.789248 ,
5.789248 , 4.45308287, 2.67152936, 5.789248 , 5.789248 ,
4.00769449, 5.789248 , 3.56230611, 4.00769449, 5.789248 ,
5.789248 , 4.89847125, 5.789248 , 4.45308287, 5.34385962,
5.34385962, 3.11691773, 3.56230611, 5.789248 , 4.45308287,
5.789248 , 4.89847125, 4.00769449, 4.89847125, 4.89847125,
5.789248 , 5.789248 , 5.34385962, 4.00769449, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 3.56230611, 4.45308287, 4.00769449, 4.89847125,
4.45308287, 3.56230611, 4.00769449, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 3.11691773, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 4.00769449, 4.89847125,
5.34385962, 3.56230611, 3.11691773, 5.789248 , 4.00769449,
5.789248 , 3.56230611, 5.789248 , 5.789248 , 4.00769449,
5.789248 , 4.00769449, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.89847125, 4.00769449, 4.89847125, 5.34385962,
2.67152936, 5.789248 , 4.45308287, 5.789248 , 4.89847125,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
3.56230611, 2.67152936, 5.789248 , 5.789248 , 5.789248 ,
4.00769449, 4.89847125, 5.789248 , 5.34385962, 4.89847125,
5.34385962, 5.789248 , 5.789248 , 5.34385962, 5.789248 ,
5.789248 , 5.789248 , 2.67152936, 5.34385962, 5.789248 ,
5.789248 , 4.89847125, 4.89847125, 5.34385962, 5.789248 ,
5.789248 , 4.45308287, 3.11691773, 3.56230611, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.34385962,
5.789248 , 5.789248 , 4.00769449, 4.89847125, 5.789248 ,
3.56230611, 5.789248 , 5.34385962, 2.67152936, 5.789248 ,
5.34385962, 5.789248 , 5.789248 , 5.789248 , 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.45308287, 5.789248 , 3.11691773, 5.789248 ,
5.34385962, 4.89847125, 5.34385962, 5.789248 , 4.89847125,
5.789248 , 4.00769449, 4.45308287, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 4.00769449,
4.89847125, 4.00769449, 5.789248 , 3.56230611, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 3.56230611, 5.789248 ,
4.89847125, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 4.00769449,
2.22614098, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
4.89847125, 5.789248 , 3.56230611, 5.789248 , 5.789248 ,
4.00769449, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 3.11691773, 3.11691773, 5.789248 ,
5.789248 , 5.789248 , 4.00769449, 5.789248 , 5.34385962,
4.45308287, 5.34385962, 4.45308287, 4.45308287, 4.89847125,
5.789248 , 4.89847125, 5.789248 , 3.56230611, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 3.11691773,
5.789248 , 4.00769449, 5.789248 , 4.89847125, 5.789248 ,
4.00769449, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 5.789248 , 4.45308287, 5.34385962,
4.45308287, 5.789248 , 4.00769449, 5.789248 , 3.56230611,
5.34385962, 5.789248 , 5.789248 , 4.45308287, 4.00769449,
3.56230611, 5.789248 , 5.789248 , 5.789248 , 4.45308287,
5.789248 , 5.789248 , 5.34385962, 4.89847125, 4.45308287,
3.11691773, 5.789248 , 3.56230611, 3.11691773, 5.789248 ,
5.789248 , 3.11691773, 3.11691773, 5.789248 , 4.45308287,
4.45308287, 5.789248 , 5.789248 , 4.00769449, 4.00769449,
3.56230611, 5.789248 , 4.00769449, 3.56230611, 5.789248 ,
4.00769449, 5.34385962, 5.789248 , 3.56230611, 5.789248 ,
5.34385962, 5.789248 , 4.45308287, 4.00769449, 5.789248 ,
5.789248 , 4.45308287, 5.34385962, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.34385962,
3.11691773, 5.34385962, 2.67152936, 4.00769449, 5.789248 ,
3.56230611, 5.789248 , 5.34385962, 5.789248 , 2.67152936,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 3.11691773,
5.789248 , 5.34385962, 3.56230611, 4.45308287, 4.89847125,
4.00769449, 5.789248 , 5.789248 , 5.34385962, 4.00769449,
4.89847125, 4.45308287, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.00769449, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.34385962,
3.56230611, 5.789248 , 5.789248 , 5.34385962, 5.789248 ,
3.11691773, 5.789248 , 4.89847125, 5.789248 , 4.89847125,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 2.67152936, 5.789248 , 5.789248 ,
2.67152936, 3.56230611, 5.789248 , 5.789248 , 2.67152936,
4.45308287, 3.56230611, 4.45308287, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 4.45308287,
4.89847125, 5.34385962, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 4.00769449, 5.789248 , 5.34385962,
5.789248 , 2.67152936, 2.67152936, 5.789248 , 3.56230611,
5.789248 , 3.56230611, 5.789248 , 4.45308287, 2.67152936,
5.789248 , 5.789248 , 2.67152936, 4.89847125, 5.789248 ,
5.789248 , 3.11691773, 5.789248 , 4.00769449, 5.789248 ,
5.789248 , 3.11691773, 4.00769449, 4.89847125, 4.89847125,
5.789248 , 4.00769449, 4.45308287, 5.789248 , 4.45308287,
5.789248 , 3.11691773, 4.45308287, 4.89847125, 3.56230611,
5.789248 , 5.789248 , 3.56230611, 3.56230611, 3.56230611,
5.789248 , 5.789248 , 4.00769449, 4.00769449, 3.11691773,
5.789248 , 5.789248 , 2.67152936, 4.00769449, 5.789248 ,
2.67152936, 3.56230611, 3.56230611, 4.45308287, 5.789248 ,
3.56230611, 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.89847125, 5.789248 , 5.789248 , 4.89847125,
5.789248 , 5.789248 , 4.89847125, 4.89847125, 5.789248 ,
5.34385962, 4.45308287, 5.789248 , 4.89847125, 4.00769449,
4.45308287, 5.789248 , 2.67152936, 4.45308287, 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 3.11691773,
5.34385962, 5.789248 , 4.89847125, 5.789248 , 4.45308287,
5.789248 , 5.789248 , 4.89847125, 4.45308287, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 4.45308287,
4.45308287, 3.11691773, 4.89847125, 5.789248 , 3.11691773,
3.11691773, 5.789248 , 4.00769449, 5.34385962, 3.11691773,
4.89847125, 3.11691773, 3.56230611, 4.89847125, 5.789248 ,
5.789248 , 4.45308287, 4.89847125, 5.789248 , 4.45308287,
5.789248 , 4.89847125, 4.45308287, 2.67152936, 5.789248 ,
5.789248 , 5.789248 , 4.89847125, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.89847125, 5.34385962, 4.45308287,
5.789248 , 4.00769449, 3.56230611, 4.89847125, 5.789248 ,
4.45308287, 5.789248 , 5.789248 , 3.11691773, 5.789248 ,
5.34385962, 5.789248 , 5.789248 , 5.789248 , 3.56230611,
2.22614098, 5.789248 , 5.789248 , 3.56230611, 5.34385962,
4.00769449, 5.789248 , 2.67152936, 5.789248 , 4.00769449,
5.789248 , 5.789248 , 4.00769449, 2.67152936, 5.789248 ,
4.89847125, 4.45308287, 5.789248 , 5.34385962, 5.789248 ,
5.789248 , 4.45308287, 5.789248 , 5.789248 , 5.789248 ,
4.00769449, 4.89847125, 5.789248 , 4.89847125, 5.789248 ,
4.89847125, 3.56230611, 4.00769449, 5.789248 , 5.789248 ,
5.789248 , 5.34385962, 5.789248 , 5.789248 , 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
3.56230611, 3.56230611, 4.00769449, 5.789248 , 4.45308287,
3.56230611, 5.789248 , 4.89847125, 4.89847125, 3.11691773,
4.00769449, 5.789248 , 5.34385962, 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 4.45308287, 5.34385962, 4.45308287,
5.789248 , 3.11691773, 5.789248 , 4.45308287, 4.45308287,
4.89847125, 5.789248 , 4.89847125, 4.45308287, 5.789248 ,
5.34385962, 3.56230611, 3.56230611, 5.34385962, 5.789248 ,
4.00769449, 5.34385962, 4.45308287, 4.45308287, 3.56230611,
5.789248 , 4.00769449, 5.789248 , 5.789248 , 4.89847125,
5.34385962, 3.11691773, 4.00769449, 5.789248 , 5.34385962,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 4.45308287,
5.34385962, 4.89847125, 5.789248 , 5.789248 , 5.34385962,
3.56230611, 5.789248 , 5.789248 , 5.789248 , 5.34385962,
5.34385962, 5.789248 , 3.11691773, 5.789248 , 5.789248 ,
3.56230611, 5.789248 , 5.789248 , 5.789248 , 4.45308287,
5.789248 , 4.89847125, 5.789248 , 5.789248 , 5.789248 ,
4.00769449, 4.45308287, 5.34385962, 5.789248 , 3.56230611,
4.00769449, 5.34385962, 4.45308287, 5.789248 , 5.34385962,
5.34385962, 2.67152936, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 5.34385962, 5.789248 , 4.89847125,
5.789248 , 5.789248 , 5.789248 , 4.45308287, 5.789248 ,
3.11691773, 4.00769449, 5.789248 , 5.789248 , 5.34385962,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 5.789248 , 2.22614098, 5.789248 , 4.00769449,
5.34385962, 4.89847125, 5.789248 , 5.789248 , 4.00769449,
5.789248 , 3.56230611, 2.67152936, 5.789248 , 3.56230611,
2.67152936, 4.89847125, 5.789248 , 5.789248 , 5.789248 ,
4.45308287, 5.789248 , 4.89847125, 4.00769449, 2.67152936,
4.89847125, 5.789248 , 2.22614098, 3.56230611, 4.45308287,
5.34385962, 5.34385962, 3.11691773, 4.45308287, 4.45308287,
3.11691773, 4.45308287, 5.34385962, 4.45308287, 5.34385962,
4.00769449, 4.45308287, 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.00769449, 5.789248 , 5.789248 , 5.789248 ,
4.45308287, 5.789248 , 5.789248 , 5.789248 , 4.00769449,
5.34385962, 4.00769449, 4.45308287, 4.89847125, 4.00769449,
5.789248 , 5.789248 , 5.789248 , 2.67152936, 5.789248 ,
5.789248 , 5.789248 , 5.789248 , 5.789248 , 5.789248 ,
5.789248 , 4.00769449, 3.11691773, 5.34385962, 4.89847125,
5.34385962])
class Committee(object):
def __init__(self):
self.resids = np.array([[ -5.04950800e-01, -6.29721800e-01,
-8.35499100e+01,
-1.30628500e+00, -6.62028600e+00],
[ -2.34152200e-01, -2.55423500e-01, -2.16830700e+02,
-7.58866000e-01, -7.18370200e+00],
[ 1.02423700e+00, 7.98775800e-01, 4.83736300e+02,
2.50351500e+00, 2.25135300e+01],
[ -2.85061700e-01, -3.17796600e-01, -7.04115100e+04,
-2.37991800e+00, -1.41745600e+02],
[ 2.09902500e-01, 1.96787700e-01, 2.24751400e+03,
9.51945500e-01, 2.17724200e+01],
[ -4.03483500e-01, -4.75741500e-01, -1.95633600e+04,
-2.63502600e+00, -8.89461400e+01],
[ -1.64413400e-01, -1.74401100e-01, -1.73310300e+04,
-1.16235500e+00, -5.34213500e+01],
[ -4.29607700e-01, -5.13466700e-01, -5.30037000e+03,
-2.24496200e+00, -4.78260300e+01],
[ 3.23713000e-01, 2.94184600e-01, 4.11079400e+03,
1.48684400e+00, 3.65598400e+01],
[ 1.50367200e-01, 1.43429400e-01, 7.28532100e+03,
8.85542900e-01, 3.31355000e+01],
[ 4.21288600e-01, 3.73428000e-01, 1.37315700e+03,
1.52133200e+00, 2.41570200e+01],
[ 4.50658700e-01, 3.96586700e-01, 1.70146900e+03,
1.66177900e+00, 2.78032600e+01],
[ 2.43537500e-01, 2.26174000e-01, 3.18402300e+03,
1.13656200e+00, 2.79073400e+01],
[ 1.05182900e+00, 8.16205400e-01, 6.00135200e+03,
3.89079700e+00, 7.97131300e+01],
[ -5.54450300e-01, -7.12749000e-01, -2.09485200e+03,
-2.45496500e+00, -3.42189900e+01],
[ -6.05750600e-01, -8.06411100e-01, -2.74738200e+02,
-1.90774400e+00, -1.30510500e+01],
[ -3.41215700e-01, -3.90244600e-01, -6.31138000e+02,
-1.27022900e+00, -1.47600100e+01],
[ 2.21898500e-01, 2.07328700e-01, 6.91135800e+02,
8.16876400e-01, 1.24392900e+01],
[ 2.45592500e-01, 2.26639200e-01, 1.99250600e-01,
2.57948300e-01, 2.74723700e-01],
[ -7.58952600e-01, -1.15300800e+00, -2.56739000e+02,
-2.40716600e+00, -1.41474200e+01]])
self.null_deviance = 27.81104693643434 # from R, Rpy bug
self.params = np.array([-0.0268147 , 1.25103364, 2.91070663,
-0.34799563, 0.00659808, -0.31303026, -6.44847076])
self.bse = np.array([ 1.99956263e-02, 4.76820254e-01,
6.48362654e-01, 4.17956107e-01, 1.41512690e-03, 1.07770186e-01,
1.99557656e+00])
self.aic_R = 216.66573352377935
self.aic_Stata = 10.83328660860436
self.deviance = 5.615520158267981
self.scale = 0.38528595746569905
self.llf = -101.33286676188968 # from R
self.llf_Stata = -101.3328660860436 # same as R
self.bic_Stata = -33.32900074962649
self.chi2 = 5.008550263545408
self.df_model = 6
self.df_resid = 13
self.fittedvalues = np.array([12.62019383, 30.18289514, 21.48377849,
496.74068604,
103.23024673, 219.94693494, 324.4301163 , 110.82526477,
112.44244488, 219.86056381, 56.84399998, 61.19840382,
114.09290269, 75.29071944, 61.21994387, 21.05130889,
42.75939828, 55.56133536, 0.72532053, 18.14664665])
class Wfs(object):
"""
Wfs used for TestGlmPoissonOffset
Results are from Stata and R.
"""
def __init__(self):
self.resids = glm_test_resids.wfs_resids
self.null_deviance = 3731.85161919 # from R
self.params = [.9969348, 1.3693953, 1.6137574, 1.7849111, 1.9764051,
.11241858, .15166023, .02297282, -.10127377, -.31014953,
-.11709716]
self.bse = [.0527437, .0510688, .0511949, .0512138, .0500341,
.0324963, .0283292, .0226563, .0309871, .0552107, .0549118]
self.aic_R = 522.14215776 # R adds 2 for dof to AIC
self.aic_Stata = 7.459173652869477 # stata divides by nobs
# self.deviance = 70.6652992116034 # from Stata
self.deviance = 70.665301270867 # from R
self.scale = 1.0
self.llf = -250.0710778504317 # from Stata, ours with scale=1
self.bic_Stata = -179.9959200693088 # no bic in R?
self.df_model = 10
self.df_resid = 59
self.chi2 = 2699.138063147485 #TODO: taken from Stata not available
# in sm yet
self.fittedvalues = [7.11599,19.11356,33.76075,33.26743,11.94399,
27.49849,35.07923,37.22563,64.18037,108.0408,100.0948,35.67896,
24.10508,73.99577,52.2802,38.88975,35.06507,102.1198,107.251,
41.53885,196.3685,335.8434,205.3413,43.20131,41.98048,96.65113,
63.2286,30.78585,70.46306,172.2402,102.5898,43.06099,358.273,
549.8983,183.958,26.87062,62.53445,141.687,52.47494,13.10253,
114.9587,214.803,90.33611,18.32685,592.5995,457.4376,140.9273,
3.812064,111.3119,97.62744,57.48056,19.43552,130.4872,
151.7268,69.67963,13.04879,721.728,429.2136,128.2132,9.04735,
301.7067,177.3487,46.40818,4.707507,330.4211,330.7497,84.38604,
1456.757,451.005,67.51025]
|
wesm/statsmodels
|
scikits/statsmodels/genmod/tests/results/results_glm.py
|
Python
|
bsd-3-clause
| 270,087
|
[
"Gaussian"
] |
5d9f8563256ef024c9e0dc0f6409915905d62e5d85010db4e302abe5f5ceb526
|
# ============================================================================
#
# Copyright (C) 2007-2010 Conceptive Engineering bvba. All rights reserved.
# www.conceptive.be / project-camelot@conceptive.be
#
# This file is part of the Camelot Library.
#
# This file may be used under the terms of the GNU General Public
# License version 2.0 as published by the Free Software Foundation
# and appearing in the file license.txt included in the packaging of
# this file. Please review this information to ensure GNU
# General Public Licensing requirements will be met.
#
# If you are unsure which license is appropriate for your use, please
# visit www.python-camelot.com or contact project-camelot@conceptive.be
#
# This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
# WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
#
# For use of this library in commercial applications, please contact
# project-camelot@conceptive.be
#
# ============================================================================
"""Convenience functions and classes to present views to the user"""
from PyQt4 import QtGui
from PyQt4 import QtCore
from PyQt4.QtCore import Qt
#from camelot.view.art import Pixmap
import logging
logger = logging.getLogger('camelot.view.workspace')
from camelot.core.utils import ugettext as _
from camelot.view.model_thread import gui_function
class DesktopBackground(QtGui.QGraphicsView):
"""A custom background widget for the desktop"""
def __init__(self, parent=None):
super(DesktopBackground, self).__init__(parent)
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
# self.scene = QtGui.QGraphicsScene()
# self.pixitem = self.scene.addPixmap(
# Pixmap('camelot-home.png').getQPixmap()
# )
# self.setAlignment(Qt.AlignBottom | Qt.AlignLeft)
# self.setScene(self.scene)
class DesktopTabbar(QtGui.QTabBar):
change_view_mode_signal = QtCore.pyqtSignal()
def mouseDoubleClickEvent(self, event):
self.change_view_mode_signal.emit()
event.accept()
class DesktopWorkspace(QtGui.QWidget):
"""A tab based workspace that can be used by views
to display themselves. In essence this is A wrapper around the QTabWidget to
do some initial setup and provide it with a background widget. This was
implemented first using the QMdiArea, but the QMdiArea has too many
drawbacks, like not being able to add close buttons to the tabs in
a decent way.
.. attribute:: background
The widget class to be used as a background for when there are
no open tabs on the desktop.
"""
background = DesktopBackground
view_activated_signal = QtCore.pyqtSignal(QtGui.QWidget)
change_view_mode_signal = QtCore.pyqtSignal()
last_view_closed_signal = QtCore.pyqtSignal()
@gui_function
def __init__(self, parent):
super(DesktopWorkspace, self).__init__(parent)
layout = QtGui.QHBoxLayout()
layout.setMargin( 0 )
layout.setSpacing( 0 )
# setup the tab widget
self._tab_widget = QtGui.QTabWidget( self )
tab_bar = DesktopTabbar(self._tab_widget)
tab_bar.setToolTip( _('Double click to (un)maximize') )
tab_bar.change_view_mode_signal.connect( self._change_view_mode )
self._tab_widget.setTabBar( tab_bar )
self._tab_widget.setDocumentMode(True)
self._tab_widget.setMovable( True )
self._tab_widget.setTabsClosable( True )
self._tab_widget.hide()
self._tab_widget.tabCloseRequested.connect( self._tab_close_request )
self._tab_widget.currentChanged.connect( self._tab_changed )
layout.addWidget( self._tab_widget )
# setup the background widget
self._background_widget = self.background( self )
self._background_widget.show()
layout.addWidget( self._background_widget )
self.setLayout( layout )
@QtCore.pyqtSlot()
def _change_view_mode(self):
self.change_view_mode_signal.emit()
@QtCore.pyqtSlot(int)
def _tab_close_request(self, index):
"""request the removal of the tab at index"""
self._tab_widget.removeTab( index )
if self._tab_widget.currentIndex() < 0:
self._tab_widget.hide()
self._background_widget.show()
self.last_view_closed_signal.emit()
@QtCore.pyqtSlot(int)
def _tab_changed(self, _index):
"""the active tab has changed, emit the view_activated signal"""
self.view_activated_signal.emit( self.active_view() )
def active_view(self):
""":return: the currently active view or None"""
i = self._tab_widget.currentIndex()
if i < 0:
return None
return self._tab_widget.widget( i )
@QtCore.pyqtSlot( QtCore.QString )
def change_title(self, new_title):
"""slot to be called when the tile of a view needs to
change"""
# the request of the sender does not work in older pyqt versions
# therefore, take the current index, notice this is not correct !!
#
# sender = self.sender()
sender = self.active_view()
if sender:
index = self._tab_widget.indexOf( sender )
if index >= 0:
self._tab_widget.setTabText( index, new_title )
def set_view(self, view, title='...'):
"""Remove the currently active view and replace it with a new
view"""
index = self._tab_widget.currentIndex()
if index < 0:
self.add_view( view, title )
else:
view.title_changed_signal.connect( self.change_title )
self._tab_widget.removeTab( index )
index = self._tab_widget.insertTab( index, view, title )
self._tab_widget.setCurrentIndex( index )
@gui_function
def add_view(self, view, title='...'):
"""add a Widget implementing AbstractView to the workspace"""
view.title_changed_signal.connect( self.change_title )
index = self._tab_widget.addTab( view, title )
self._tab_widget.setCurrentIndex( index )
self._tab_widget.show()
self._background_widget.hide()
def close_all_views(self):
"""Remove all views from the workspace"""
# NOTE: will call removeTab until tab widget is cleared
# but removeTab does not really delete the page objects
#self._tab_widget.clear()
n = self._tab_widget.count()
while n:
self._tab_widget.tabCloseRequested.emit(n)
n -= 1
def show_top_level(view, parent):
"""Show a widget as a top level window
:param view: the widget extend AbstractView
:param parent: the widget with regard to which the top level
window will be placed.
"""
view.setParent( parent )
view.setWindowFlags(QtCore.Qt.Window)
#
# Make the window title blank to prevent the something
# like main.py or pythonw being displayed
#
view.setWindowTitle( u'' )
view.title_changed_signal.connect( view.setWindowTitle )
view.setAttribute(QtCore.Qt.WA_DeleteOnClose)
#
# position the new window in the center of the same screen
# as the parent
#
screen = QtGui.QApplication.desktop().screenNumber(parent)
available = QtGui.QApplication.desktop().availableGeometry(screen)
point = QtCore.QPoint(available.x() + available.width()/2,
available.y() + available.height()/2)
point = QtCore.QPoint(point.x()-view.width()/2,
point.y()-view.height()/2)
view.move( point )
#view.setWindowModality(QtCore.Qt.WindowModal)
view.show()
|
kurtraschke/camelot
|
camelot/view/workspace.py
|
Python
|
gpl-2.0
| 7,780
|
[
"VisIt"
] |
293d41dce708dbf6915941c14344969d26e56b705a4f9d844a0589d4502d14cd
|
""" PilotCommand
The PilotCommand class is a command class to know about present pilots
efficiency.
"""
from DIRAC import S_OK, S_ERROR
from DIRAC.ConfigurationSystem.Client.Helpers.Resources import getSites, getCESiteMapping
from DIRAC.ResourceStatusSystem.Command.Command import Command
from DIRAC.ResourceStatusSystem.Client.ResourceManagementClient import ResourceManagementClient
from DIRAC.WorkloadManagementSystem.Client.PilotManagerClient import PilotManagerClient
class PilotCommand(Command):
"""
Pilot "master" Command.
"""
def __init__(self, args=None, clients=None):
super(PilotCommand, self).__init__(args, clients)
if "Pilots" in self.apis:
self.pilots = self.apis["Pilots"]
else:
self.pilots = PilotManagerClient()
if "ResourceManagementClient" in self.apis:
self.rmClient = self.apis["ResourceManagementClient"]
else:
self.rmClient = ResourceManagementClient()
def _storeCommand(self, result):
"""
Stores the results of doNew method on the database.
"""
for pilotDict in result:
resQuery = self.rmClient.addOrModifyPilotCache(
site=pilotDict["Site"],
cE=pilotDict["CE"],
vO=pilotDict.get("OwnerGroup", None),
pilotsPerJob=pilotDict["PilotsPerJob"],
pilotJobEff=pilotDict["PilotJobEff"],
status=pilotDict["Status"],
)
if not resQuery["OK"]:
return resQuery
return S_OK()
def _prepareCommand(self):
"""
JobCommand requires one arguments:
- name : <str>
"""
self.log.debug("_prepareCommand: args:", self.args)
if "name" not in self.args:
return S_ERROR('"name" not found in self.args')
name = self.args["name"]
if "element" not in self.args:
return S_ERROR("element is missing")
element = self.args["element"]
if "vO" not in self.args:
return S_ERROR('_prepareCommand: "vO" not found in self.args')
vo = self.args["vO"]
if element not in ["Site", "Resource"]:
return S_ERROR('"%s" is not Site nor Resource' % element)
return S_OK((element, name, vo))
def doNew(self, masterParams=None):
self.log.debug("PilotCommand doNew")
if masterParams is not None:
element, name = masterParams
else:
params = self._prepareCommand()
if not params["OK"]:
return params
element, name = params["Value"]
wmsDict = {}
if element == "Site":
wmsDict = {"GridSite": name}
elif element == "Resource":
wmsDict = {"ExpandSite": name}
else:
# You should never see this error
return S_ERROR('"%s" is not Site nor Resource' % element)
if element == "Resource":
pilotsResultPivot = self.pilots.getGroupedPilotSummary({}, ["GridSite", "DestinationSite", "OwnerGroup"])
elif element == "Site":
pilotsResultPivot = self.pilots.getGroupedPilotSummary({}, ["GridSite", "OwnerGroup"])
else:
# You should never see this error
return S_ERROR('"%s" is not Site nor Resource' % element)
if not pilotsResultPivot["OK"]:
return pilotsResultPivot
pilotsResults = pilotsResultPivot["Value"]
if "ParameterNames" not in pilotsResults:
return S_ERROR('Wrong result dictionary, missing "ParameterNames"')
params = pilotsResults["ParameterNames"]
if "Records" not in pilotsResults:
return S_ERROR('Wrong formed result dictionary, missing "Records"')
records = pilotsResults["Records"]
uniformResult = []
for record in records:
# This returns a dictionary with the following keys:
# 'Site', 'CE', 'Submitted', 'Ready', 'Scheduled', 'Waiting', 'Running',
# 'Done', 'Aborted', 'Done_Empty', 'Aborted_Hour', 'Total', 'PilotsPerJob',
# 'PilotJobEff', 'Status', 'InMask'
pilotDict = dict(zip(params, record))
pilotDict["PilotsPerJob"] = float(pilotDict["PilotsPerJob"])
pilotDict["PilotJobEff"] = float(pilotDict["PilotJobEff"])
uniformResult.append(pilotDict)
storeRes = self._storeCommand(uniformResult)
if not storeRes["OK"]:
return storeRes
return S_OK(uniformResult)
def doCache(self):
self.log.debug("PilotCommand doCache")
params = self._prepareCommand()
if not params["OK"]:
return params
element, name, vo = params["Value"]
if element == "Site":
# WMS returns Site entries with CE = 'Multiple'
site, ce = name, "Multiple"
elif element == "Resource":
site, ce = None, name
else:
# You should never see this error
return S_ERROR('"%s" is not Site nor Resource' % element)
result = self.rmClient.selectPilotCache(site=site, cE=ce)
if result["OK"]:
result = S_OK([dict(zip(result["Columns"], res)) for res in result["Value"]])
self.log.debug("PilotCommand doCache result: ", result)
return result
def doMaster(self):
self.log.debug("PilotCommand doMaster")
siteNames = getSites()
if not siteNames["OK"]:
return siteNames
siteNames = siteNames["Value"]
res = getCESiteMapping()
if not res["OK"]:
return res
ces = list(res["Value"])
pilotResults = self.doNew(("Site", siteNames))
if not pilotResults["OK"]:
self.metrics["failed"].append(pilotResults["Message"])
pilotResults = self.doNew(("Resource", ces))
if not pilotResults["OK"]:
self.metrics["failed"].append(pilotResults["Message"])
return S_OK(self.metrics)
|
DIRACGrid/DIRAC
|
src/DIRAC/ResourceStatusSystem/Command/PilotCommand.py
|
Python
|
gpl-3.0
| 6,080
|
[
"DIRAC"
] |
2daa34881108ab3ce5dba91346ab879972f6cd76ebf93e6117ee555940f79245
|
#!/usr/bin/python
import os
import json
import urllib
import subprocess
import sys
url_base = "http://admin.ci.centos.org:8080"
api_key = os.environ['API_KEY']
count = os.environ['MACHINE_COUNT'] if os.environ.get('MACHINE_COUNT') != None else "1"
ver = "7"
arch = "x86_64"
req_url = "%s/Node/get?key=%s&ver=%s&arch=%s&count=%s" % (url_base, api_key, ver, arch, count)
jsondata = urllib.urlopen(req_url).read()
data = json.loads(jsondata)
# Setup some variables. Can be passed as env variables via the job config. Otherwise defaults apply
repo_url = os.environ['REPO_URL'] if os.environ.get('REPO_URL') != None else 'https://github.com/projectatomic/vagrant-service-manager.git'
branch = os.environ['BRANCH'] if os.environ.get('BRANCH') != None else 'master'
def execute_on_host( host, cmd, error_message ):
# build command to execute install and test commands via ssh
ssh_cmd = "ssh -t -t "
ssh_cmd += "-o UserKnownHostsFile=/dev/null "
ssh_cmd += "-o StrictHostKeyChecking=no "
ssh_cmd += "root@%s " % (host)
cmd = '%s "%s"' % (ssh_cmd, cmd)
print "Executing: %s" % (cmd)
exit_code = subprocess.call(cmd, shell=True)
if exit_code != 0 : sys.exit(error_message)
return
def prepare_pull_request_build(host):
pr_branch = os.environ['ghprbSourceBranch']
pr_author_repo = os.environ['ghprbAuthorRepoGitUrl']
branch_cmd = 'cd vagrant-service-manager && '
branch_cmd += "git checkout -b %s" % (pr_branch)
execute_on_host(host, branch_cmd, "Unable to create branch for pull request build")
pull_cmd = 'cd vagrant-service-manager && '
pull_cmd += "git pull --no-edit %s %s " % (pr_author_repo, pr_branch)
execute_on_host(host, pull_cmd, "Unable to pull pull request")
return
for host in data['hosts']:
# run the Ansible playbook
ansible_cmd = 'yum -y install git epel-release ansible1.9 && '
ansible_cmd += 'yum -y install ansible1.9 && '
ansible_cmd += 'git clone %s && ' % repo_url
ansible_cmd += 'cd vagrant-service-manager && '
ansible_cmd += 'git checkout %s && ' % branch
ansible_cmd += 'cd .ci/ansible && '
ansible_cmd += 'ANSIBLE_NOCOLOR=1 ansible-playbook site.yml'
execute_on_host(host, ansible_cmd, "Ansible playbook failed")
# if we deal with a pull request build we need to prepare the source
if os.environ.get('ghprbPullId') != None:
prepare_pull_request_build(host)
# setup the environment
setup_cmd = 'cd vagrant-service-manager && '
setup_cmd += 'gem install bundler && '
setup_cmd += 'bundle install --no-color'
execute_on_host(host, setup_cmd, "Unable to setup Ruby environment")
# run build and features
build_cmd = 'cd vagrant-service-manager && '
build_cmd += 'bundle exec rake rubocop && '
build_cmd += 'bundle exec rake test && '
build_cmd += 'bundle exec rake features CUCUMBER_OPTS=\'-p ci\' PROVIDER=libvirt BOX=adb,cdk && '
build_cmd += 'bundle exec rake build'
execute_on_host(host, build_cmd, "Tests failures")
done_nodes_url = "%s/Node/done?key=%s&sside=%s" % (url_base, api_key, data['ssid'])
print urllib.urlopen(done_nodes_url)
|
navidshaikh/vagrant-service-manager
|
.ci/jenkins-execute-script.py
|
Python
|
gpl-2.0
| 3,164
|
[
"CDK"
] |
4bd16cf9fce8bb29cb1757a2a8e46650e757b32953b9374f40e046e31e88bc41
|
# -*- coding: utf-8 -*-
"""
End-to-end tests for the main LMS Dashboard (aka, Student Dashboard).
"""
import datetime
from nose.plugins.attrib import attr
from common.test.acceptance.fixtures.course import CourseFixture
from common.test.acceptance.pages.common.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.dashboard import DashboardPage
from common.test.acceptance.tests.helpers import UniqueCourseTest, generate_course_key
DEFAULT_SHORT_DATE_FORMAT = '{dt:%b} {dt.day}, {dt.year}'
TEST_DATE_FORMAT = '{dt:%b} {dt.day}, {dt.year} {dt.hour:02}:{dt.minute:02}'
class BaseLmsDashboardTest(UniqueCourseTest):
""" Base test suite for the LMS Student Dashboard """
def setUp(self):
"""
Initializes the components (page objects, courses, users) for this test suite
"""
# Some parameters are provided by the parent setUp() routine, such as the following:
# self.course_id, self.course_info, self.unique_id
super(BaseLmsDashboardTest, self).setUp()
# Load page objects for use by the tests
self.dashboard_page = DashboardPage(self.browser)
# Configure some aspects of the test course and install the settings into the course
self.course_fixture = CourseFixture(
self.course_info["org"],
self.course_info["number"],
self.course_info["run"],
self.course_info["display_name"],
)
self.course_fixture.add_advanced_settings({
u"social_sharing_url": {u"value": "http://custom/course/url"}
})
self.course_fixture.install()
self.username = "test_{uuid}".format(uuid=self.unique_id[0:6])
self.email = "{user}@example.com".format(user=self.username)
# Create the test user, register them for the course, and authenticate
AutoAuthPage(
self.browser,
username=self.username,
email=self.email,
course_id=self.course_id
).visit()
# Navigate the authenticated, enrolled user to the dashboard page and get testing!
self.dashboard_page.visit()
class BaseLmsDashboardTestMultiple(UniqueCourseTest):
""" Base test suite for the LMS Student Dashboard with Multiple Courses"""
def setUp(self):
"""
Initializes the components (page objects, courses, users) for this test suite
"""
# Some parameters are provided by the parent setUp() routine, such as the following:
# self.course_id, self.course_info, self.unique_id
super(BaseLmsDashboardTestMultiple, self).setUp()
# Load page objects for use by the tests
self.dashboard_page = DashboardPage(self.browser)
# Configure some aspects of the test course and install the settings into the course
self.courses = {
'A': {
'org': 'test_org',
'number': self.unique_id,
'run': 'test_run_A',
'display_name': 'Test Course A'
},
'B': {
'org': 'test_org',
'number': self.unique_id,
'run': 'test_run_B',
'display_name': 'Test Course B'
},
'C': {
'org': 'test_org',
'number': self.unique_id,
'run': 'test_run_C',
'display_name': 'Test Course C'
}
}
self.username = "test_{uuid}".format(uuid=self.unique_id[0:6])
self.email = "{user}@example.com".format(user=self.username)
self.course_keys = {}
self.course_fixtures = {}
for key, value in self.courses.iteritems():
course_key = generate_course_key(
value['org'],
value['number'],
value['run'],
)
course_fixture = CourseFixture(
value['org'],
value['number'],
value['run'],
value['display_name'],
)
course_fixture.add_advanced_settings({
u"social_sharing_url": {u"value": "http://custom/course/url"}
})
course_fixture.install()
self.course_keys[key] = course_key
self.course_fixtures[key] = course_fixture
# Create the test user, register them for the course, and authenticate
AutoAuthPage(
self.browser,
username=self.username,
email=self.email,
course_id=course_key
).visit()
# Navigate the authenticated, enrolled user to the dashboard page and get testing!
self.dashboard_page.visit()
class LmsDashboardPageTest(BaseLmsDashboardTest):
""" Test suite for the LMS Student Dashboard page """
def setUp(self):
super(LmsDashboardPageTest, self).setUp()
# now datetime for usage in tests
self.now = datetime.datetime.now()
def test_dashboard_course_listings(self):
"""
Perform a general validation of the course listings section
"""
course_listings = self.dashboard_page.get_course_listings()
self.assertEqual(len(course_listings), 1)
def test_dashboard_social_sharing_feature(self):
"""
Validate the behavior of the social sharing feature
"""
twitter_widget = self.dashboard_page.get_course_social_sharing_widget('twitter')
twitter_url = ("https://twitter.com/intent/tweet?text=Testing+feature%3A%20http%3A%2F%2Fcustom%2Fcourse%2Furl"
"%3Futm_campaign%3Dsocial-sharing%26utm_medium%3Dsocial-post%26utm_source%3Dtwitter")
self.assertEqual(twitter_widget.attrs('title')[0], 'Share on Twitter')
self.assertEqual(twitter_widget.attrs('data-tooltip')[0], 'Share on Twitter')
self.assertEqual(twitter_widget.attrs('aria-haspopup')[0], 'true')
self.assertEqual(twitter_widget.attrs('aria-expanded')[0], 'false')
self.assertEqual(twitter_widget.attrs('target')[0], '_blank')
self.assertIn(twitter_url, twitter_widget.attrs('href')[0])
self.assertIn(twitter_url, twitter_widget.attrs('onclick')[0])
facebook_widget = self.dashboard_page.get_course_social_sharing_widget('facebook')
facebook_url = ("https://www.facebook.com/sharer/sharer.php?u=http%3A%2F%2Fcustom%2Fcourse%2Furl%3F"
"utm_campaign%3Dsocial-sharing%26utm_medium%3Dsocial-post%26utm_source%3Dfacebook&"
"quote=I%27m+taking+Test")
self.assertEqual(facebook_widget.attrs('title')[0], 'Share on Facebook')
self.assertEqual(facebook_widget.attrs('data-tooltip')[0], 'Share on Facebook')
self.assertEqual(facebook_widget.attrs('aria-haspopup')[0], 'true')
self.assertEqual(facebook_widget.attrs('aria-expanded')[0], 'false')
self.assertEqual(facebook_widget.attrs('target')[0], '_blank')
self.assertIn(facebook_url, facebook_widget.attrs('href')[0])
self.assertIn(facebook_url, facebook_widget.attrs('onclick')[0])
def test_ended_course_date(self):
"""
Scenario:
Course Date should have the format 'Ended - Sep 23, 2015'
if the course on student dashboard has ended.
As a Student,
Given that I have enrolled to a course
And the course has ended in the past
When I visit dashboard page
Then the course date should have the following format "Ended - %b %d, %Y" e.g. "Ended - Sep 23, 2015"
"""
course_start_date = datetime.datetime(1970, 1, 1)
course_end_date = self.now - datetime.timedelta(days=90)
self.course_fixture.add_course_details({
'start_date': course_start_date,
'end_date': course_end_date
})
self.course_fixture.configure_course()
end_date = DEFAULT_SHORT_DATE_FORMAT.format(dt=course_end_date)
expected_course_date = "Ended - {end_date}".format(end_date=end_date)
# reload the page for changes to course date changes to appear in dashboard
self.dashboard_page.visit()
course_date = self.dashboard_page.get_course_date()
# Test that proper course date with 'ended' message is displayed if a course has already ended
self.assertEqual(course_date, expected_course_date)
def test_running_course_date(self):
"""
Scenario:
Course Date should have the format 'Started - Sep 23, 2015'
if the course on student dashboard is running.
As a Student,
Given that I have enrolled to a course
And the course has started
And the course is in progress
When I visit dashboard page
Then the course date should have the following format "Started - %b %d, %Y" e.g. "Started - Sep 23, 2015"
"""
course_start_date = datetime.datetime(1970, 1, 1)
course_end_date = self.now + datetime.timedelta(days=90)
self.course_fixture.add_course_details({
'start_date': course_start_date,
'end_date': course_end_date
})
self.course_fixture.configure_course()
start_date = DEFAULT_SHORT_DATE_FORMAT.format(dt=course_start_date)
expected_course_date = "Started - {start_date}".format(start_date=start_date)
# reload the page for changes to course date changes to appear in dashboard
self.dashboard_page.visit()
course_date = self.dashboard_page.get_course_date()
# Test that proper course date with 'started' message is displayed if a course is in running state
self.assertEqual(course_date, expected_course_date)
def test_future_course_date(self):
"""
Scenario:
Course Date should have the format 'Starts - Sep 23, 2015'
if the course on student dashboard starts in future.
As a Student,
Given that I have enrolled to a course
And the course starts in future
And the course does not start within 5 days
When I visit dashboard page
Then the course date should have the following format "Starts - %b %d, %Y" e.g. "Starts - Sep 23, 2015"
"""
course_start_date = self.now + datetime.timedelta(days=30)
course_end_date = self.now + datetime.timedelta(days=365)
self.course_fixture.add_course_details({
'start_date': course_start_date,
'end_date': course_end_date
})
self.course_fixture.configure_course()
start_date = DEFAULT_SHORT_DATE_FORMAT.format(dt=course_start_date)
expected_course_date = "Starts - {start_date}".format(start_date=start_date)
# reload the page for changes to course date changes to appear in dashboard
self.dashboard_page.visit()
course_date = self.dashboard_page.get_course_date()
# Test that proper course date with 'starts' message is displayed if a course is about to start in future,
# and course does not start within 5 days
self.assertEqual(course_date, expected_course_date)
def test_near_future_course_date(self):
"""
Scenario:
Course Date should have the format 'Starts - Wednesday at 5am UTC'
if the course on student dashboard starts within 5 days.
As a Student,
Given that I have enrolled to a course
And the course starts within 5 days
When I visit dashboard page
Then the course date should have the following format "Starts - %A at %-I%P UTC"
e.g. "Starts - Wednesday at 5am UTC"
"""
course_start_date = self.now + datetime.timedelta(days=2)
course_end_date = self.now + datetime.timedelta(days=365)
self.course_fixture.add_course_details({
'start_date': course_start_date,
'end_date': course_end_date
})
self.course_fixture.configure_course()
start_date = TEST_DATE_FORMAT.format(dt=course_start_date)
expected_course_date = "Starts - {start_date} UTC".format(start_date=start_date)
# reload the page for changes to course date changes to appear in dashboard
self.dashboard_page.visit()
course_date = self.dashboard_page.get_course_date()
# Test that proper course date with 'starts' message is displayed if a course is about to start in future,
# and course starts within 5 days
self.assertEqual(course_date, expected_course_date)
def test_advertised_start_date(self):
"""
Scenario:
Course Date should be advertised start date
if the course on student dashboard has `Course Advertised Start` set.
As a Student,
Given that I have enrolled to a course
And the course has `Course Advertised Start` set.
When I visit dashboard page
Then the advertised start date should be displayed rather course start date"
"""
course_start_date = self.now + datetime.timedelta(days=2)
course_advertised_start = "Winter 2018"
self.course_fixture.add_course_details({
'start_date': course_start_date,
})
self.course_fixture.configure_course()
self.course_fixture.add_advanced_settings({
u"advertised_start": {u"value": course_advertised_start}
})
self.course_fixture._add_advanced_settings()
expected_course_date = "Starts - {start_date}".format(start_date=course_advertised_start)
self.dashboard_page.visit()
course_date = self.dashboard_page.get_course_date()
self.assertEqual(course_date, expected_course_date)
def test_profile_img_alt_empty(self):
"""
Validate value of profile image alt attribue is null
"""
profile_img = self.dashboard_page.get_profile_img()
self.assertEqual(profile_img.attrs('alt')[0], '')
@attr('a11y')
class LmsDashboardA11yTest(BaseLmsDashboardTestMultiple):
"""
Class to test lms student dashboard accessibility.
"""
def test_dashboard_course_listings_a11y(self):
"""
Test the accessibility of the course listings
"""
course_listings = self.dashboard_page.get_courses()
self.assertEqual(len(course_listings), 3)
self.dashboard_page.a11y_audit.check_for_accessibility_errors()
|
miptliot/edx-platform
|
common/test/acceptance/tests/lms/test_lms_dashboard.py
|
Python
|
agpl-3.0
| 14,461
|
[
"VisIt"
] |
2541249637eedcf3ae3f1b17226e1f9b2b44d584094e4cc5c6d35b2ae37ce5a3
|
#!/usr/bin/env python
#coding: utf-8
import json
import sys
import os
import re
import numpy as np
import neuron
from neuron import h
from builtins import range
# This is super annoying: without neuron.gui, need
# to explicit load 'standard' hoc routines like 'run',
# but this is chatty on stdout, which means we get
# junk in our data if capturing output.
def hoc_execute_quiet(arg):
with open(os.devnull, 'wb') as null:
fd = sys.stdout.fileno()
keep = os.dup(fd)
sys.stdout.flush()
os.dup2(null.fileno(), fd)
h(arg)
sys.stdout.flush()
os.dup2(keep, fd)
def hoc_setup():
hoc_execute_quiet('load_file("stdrun.hoc")')
def hoc_quit():
hoc_execute_quiet('quit()')
#h('quit()')
default_model_parameters = {
'gnabar_hh': 0.12, # H-H sodium conductance in S/cm^2
'gkbar_hh': 0.036, # H-H potassium conductance in S/cm^2
'gl_hh': 0.0003, # H-H leak conductance in S/cm^2
'el_hh': -54.3, # H-H reversal potential in mV
'g_pas': 0.001, # Passive conductance in S/cm^2
'e_pas': -65.0, # Leak reversal potential in mV
'Ra': 100.0, # Intracellular resistivity in Ω·cm
'cm': 1.0, # Membrane areal capacitance in µF/cm^2
'tau': 2.0, # Exponential synapse time constant
'tau1': 0.5, # Exp2 synapse tau1
'tau2': 2.0, # Exp2 synapse tau2
'ncomp': 1001, # Number of compartments (nseg) in dendrites
'dt': 0.0, # (Simulation parameter) default dt, 0 => use cvode adaptive
'abstol': 1e-6 # (Simulation parameter) abstol for cvode if used
}
def override_defaults_from_args(args=sys.argv):
global default_model_parameters
keys = default_model_parameters.keys()
r = re.compile('('+'|'.join(keys)+')=(.*)')
for m in [r.match(a) for a in args]:
if m:
default_model_parameters[m.group(1)]=float(m.group(2))
def combine(*dicts, **kw):
r = {}
for d in dicts:
r.update(d)
r.update(kw)
return r
class VModel:
def __init__(self):
self.soma = None
self.sections = {}
self.stims = []
self.synapses = []
self.netcons = []
def set_ncomp(self, n):
for s in self.sections.values():
s.nseg = int(n)
def add_iclamp(self, t0, dt, i, to=None, pos=1):
# If no section specified, attach to middle of soma
if to is None:
sec = self.soma
pos = 0.5
else:
sec = self.sections[to]
stim = h.IClamp(sec(pos))
stim.delay = t0
stim.dur = dt
stim.amp = i
self.stims.append(stim)
def add_exp_syn(self, secname, pos=0.5, **kw):
p = combine(default_model_parameters, kw)
syn = h.ExpSyn(self.sections[secname](pos))
syn.tau = p['tau']
self.synapses.append(syn)
return len(self.synapses)-1
def add_exp2_syn(self, secname, pos=0.5, **kw):
p = combine(default_model_parameters, kw)
syn = h.Exp2Syn(self.sections[secname](pos))
syn.tau1 = p['tau1']
syn.tau2 = p['tau2']
self.synapses.append(syn)
return len(self.synapses)-1
def add_spike(self, t, weight, target=0):
stim = h.NetStim()
stim.number = 1
stim.start = 0
nc = h.NetCon(stim, self.synapses[target])
nc.delay = t
nc.weight[0] = weight
self.stims.append(stim)
self.netcons.append(nc)
def add_soma(self, diam, **kw):
p = combine(default_model_parameters, kw)
soma = h.Section(name='soma')
soma.diam = diam
soma.L = diam
soma.Ra = p['Ra']
soma.cm = p['cm']
# Insert active Hodgkin-Huxley channels in the soma.
soma.insert('hh')
soma.gnabar_hh = p['gnabar_hh']
soma.gkbar_hh = p['gkbar_hh']
soma.gl_hh = p['gl_hh']
soma.el_hh = p['el_hh']
# For reversal potentials we use those computed using
# the Nernst equation with the following values:
# R 8.3144598
# F 96485.33289
# nao 140 mM
# nai 10 mM
# ko 2.5 mM
# ki 64.4 nM
# We don't use the default values for ena and ek taken
# from the HH paper:
# ena = 115.0mV + -65.0mV,
# ek = -12.0mV + -65.0mV,
soma.ena = 63.55148117386
soma.ek = -74.17164678272
# This is how we would get NEURON to use Nernst equation, when they
# correct the Nernst equation implementation.
#h.ion_style('k_ion', 3, 2, 1, 1, 1)
#h.ion_style('na_ion', 3, 2, 1, 1, 1)
self.soma = soma
def add_dendrite(self, name, geom, to=None, **kw):
p = combine(default_model_parameters, kw)
dend = h.Section(name=name)
dend.push()
for x, d in geom:
h.pt3dadd(x, 0, 0, d)
h.pop_section()
dend.Ra = p['Ra']
dend.cm = p['cm']
# Add passive membrane properties to dendrite.
dend.insert('pas')
dend.g_pas = p['g_pas']
dend.e_pas = p['e_pas']
dend.nseg = int(p['ncomp'])
if to is None:
if self.soma is not None:
dend.connect(self.soma(1))
else:
dend.connect(self.sections[to](1))
self.sections[name] = dend
# Run 'current' model, return list of traces.
# Samples at cable mid- and end-points taken every `sample_dt`;
# Voltage on all compartments per section reported every `report_dt`.
def run_nrn_sim(tend, sample_dt=0.025, report_t=None, report_dt=None, dt=None, **meta):
if dt is None:
dt = default_model_parameters['dt']
# Instrument mid-point and ends of each section for traces.
vtraces = []
vtrace_t_hoc = h.Vector()
ncomps = set([s.nseg for s in h.allsec() if s.name()!='soma'])
if len(ncomps)==1:
common_ncomp = { 'ncomp': ncomps.pop() }
else:
common_ncomp = {}
for s in h.allsec():
vend = h.Vector()
vend.record(s(0.5)._ref_v, sample_dt)
vtraces.append((s.name()+".mid", vend))
if s.nseg!=1 or s.name()!='soma':
vmid = h.Vector()
vmid.record(s(1.0)._ref_v, sample_dt)
vtraces.append((s.name()+".end", vmid))
vtrace_t_hoc.record(h._ref_t, sample_dt)
# Instrument every segment for section voltage reports.
if report_t is None:
if report_dt is not None:
report_t = [report_dt*(1+i) for i in range(int(tend/report_dt))]
else:
report_t = []
elif not isinstance(report_t, list):
report_t = [report_t]
vreports = []
vreport_t_hoc = h.Vector(report_t)
if report_t:
for s in h.allsec():
nseg = s.nseg;
ps = [0] + [(i+0.5)/nseg for i in range(nseg)] + [1]
vs = [h.Vector() for p in ps]
for p, v in zip(ps, vs):
v.record(s(p)._ref_v, vreport_t_hoc)
vreports.append((s.name(), s.L, s.nseg, ps, vs))
# Run sim
if dt==0:
# Use CVODE instead
h.cvode.active(1)
abstol = default_model_parameters['abstol']
h.cvode.atol(abstol)
common_meta = { 'dt': 0, 'cvode': True, 'abstol': abstol }
else:
h.dt = dt
h.steps_per_ms = 1/dt # or else NEURON might noisily fudge dt
common_meta = { 'dt': dt, 'cvode': False }
h.secondorder = 2
h.tstop = tend
h.run()
# convert results to traces with metadata
traces = []
vtrace_t = list(vtrace_t_hoc)
traces.append(combine(common_meta, meta, common_ncomp, {
'name': 'membrane voltage',
'sim': 'neuron',
'units': 'mV',
'data': combine({n: list(v) for n, v in vtraces}, time=vtrace_t)
}))
# and section reports too
vreport_t = list(vreport_t_hoc)
for name, length, nseg, ps, vs in vreports:
obs = np.column_stack([np.array(v) for v in vs])
xs = [length*p for p in ps]
for i, t in enumerate(report_t):
if i>=obs.shape[0]:
break
traces.append(combine(common_meta, meta, {
'name': 'membrane voltage',
'sim': 'neuron',
'units': {'x': 'µm', name: 'mV'},
'ncomp': nseg,
'time': t,
'data': {
'x': xs,
name: list(obs[i,:])
}
}))
return traces
def nrn_assert_no_sections():
for s in h.allsec():
assert False, 'a section exists'
def nrn_stop():
hoc_quit()
# Run hoc setup on load
hoc_setup()
|
eth-cscs/nestmc-proto
|
validation/ref/neuron/nrn_validation.py
|
Python
|
bsd-3-clause
| 8,771
|
[
"NEURON"
] |
8ed27dfcee0c6e630cf2a3006f95edce037f7058ac9b79e3d1628534e1924de3
|
# Copyright (C) 2011-2012 CRS4.
#
# This file is part of Seal.
#
# Seal is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Seal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Seal. If not, see <http://www.gnu.org/licenses/>.
import logging
logging.basicConfig(level=logging.DEBUG)
import time, os, glob, math
import itertools as it
import ctypes as ct
import Bio.SeqIO
import seal.lib.aligner.bwa.bwa_core as bwa
from seal.lib.aligner.bwa.bwa_aligner import BwaAligner
#import seal.lib.generator as sg
import seal.lib.aligner.bwa.constants as bwa_const
from seal.lib.io.sam_formatter import SamFormatter
##---------------------------------##
# find or build the bwa executable
##---------------------------------##
if os.environ.has_key('SRC_DIR'): # provide a way to override the default location
SRC_DIR = os.environ['SRC_DIR']
else:
# our expected path: tests/seal/lib/aligner/bwa/testing_utilities.py
tree_root = os.path.abspath(os.path.join(os.path.dirname( __file__ ), '..', '..', '..', '..', '..'))
SRC_DIR = os.path.join(tree_root, 'seal', 'lib', 'aligner', 'bwa', 'libbwa')
if not os.path.exists(SRC_DIR):
raise ValueError("%r not found: redefine SRC_DIR in %s or in an environment variable" % (SRC_DIR, __name__))
BWA_EXE = os.path.join(SRC_DIR, 'bwa')
logging.debug("BWA_EXE: %r" % BWA_EXE)
if not os.path.exists(BWA_EXE):
logging.warn("bwa executable not found; building source code")
cmd = "make -C %s" % SRC_DIR
ret = os.system(cmd)
if ret:
raise RuntimeError("%r failed -- could not make bwa executable" % cmd)
##---------------------------------##
##---------------------------------##
# Fixture helpers
##---------------------------------##
reference = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'fixtures/foobar.fa')
def build_ref_index():
if not os.path.exists('%s.bwt' % reference):
os.system('%s index %s >/dev/null 2>&1' % (BWA_EXE, reference))
def remove_ref_index():
os.system('rm -f %s.*' % reference)
##---------------------------------##
def clean_up_aux_files(fname):
fs = glob.glob(fname + '.*')
for f in fs:
if os.path.splitext(f)[-1] != ".gz":
os.unlink(f)
def create_seq_file(fname, seq, qtype="fastq-sanger"):
fp = open(fname, 'w')
fp_sanger = open("%s.sanger" % fname, 'w')
if qtype == "fastq-illumina" or qtype == "fastq-solexa":
hdr1, s, hdr2, q = seq.splitlines(False)
# FIXME: duplicate code from bwa_core
q = [ord(x)-bwa_core.Q_OFFSET[qtype] for x in q]
if qtype == "fastq-solexa":
q = [int(round(x+10*math.log10(1+10**(-x/10.)))) for x in q]
q = "".join([chr(x+bwa_core.Q_OFFSET["fastq-sanger"]) for x in q])
seq_sanger = "%s\n%s\n%s\n%s\n" % (hdr1, s, hdr2, q)
else:
seq_sanger = seq
fp.write(seq)
fp_sanger.write(seq_sanger)
fp.close()
fp_sanger.close()
def read_sam_file(fname):
formatter = SamFormatter()
fp = open(fname)
res = []
for l in fp:
l = l.strip()
if l[0] == "@": # SAM header line
continue
r = formatter.parse(l)
res.append(r)
fp.close()
return res
def build_index(refseq_fname):
os.system('%s index %s' % (BWA_EXE, refseq_fname))
def run_bwa_samse(refseq_fname, hit_fname):
hit_fname += ".sanger"
os.system('%s aln %s %s > %s.aln' % (BWA_EXE, refseq_fname,
hit_fname, hit_fname))
os.system('%s samse %s %s.aln %s > %s.sam' % (BWA_EXE, refseq_fname,
hit_fname, hit_fname,
hit_fname))
return read_sam_file(hit_fname + '.sam')
def run_bwa_sampe(refseq_fname, read_fname, mate_fname):
read_fname += ".sanger"
mate_fname += ".sanger"
os.system('%s aln %s %s > %s.aln' % (BWA_EXE, refseq_fname,
read_fname, read_fname))
os.system('%s aln %s %s > %s.aln' % (BWA_EXE, refseq_fname,
mate_fname, mate_fname))
os.system('%s sampe %s %s.aln %s.aln %s %s > %s.sam' % (BWA_EXE, refseq_fname,
read_fname,
mate_fname,
read_fname,
mate_fname,
read_fname))
return read_sam_file(read_fname + '.sam')
def run_bwa_py_sampe(refseq_fname, read_fname, mate_fname,
log_level=logging.INFO, pairing_batch_size=None,
seq_list_len=None, fastq_subfmt="fastq-illumina"):
logger = logging.getLogger("PY")
logger.setLevel(log_level)
logger.info("RUNNING PYTHON VERSION")
def debug_dump(seq, state):
logger.debug("%s: name=%s" % (state, seq.get_name()))
logger.debug("%s: qual=%s" % (state, seq.get_qual_string()))
logger.debug("%s: strand=%d" % (state, seq.strand))
logger.debug("%s: pos=%d" % (state, seq.pos))
logger.debug("%s: mapQ=%d" % (state, seq.mapQ))
read_flow = Bio.SeqIO.parse(open(read_fname), fastq_subfmt)
mate_flow = Bio.SeqIO.parse(open(mate_fname), fastq_subfmt)
pairs_flow = it.izip(read_flow, mate_flow)
class ResultCollector(object):
def __init__(self):
self.result = []
def process(self, pair):
self.result.append(pair[0])
self.result.append(pair[1])
result = ResultCollector()
while 1:
start = time.time()
pairs = list(it.islice(pairs_flow, 0, seq_list_len))
if len(pairs) == 0:
break
# turn the biopython SeqRecords into simple tuples
tuples = map(lambda t: (t[0].name, t[0].seq.tostring(), None, t[1].seq.tostring(), None), pairs[0:5])
for t in tuples:
print t
logger.info('reading seqs %f sec' % (time.time() - start))
start = time.time()
aligner = BwaAligner()
aligner.reference = refseq_fname
aligner.hit_visitor = result
for t in tuples[0:5]:
aligner.load_pair_record(t)
aligner.run_alignment()
aligner.clear_batch()
logger.info('alignment %f sec' % (time.time() - start))
# map bwa mappings to dictionaries
def bwam_to_hash(bwa_m):
h = dict(
name=bwa_m.name,
aux=bwa_m.tags,
seq=bwa_m.get_seq_5()
)
return h
return map(bwam_to_hash, result.result)
def get_fixture_path(fixture_name):
# get the path to this file's directory, then go into the fixtures directory
# and finally attach the fixture_name
return os.path.join(os.path.dirname(os.path.abspath( __file__ )), "fixtures", fixture_name)
base_key = {
'A':0, 'a':0,
'C':1, 'c':1,
'G':2, 'g':2,
'T':3, 't':3,
'N':4, 'n':4
}
op_key = {
'M':0,
'I':1,
'D':2,
'S':3
}
key_base = 'ACGTN'
complement_key_base = 'TGCAN'
complement = {
'A':'T',
'C':'G',
'G':'C',
'T':'A',
'N':'N'
}
def build_bwa_seq_t(hit):
"""Create a real bwa_seq_t from a dictionary containing its values"""
struct = bwa.bwa_seq_t()
struct.name = ct.c_char_p(hit['name'])
struct.seq = ct.cast(ct.create_string_buffer(len(hit['seq'])), ct.POINTER(ct.c_uint8))
struct.rseq = ct.cast(ct.create_string_buffer(len(hit['rseq'])), ct.POINTER(ct.c_uint8))
struct.qual = ct.cast(ct.c_char_p(hit['qual']), ct.POINTER(ct.c_uint8))
struct.len = hit['len']
struct.strand = hit['strand']
struct.type = hit['type']
struct.dummy = hit['dummy']
struct.extra_flag = hit['extra_flag']
struct.n_mm = hit['n_mm']
struct.n_gapo = hit['n_gapo']
struct.n_gape = hit['n_gape']
struct.mapQ = hit['mapQ']
struct.score = hit['score']
struct.clip_len = hit['clip_len']
struct.n_aln = hit['n_aln']
struct.aln = None
struct.n_multi = hit['n_multi']
struct.multi = None
struct.sa = hit['sa']
struct.pos = hit['pos']
struct.c1 = hit['c1']
struct.c2 = hit['c2']
struct.seQ = hit['seQ']
struct.n_cigar = hit['n_cigar']
struct.cigar = ct.cast(ct.create_string_buffer( len(hit['cigar'])*ct.sizeof(ct.c_uint16) ), ct.POINTER(ct.c_uint16))
struct.tid = hit['tid']
struct.full_len = hit['full_len']
struct.nm = hit['nm']
struct.md = None
for i in xrange(len(hit['seq'])):
struct.seq[i] = base_key[ hit['seq'][i] ]
struct.rseq[i] = base_key[ hit['rseq'][i] ]
# create the CIGAR array
for i in xrange(len(hit['cigar'])):
size, op = hit['cigar'][i]
encoded_v = (op << bwa_const.CIGAR_OP_SHIFT) | (size & bwa_const.CIGAR_LN_MASK)
struct.cigar[i] = ct.c_uint16(encoded_v)
return struct
|
QwertyManiac/seal-cdh4
|
tests/seal/lib/aligner/bwa/testing_utilities.py
|
Python
|
gpl-3.0
| 8,921
|
[
"BWA",
"Biopython"
] |
e2d19e7bf017696c9b9908868dd9fc82556cdffb4bca0cbf60c9fe4d2f9754ee
|
"""
K-means Clustering and Vector Quantization Module
Provides routines for k-means clustering, generating code books
from k-means models, and quantizing vectors by comparing them with
centroids in a code book.
The k-means algorithm takes as input the number of clusters to
generate, k, and a set of observation vectors to cluster. It
returns a set of centroids, one for each of the k clusters. An
observation vector is classified with the cluster number or
centroid index of the centroid closest to it.
A vector v belongs to cluster i if it is closer to centroid i than
any other centroids. If v belongs to i, we say centroid i is the
dominating centroid of v. The k-means algorithm tries to
minimize distortion, which is defined as the sum of the squared distances
between each observation vector and its dominating centroid. Each
step of the k-means algorithm refines the choices of centroids to
reduce distortion. The change in distortion is used as a
stopping criterion: when the change is lower than a threshold, the
k-means algorithm is not making sufficient progress and
terminates. One can also define a maximum number of iterations.
Since vector quantization is a natural application for k-means,
information theory terminology is often used. The centroid index
or cluster index is also referred to as a "code" and the table
mapping codes to centroids and vice versa is often referred as a
"code book". The result of k-means, a set of centroids, can be
used to quantize vectors. Quantization aims to find an encoding of
vectors that reduces the expected distortion.
All routines expect obs to be a M by N array where the rows are
the observation vectors. The codebook is a k by N array where the
i'th row is the centroid of code word i. The observation vectors
and centroids have the same feature dimension.
As an example, suppose we wish to compress a 24-bit color image
(each pixel is represented by one byte for red, one for blue, and
one for green) before sending it over the web. By using a smaller
8-bit encoding, we can reduce the amount of data by two
thirds. Ideally, the colors for each of the 256 possible 8-bit
encoding values should be chosen to minimize distortion of the
color. Running k-means with k=256 generates a code book of 256
codes, which fills up all possible 8-bit sequences. Instead of
sending a 3-byte value for each pixel, the 8-bit centroid index
(or code word) of the dominating centroid is transmitted. The code
book is also sent over the wire so each 8-bit code can be
translated back to a 24-bit pixel value representation. If the
image of interest was of an ocean, we would expect many 24-bit
blues to be represented by 8-bit codes. If it was an image of a
human face, more flesh tone colors would be represented in the
code book.
Functions
---------
`whiten` :
Normalize a group of observations so each feature has unit
variance.
`vq` :
Calculate code book membership of a set of observation
vectors.
`kmeans` :
Clusters a set of observation vectors. Learns centroids with
the k-means algorithm, trying to minimize distortion. A code
book is generated that can be used to quantize vectors.
`kmeans2` :
A different implementation of k-means with more methods for
initializing centroids. Uses maximum number of iterations as
opposed to a distortion threshold as its stopping criterion.
"""
__docformat__ = 'restructuredtext'
__all__ = ['whiten', 'vq', 'kmeans', 'kmeans2']
# TODO:
# - implements high level method for running several times k-means with
# different initialialization
# - warning: what happens if different number of clusters ? For now, emit a
# warning, but it is not great, because I am not sure it really make sense to
# succeed in this case (maybe an exception is better ?)
import warnings
from numpy.random import randint
from numpy import shape, zeros, sqrt, argmin, minimum, array, \
newaxis, arange, compress, equal, common_type, single, double, take, \
std, mean
import numpy as np
class ClusterError(Exception):
pass
def whiten(obs):
""" Normalize a group of observations on a per feature basis.
Before running k-means, it is beneficial to rescale each feature
dimension of the observation set with whitening. Each feature is
divided by its standard deviation across all observations to give
it unit variance.
:Parameters:
obs : ndarray
Each row of the array is an observation. The
columns are the features seen during each observation.
::
# f0 f1 f2
obs = [[ 1., 1., 1.], #o0
[ 2., 2., 2.], #o1
[ 3., 3., 3.], #o2
[ 4., 4., 4.]]) #o3
XXX perhaps should have an axis variable here.
:Returns:
result : ndarray
Contains the values in obs scaled by the standard devation
of each column.
Examples
--------
>>> from numpy import array
>>> from scipy.cluster.vq import whiten
>>> features = array([[ 1.9,2.3,1.7],
... [ 1.5,2.5,2.2],
... [ 0.8,0.6,1.7,]])
>>> whiten(features)
array([[ 3.41250074, 2.20300046, 5.88897275],
[ 2.69407953, 2.39456571, 7.62102355],
[ 1.43684242, 0.57469577, 5.88897275]])
"""
std_dev = std(obs, axis=0)
return obs / std_dev
def vq(obs, code_book):
""" Vector Quantization: assign codes from a code book to observations.
Assigns a code from a code book to each observation. Each
observation vector in the M by N obs array is compared with the
centroids in the code book and assigned the code of the closest
centroid.
The features in obs should have unit variance, which can be
acheived by passing them through the whiten function. The code
book can be created with the k-means algorithm or a different
encoding algorithm.
:Parameters:
obs : ndarray
Each row of the NxM array is an observation. The columns are the
"features" seen during each observation. The features must be
whitened first using the whiten function or something equivalent.
code_book : ndarray.
The code book is usually generated using the k-means algorithm.
Each row of the array holds a different code, and the columns are
the features of the code.
::
# f0 f1 f2 f3
code_book = [[ 1., 2., 3., 4.], #c0
[ 1., 2., 3., 4.], #c1
[ 1., 2., 3., 4.]]) #c2
:Returns:
code : ndarray
A length N array holding the code book index for each observation.
dist : ndarray
The distortion (distance) between the observation and its nearest
code.
Notes
-----
This currently forces 32-bit math precision for speed. Anyone know
of a situation where this undermines the accuracy of the algorithm?
Examples
--------
>>> from numpy import array
>>> from scipy.cluster.vq import vq
>>> code_book = array([[1.,1.,1.],
... [2.,2.,2.]])
>>> features = array([[ 1.9,2.3,1.7],
... [ 1.5,2.5,2.2],
... [ 0.8,0.6,1.7]])
>>> vq(features,code_book)
(array([1, 1, 0],'i'), array([ 0.43588989, 0.73484692, 0.83066239]))
"""
try:
import _vq
ct = common_type(obs, code_book)
c_obs = obs.astype(ct)
c_code_book = code_book.astype(ct)
if ct is single:
results = _vq.vq(c_obs, c_code_book)
elif ct is double:
results = _vq.vq(c_obs, c_code_book)
else:
results = py_vq(obs, code_book)
except ImportError:
results = py_vq(obs, code_book)
return results
def py_vq(obs, code_book):
""" Python version of vq algorithm.
The algorithm computes the euclidian distance between each
observation and every frame in the code_book.
:Parameters:
obs : ndarray
Expects a rank 2 array. Each row is one observation.
code_book : ndarray
Code book to use. Same format than obs. Should have same number of
features (eg columns) than obs.
:Note:
This function is slower than the C version but works for
all input types. If the inputs have the wrong types for the
C versions of the function, this one is called as a last resort.
It is about 20 times slower than the C version.
:Returns:
code : ndarray
code[i] gives the label of the ith obversation, that its code is
code_book[code[i]].
mind_dist : ndarray
min_dist[i] gives the distance between the ith observation and its
corresponding code.
"""
# n = number of observations
# d = number of features
if np.ndim(obs) == 1:
if not np.ndim(obs) == np.ndim(code_book):
raise ValueError(
"Observation and code_book should have the same rank")
else:
return _py_vq_1d(obs, code_book)
else:
(n, d) = shape(obs)
# code books and observations should have same number of features and same
# shape
if not np.ndim(obs) == np.ndim(code_book):
raise ValueError("Observation and code_book should have the same rank")
elif not d == code_book.shape[1]:
raise ValueError("Code book(%d) and obs(%d) should have the same " \
"number of features (eg columns)""" %
(code_book.shape[1], d))
code = zeros(n, dtype=int)
min_dist = zeros(n)
for i in range(n):
dist = np.sum((obs[i] - code_book) ** 2, 1)
code[i] = argmin(dist)
min_dist[i] = dist[code[i]]
return code, sqrt(min_dist)
def _py_vq_1d(obs, code_book):
""" Python version of vq algorithm for rank 1 only.
:Parameters:
obs : ndarray
Expects a rank 1 array. Each item is one observation.
code_book : ndarray
Code book to use. Same format than obs. Should rank 1 too.
:Returns:
code : ndarray
code[i] gives the label of the ith obversation, that its code is
code_book[code[i]].
mind_dist : ndarray
min_dist[i] gives the distance between the ith observation and its
corresponding code.
"""
raise RuntimeError("_py_vq_1d buggy, do not use rank 1 arrays for now")
n = obs.size
nc = code_book.size
dist = np.zeros((n, nc))
for i in range(nc):
dist[:, i] = np.sum(obs - code_book[i])
print dist
code = argmin(dist)
min_dist = dist[code]
return code, sqrt(min_dist)
def py_vq2(obs, code_book):
"""2nd Python version of vq algorithm.
The algorithm simply computes the euclidian distance between each
observation and every frame in the code_book/
:Parameters:
obs : ndarray
Expect a rank 2 array. Each row is one observation.
code_book : ndarray
Code book to use. Same format than obs. Should have same number of
features (eg columns) than obs.
:Note:
This could be faster when number of codebooks is small, but it
becomes a real memory hog when codebook is large. It requires
N by M by O storage where N=number of obs, M = number of
features, and O = number of codes.
:Returns:
code : ndarray
code[i] gives the label of the ith obversation, that its code is
code_book[code[i]].
mind_dist : ndarray
min_dist[i] gives the distance between the ith observation and its
corresponding code.
"""
d = shape(obs)[1]
# code books and observations should have same number of features
if not d == code_book.shape[1]:
raise ValueError("""
code book(%d) and obs(%d) should have the same
number of features (eg columns)""" % (code_book.shape[1], d))
diff = obs[newaxis, :, :] - code_book[:,newaxis,:]
dist = sqrt(np.sum(diff * diff, -1))
code = argmin(dist, 0)
min_dist = minimum.reduce(dist, 0) #the next line I think is equivalent
# - and should be faster
#min_dist = choose(code,dist) # but in practice, didn't seem to make
# much difference.
return code, min_dist
def _kmeans(obs, guess, thresh=1e-5):
""" "raw" version of k-means.
:Returns:
code_book :
the lowest distortion codebook found.
avg_dist :
the average distance a observation is from a code in the book.
Lower means the code_book matches the data better.
:SeeAlso:
- kmeans : wrapper around k-means
XXX should have an axis variable here.
Examples
--------
Note: not whitened in this example.
>>> from numpy import array
>>> from scipy.cluster.vq import _kmeans
>>> features = array([[ 1.9,2.3],
... [ 1.5,2.5],
... [ 0.8,0.6],
... [ 0.4,1.8],
... [ 1.0,1.0]])
>>> book = array((features[0],features[2]))
>>> _kmeans(features,book)
(array([[ 1.7 , 2.4 ],
[ 0.73333333, 1.13333333]]), 0.40563916697728591)
"""
code_book = array(guess, copy = True)
avg_dist = []
diff = thresh+1.
while diff > thresh:
nc = code_book.shape[0]
#compute membership and distances between obs and code_book
obs_code, distort = vq(obs, code_book)
avg_dist.append(mean(distort, axis=-1))
#recalc code_book as centroids of associated obs
if(diff > thresh):
has_members = []
for i in arange(nc):
cell_members = compress(equal(obs_code, i), obs, 0)
if cell_members.shape[0] > 0:
code_book[i] = mean(cell_members, 0)
has_members.append(i)
#remove code_books that didn't have any members
code_book = take(code_book, has_members, 0)
if len(avg_dist) > 1:
diff = avg_dist[-2] - avg_dist[-1]
#print avg_dist
return code_book, avg_dist[-1]
def kmeans(obs, k_or_guess, iter=20, thresh=1e-5):
"""
Performs k-means on a set of observation vectors forming k clusters.
The k-means algorithm adjusts the centroids until sufficient
progress cannot be made, i.e. the change in distortion since
the last iteration is less than some threshold. This yields
a code book mapping centroids to codes and vice versa.
Distortion is defined as the sum of the squared differences
between the observations and the corresponding centroid.
Parameters
----------
obs : ndarray
Each row of the M by N array is an observation vector. The
columns are the features seen during each observation.
The features must be whitened first with the `whiten` function.
k_or_guess : int or ndarray
The number of centroids to generate. A code is assigned to
each centroid, which is also the row index of the centroid
in the code_book matrix generated.
The initial k centroids are chosen by randomly selecting
observations from the observation matrix. Alternatively,
passing a k by N array specifies the initial k centroids.
iter : int, optional
The number of times to run k-means, returning the codebook
with the lowest distortion. This argument is ignored if
initial centroids are specified with an array for the
``k_or_guess`` parameter. This parameter does not represent the
number of iterations of the k-means algorithm.
thresh : float, optional
Terminates the k-means algorithm if the change in
distortion since the last k-means iteration is less than
or equal to thresh.
Returns
-------
codebook : ndarray
A k by N array of k centroids. The i'th centroid
codebook[i] is represented with the code i. The centroids
and codes generated represent the lowest distortion seen,
not necessarily the globally minimal distortion.
distortion : float
The distortion between the observations passed and the
centroids generated.
See Also
--------
kmeans2 : a different implementation of k-means clustering
with more methods for generating initial centroids but without
using a distortion change threshold as a stopping criterion.
whiten : must be called prior to passing an observation matrix
to kmeans.
Examples
--------
>>> from numpy import array
>>> from scipy.cluster.vq import vq, kmeans, whiten
>>> features = array([[ 1.9,2.3],
... [ 1.5,2.5],
... [ 0.8,0.6],
... [ 0.4,1.8],
... [ 0.1,0.1],
... [ 0.2,1.8],
... [ 2.0,0.5],
... [ 0.3,1.5],
... [ 1.0,1.0]])
>>> whitened = whiten(features)
>>> book = array((whitened[0],whitened[2]))
>>> kmeans(whitened,book)
(array([[ 2.3110306 , 2.86287398],
[ 0.93218041, 1.24398691]]), 0.85684700941625547)
>>> from numpy import random
>>> random.seed((1000,2000))
>>> codes = 3
>>> kmeans(whitened,codes)
(array([[ 2.3110306 , 2.86287398],
[ 1.32544402, 0.65607529],
[ 0.40782893, 2.02786907]]), 0.5196582527686241)
"""
if int(iter) < 1:
raise ValueError('iter must be at least 1.')
if type(k_or_guess) == type(array([])):
guess = k_or_guess
if guess.size < 1:
raise ValueError("Asked for 0 cluster ? initial book was %s" % \
guess)
result = _kmeans(obs, guess, thresh = thresh)
else:
#initialize best distance value to a large value
best_dist = np.inf
No = obs.shape[0]
k = k_or_guess
if k < 1:
raise ValueError("Asked for 0 cluster ? ")
for i in range(iter):
#the intial code book is randomly selected from observations
guess = take(obs, randint(0, No, k), 0)
book, dist = _kmeans(obs, guess, thresh = thresh)
if dist < best_dist:
best_book = book
best_dist = dist
result = best_book, best_dist
return result
def _kpoints(data, k):
"""Pick k points at random in data (one row = one observation).
This is done by taking the k first values of a random permutation of 1..N
where N is the number of observation.
:Parameters:
data : ndarray
Expect a rank 1 or 2 array. Rank 1 are assumed to describe one
dimensional data, rank 2 multidimensional data, in which case one
row is one observation.
k : int
Number of samples to generate.
"""
if data.ndim > 1:
n = data.shape[0]
else:
n = data.size
p = np.random.permutation(n)
x = data[p[:k], :].copy()
return x
def _krandinit(data, k):
"""Returns k samples of a random variable which parameters depend on data.
More precisely, it returns k observations sampled from a Gaussian random
variable which mean and covariances are the one estimated from data.
:Parameters:
data : ndarray
Expect a rank 1 or 2 array. Rank 1 are assumed to describe one
dimensional data, rank 2 multidimensional data, in which case one
row is one observation.
k : int
Number of samples to generate.
"""
def init_rank1(data):
mu = np.mean(data)
cov = np.cov(data)
x = np.random.randn(k)
x *= np.sqrt(cov)
x += mu
return x
def init_rankn(data):
mu = np.mean(data, 0)
cov = np.atleast_2d(np.cov(data, rowvar = 0))
# k rows, d cols (one row = one obs)
# Generate k sample of a random variable ~ Gaussian(mu, cov)
x = np.random.randn(k, mu.size)
x = np.dot(x, np.linalg.cholesky(cov).T) + mu
return x
nd = np.ndim(data)
if nd == 1:
return init_rank1(data)
else:
return init_rankn(data)
_valid_init_meth = {'random': _krandinit, 'points': _kpoints}
def _missing_warn():
"""Print a warning when called."""
warnings.warn("One of the clusters is empty. "
"Re-run kmean with a different initialization.")
def _missing_raise():
"""raise a ClusterError when called."""
raise ClusterError("One of the clusters is empty. "
"Re-run kmean with a different initialization.")
_valid_miss_meth = {'warn': _missing_warn, 'raise': _missing_raise}
def kmeans2(data, k, iter = 10, thresh = 1e-5, minit = 'random',
missing = 'warn'):
"""Classify a set of observations into k clusters using the k-means
algorithm.
The algorithm attempts to minimize the Euclidian distance between
observations and centroids. Several initialization methods are
included.
:Parameters:
data : ndarray
A M by N array of M observations in N dimensions or a length
M array of M one-dimensional observations.
k : int or ndarray
The number of clusters to form as well as the number of
centroids to generate. If minit initialization string is
'matrix', or if a ndarray is given instead, it is
interpreted as initial cluster to use instead.
iter : int
Number of iterations of the k-means algrithm to run. Note
that this differs in meaning from the iters parameter to
the kmeans function.
thresh : float
(not used yet).
minit : string
Method for initialization. Available methods are 'random',
'points', 'uniform', and 'matrix':
'random': generate k centroids from a Gaussian with mean and
variance estimated from the data.
'points': choose k observations (rows) at random from data for
the initial centroids.
'uniform': generate k observations from the data from a uniform
distribution defined by the data set (unsupported).
'matrix': interpret the k parameter as a k by M (or length k
array for one-dimensional data) array of initial centroids.
:Returns:
centroid : ndarray
A k by N array of centroids found at the last iteration of
k-means.
label : ndarray
label[i] is the code or index of the centroid the
i'th observation is closest to.
"""
if missing not in _valid_miss_meth.keys():
raise ValueError("Unkown missing method: %s" % str(missing))
# If data is rank 1, then we have 1 dimension problem.
nd = np.ndim(data)
if nd == 1:
d = 1
#raise ValueError("Input of rank 1 not supported yet")
elif nd == 2:
d = data.shape[1]
else:
raise ValueError("Input of rank > 2 not supported")
if np.size(data) < 1:
raise ValueError("Input has 0 items.")
# If k is not a single value, then it should be compatible with data's
# shape
if np.size(k) > 1 or minit == 'matrix':
if not nd == np.ndim(k):
raise ValueError("k is not an int and has not same rank than data")
if d == 1:
nc = len(k)
else:
(nc, dc) = k.shape
if not dc == d:
raise ValueError("k is not an int and has not same rank than\
data")
clusters = k.copy()
else:
try:
nc = int(k)
except TypeError:
raise ValueError("k (%s) could not be converted to an integer " % str(k))
if nc < 1:
raise ValueError("kmeans2 for 0 clusters ? (k was %s)" % str(k))
if not nc == k:
warnings.warn("k was not an integer, was converted.")
try:
init = _valid_init_meth[minit]
except KeyError:
raise ValueError("unknown init method %s" % str(minit))
clusters = init(data, k)
if int(iter) < 1:
raise ValueError("iter = %s is not valid. iter must be a positive integer." % iter)
return _kmeans2(data, clusters, iter, nc, _valid_miss_meth[missing])
def _kmeans2(data, code, niter, nc, missing):
""" "raw" version of kmeans2. Do not use directly.
Run k-means with a given initial codebook. """
for i in range(niter):
# Compute the nearest neighbour for each obs
# using the current code book
label = vq(data, code)[0]
# Update the code by computing centroids using the new code book
for j in range(nc):
mbs = np.where(label==j)
if mbs[0].size > 0:
code[j] = np.mean(data[mbs], axis=0)
else:
missing()
return code, label
if __name__ == '__main__':
pass
#import _vq
#a = np.random.randn(4, 2)
#b = np.random.randn(2, 2)
#print _vq.vq(a, b)
#print _vq.vq(np.array([[1], [2], [3], [4], [5], [6.]]),
# np.array([[2.], [5.]]))
#print _vq.vq(np.array([1, 2, 3, 4, 5, 6.]), np.array([2., 5.]))
#_vq.vq(a.astype(np.float32), b.astype(np.float32))
#_vq.vq(a, b.astype(np.float32))
#_vq.vq([0], b)
|
jasonmccampbell/scipy-refactor
|
scipy/cluster/vq.py
|
Python
|
bsd-3-clause
| 25,832
|
[
"Gaussian"
] |
47074512896e115a0948316b2a7f69638164eae42a94bf82ef74adf361c84e0f
|
# Generated by Django 1.11.2 on 2017-06-22 10:22
import bitfield.models
import django.contrib.auth.models
import django.core.validators
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
from django.db import migrations, models
from django.db.backends.postgresql.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.models import generate_email_token_for_stream
def migrate_existing_attachment_data(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
Attachment = apps.get_model("zerver", "Attachment")
Recipient = apps.get_model("zerver", "Recipient")
Stream = apps.get_model("zerver", "Stream")
attachments = Attachment.objects.all()
for entry in attachments:
owner = entry.owner
entry.realm = owner.realm
for message in entry.messages.all():
if owner == message.sender:
if message.recipient.type == Recipient.STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_realm_public = (
not stream.realm.is_zephyr_mirror_realm and not stream.invite_only
)
entry.is_realm_public = entry.is_realm_public or is_realm_public
entry.save()
class Migration(migrations.Migration):
initial = True
dependencies = [
("auth", "0001_initial"),
]
if settings.POSTGRESQL_MISSING_DICTIONARIES:
fts_sql = """
CREATE TEXT SEARCH CONFIGURATION zulip.english_us_search (COPY=pg_catalog.english);
"""
else:
fts_sql = """
CREATE TEXT SEARCH DICTIONARY english_us_hunspell
(template = ispell, DictFile = en_us, AffFile = en_us, StopWords = zulip_english);
CREATE TEXT SEARCH CONFIGURATION zulip.english_us_search (COPY=pg_catalog.english);
ALTER TEXT SEARCH CONFIGURATION zulip.english_us_search
ALTER MAPPING FOR asciiword, asciihword, hword_asciipart, word, hword, hword_part
WITH english_us_hunspell, english_stem;
"""
fts_sql += """
CREATE FUNCTION escape_html(text) RETURNS text IMMUTABLE LANGUAGE 'sql' AS $$
SELECT replace(replace(replace(replace(replace($1, '&', '&'), '<', '<'),
'>', '>'), '"', '"'), '''', ''');
$$ ;
ALTER TABLE zerver_message ADD COLUMN search_tsvector tsvector;
CREATE INDEX zerver_message_search_tsvector ON zerver_message USING gin(search_tsvector);
ALTER INDEX zerver_message_search_tsvector SET (fastupdate = OFF);
CREATE TABLE fts_update_log (id SERIAL PRIMARY KEY, message_id INTEGER NOT NULL);
CREATE FUNCTION do_notify_fts_update_log() RETURNS trigger LANGUAGE plpgsql AS
$$ BEGIN NOTIFY fts_update_log; RETURN NEW; END $$;
CREATE TRIGGER fts_update_log_notify AFTER INSERT ON fts_update_log
FOR EACH STATEMENT EXECUTE PROCEDURE do_notify_fts_update_log();
CREATE FUNCTION append_to_fts_update_log() RETURNS trigger LANGUAGE plpgsql AS
$$ BEGIN INSERT INTO fts_update_log (message_id) VALUES (NEW.id); RETURN NEW; END $$;
CREATE TRIGGER zerver_message_update_search_tsvector_async
BEFORE INSERT OR UPDATE OF subject, rendered_content ON zerver_message
FOR EACH ROW EXECUTE PROCEDURE append_to_fts_update_log();
"""
operations = [
migrations.CreateModel(
name="UserProfile",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("password", models.CharField(max_length=128, verbose_name="password")),
(
"last_login",
models.DateTimeField(
default=django.utils.timezone.now, verbose_name="last login"
),
),
(
"is_superuser",
models.BooleanField(
default=False,
help_text="Designates that this user has all permissions without explicitly assigning them.",
verbose_name="superuser status",
),
),
("email", models.EmailField(db_index=True, max_length=75, unique=True)),
("is_staff", models.BooleanField(default=False)),
("is_active", models.BooleanField(default=True)),
("is_bot", models.BooleanField(default=False)),
("date_joined", models.DateTimeField(default=django.utils.timezone.now)),
("is_mirror_dummy", models.BooleanField(default=False)),
("full_name", models.CharField(max_length=100)),
("short_name", models.CharField(max_length=100)),
("pointer", models.IntegerField()),
("last_pointer_updater", models.CharField(max_length=64)),
("api_key", models.CharField(max_length=32)),
("enable_stream_desktop_notifications", models.BooleanField(default=True)),
("enable_stream_sounds", models.BooleanField(default=True)),
("enable_desktop_notifications", models.BooleanField(default=True)),
("enable_sounds", models.BooleanField(default=True)),
("enable_offline_email_notifications", models.BooleanField(default=True)),
("enable_offline_push_notifications", models.BooleanField(default=True)),
("enable_digest_emails", models.BooleanField(default=True)),
("default_desktop_notifications", models.BooleanField(default=True)),
(
"last_reminder",
models.DateTimeField(default=django.utils.timezone.now, null=True),
),
("rate_limits", models.CharField(default="", max_length=100)),
("default_all_public_streams", models.BooleanField(default=False)),
("enter_sends", models.NullBooleanField(default=True)),
("autoscroll_forever", models.BooleanField(default=False)),
("twenty_four_hour_time", models.BooleanField(default=False)),
(
"avatar_source",
models.CharField(
choices=[
("G", "Hosted by Gravatar"),
("U", "Uploaded by user"),
("S", "System generated"),
],
default="G",
max_length=1,
),
),
(
"tutorial_status",
models.CharField(
choices=[("W", "Waiting"), ("S", "Started"), ("F", "Finished")],
default="W",
max_length=1,
),
),
("onboarding_steps", models.TextField(default="[]")),
("invites_granted", models.IntegerField(default=0)),
("invites_used", models.IntegerField(default=0)),
("alert_words", models.TextField(default="[]")),
("muted_topics", models.TextField(default="[]")),
(
"bot_owner",
models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.SET_NULL,
to=settings.AUTH_USER_MODEL,
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Client",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.CharField(db_index=True, max_length=30, unique=True)),
],
),
migrations.CreateModel(
name="DefaultStream",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
],
),
migrations.CreateModel(
name="Huddle",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("huddle_hash", models.CharField(db_index=True, max_length=40, unique=True)),
],
),
migrations.CreateModel(
name="Message",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("subject", models.CharField(db_index=True, max_length=60)),
("content", models.TextField()),
("rendered_content", models.TextField(null=True)),
("rendered_content_version", models.IntegerField(null=True)),
("pub_date", models.DateTimeField(db_index=True, verbose_name="date published")),
("last_edit_time", models.DateTimeField(null=True)),
("edit_history", models.TextField(null=True)),
("has_attachment", models.BooleanField(db_index=True, default=False)),
("has_image", models.BooleanField(db_index=True, default=False)),
("has_link", models.BooleanField(db_index=True, default=False)),
],
),
migrations.CreateModel(
name="PreregistrationUser",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("email", models.EmailField(max_length=75)),
("invited_at", models.DateTimeField(auto_now=True)),
("status", models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name="PushDeviceToken",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("kind", models.PositiveSmallIntegerField(choices=[(1, "apns"), (2, "gcm")])),
("token", models.CharField(max_length=4096, unique=True)),
(
"last_updated",
models.DateTimeField(auto_now=True, default=django.utils.timezone.now),
),
("ios_app_id", models.TextField(null=True)),
(
"user",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="Realm",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("domain", models.CharField(db_index=True, max_length=40, unique=True)),
("name", models.CharField(max_length=40, null=True)),
("restricted_to_domain", models.BooleanField(default=True)),
("invite_required", models.BooleanField(default=False)),
("invite_by_admins_only", models.BooleanField(default=False)),
("mandatory_topics", models.BooleanField(default=False)),
("show_digest_email", models.BooleanField(default=True)),
("name_changes_disabled", models.BooleanField(default=False)),
("date_created", models.DateTimeField(default=django.utils.timezone.now)),
("deactivated", models.BooleanField(default=False)),
],
options={
"permissions": (("administer", "Administer a realm"),),
},
),
migrations.CreateModel(
name="RealmAlias",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("domain", models.CharField(db_index=True, max_length=80, unique=True)),
(
"realm",
models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="RealmEmoji",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.TextField()),
("img_url", models.TextField()),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="RealmFilter",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("pattern", models.TextField()),
("url_format_string", models.TextField()),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="Recipient",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("type_id", models.IntegerField(db_index=True)),
("type", models.PositiveSmallIntegerField(db_index=True)),
],
),
migrations.CreateModel(
name="Referral",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("email", models.EmailField(max_length=75)),
("timestamp", models.DateTimeField(auto_now_add=True)),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="ScheduledJob",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("scheduled_timestamp", models.DateTimeField()),
("type", models.PositiveSmallIntegerField()),
("data", models.TextField()),
("filter_id", models.IntegerField(null=True)),
("filter_string", models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name="Stream",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("name", models.CharField(db_index=True, max_length=60)),
("invite_only", models.NullBooleanField(default=False)),
(
"email_token",
models.CharField(default=generate_email_token_for_stream, max_length=32),
),
("description", models.CharField(default="", max_length=1024)),
("date_created", models.DateTimeField(default=django.utils.timezone.now)),
("deactivated", models.BooleanField(default=False)),
(
"realm",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
],
),
migrations.CreateModel(
name="Subscription",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("active", models.BooleanField(default=True)),
("in_home_view", models.NullBooleanField(default=True)),
("color", models.CharField(default="#c2c2c2", max_length=10)),
("desktop_notifications", models.BooleanField(default=True)),
("audible_notifications", models.BooleanField(default=True)),
("notifications", models.BooleanField(default=False)),
(
"recipient",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserActivity",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("query", models.CharField(db_index=True, max_length=50)),
("count", models.IntegerField()),
("last_visit", models.DateTimeField(verbose_name="last visit")),
(
"client",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Client"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserActivityInterval",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("start", models.DateTimeField(db_index=True, verbose_name="start time")),
("end", models.DateTimeField(db_index=True, verbose_name="end time")),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserMessage",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
(
"flags",
bitfield.models.BitField(
[
"read",
"starred",
"collapsed",
"mentioned",
"wildcard_mentioned",
"summarize_in_home",
"summarize_in_stream",
"force_expand",
"force_collapse",
"has_alert_word",
"historical",
"is_me_message",
],
default=0,
),
),
(
"message",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Message"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.CreateModel(
name="UserPresence",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("timestamp", models.DateTimeField(verbose_name="presence changed")),
("status", models.PositiveSmallIntegerField(default=1)),
(
"client",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Client"
),
),
(
"user_profile",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
],
),
migrations.AlterUniqueTogether(
name="userpresence",
unique_together={("user_profile", "client")},
),
migrations.AlterUniqueTogether(
name="usermessage",
unique_together={("user_profile", "message")},
),
migrations.AlterUniqueTogether(
name="useractivity",
unique_together={("user_profile", "client", "query")},
),
migrations.AlterUniqueTogether(
name="subscription",
unique_together={("user_profile", "recipient")},
),
migrations.AlterUniqueTogether(
name="stream",
unique_together={("name", "realm")},
),
migrations.AlterUniqueTogether(
name="recipient",
unique_together={("type", "type_id")},
),
migrations.AlterUniqueTogether(
name="realmfilter",
unique_together={("realm", "pattern")},
),
migrations.AlterUniqueTogether(
name="realmemoji",
unique_together={("realm", "name")},
),
migrations.AddField(
model_name="realm",
name="notifications_stream",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="zerver.Stream",
),
),
migrations.AddField(
model_name="preregistrationuser",
name="realm",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"
),
),
migrations.AddField(
model_name="preregistrationuser",
name="referred_by",
field=models.ForeignKey(
null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="preregistrationuser",
name="streams",
field=models.ManyToManyField(null=True, to="zerver.Stream"),
),
migrations.AddField(
model_name="message",
name="recipient",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Recipient"
),
),
migrations.AddField(
model_name="message",
name="sender",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
migrations.AddField(
model_name="message",
name="sending_client",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Client"
),
),
migrations.AddField(
model_name="defaultstream",
name="realm",
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"),
),
migrations.AddField(
model_name="defaultstream",
name="stream",
field=models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to="zerver.Stream"
),
),
migrations.AlterUniqueTogether(
name="defaultstream",
unique_together={("realm", "stream")},
),
migrations.AddField(
model_name="userprofile",
name="default_events_register_stream",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="zerver.Stream",
),
),
migrations.AddField(
model_name="userprofile",
name="default_sending_stream",
field=models.ForeignKey(
null=True,
on_delete=django.db.models.deletion.CASCADE,
related_name="+",
to="zerver.Stream",
),
),
migrations.AddField(
model_name="userprofile",
name="groups",
field=models.ManyToManyField(
blank=True,
help_text="The groups this user belongs to. A user will get all permissions granted to each of their groups.",
related_name="user_set",
related_query_name="user",
to="auth.Group",
verbose_name="groups",
),
),
migrations.AddField(
model_name="userprofile",
name="realm",
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to="zerver.Realm"),
),
migrations.AddField(
model_name="userprofile",
name="user_permissions",
field=models.ManyToManyField(
blank=True,
help_text="Specific permissions for this user.",
related_name="user_set",
related_query_name="user",
to="auth.Permission",
verbose_name="user permissions",
),
),
migrations.RunSQL(
sql=fts_sql,
),
migrations.AlterModelManagers(
name="userprofile",
managers=[
("objects", django.contrib.auth.models.UserManager()),
],
),
migrations.AlterField(
model_name="preregistrationuser",
name="email",
field=models.EmailField(max_length=254),
),
migrations.AlterField(
model_name="preregistrationuser",
name="streams",
field=models.ManyToManyField(to="zerver.Stream"),
),
migrations.AlterField(
model_name="pushdevicetoken",
name="last_updated",
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name="referral",
name="email",
field=models.EmailField(max_length=254),
),
migrations.AlterField(
model_name="userprofile",
name="email",
field=models.EmailField(db_index=True, max_length=254, unique=True),
),
migrations.AlterField(
model_name="userprofile",
name="last_login",
field=models.DateTimeField(blank=True, null=True, verbose_name="last login"),
),
migrations.RunSQL(
sql="CREATE INDEX upper_subject_idx ON zerver_message ((upper(subject)));",
reverse_sql="DROP INDEX upper_subject_idx;",
),
migrations.RunSQL(
sql="CREATE INDEX upper_stream_name_idx ON zerver_stream ((upper(name)));",
reverse_sql="DROP INDEX upper_stream_name_idx;",
),
migrations.AddField(
model_name="userprofile",
name="left_side_userlist",
field=models.BooleanField(default=False),
),
migrations.AlterModelOptions(
name="realm",
options={
"permissions": (
("administer", "Administer a realm"),
("api_super_user", "Can send messages as other users for mirroring"),
)
},
),
migrations.RunSQL(
sql="CREATE INDEX upper_userprofile_email_idx ON zerver_userprofile ((upper(email)));",
reverse_sql="DROP INDEX upper_userprofile_email_idx;",
),
migrations.AlterField(
model_name="userprofile",
name="is_active",
field=models.BooleanField(db_index=True, default=True),
),
migrations.AlterField(
model_name="userprofile",
name="is_bot",
field=models.BooleanField(db_index=True, default=False),
),
migrations.RunSQL(
sql="CREATE INDEX upper_preregistration_email_idx ON zerver_preregistrationuser ((upper(email)));",
reverse_sql="DROP INDEX upper_preregistration_email_idx;",
),
migrations.AlterField(
model_name="userprofile",
name="enable_stream_desktop_notifications",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="userprofile",
name="enable_stream_sounds",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="userprofile",
name="is_api_super_user",
field=models.BooleanField(db_index=True, default=False),
),
migrations.AddField(
model_name="userprofile",
name="is_realm_admin",
field=models.BooleanField(db_index=True, default=False),
),
migrations.AlterField(
model_name="realmemoji",
name="img_url",
field=models.URLField(),
),
migrations.AlterField(
model_name="realmemoji",
name="name",
field=models.TextField(
validators=[
django.core.validators.MinLengthValidator(1),
django.core.validators.RegexValidator(regex="^[0-9a-zA-Z.\\-_]+(?<![.\\-_])$"),
]
),
),
migrations.AlterField(
model_name="realmemoji",
name="img_url",
field=models.URLField(max_length=1000),
),
migrations.CreateModel(
name="Attachment",
fields=[
(
"id",
models.AutoField(
auto_created=True, primary_key=True, serialize=False, verbose_name="ID"
),
),
("file_name", models.CharField(db_index=True, max_length=100)),
("path_id", models.TextField(db_index=True)),
(
"create_time",
models.DateTimeField(db_index=True, default=django.utils.timezone.now),
),
("messages", models.ManyToManyField(to="zerver.Message")),
(
"owner",
models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL
),
),
("is_realm_public", models.BooleanField(default=False)),
],
),
migrations.AddField(
model_name="realm",
name="create_stream_by_admins_only",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="userprofile",
name="bot_type",
field=models.PositiveSmallIntegerField(db_index=True, null=True),
),
migrations.AlterField(
model_name="realmemoji",
name="name",
field=models.TextField(
validators=[
django.core.validators.MinLengthValidator(1),
django.core.validators.RegexValidator(
message="Invalid characters in emoji name",
regex="^[0-9a-zA-Z.\\-_]+(?<![.\\-_])$",
),
]
),
),
migrations.AddField(
model_name="preregistrationuser",
name="realm_creation",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="attachment",
name="realm",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="zerver.Realm",
),
),
migrations.RunPython(
code=migrate_existing_attachment_data,
elidable=True,
),
migrations.AddField(
model_name="subscription",
name="pin_to_top",
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name="userprofile",
name="default_language",
field=models.CharField(default="en", max_length=50),
),
migrations.AddField(
model_name="realm",
name="allow_message_editing",
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name="realm",
name="message_content_edit_limit_seconds",
field=models.IntegerField(default=600),
),
migrations.AddField(
model_name="realm",
name="default_language",
field=models.CharField(default="en", max_length=50),
),
migrations.AddField(
model_name="userprofile",
name="tos_version",
field=models.CharField(max_length=10, null=True),
),
]
|
eeshangarg/zulip
|
zerver/migrations/0001_initial.py
|
Python
|
apache-2.0
| 35,999
|
[
"VisIt"
] |
de18a337a304f8b2151e3c880314e943d0a10894505d0e537a6912deafa48821
|
"""//////////////////////////////////////////////////////////////////////////////
// //
// Copyright © 2016 Juan P. Dominguez-Morales //
// //
// This file is part of Multilayer Spiking Neural Network for audio //
// samples classification using SpiNNaker. //
// //
// This code is free software: you can redistribute it and/or modify //
// it under the terms of the GNU General Public License as published by //
// the Free Software Foundation, either version 3 of the License, or //
// (at your option) any later version. //
// //
// The code of Multilayer Spiking Neural Network for audio samples //
// classification using SpiNNaker is distributed in the hope that it will //
// be useful, but WITHOUT ANY WARRANTY; without even the implied warranty //
// of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.See the GNU //
// General Public License for more details. //
// //
// You should have received a copy of the GNU General Public License //
// along with NAVIS Tool. If not, see<http://www.gnu.org/licenses/> . //
// //
//////////////////////////////////////////////////////////////////////////////"""
import pyNN.spiNNaker as p
import sys
import struct
import numpy as np
import csv
p.setup(timestep=1.0, min_delay=1.0, max_delay=16.0)
cell_params_lif = {'cm' : 0.25, # nF
'i_offset' : 0.0,
'tau_m' : 20.0,
'tau_refrac': 2.0,
'tau_syn_E' : 5.0,
'tau_syn_I' : 5.0,
'v_reset' : -70.0,
'v_rest' : -65.0,
'v_thresh' : -50.0
}
cell_params_output = {'cm' : 0.25, # nF
'i_offset' : 0.0,
'tau_m' : 20.0,
'tau_refrac': 2.0,
'tau_syn_E' : 5.0,
'tau_syn_I' : 5.0,
'v_reset' : -70.0,
'v_rest' : -65.0,
'v_thresh' : -55.0
}
weight_to_spike = 1.0
delay = 0
aedatLength = 0
maxTimestamp = 0
minTimestamp = sys.maxsize
files = ["" for i in range(8)]
files[0] = "tones\\130.aedat"
files[1] = "tones\\174.aedat"
files[2] = "tones\\261.aedat"
files[3] = "tones\\349.aedat"
files[4] = "tones\\523.aedat"
files[5] = "tones\\698.aedat"
files[6] = "tones\\1046.aedat"
files[7] = "tones\\1396.aedat"
endTime = 0
timestampBeforeSecondFile = 0
wList = []
wList_2ndLayer = []
def createSpikeSourceArray16(aedat):
listaResultado = [[] for i in range(128)]
for i in range(0, len(aedat)):
if int(aedat[i][0]/2) %2 == 0:
listaResultado[int(aedat[i][0]/2)].append(aedat[i][1])
else:
listaResultado[int(aedat[i][0]/2)].append(aedat[i][1])
return listaResultado
def loadAedat32(path):
f = open(path, "rb")
pos = 0
evt = 0
timestamp = 0
rowID = 0
aedatFile = []
global minTimestamp
global maxTimestamp
global aedatLength
global endTime
try:
while True:
lec = f.read(1)
if lec == "":
break
evt = struct.unpack("<B", lec)
pos += 8
evt = evt[0] << 8
evt = evt | struct.unpack("<B", f.read(1))[0]
pos += 8
evt = struct.unpack("<B", f.read(1))[0]
pos += 8
evt <<= 8
evt = evt | struct.unpack("<B", f.read(1))[0]
evt &= 0x000000FF
pos += 8
timestamp = f.read(1)
timestamp = struct.unpack("<B", timestamp)
pos += 8
timestamp = timestamp[0] << 8
timestamp = timestamp | struct.unpack("<B", f.read(1))[0]
pos += 8
timestamp <<= 8
timestamp = timestamp | struct.unpack("<B", f.read(1))[0]
pos += 8
timestamp <<= 8
timestamp = (timestamp | struct.unpack("<B", f.read(1))[0])/1000
pos += 8
if maxTimestamp < timestamp:
maxTimestamp = timestamp
if rowID == 0:
minTimestamp = timestamp
row = np.array([evt, timestamp - minTimestamp])
aedatFile.append(row)
rowID += 1
maxTimestamp = maxTimestamp - minTimestamp
minTimestamp = 0
endTime = maxTimestamp
aedatLength = rowID
finally:
f.close()
return aedatFile
def loadWeights(path):
global wList
with open(path, 'rb') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',')
for row in spamreader:
wList.append(row[0])
def loadWeights_2ndLayer(path):
global wList_2ndLayer
with open(path, 'rb') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',')
for row in spamreader:
wList_2ndLayer.append(row[0])
loadWeights("synapticWeights.csv")
ssa_times = None
ssa_times = {'spike_times': createSpikeSourceArray16(loadAedat32(files[int(sys.argv[1])]))}
ssa = p.Population(64, p.SpikeSourceArray, ssa_times, label='ssa')
lif_output = p.Population(8, p.IF_curr_exp, cell_params_lif, label='lif_output')
weighted_connections = []
cont = 0
esPos = 0
for i in range (0,63):
for x in range(8):
weighted_connections.append((i, x, float(float(wList[x + cont])), delay))
cont += 16
lif_to_lif_output_proj = p.Projection(ssa, lif_output, p.FromListConnector(weighted_connections), target="excitatory")
""" SECOND LAYER"""
second_lif_layer = p.Population(8, p.IF_curr_exp, cell_params_output, label='second_lif_layer')
loadWeights_2ndLayer("synapticWeights_2ndLayer.csv")
weighted_connections_2ndLayer = []
cont = 0
for i in range (8):
for x in range(8):
weighted_connections_2ndLayer.append((x, i, float(wList_2ndLayer[x + cont]), delay))
cont += 8
lif_1_to_lif_2_proj = p.Projection(lif_output, second_lif_layer, p.FromListConnector(weighted_connections_2ndLayer), target="excitatory")
lif_output.record()
second_lif_layer.record()
p.run(endTime*2)
spikes_output_def = lif_output.getSpikes()
spikes_output_def_2nd = second_lif_layer.getSpikes()
weights = lif_to_lif_output_proj.getWeights()
neuronFirings = [0 for i in range(8)]
neuronTotal = 0
neuronFirings_2nd = [0 for i in range(8)]
neuronTotal_2nd = 0
for x in spikes_output_def:
neuronTotal += 1
if x[0] == 0:
neuronFirings[0] += 1
elif x[0] == 1:
neuronFirings[1] += 1
elif x[0] == 2:
neuronFirings[2] += 1
elif x[0] == 3:
neuronFirings[3] += 1
elif x[0] == 4:
neuronFirings[4] += 1
elif x[0] == 5:
neuronFirings[5] += 1
elif x[0] == 6:
neuronFirings[6] += 1
elif x[0] == 7:
neuronFirings[7] += 1
for x in spikes_output_def_2nd:
neuronTotal_2nd += 1
if x[0] == 0:
neuronFirings_2nd[0] += 1
elif x[0] == 1:
neuronFirings_2nd[1] += 1
elif x[0] == 2:
neuronFirings_2nd[2] += 1
elif x[0] == 3:
neuronFirings_2nd[3] += 1
elif x[0] == 4:
neuronFirings_2nd[4] += 1
elif x[0] == 5:
neuronFirings_2nd[5] += 1
elif x[0] == 6:
neuronFirings_2nd[6] += 1
elif x[0] == 7:
neuronFirings_2nd[7] += 1
print "\n\nSPIKES FIRED FOR EACH OUTPUT NEURON - LAYER 1. FILE No. "+ str(sys.argv[1])
print "Neuron0:", neuronFirings[0]
print "Neuron1:", neuronFirings[1]
print "Neuron2:", neuronFirings[2]
print "Neuron3:", neuronFirings[3]
print "Neuron4:", neuronFirings[4]
print "Neuron5:", neuronFirings[5]
print "Neuron6:", neuronFirings[6]
print "Neuron7:", neuronFirings[7]
print "NeuronTotal", neuronTotal
print "\n\nSPIKES FIRED FOR EACH OUTPUT NEURON - LAYER 2. FILE No. "+ str(sys.argv[1])
print "Neuron0:", neuronFirings_2nd[0]
print "Neuron1:", neuronFirings_2nd[1]
print "Neuron2:", neuronFirings_2nd[2]
print "Neuron3:", neuronFirings_2nd[3]
print "Neuron4:", neuronFirings_2nd[4]
print "Neuron5:", neuronFirings_2nd[5]
print "Neuron6:", neuronFirings_2nd[6]
print "Neuron7:", neuronFirings_2nd[7]
print "NeuronTotal", neuronTotal_2nd
|
jpdominguez/Multilayer-SNN-for-audio-samples-classification-using-SpiNNaker
|
Main_SNN_Classifier.py
|
Python
|
gpl-3.0
| 8,611
|
[
"NEURON"
] |
e8a04b0cedfd4e89e14714e563bf23455d989629981669a40823e1ef8635c756
|
#!/usr/bin/env python3
# ver 0.1 - copy from rdf_itf.py (v0.1) and modify codes on 2/3/2018
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description='calculation end-to-end distance of molecules you select')
## args
parser.add_argument('-i', '--input', default='traj.trr', nargs='?',
help='input trajectory file')
parser.add_argument('-s', '--structure', default='topol.tpr', nargs='?',
help='.tpr structure file')
parser.add_argument('-select1', '--select1', nargs='?',
help='selection of end1 of each molecule')
parser.add_argument('-select2', '--select2', nargs='?',
help='selection of end2 of each molecule')
parser.add_argument('-nmol', '--nmol', nargs='?', type=int,
help='# molecules')
parser.add_argument('-cutoff', '--cutoff', default=0.0, nargs='?', type=float,
help='cut-off checking distance between atoms in a molecule (d_cutoff < d_neighbor_atoms: stop)')
parser.add_argument('-b', '--begin', default=-1, nargs='?', type=int,
help='begining frame (-1: last half trajectory)')
parser.add_argument('-o', '--output', default='pol', nargs='?',
help='output prefix filename for Ree files (.ree)')
parser.add_argument('args', nargs=argparse.REMAINDER)
parser.add_argument('-v', '--version', action='version', version='%(prog)s 0.1')
## read args
args = parser.parse_args()
## Check arguments for log
print(" input arguments: {0}".format(args))
## import modules
import sys
sys.path.append('/home/htjung/Utility/python/')
import hjung
from hjung import *
import MDAnalysis as mda
from MDAnalysis.analysis import distances
import numpy as np
args.output = args.output + '.ree'
## timer
start_proc, start_prof = hjung.time.init()
## read files
u = mda.Universe(args.structure,args.input)
n_frames = len(u.trajectory)
if args.begin == -1:
skip_frames = int(n_frames/2)
print(" skip {} frames".format(skip_frames))
else:
skip_frames = args.begin
if args.begin >= n_frames:
raise ValueError("wrong args.begin because of > n_frames")
n_frames = n_frames - skip_frames
#print("goal n_frames = {}".format(n_frames))
atomtxt1 = open(args.select1).read()
atomtxt2 = open(args.select2).read()
print(" your selection1: {}".format(atomtxt1))
print(" your selection2: {}".format(atomtxt2))
#hjung.polymer.check_traj_connectivity(u,str(atomtxt),args.nmol,args.cutoff,'simple')
## data setting
data_ree = np.zeros((n_frames,args.nmol))
data_ree_vec = np.zeros((n_frames,args.nmol,3))
select_mol1 = u.select_atoms(str(atomtxt1))
select_mol2 = u.select_atoms(str(atomtxt2))
if len(select_mol1)%args.nmol != 0:
raise ValueError("wrong # molecules, (args.nmol, select_mol) {} {} ".format(args.nmol, len(select_mol1)))
if len(select_mol2)%args.nmol != 0:
raise ValueError("wrong # molecules, (args.nmol, select_mol) {} {} ".format(args.nmol, len(select_mol2)))
## read trajectory
i_frame = 0
imod = hjung.time.process_init()
for ts in u.trajectory[skip_frames:]:
data_ree[i_frame] = distances.dist(select_mol1,select_mol2)[2]
#vec_tmp = select_mol2.positions - select_mol1.positions
#print(np.sqrt(np.sum(np.dot(vec_tmp[0],vec_tmp[0]))))
#print(data_ree[i_frame][0])
data_ree_vec[i_frame] = select_mol2.positions - select_mol1.positions
i_frame = i_frame + 1
imod = hjung.time.process_print(i_frame, n_frames, imod)
print("read total {} frames".format(i_frame))
# save raw rg data file
np.savetxt(args.output, data_ree,
header='Ree (mean = {} +- {} with {} frames'.format(np.mean(data_ree),np.std(data_ree),n_frames), fmt='%f', comments='# ')
np.save(args.output, data_ree)
print("Ree = {:.3f} +- {:.3f}".format(np.mean(data_ree),np.std(data_ree)))
print(" saved ree files")
# ree.vec
np.save(args.output+str('.vec'), data_ree_vec.flatten())
# save avg file
data_ree_tavg = np.column_stack((np.mean(data_ree, axis=0),np.std(data_ree, axis=0)))
np.savetxt(args.output+'.tavg', data_ree_tavg,
header='averaged Ree for each molecule with {} frames'.format(n_frames), fmt='%f', comments='# ')
print(" saved average Ree files")
## timer
hjung.time.end_print(start_proc, start_prof)
|
jht0664/Utility_python_gromacs
|
python/ree.py
|
Python
|
mit
| 4,168
|
[
"MDAnalysis"
] |
242b2378f88f070d1711958f3a2154ee8758f5992b962544780d91ad06a01ad0
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.modeling.preparation.datapreparation Contains the DataPreparer class
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import the relevant PTS classes and modules
from ...magic.core.image import Image
from ...magic.core.frame import Frame
from ...magic.basics.region import Region
from .component import PreparationComponent
from ...magic.prepare.preparer import ImagePreparer
from ...core.tools import filesystem as fs
from ...core.tools.logging import log
from ...magic.tools import regions
from ...magic.misc.kernels import AnianoKernels, aniano_names
from ...magic.misc.calibration import CalibrationError
from ...magic.misc.extinction import GalacticExtinction
from ...core.basics.filter import Filter
# -----------------------------------------------------------------
class DataPreparer(PreparationComponent):
"""
This class ...
"""
# -----------------------------------------------------------------
def __init__(self, config=None):
"""
The constructor ...
:param config:
:return:
"""
# Call the constructor of the base class
super(DataPreparer, self).__init__(config)
# -- Attributes --
# The paths to the initialized images
self.paths = []
# Information about the images
self.attenuations = dict()
# The FWHM of the reference image
self.reference_fwhm = None
# The coordinate of the center of the galaxy
self.center_coordinate = None
# The Aniano kernels service
self.aniano = None
# -----------------------------------------------------------------
@classmethod
def from_arguments(cls, arguments):
"""
This function ...
:param arguments:
:return:
"""
# Create a new DataPreparer instance
preparer = cls(arguments.config)
# Whether to write the results of intermediate steps
preparer.config.preparation.write_steps = arguments.steps
# Set the reference image
if arguments.reference is not None: preparer.reference_image = arguments.reference
# Set the modeling path
preparer.config.path = arguments.path
# A single image can be specified so the preparation is only run with that image
preparer.config.single_image = arguments.image
# Make visualisations
preparer.config.visualise = arguments.visualise
# Return the new instance
return preparer
# -----------------------------------------------------------------
def run(self):
"""
This function runs the data preparation ...
:return:
"""
# 1. Call the setup function
self.setup()
# 2. Check which images can be prepared
self.check_images()
# If all images have already been prepared, break
if len(self.paths) == 0:
log.success("All images are already prepared")
return
# 3. Get attenuations
self.get_attenuations()
# 4. Prepare the images
self.prepare_images()
# -----------------------------------------------------------------
def setup(self):
"""
This function ...
:return:
"""
# -- Children --
# Create the preparation object
self.add_child("image_preparer", ImagePreparer, self.config.preparation)
# -- Setup of the base class --
# Call the setup function of the base class
super(DataPreparer, self).setup()
# -- Fixed properties for the image preparer (valid for all target images)
# Set the path to the reference image for the rebinning
reference_path = fs.join(self.prep_paths[self.config.reference_image], "initialized.fits")
# Set the path of the rebinning reference path and the kernel image
self.image_preparer.config.rebinning.rebin_to = reference_path
# Get the FWHM of the reference image
reference_frame = Frame.from_file(reference_path)
self.reference_fwhm = reference_frame.fwhm
# Get the center coordinate of the galaxy
self.center_coordinate = reference_frame.coordinate_range[0]
# Create the Aniano kernels service
self.aniano = AnianoKernels()
# -----------------------------------------------------------------
def check_images(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Checking the initialized images ...")
# Loop over all subdirectories of the preparation directory
for path in fs.directories_in_path(self.prep_path):
# Debugging
log.debug("Opening " + path + " ...")
# Look if an initialized image file is present
image_path = fs.join(path, "initialized.fits")
if not fs.is_file(image_path):
log.warning("Initialized image could not be found for " + path)
continue
# Look if the 'sources' directory is present
sources_path = fs.join(path, "sources")
if not fs.is_directory(sources_path):
log.warning("Sources directory could not be found for " + path)
continue
# Check if a prepared image is already present
result_path = fs.join(path, "result.fits")
if fs.is_file(result_path): continue
# Add the path to the initialized image to the list
self.paths.append(image_path)
# -----------------------------------------------------------------
def get_attenuations(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Getting the galactic extinction values for the different images ...")
# Create the galactic extinction calculator
extinction = GalacticExtinction(self.center_coordinate)
# Loop over all image paths
for image_path in self.paths:
# Get the filter name
filter_name = fs.name(fs.directory_of(image_path))
# Create a filter instance
fltr = Filter.from_string(filter_name)
# Get the exintinction
self.attenuations[filter_name] = extinction.extinction_for_filter(fltr)
# -----------------------------------------------------------------
def prepare_images(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Preparing the images ...")
# Loop over the image paths
for image_path in self.paths:
# Get the directory containing this image = the output path for that image
output_path = fs.directory_of(image_path)
# Get the directory containing the output from the SourceFinder
sources_path = fs.join(output_path, "sources")
# Get the name
name = fs.name(output_path)
# Inform the user
log.info("Starting preparation of " + name + " image ...")
# -----------------------------------------------------------------
# Load the initialized image
image = Image.from_file(image_path)
image.name = name
# Load the output of the source finder
galaxy_region, star_region, saturation_region, other_region, galaxy_segments, star_segments, other_segments = load_sources(sources_path)
# -----------------------------------------------------------------
# Reset all flags to True
self.enable_all_preparation_steps()
# Set options for the ImagePreparation class
self.set_preparation_options(image, output_path)
# Check if the intermediate results have already been produced for this image and saved to the
# corresponding preparation subdirectory
extracted_path = fs.join(output_path, "extracted.fits")
corrected_path = fs.join(output_path, "corrected_for_extinction.fits")
converted_path = fs.join(output_path, "converted_unit.fits")
convolved_path = fs.join(output_path, "convolved.fits")
rebinned_path = fs.join(output_path, "rebinned.fits")
subtracted_path = fs.join(output_path, "sky_subtracted.fits")
## CURRENT ORDER OF STEPS IN IMAGEPREPARER:
# 1. Setup
# 2. Extract stars and galaxies from the image
# extract_sources
# 3. If requested, calculate the poisson noise
# calculate_poisson_noise
# 4. If requested, correct for galactic extinction
# correct_for_extinction
# 5. If requested, convert the unit
# convert_unit
# 6. If requested, convolve
# convolve
# 7. If requested, rebin
# rebin
# 8. If requested, subtract the sky
# subtract_sky
# 9. Calculate the calibration uncertainties
# calculate_calibration_uncertainties
# 10. If requested, set the uncertainties
# set_uncertainties
##
# Check if the sky-subtracted image is present
if fs.is_file(subtracted_path):
# Disable all steps preceeding and including the sky subtraction
self.image_preparer.config.extract_sources = False
self.image_preparer.config.calculate_poisson_noise = False
self.image_preparer.config.correct_for_extinction = False
self.image_preparer.config.convert_unit = False
self.image_preparer.config.convolve = False
self.image_preparer.config.rebin = False
self.image_preparer.config.subtract_sky = False
# Set the principal ellipse and saturation region in sky coordinates
self.image_preparer.principal_ellipse_sky = regions.largest_ellipse(galaxy_region).to_sky(self.image.wcs)
self.image_preparer.saturation_region_sky = saturation_region.to_sky(self.image.wcs) if saturation_region is not None else None
# Load the sky-subtracted image
image = Image.from_file(subtracted_path)
image.name = name
# Check if the rebinned image is present
elif fs.is_file(rebinned_path):
# Disable all steps preceeding and including the rebinning
self.image_preparer.config.extract_sources = False
self.image_preparer.config.calculate_poisson_noise = False
self.image_preparer.config.correct_for_extinction = False
self.image_preparer.config.convert_unit = False
self.image_preparer.config.convolve = False
self.image_preparer.config.rebin = False
# Set the principal ellipse and saturation region in sky coordinates
self.image_preparer.principal_ellipse_sky = regions.largest_ellipse(galaxy_region).to_sky(image.wcs)
self.image_preparer.saturation_region_sky = saturation_region.to_sky(image.wcs) if saturation_region is not None else None
# Load the rebinned image
image = Image.from_file(rebinned_path)
image.name = name
# Check if the convolved image is present
elif fs.is_file(convolved_path):
# Disable all steps preceeding and including the convolution
self.image_preparer.config.extract_sources = False
self.image_preparer.config.calculate_poisson_noise = False
self.image_preparer.config.correct_for_extinction = False
self.image_preparer.config.convert_unit = False
self.image_preparer.config.convolve = False
# Set the principal ellipse and saturation region in sky coordinates
self.image_preparer.principal_ellipse_sky = regions.largest_ellipse(galaxy_region).to_sky(image.wcs)
self.image_preparer.saturation_region_sky = saturation_region.to_sky(image.wcs) if saturation_region is not None else None
# Load the convolved image
image = Image.from_file(convolved_path)
image.name = name
# Check if the converted image is present
elif fs.is_file(converted_path):
# Disable all steps preceeding and including the unit conversion
self.image_preparer.config.extract_sources = False
self.image_preparer.config.calculate_poisson_noise = False
self.image_preparer.config.correct_for_extinction = False
self.image_preparer.config.convert_unit = False
# Set the principal ellipse and saturation region in sky coordinates
self.image_preparer.principal_ellipse_sky = regions.largest_ellipse(galaxy_region).to_sky(image.wcs)
self.image_preparer.saturation_region_sky = saturation_region.to_sky(image.wcs) if saturation_region is not None else None
# Load the converted image
image = Image.from_file(converted_path)
image.name = name
# Check if the extinction-corrected image is present
elif fs.is_file(corrected_path):
# Disable all steps preceeding and including the correction for extinction
self.image_preparer.config.extract_sources = False
self.image_preparer.config.calculate_poisson_noise = False
self.image_preparer.config.correct_for_extinction = False
# Set the principal ellipse and saturation region in sky coordinates
self.image_preparer.principal_ellipse_sky = regions.largest_ellipse(galaxy_region).to_sky(image.wcs)
self.image_preparer.saturation_region_sky = saturation_region.to_sky(image.wcs) if saturation_region is not None else None
# Load the extinction-corrected image
image = Image.from_file(corrected_path)
image.name = name
# Check if the source-extracted image is present
elif fs.is_file(extracted_path):
# Disable all steps preceeding and including the source extraction
self.image_preparer.config.extract_sources = False
# Set the principal ellipse and saturation region in sky coordinates
self.image_preparer.principal_ellipse_sky = regions.largest_ellipse(galaxy_region).to_sky(image.wcs)
self.image_preparer.saturation_region_sky = saturation_region.to_sky(image.wcs) if saturation_region is not None else None
# Load the extracted image
image = Image.from_file(extracted_path)
image.name = name
# -----------------------------------------------------------------
# Write out sky annuli frames
sky_path = fs.join(output_path, "sky")
if not fs.is_directory(sky_path): fs.create_directory(sky_path)
self.image_preparer.config.write_sky_annuli = True
self.image_preparer.config.sky_annuli_path = sky_path
# Set the visualisation path for the image preparer
visualisation_path = self.visualisation_path if self.config.visualise else None
# -----------------------------------------------------------------
# Run the image preparation
self.image_preparer.run(image, galaxy_region, star_region, saturation_region, other_region, galaxy_segments, star_segments, other_segments, visualisation_path)
# -----------------------------------------------------------------
# Inform the user
log.success("Preparation of " + name + " image finished")
# Clear the image preparer
self.image_preparer.clear()
# -----------------------------------------------------------------
def enable_all_preparation_steps(self):
"""
This function ...
:return:
"""
self.image_preparer.config.calculate_calibration_uncertainties = True
self.image_preparer.config.extract_sources = True
self.image_preparer.config.correct_for_extinction = True
self.image_preparer.config.convert_unit = True
self.image_preparer.config.convolve = True
self.image_preparer.config.rebin = True
self.image_preparer.config.subtract_sky = True
self.image_preparer.config.set_uncertainties = True
# -----------------------------------------------------------------
def set_preparation_options(self, image, output_path):
"""
This function ...
:param image:
:param output_path:
:return:
"""
# Set the attenuation value
self.image_preparer.config.attenuation = self.attenuations[image.name]
# If this image is not the reference image, set the appropriate options for rebinning and convolution
# or this image does not need to be convolved (e.g. SPIRE images)
if image.name == self.config.reference_image or aniano_names[image.name] is None:
self.image_preparer.config.rebin = False
self.image_preparer.config.convolve = False
# Images that do need to be convolved
else:
# Debugging information
log.debug("Setting the path to the convolution kernel ...")
# Get the path to the local convolution kernel file
this_aniano_name = aniano_names[image.name]
reference_aniano_name = aniano_names[self.config.reference_image]
kernel_file_path = self.aniano.get_kernel_path(this_aniano_name, reference_aniano_name)
# Set the kernel path and FWHM
self.image_preparer.config.convolution.kernel_path = kernel_file_path # set kernel path
self.image_preparer.config.convolution.kernel_fwhm = self.reference_fwhm # set kernel FWHM (is a quantity here)
# Set flags to True
self.image_preparer.config.rebin = True
self.image_preparer.config.convolve = True
# Convolve the SDSS images remotely
if "SDSS" in image.name: self.image_preparer.config.convolution.remote = "nancy"
else: self.image_preparer.config.convolution.remote = None
# Check whether the image has to be sky subtracted
if image.frames.primary.sky_subtracted:
log.debug("The " + image.name + " image has already been sky subtracted")
self.image_preparer.config.subtract_sky = False
else: self.image_preparer.config.subtract_sky = True # Has yet to be sky subtracted
# Set the calibration error
self.image_preparer.config.uncertainties.calibration_error = CalibrationError.from_filter(image.filter)
# Set the output directory
self.image_preparer.config.output_path = output_path
# -----------------------------------------------------------------
# The units of the Halpha image don't have to be converted
if "Halpha" in image.name: self.image_preparer.config.convert_unit = False
else: self.image_preparer.config.convert_unit = True
# -----------------------------------------------------------------
def load_sources(path):
"""
This function ...
:param path:
:return:
"""
# Load the galaxy region
galaxy_region_path = fs.join(path, "galaxies.reg")
galaxy_region = Region.from_file(galaxy_region_path)
# Load the star region (if present)
star_region_path = fs.join(path, "stars.reg")
star_region = Region.from_file(star_region_path) if fs.is_file(star_region_path) else None
# load the saturation region (if present)
saturation_region_path = fs.join(path, "saturation.reg")
saturation_region = Region.from_file(saturation_region_path) if fs.is_file(saturation_region_path) else None
# Load the region of other sources
other_region_path = fs.join(path, "other_sources.reg")
other_region = Region.from_file(other_region_path) if fs.is_file(other_region_path) else None
# Load the image with segmentation maps
segments_path = fs.join(path, "segments.fits")
segments = Image.from_file(segments_path, no_filter=True)
# Get the different segmentation frames
galaxy_segments = segments.frames.galaxies
star_segments = segments.frames.stars if "stars" in segments.frames else None
other_segments = segments.frames.other_sources if "other_sources" in segments.frames else None
# Return the regions and segmentation maps
return galaxy_region, star_region, saturation_region, other_region, galaxy_segments, star_segments, other_segments
# -----------------------------------------------------------------
|
Stargrazer82301/CAAPR
|
CAAPR/CAAPR_AstroMagic/PTS/pts/modeling/preparation/preparer.py
|
Python
|
mit
| 21,533
|
[
"Galaxy"
] |
78ad23b338517f3acd3423724d92ed88f2176d692e4d8180adacb344d828064e
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import re
class Texinfo(AutotoolsPackage, GNUMirrorPackage):
"""Texinfo is the official documentation format of the GNU project.
It was invented by Richard Stallman and Bob Chassell many years ago,
loosely based on Brian Reid's Scribe and other formatting languages
of the time. It is used by many non-GNU projects as well."""
homepage = "https://www.gnu.org/software/texinfo/"
gnu_mirror_path = "texinfo/texinfo-6.0.tar.gz"
executables = ['^info$']
version('6.5', sha256='d34272e4042c46186ddcd66bd5d980c0ca14ff734444686ccf8131f6ec8b1427')
version('6.3', sha256='300a6ba4958c2dd4a6d5ce60f0a335daf7e379f5374f276f6ba31a221f02f606')
version('6.0', sha256='83d3183290f34e7f958d209d0b20022c6fe9e921eb6fe94c27d988827d4878d2')
version('5.2', sha256='6b8ca30e9b6f093b54fe04439e5545e564c63698a806a48065c0bba16994cf74')
version('5.1', sha256='50e8067f9758bb2bf175b69600082ac4a27c464cb4bcd48a578edd3127216600')
version('5.0', sha256='2c579345a39a2a0bb4b8c28533f0b61356504a202da6a25d17d4d866af7f5803')
depends_on('perl')
# Fix unescaped braces in regexps.
# Ref: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=898994
patch('fix_unescaped_braces.patch', when='@6.3:')
patch('fix_unescaped_braces_2.patch', when='@5.1:6.0')
patch('fix_unescaped_braces_3.patch', when='@5.0')
# Apply this fix to perform thread-safe processing in code
# that uses the global locale.
# Ref: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=902771
patch('update_locale_handling.patch', when='@6.3:')
patch('nvhpc.patch', when='%nvhpc')
@classmethod
def determine_version(cls, exe):
output = Executable(exe)('--version', output=str, error=str)
match = re.search(r'info \(GNU texinfo\)\s+(\S+)', output)
return match.group(1) if match else None
|
iulian787/spack
|
var/spack/repos/builtin/packages/texinfo/package.py
|
Python
|
lgpl-2.1
| 2,071
|
[
"Brian"
] |
9557b258b471f770a0323f41e62dab0113f02dd7c47ba201460e9600f069c939
|
"""
Code for kernel density estimation.
- Source: http://nbviewer.jupyter.org/gist/tillahoffmann/f844bce2ec264c1c8cb5
"""
import numpy as np
from scipy.spatial.distance import cdist
class gaussian_kde(object):
"""Representation of a kernel-density estimate using Gaussian kernels.
Kernel density estimation is a way to estimate the probability density
function (PDF) of a random variable in a non-parametric way.
`gaussian_kde` works for both uni-variate and multi-variate data. It
includes automatic bandwidth determination. The estimation works best for
a unimodal distribution; bimodal or multi-modal distributions tend to be
oversmoothed.
Parameters
----------
dataset : array_like
Datapoints to estimate from. In case of univariate data this is a 1-D
array, otherwise a 2-D array with shape (# of dims, # of data).
bw_method : str, scalar or callable, optional
The method used to calculate the estimator bandwidth. This can be
'scott', 'silverman', a scalar constant or a callable. If a scalar,
this will be used directly as `kde.factor`. If a callable, it should
take a `gaussian_kde` instance as only parameter and return a scalar.
If None (default), 'scott' is used. See Notes for more details.
weights : array_like, shape (n, ), optional, default: None
An array of weights, of the same shape as `x`. Each value in `x`
only contributes its associated weight towards the bin count
(instead of 1).
Attributes
----------
dataset : ndarray
The dataset with which `gaussian_kde` was initialized.
d : int
Number of dimensions.
n : int
Number of datapoints.
neff : float
Effective sample size using Kish's approximation.
factor : float
The bandwidth factor, obtained from `kde.covariance_factor`, with which
the covariance matrix is multiplied.
covariance : ndarray
The covariance matrix of `dataset`, scaled by the calculated bandwidth
(`kde.factor`).
inv_cov : ndarray
The inverse of `covariance`.
Methods
-------
kde.evaluate(points) : ndarray
Evaluate the estimated pdf on a provided set of points.
kde(points) : ndarray
Same as kde.evaluate(points)
kde.pdf(points) : ndarray
Alias for ``kde.evaluate(points)``.
kde.set_bandwidth(bw_method='scott') : None
Computes the bandwidth, i.e. the coefficient that multiplies the data
covariance matrix to obtain the kernel covariance matrix.
.. versionadded:: 0.11.0
kde.covariance_factor : float
Computes the coefficient (`kde.factor`) that multiplies the data
covariance matrix to obtain the kernel covariance matrix.
The default is `scotts_factor`. A subclass can overwrite this method
to provide a different method, or set it through a call to
`kde.set_bandwidth`.
Notes
-----
Bandwidth selection strongly influences the estimate obtained from the KDE
(much more so than the actual shape of the kernel). Bandwidth selection
can be done by a "rule of thumb", by cross-validation, by "plug-in
methods" or by other means; see [3]_, [4]_ for reviews. `gaussian_kde`
uses a rule of thumb, the default is Scott's Rule.
Scott's Rule [1]_, implemented as `scotts_factor`, is::
n**(-1./(d+4)),
with ``n`` the number of data points and ``d`` the number of dimensions.
Silverman's Rule [2]_, implemented as `silverman_factor`, is::
(n * (d + 2) / 4.)**(-1. / (d + 4)).
Good general descriptions of kernel density estimation can be found in [1]_
and [2]_, the mathematics for this multi-dimensional implementation can be
found in [1]_.
References
----------
.. [1] D.W. Scott, "Multivariate Density Estimation: Theory, Practice, and
Visualization", John Wiley & Sons, New York, Chicester, 1992.
.. [2] B.W. Silverman, "Density Estimation for Statistics and Data
Analysis", Vol. 26, Monographs on Statistics and Applied Probability,
Chapman and Hall, London, 1986.
.. [3] B.A. Turlach, "Bandwidth Selection in Kernel Density Estimation: A
Review", CORE and Institut de Statistique, Vol. 19, pp. 1-33, 1993.
.. [4] D.M. Bashtannyk and R.J. Hyndman, "Bandwidth selection for kernel
conditional density estimation", Computational Statistics & Data
Analysis, Vol. 36, pp. 279-298, 2001.
Examples
--------
Generate some random two-dimensional data:
>>> from scipy import stats
>>> def measure(n):
>>> "Measurement model, return two coupled measurements."
>>> m1 = np.random.normal(size=n)
>>> m2 = np.random.normal(scale=0.5, size=n)
>>> return m1+m2, m1-m2
>>> m1, m2 = measure(2000)
>>> xmin = m1.min()
>>> xmax = m1.max()
>>> ymin = m2.min()
>>> ymax = m2.max()
Perform a kernel density estimate on the data:
>>> X, Y = np.mgrid[xmin:xmax:100j, ymin:ymax:100j]
>>> positions = np.vstack([X.ravel(), Y.ravel()])
>>> values = np.vstack([m1, m2])
>>> kernel = stats.gaussian_kde(values)
>>> Z = np.reshape(kernel(positions).T, X.shape)
Plot the results:
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax = fig.add_subplot(111)
>>> ax.imshow(np.rot90(Z), cmap=plt.cm.gist_earth_r,
... extent=[xmin, xmax, ymin, ymax])
>>> ax.plot(m1, m2, 'k.', markersize=2)
>>> ax.set_xlim([xmin, xmax])
>>> ax.set_ylim([ymin, ymax])
>>> plt.show()
"""
def __init__(self, dataset, bw_method=None, weights=None):
self.dataset = np.atleast_2d(dataset)
if not self.dataset.size > 1:
raise ValueError("`dataset` input should have multiple elements.")
self.d, self.n = self.dataset.shape
if weights is not None:
self.weights = weights / np.sum(weights)
else:
self.weights = np.ones(self.n) / self.n
# Compute the effective sample size
# http://surveyanalysis.org/wiki/Design_Effects_and_Effective_Sample_Size#Kish.27s_approximate_formula_for_computing_effective_sample_size
self.neff = 1.0 / np.sum(self.weights ** 2)
self.set_bandwidth(bw_method=bw_method)
def evaluate(self, points):
"""Evaluate the estimated pdf on a set of points.
Parameters
----------
points : (# of dimensions, # of points)-array
Alternatively, a (# of dimensions,) vector can be passed in and
treated as a single point.
Returns
-------
values : (# of points,)-array
The values at each point.
Raises
------
ValueError : if the dimensionality of the input points is different than
the dimensionality of the KDE.
"""
points = np.atleast_2d(points)
d, m = points.shape
if d != self.d:
if d == 1 and m == self.d:
# points was passed in as a row vector
points = np.reshape(points, (self.d, 1))
m = 1
else:
msg = "points have dimension %s, dataset has dimension %s" % (d,
self.d)
raise ValueError(msg)
# compute the normalised residuals
chi2 = cdist(points.T, self.dataset.T, 'mahalanobis', VI=self.inv_cov) ** 2
# compute the pdf
result = np.sum(np.exp(-.5 * chi2) * self.weights, axis=1) / self._norm_factor
return result
__call__ = evaluate
def scotts_factor(self):
return np.power(self.neff, -1./(self.d+4))
def silverman_factor(self):
return np.power(self.neff*(self.d+2.0)/4.0, -1./(self.d+4))
# Default method to calculate bandwidth, can be overwritten by subclass
covariance_factor = scotts_factor
def set_bandwidth(self, bw_method=None):
"""Compute the estimator bandwidth with given method.
The new bandwidth calculated after a call to `set_bandwidth` is used
for subsequent evaluations of the estimated density.
Parameters
----------
bw_method : str, scalar or callable, optional
The method used to calculate the estimator bandwidth. This can be
'scott', 'silverman', a scalar constant or a callable. If a
scalar, this will be used directly as `kde.factor`. If a callable,
it should take a `gaussian_kde` instance as only parameter and
return a scalar. If None (default), nothing happens; the current
`kde.covariance_factor` method is kept.
Notes
-----
.. versionadded:: 0.11
Examples
--------
>>> x1 = np.array([-7, -5, 1, 4, 5.])
>>> kde = stats.gaussian_kde(x1)
>>> xs = np.linspace(-10, 10, num=50)
>>> y1 = kde(xs)
>>> kde.set_bandwidth(bw_method='silverman')
>>> y2 = kde(xs)
>>> kde.set_bandwidth(bw_method=kde.factor / 3.)
>>> y3 = kde(xs)
>>> fig = plt.figure()
>>> ax = fig.add_subplot(111)
>>> ax.plot(x1, np.ones(x1.shape) / (4. * x1.size), 'bo',
... label='Data points (rescaled)')
>>> ax.plot(xs, y1, label='Scott (default)')
>>> ax.plot(xs, y2, label='Silverman')
>>> ax.plot(xs, y3, label='Const (1/3 * Silverman)')
>>> ax.legend()
>>> plt.show()
"""
if bw_method is None:
pass
elif bw_method == 'scott':
self.covariance_factor = self.scotts_factor
elif bw_method == 'silverman':
self.covariance_factor = self.silverman_factor
elif np.isscalar(bw_method) and not isinstance(bw_method, string_types):
self._bw_method = 'use constant'
self.covariance_factor = lambda: bw_method
elif callable(bw_method):
self._bw_method = bw_method
self.covariance_factor = lambda: self._bw_method(self)
else:
msg = "`bw_method` should be 'scott', 'silverman', a scalar " \
"or a callable."
raise ValueError(msg)
self._compute_covariance()
def _compute_covariance(self):
"""Computes the covariance matrix for each Gaussian kernel using
covariance_factor().
"""
self.factor = self.covariance_factor()
# Cache covariance and inverse covariance of the data
if not hasattr(self, '_data_inv_cov'):
# Compute the mean and residuals
_mean = np.sum(self.weights * self.dataset, axis=1)
_residual = (self.dataset - _mean[:, None])
# Compute the biased covariance
self._data_covariance = np.atleast_2d(np.dot(_residual * self.weights, _residual.T))
# Correct for bias (http://en.wikipedia.org/wiki/Weighted_arithmetic_mean#Weighted_sample_covariance)
self._data_covariance /= (1 - np.sum(self.weights ** 2))
self._data_inv_cov = np.linalg.inv(self._data_covariance)
self.covariance = self._data_covariance * self.factor**2
self.inv_cov = self._data_inv_cov / self.factor**2
self._norm_factor = np.sqrt(np.linalg.det(2*np.pi*self.covariance)) #* self.n
|
DamienIrving/ocean-analysis
|
modules/kde.py
|
Python
|
mit
| 11,449
|
[
"Gaussian"
] |
98968dc4256413e148d7ff194478e20b4da250202434f408fa2945e8fb3c3d18
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for Chromium.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
import re
import subprocess
import sys
_EXCLUDED_PATHS = (
r"^breakpad[\\\/].*",
r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_rules.py",
r"^native_client_sdk[\\\/]src[\\\/]build_tools[\\\/]make_simple.py",
r"^net[\\\/]tools[\\\/]spdyshark[\\\/].*",
r"^skia[\\\/].*",
r"^v8[\\\/].*",
r".*MakeFile$",
r".+_autogen\.h$",
r"^cc[\\\/].*",
r"^webkit[\\\/]compositor_bindings[\\\/].*",
r".+[\\\/]pnacl_shim\.c$",
)
_TEST_ONLY_WARNING = (
'You might be calling functions intended only for testing from\n'
'production code. It is OK to ignore this warning if you know what\n'
'you are doing, as the heuristics used to detect the situation are\n'
'not perfect. The commit queue will not block on this warning.\n'
'Email joi@chromium.org if you have questions.')
_INCLUDE_ORDER_WARNING = (
'Your #include order seems to be broken. Send mail to\n'
'marja@chromium.org if this is not the case.')
_BANNED_OBJC_FUNCTIONS = (
(
'addTrackingRect:',
(
'The use of -[NSView addTrackingRect:owner:userData:assumeInside:] is'
'prohibited. Please use CrTrackingArea instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
False,
),
(
'NSTrackingArea',
(
'The use of NSTrackingAreas is prohibited. Please use CrTrackingArea',
'instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
False,
),
(
'convertPointFromBase:',
(
'The use of -[NSView convertPointFromBase:] is almost certainly wrong.',
'Please use |convertPoint:(point) fromView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertPointToBase:',
(
'The use of -[NSView convertPointToBase:] is almost certainly wrong.',
'Please use |convertPoint:(point) toView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertRectFromBase:',
(
'The use of -[NSView convertRectFromBase:] is almost certainly wrong.',
'Please use |convertRect:(point) fromView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertRectToBase:',
(
'The use of -[NSView convertRectToBase:] is almost certainly wrong.',
'Please use |convertRect:(point) toView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertSizeFromBase:',
(
'The use of -[NSView convertSizeFromBase:] is almost certainly wrong.',
'Please use |convertSize:(point) fromView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
(
'convertSizeToBase:',
(
'The use of -[NSView convertSizeToBase:] is almost certainly wrong.',
'Please use |convertSize:(point) toView:nil| instead.',
'http://dev.chromium.org/developers/coding-style/cocoa-dos-and-donts',
),
True,
),
)
_BANNED_CPP_FUNCTIONS = (
# Make sure that gtest's FRIEND_TEST() macro is not used; the
# FRIEND_TEST_ALL_PREFIXES() macro from base/gtest_prod_util.h should be
# used instead since that allows for FLAKY_ and DISABLED_ prefixes.
(
'FRIEND_TEST(',
(
'Chromium code should not use gtest\'s FRIEND_TEST() macro. Include',
'base/gtest_prod_util.h and use FRIEND_TEST_ALL_PREFIXES() instead.',
),
False,
),
(
'ScopedAllowIO',
(
'New code should not use ScopedAllowIO. Post a task to the blocking',
'pool or the FILE thread instead.',
),
True,
),
(
'FilePathWatcher::Delegate',
(
'New code should not use FilePathWatcher::Delegate. Use the callback',
'interface instead.',
),
False,
),
(
'chrome::FindLastActiveWithProfile',
(
'This function is deprecated and we\'re working on removing it. Pass',
'more context to get a Browser*, like a WebContents, window, or session',
'id. Talk to robertshield@ for more information.',
),
True,
),
(
'browser::FindAnyBrowser',
(
'This function is deprecated and we\'re working on removing it. Pass',
'more context to get a Browser*, like a WebContents, window, or session',
'id. Talk to robertshield@ for more information.',
),
True,
),
(
'browser::FindOrCreateTabbedBrowser',
(
'This function is deprecated and we\'re working on removing it. Pass',
'more context to get a Browser*, like a WebContents, window, or session',
'id. Talk to robertshield@ for more information.',
),
True,
),
(
'browser::FindTabbedBrowserDeprecated',
(
'This function is deprecated and we\'re working on removing it. Pass',
'more context to get a Browser*, like a WebContents, window, or session',
'id. Talk to robertshield@ for more information.',
),
True,
),
(
'RunAllPending()',
(
'This function is deprecated and we\'re working on removing it. Rename',
'to RunUntilIdle',
),
True,
),
)
def _CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api):
"""Attempts to prevent use of functions intended only for testing in
non-testing code. For now this is just a best-effort implementation
that ignores header files and may have some false positives. A
better implementation would probably need a proper C++ parser.
"""
# We only scan .cc files and the like, as the declaration of
# for-testing functions in header files are hard to distinguish from
# calls to such functions without a proper C++ parser.
platform_specifiers = r'(_(android|chromeos|gtk|mac|posix|win))?'
source_extensions = r'\.(cc|cpp|cxx|mm)$'
file_inclusion_pattern = r'.+%s' % source_extensions
file_exclusion_patterns = (
r'.*[/\\](fake_|test_|mock_).+%s' % source_extensions,
r'.+_test_(base|support|util)%s' % source_extensions,
r'.+_(api|browser|perf|unit|ui)?test%s%s' % (platform_specifiers,
source_extensions),
r'.+profile_sync_service_harness%s' % source_extensions,
)
path_exclusion_patterns = (
r'.*[/\\](test|tool(s)?)[/\\].*',
# At request of folks maintaining this folder.
r'chrome[/\\]browser[/\\]automation[/\\].*',
)
base_function_pattern = r'ForTest(ing)?|for_test(ing)?'
inclusion_pattern = input_api.re.compile(r'(%s)\s*\(' % base_function_pattern)
exclusion_pattern = input_api.re.compile(
r'::[A-Za-z0-9_]+(%s)|(%s)[^;]+\{' % (
base_function_pattern, base_function_pattern))
def FilterFile(affected_file):
black_list = (file_exclusion_patterns + path_exclusion_patterns +
_EXCLUDED_PATHS + input_api.DEFAULT_BLACK_LIST)
return input_api.FilterSourceFile(
affected_file,
white_list=(file_inclusion_pattern, ),
black_list=black_list)
problems = []
for f in input_api.AffectedSourceFiles(FilterFile):
local_path = f.LocalPath()
lines = input_api.ReadFile(f).splitlines()
line_number = 0
for line in lines:
if (inclusion_pattern.search(line) and
not exclusion_pattern.search(line)):
problems.append(
'%s:%d\n %s' % (local_path, line_number, line.strip()))
line_number += 1
if problems:
if not input_api.is_committing:
return [output_api.PresubmitPromptWarning(_TEST_ONLY_WARNING, problems)]
else:
# We don't warn on commit, to avoid stopping commits going through CQ.
return [output_api.PresubmitNotifyResult(_TEST_ONLY_WARNING, problems)]
else:
return []
def _CheckNoIOStreamInHeaders(input_api, output_api):
"""Checks to make sure no .h files include <iostream>."""
files = []
pattern = input_api.re.compile(r'^#include\s*<iostream>',
input_api.re.MULTILINE)
for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
if not f.LocalPath().endswith('.h'):
continue
contents = input_api.ReadFile(f)
if pattern.search(contents):
files.append(f)
if len(files):
return [ output_api.PresubmitError(
'Do not #include <iostream> in header files, since it inserts static '
'initialization into every file including the header. Instead, '
'#include <ostream>. See http://crbug.com/94794',
files) ]
return []
def _CheckNoUNIT_TESTInSourceFiles(input_api, output_api):
"""Checks to make sure no source files use UNIT_TEST"""
problems = []
for f in input_api.AffectedFiles():
if (not f.LocalPath().endswith(('.cc', '.mm'))):
continue
for line_num, line in f.ChangedContents():
if 'UNIT_TEST' in line:
problems.append(' %s:%d' % (f.LocalPath(), line_num))
if not problems:
return []
return [output_api.PresubmitPromptWarning('UNIT_TEST is only for headers.\n' +
'\n'.join(problems))]
def _CheckNoNewWStrings(input_api, output_api):
"""Checks to make sure we don't introduce use of wstrings."""
problems = []
for f in input_api.AffectedFiles():
if (not f.LocalPath().endswith(('.cc', '.h')) or
f.LocalPath().endswith('test.cc')):
continue
allowWString = False
for line_num, line in f.ChangedContents():
if 'presubmit: allow wstring' in line:
allowWString = True
elif not allowWString and 'wstring' in line:
problems.append(' %s:%d' % (f.LocalPath(), line_num))
allowWString = False
else:
allowWString = False
if not problems:
return []
return [output_api.PresubmitPromptWarning('New code should not use wstrings.'
' If you are calling a cross-platform API that accepts a wstring, '
'fix the API.\n' +
'\n'.join(problems))]
def _CheckNoDEPSGIT(input_api, output_api):
"""Make sure .DEPS.git is never modified manually."""
if any(f.LocalPath().endswith('.DEPS.git') for f in
input_api.AffectedFiles()):
return [output_api.PresubmitError(
'Never commit changes to .DEPS.git. This file is maintained by an\n'
'automated system based on what\'s in DEPS and your changes will be\n'
'overwritten.\n'
'See http://code.google.com/p/chromium/wiki/UsingNewGit#Rolling_DEPS\n'
'for more information')]
return []
def _CheckNoBannedFunctions(input_api, output_api):
"""Make sure that banned functions are not used."""
warnings = []
errors = []
file_filter = lambda f: f.LocalPath().endswith(('.mm', '.m', '.h'))
for f in input_api.AffectedFiles(file_filter=file_filter):
for line_num, line in f.ChangedContents():
for func_name, message, error in _BANNED_OBJC_FUNCTIONS:
if func_name in line:
problems = warnings;
if error:
problems = errors;
problems.append(' %s:%d:' % (f.LocalPath(), line_num))
for message_line in message:
problems.append(' %s' % message_line)
file_filter = lambda f: f.LocalPath().endswith(('.cc', '.mm', '.h'))
for f in input_api.AffectedFiles(file_filter=file_filter):
for line_num, line in f.ChangedContents():
for func_name, message, error in _BANNED_CPP_FUNCTIONS:
if func_name in line:
problems = warnings;
if error:
problems = errors;
problems.append(' %s:%d:' % (f.LocalPath(), line_num))
for message_line in message:
problems.append(' %s' % message_line)
result = []
if (warnings):
result.append(output_api.PresubmitPromptWarning(
'Banned functions were used.\n' + '\n'.join(warnings)))
if (errors):
result.append(output_api.PresubmitError(
'Banned functions were used.\n' + '\n'.join(errors)))
return result
def _CheckNoPragmaOnce(input_api, output_api):
"""Make sure that banned functions are not used."""
files = []
pattern = input_api.re.compile(r'^#pragma\s+once',
input_api.re.MULTILINE)
for f in input_api.AffectedSourceFiles(input_api.FilterSourceFile):
if not f.LocalPath().endswith('.h'):
continue
contents = input_api.ReadFile(f)
if pattern.search(contents):
files.append(f)
if files:
return [output_api.PresubmitError(
'Do not use #pragma once in header files.\n'
'See http://www.chromium.org/developers/coding-style#TOC-File-headers',
files)]
return []
def _CheckNoTrinaryTrueFalse(input_api, output_api):
"""Checks to make sure we don't introduce use of foo ? true : false."""
problems = []
pattern = input_api.re.compile(r'\?\s*(true|false)\s*:\s*(true|false)')
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith(('.cc', '.h', '.inl', '.m', '.mm')):
continue
for line_num, line in f.ChangedContents():
if pattern.match(line):
problems.append(' %s:%d' % (f.LocalPath(), line_num))
if not problems:
return []
return [output_api.PresubmitPromptWarning(
'Please consider avoiding the "? true : false" pattern if possible.\n' +
'\n'.join(problems))]
def _CheckUnwantedDependencies(input_api, output_api):
"""Runs checkdeps on #include statements added in this
change. Breaking - rules is an error, breaking ! rules is a
warning.
"""
# We need to wait until we have an input_api object and use this
# roundabout construct to import checkdeps because this file is
# eval-ed and thus doesn't have __file__.
original_sys_path = sys.path
try:
sys.path = sys.path + [input_api.os_path.join(
input_api.PresubmitLocalPath(), 'tools', 'checkdeps')]
import checkdeps
from cpp_checker import CppChecker
from rules import Rule
finally:
# Restore sys.path to what it was before.
sys.path = original_sys_path
added_includes = []
for f in input_api.AffectedFiles():
if not CppChecker.IsCppFile(f.LocalPath()):
continue
changed_lines = [line for line_num, line in f.ChangedContents()]
added_includes.append([f.LocalPath(), changed_lines])
deps_checker = checkdeps.DepsChecker()
error_descriptions = []
warning_descriptions = []
for path, rule_type, rule_description in deps_checker.CheckAddedCppIncludes(
added_includes):
description_with_path = '%s\n %s' % (path, rule_description)
if rule_type == Rule.DISALLOW:
error_descriptions.append(description_with_path)
else:
warning_descriptions.append(description_with_path)
results = []
if error_descriptions:
results.append(output_api.PresubmitError(
'You added one or more #includes that violate checkdeps rules.',
error_descriptions))
if warning_descriptions:
if not input_api.is_committing:
warning_factory = output_api.PresubmitPromptWarning
else:
# We don't want to block use of the CQ when there is a warning
# of this kind, so we only show a message when committing.
warning_factory = output_api.PresubmitNotifyResult
results.append(warning_factory(
'You added one or more #includes of files that are temporarily\n'
'allowed but being removed. Can you avoid introducing the\n'
'#include? See relevant DEPS file(s) for details and contacts.',
warning_descriptions))
return results
def _CheckFilePermissions(input_api, output_api):
"""Check that all files have their permissions properly set."""
args = [sys.executable, 'tools/checkperms/checkperms.py', '--root',
input_api.change.RepositoryRoot()]
for f in input_api.AffectedFiles():
args += ['--file', f.LocalPath()]
errors = []
(errors, stderrdata) = subprocess.Popen(args).communicate()
results = []
if errors:
results.append(output_api.PresubmitError('checkperms.py failed.',
errors))
return results
def _CheckNoAuraWindowPropertyHInHeaders(input_api, output_api):
"""Makes sure we don't include ui/aura/window_property.h
in header files.
"""
pattern = input_api.re.compile(r'^#include\s*"ui/aura/window_property.h"')
errors = []
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith('.h'):
continue
for line_num, line in f.ChangedContents():
if pattern.match(line):
errors.append(' %s:%d' % (f.LocalPath(), line_num))
results = []
if errors:
results.append(output_api.PresubmitError(
'Header files should not include ui/aura/window_property.h', errors))
return results
def _CheckIncludeOrderForScope(scope, input_api, file_path, changed_linenums):
"""Checks that the lines in scope occur in the right order.
1. C system files in alphabetical order
2. C++ system files in alphabetical order
3. Project's .h files
"""
c_system_include_pattern = input_api.re.compile(r'\s*#include <.*\.h>')
cpp_system_include_pattern = input_api.re.compile(r'\s*#include <.*>')
custom_include_pattern = input_api.re.compile(r'\s*#include ".*')
C_SYSTEM_INCLUDES, CPP_SYSTEM_INCLUDES, CUSTOM_INCLUDES = range(3)
state = C_SYSTEM_INCLUDES
previous_line = ''
previous_line_num = 0
problem_linenums = []
for line_num, line in scope:
if c_system_include_pattern.match(line):
if state != C_SYSTEM_INCLUDES:
problem_linenums.append((line_num, previous_line_num))
elif previous_line and previous_line > line:
problem_linenums.append((line_num, previous_line_num))
elif cpp_system_include_pattern.match(line):
if state == C_SYSTEM_INCLUDES:
state = CPP_SYSTEM_INCLUDES
elif state == CUSTOM_INCLUDES:
problem_linenums.append((line_num, previous_line_num))
elif previous_line and previous_line > line:
problem_linenums.append((line_num, previous_line_num))
elif custom_include_pattern.match(line):
if state != CUSTOM_INCLUDES:
state = CUSTOM_INCLUDES
elif previous_line and previous_line > line:
problem_linenums.append((line_num, previous_line_num))
else:
problem_linenums.append(line_num)
previous_line = line
previous_line_num = line_num
warnings = []
for (line_num, previous_line_num) in problem_linenums:
if line_num in changed_linenums or previous_line_num in changed_linenums:
warnings.append(' %s:%d' % (file_path, line_num))
return warnings
def _CheckIncludeOrderInFile(input_api, f, is_source, changed_linenums):
"""Checks the #include order for the given file f."""
system_include_pattern = input_api.re.compile(r'\s*#include \<.*')
# Exclude #include <.../...> includes from the check; e.g., <sys/...> includes
# often need to appear in a specific order.
excluded_include_pattern = input_api.re.compile(r'\s*#include \<.*/.*')
custom_include_pattern = input_api.re.compile(r'\s*#include "(?P<FILE>.*)"')
if_pattern = input_api.re.compile(r'\s*#\s*(if|elif|else|endif).*')
contents = f.NewContents()
warnings = []
line_num = 0
# Handle the special first include for source files. If the header file is
# some/path/file.h, the corresponding source file can be some/path/file.cc,
# some/other/path/file.cc, some/path/file_platform.cc etc. It's also possible
# that no special first include exists.
if is_source:
for line in contents:
line_num += 1
if system_include_pattern.match(line):
# No special first include -> process the line again along with normal
# includes.
line_num -= 1
break
match = custom_include_pattern.match(line)
if match:
match_dict = match.groupdict()
header_basename = input_api.os_path.basename(
match_dict['FILE']).replace('.h', '')
if header_basename not in input_api.os_path.basename(f.LocalPath()):
# No special first include -> process the line again along with normal
# includes.
line_num -= 1
break
# Split into scopes: Each region between #if and #endif is its own scope.
scopes = []
current_scope = []
for line in contents[line_num:]:
line_num += 1
if if_pattern.match(line):
scopes.append(current_scope)
current_scope = []
elif ((system_include_pattern.match(line) or
custom_include_pattern.match(line)) and
not excluded_include_pattern.match(line)):
current_scope.append((line_num, line))
scopes.append(current_scope)
for scope in scopes:
warnings.extend(_CheckIncludeOrderForScope(scope, input_api, f.LocalPath(),
changed_linenums))
return warnings
def _CheckIncludeOrder(input_api, output_api):
"""Checks that the #include order is correct.
1. The corresponding header for source files.
2. C system files in alphabetical order
3. C++ system files in alphabetical order
4. Project's .h files in alphabetical order
Each region between #if and #endif follows these rules separately.
"""
warnings = []
for f in input_api.AffectedFiles():
changed_linenums = set([line_num for line_num, _ in f.ChangedContents()])
if f.LocalPath().endswith('.cc'):
warnings.extend(_CheckIncludeOrderInFile(input_api, f, True,
changed_linenums))
elif f.LocalPath().endswith('.h'):
warnings.extend(_CheckIncludeOrderInFile(input_api, f, False,
changed_linenums))
results = []
if warnings:
results.append(output_api.PresubmitPromptWarning(_INCLUDE_ORDER_WARNING,
warnings))
return results
def _CheckForVersionControlConflictsInFile(input_api, f):
pattern = input_api.re.compile('^(?:<<<<<<<|>>>>>>>) |^=======$')
errors = []
for line_num, line in f.ChangedContents():
if pattern.match(line):
errors.append(' %s:%d %s' % (f.LocalPath(), line_num, line))
return errors
def _CheckForVersionControlConflicts(input_api, output_api):
"""Usually this is not intentional and will cause a compile failure."""
errors = []
for f in input_api.AffectedFiles():
errors.extend(_CheckForVersionControlConflictsInFile(input_api, f))
results = []
if errors:
results.append(output_api.PresubmitError(
'Version control conflict markers found, please resolve.', errors))
return results
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
results.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, excluded_paths=_EXCLUDED_PATHS))
results.extend(_CheckAuthorizedAuthor(input_api, output_api))
results.extend(
_CheckNoProductionCodeUsingTestOnlyFunctions(input_api, output_api))
results.extend(_CheckNoIOStreamInHeaders(input_api, output_api))
results.extend(_CheckNoUNIT_TESTInSourceFiles(input_api, output_api))
results.extend(_CheckNoNewWStrings(input_api, output_api))
results.extend(_CheckNoDEPSGIT(input_api, output_api))
results.extend(_CheckNoBannedFunctions(input_api, output_api))
results.extend(_CheckNoPragmaOnce(input_api, output_api))
results.extend(_CheckNoTrinaryTrueFalse(input_api, output_api))
results.extend(_CheckUnwantedDependencies(input_api, output_api))
results.extend(_CheckFilePermissions(input_api, output_api))
results.extend(_CheckNoAuraWindowPropertyHInHeaders(input_api, output_api))
results.extend(_CheckIncludeOrder(input_api, output_api))
results.extend(_CheckForVersionControlConflicts(input_api, output_api))
if any('PRESUBMIT.py' == f.LocalPath() for f in input_api.AffectedFiles()):
results.extend(input_api.canned_checks.RunUnitTestsInDirectory(
input_api, output_api,
input_api.PresubmitLocalPath(),
whitelist=[r'.+_test\.py$']))
return results
def _CheckSubversionConfig(input_api, output_api):
"""Verifies the subversion config file is correctly setup.
Checks that autoprops are enabled, returns an error otherwise.
"""
join = input_api.os_path.join
if input_api.platform == 'win32':
appdata = input_api.environ.get('APPDATA', '')
if not appdata:
return [output_api.PresubmitError('%APPDATA% is not configured.')]
path = join(appdata, 'Subversion', 'config')
else:
home = input_api.environ.get('HOME', '')
if not home:
return [output_api.PresubmitError('$HOME is not configured.')]
path = join(home, '.subversion', 'config')
error_msg = (
'Please look at http://dev.chromium.org/developers/coding-style to\n'
'configure your subversion configuration file. This enables automatic\n'
'properties to simplify the project maintenance.\n'
'Pro-tip: just download and install\n'
'http://src.chromium.org/viewvc/chrome/trunk/tools/build/slave/config\n')
try:
lines = open(path, 'r').read().splitlines()
# Make sure auto-props is enabled and check for 2 Chromium standard
# auto-prop.
if (not '*.cc = svn:eol-style=LF' in lines or
not '*.pdf = svn:mime-type=application/pdf' in lines or
not 'enable-auto-props = yes' in lines):
return [
output_api.PresubmitNotifyResult(
'It looks like you have not configured your subversion config '
'file or it is not up-to-date.\n' + error_msg)
]
except (OSError, IOError):
return [
output_api.PresubmitNotifyResult(
'Can\'t find your subversion config file.\n' + error_msg)
]
return []
def _CheckAuthorizedAuthor(input_api, output_api):
"""For non-googler/chromites committers, verify the author's email address is
in AUTHORS.
"""
# TODO(maruel): Add it to input_api?
import fnmatch
author = input_api.change.author_email
if not author:
input_api.logging.info('No author, skipping AUTHOR check')
return []
authors_path = input_api.os_path.join(
input_api.PresubmitLocalPath(), 'AUTHORS')
valid_authors = (
input_api.re.match(r'[^#]+\s+\<(.+?)\>\s*$', line)
for line in open(authors_path))
valid_authors = [item.group(1).lower() for item in valid_authors if item]
if input_api.verbose:
print 'Valid authors are %s' % ', '.join(valid_authors)
if not any(fnmatch.fnmatch(author.lower(), valid) for valid in valid_authors):
return [output_api.PresubmitPromptWarning(
('%s is not in AUTHORS file. If you are a new contributor, please visit'
'\n'
'http://www.chromium.org/developers/contributing-code and read the '
'"Legal" section\n'
'If you are a chromite, verify the contributor signed the CLA.') %
author)]
return []
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
# TODO(thestig) temporarily disabled, doesn't work in third_party/
#results.extend(input_api.canned_checks.CheckSvnModifiedDirectories(
# input_api, output_api, sources))
# Make sure the tree is 'open'.
results.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api,
output_api,
json_url='http://chromium-status.appspot.com/current?format=json'))
results.extend(input_api.canned_checks.CheckRietveldTryJobExecution(input_api,
output_api, 'http://codereview.chromium.org',
('win_rel', 'linux_rel', 'mac_rel, win:compile'),
'tryserver@chromium.org'))
results.extend(input_api.canned_checks.CheckChangeHasBugField(
input_api, output_api))
results.extend(input_api.canned_checks.CheckChangeHasDescription(
input_api, output_api))
results.extend(_CheckSubversionConfig(input_api, output_api))
return results
def GetPreferredTrySlaves(project, change):
files = change.LocalPaths()
if not files:
return []
if all(re.search('\.(m|mm)$|(^|[/_])mac[/_.]', f) for f in files):
return ['mac_rel', 'mac_asan']
if all(re.search('(^|[/_])win[/_.]', f) for f in files):
return ['win_rel']
if all(re.search('(^|[/_])android[/_.]', f) for f in files):
return ['android_dbg', 'android_clang_dbg']
if all(re.search('^native_client_sdk', f) for f in files):
return ['linux_nacl_sdk', 'win_nacl_sdk', 'mac_nacl_sdk']
if all(re.search('[/_]ios[/_.]', f) for f in files):
return ['ios_rel_device', 'ios_dbg_simulator']
trybots = [
'android_clang_dbg',
'android_dbg',
'ios_dbg_simulator',
'ios_rel_device',
'linux_asan',
'linux_chromeos',
'linux_clang:compile',
'linux_rel',
'mac_asan',
'mac_rel',
'win_rel',
]
# Match things like path/aura/file.cc and path/file_aura.cc.
# Same for ash and chromeos.
if any(re.search('[/_](ash|aura)', f) for f in files):
trybots += ['linux_chromeos_clang:compile', 'win_aura',
'linux_chromeos_asan']
elif any(re.search('[/_]chromeos', f) for f in files):
trybots += ['linux_chromeos_clang:compile', 'linux_chromeos_asan']
return trybots
|
leighpauls/k2cro4
|
PRESUBMIT.py
|
Python
|
bsd-3-clause
| 29,745
|
[
"VisIt"
] |
6469389707a86d9565dbdb2e80cb5718a42c81936fc6a2d3cb1c068bdd134fab
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# jobobjsubmit - submit helper to submit job objects directly
# Copyright (C) 2003-2009 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
"""Low level frontend for submitting jobs"""
import cgi
import cgitb
cgitb.enable()
from shared.functionality.jobobjsubmit import main
from shared.cgiscriptstub import run_cgi_script
run_cgi_script(main)
|
heromod/migrid
|
mig/cgi-bin/jobobjsubmit.py
|
Python
|
gpl-2.0
| 1,147
|
[
"Brian"
] |
4417621e5e289ecad8a50eb4ce9e19f0c2c2c4dabe6dfab1e273437b99e374c3
|
# -*- coding: utf-8 -*-s
import numpy as np
import create_timeseries as cts
#create grid
manual_grid_option=True
lons,lats,lonbs,latbs,nlon,nlat,nlonb,nlatb=cts.create_grid(manual_grid_option)
p_full,p_half,npfull,nphalf=cts.create_pressures()
#create times
is_climatology=False
num_years=100
time_spacing=num_years
time_arr,day_number,ntime,time_units, time_bounds=cts.create_time_arr(num_years,is_climatology, time_spacing)
#create time series based on times
co2 = np.zeros((ntime, npfull, nlat, nlon))
for tick in np.arange(0,len(day_number)):
co2[tick,...] = 300.*(1.01**(day_number[tick]/360.)) #Some scenario in dimensionless units. 1.e-6 is to convert from ppmv.
#Output it to a netcdf file.
file_name='co2_test_new_routine_2.nc'
variable_name='co2'
number_dict={}
number_dict['nlat']=nlat
number_dict['nlon']=nlon
number_dict['nlatb']=nlatb
number_dict['nlonb']=nlonb
number_dict['npfull']=npfull
number_dict['nphalf']=nphalf
number_dict['ntime']=ntime
cts.output_to_file(co2,lats,lons,latbs,lonbs,p_full,p_half,time_arr,time_units,file_name,variable_name,number_dict)
|
jamesp/Isca
|
src/extra/python/scripts/create_co2_timeseries.py
|
Python
|
gpl-3.0
| 1,095
|
[
"NetCDF"
] |
b81c8cdebe366f521933527281b240edab5e40cf3b758f99c2cd697725a4ad83
|
import optparse
import numpy as np
from ase.atoms import Atoms
from gpaw import FermiDirac
from gpaw.tasks.convergence import ConvergenceTestTask
class EggboxTestTask(ConvergenceTestTask):
taskname = 'eggbox'
def __init__(self, **kwargs):
"""Calculate size of eggbox error.
A single atom is translated from (0, 0, 0) to (h / 2, 0, 0) in
25 steps in order to measure to eggbox error."""
ConvergenceTestTask.__init__(self, **kwargs)
def calculate(self, name, atoms):
atoms.calc.set(occupations=FermiDirac(0.1),
kpts=[1, 1, 1])
data = {}
for g in self.gs:
atoms.calc.set(gpts=(g, g, g))
energies = []
forces = []
for i in range(25):
x = self.L / g * i / 48
atoms.positions[0] = x
e = atoms.calc.get_potential_energy(atoms,
force_consistent=True)
energies.append(e)
forces.append(atoms.get_forces()[0,0])
data[g] = (energies, forces)
return data
def analyse(self):
self.summary_header = [('name', '')] + [
('dE(h=%.2f)' % (self.L / g), 'meV') for g in self.gs]
for name, data in self.data.items():
results = []
for g in self.gs:
de = data[str(g)][0].ptp()
results.append(de * 1000)
self.results[name] = results
if __name__ == '__main__':
task = EggboxTestTask()
args = task.parse_args()
task.run(args)
|
ajylee/gpaw-rtxs
|
gpaw/tasks/eggbox.py
|
Python
|
gpl-3.0
| 1,622
|
[
"ASE",
"GPAW"
] |
a9554f15c521d698fa7e69145c8e37c3d435118e9b5bbff85f2b697a202bf465
|
import numpy as np
from ..geometry.point import Point
class DataGenerator:
@staticmethod
def generate_random_set(n=10, d=2, seed_value=431):
#print('n={}'.format(n))
#print('d={}'.format(d))
np.random.seed(seed_value)
points = np.random.rand(n, d)
return np.array([Point(i[0], i[1]) for i in points])
@staticmethod
def generate_normal_set(n=10, d=2, seed_value=431):
""" Generate random test data with Gaussian distribution."""
np.random.seed(seed_value)
mean = [0,0]
cov = [[1,0],[0,1]]
points = np.random.multivariate_normal(mean, cov, n)
return np.array([Point(i[0], i[1]) for i in points])
|
nhonaitran/aries
|
aries/utils/datagenerator.py
|
Python
|
gpl-2.0
| 700
|
[
"Gaussian"
] |
6241eb8db41338578543dc9dacc737fc7b7c48a56490ec4d658c3ba3582f56c6
|
"""Handles IMAP operations for tMail, the terminal Gmail client"""
import os
import time
from os import path
from simplecrypt import decrypt, encrypt
import imaplib
from requests_oauthlib import OAuth2Session
CLIENT_ID = '47637480825-5d3ndp33q8m6eojt015p9th1q5cig3bm.apps.googleusercontent.com'
CLIENT_SECRET = 'xjFxdgVhJZjypUUoW7sC8R4Y'
REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob'
SCOPE = 'https://mail.google.com/'
SETTINGS_DIR = '.tmail'
SETTINGS_FILE = 'settings.txt'
REFRESH_KEY_FILE = '.refresh_key'
ACCESS_KEY_FILE = '.access_key'
def _generate_auth_string(user, token):
"""Generates the string to use when authenticating within IMAP"""
return 'user=%s\1auth=Bearer %s\1\1' % (user, token)
def get_expiration():
"""Retrieves the access token expiration from the settings file"""
with open(path.join(
path.expanduser('~'),
SETTINGS_DIR,
SETTINGS_FILE)) as settings_file:
for line in settings_file:
if line.startswith('expiration'):
tokens = line.split('=')
return float(tokens[1])
raise IOError('No expiration in settings')
def refresh_access_token():
"""Refreshes the access token"""
with open(path.join(
path.expanduser('~'),
SETTINGS_DIR,
ACCESS_KEY_FILE), 'rb') as key_file:
refresh_token = key_file.read()
refresh_token = decrypt(CLIENT_SECRET, refresh_token).decode('utf-8')
google = OAuth2Session(client_id=CLIENT_ID)
token = google.refresh_token(
'https://accounts.google.com/o/oauth2/token',
refresh_token=refresh_token,
client_secret=CLIENT_SECRET,
client_id=CLIENT_ID)
return token['access_token'], token['expires_in']
def save_expiration(expiration):
"""Saves over any existing expiration in the settings file"""
with open(
path.join(path.expanduser('~'), SETTINGS_DIR, SETTINGS_FILE),
'r+') as settings_file:
lines = settings_file.readlines()
with open(
path.join(path.expanduser('~'), SETTINGS_DIR, SETTINGS_FILE),
'w+') as settings_file:
for line in lines:
if not line.startswith('expiration'):
settings_file.write(line)
with open(
path.join(path.expanduser('~'), SETTINGS_DIR, SETTINGS_FILE),
'a+') as settings_file:
settings_file.write('expiration=' + str(time.time() + expiration))
def get_access_token():
"""Retrieves the access token from the keys file"""
try:
expiration = get_expiration()
assert expiration > time.time()
except AssertionError:
access_token, expiration = refresh_access_token()
save_expiration(expiration)
return access_token
with open(path.join(
path.expanduser('~'),
SETTINGS_DIR,
ACCESS_KEY_FILE), 'rb') as key_file:
access_token = key_file.read()
return decrypt(CLIENT_SECRET, access_token).decode('utf-8')
def get_username():
"""Retrieves the username from the settings file"""
with open(path.join(
path.expanduser('~'),
SETTINGS_DIR,
SETTINGS_FILE)) as settings_file:
for line in settings_file:
if line.startswith('username'):
tokens = line.split('=')
return tokens[1]
raise IOError('No username in settings')
def oauth_process():
"""Goes through the OAuth2 process for Gmail"""
os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1'
os.environ['OAUTHLIB_RELAX_TOKEN_SCOPE'] = '1'
google = OAuth2Session(
client_id=CLIENT_ID,
scope=SCOPE,
redirect_uri=REDIRECT_URI)
authorization_url = google.authorization_url(
'https://accounts.google.com/o/oauth2/auth')
print('Please visit this url to authenticate: ')
print(authorization_url)
authorization_response = input('Please enter the authorization code: ')
token = google.fetch_token(
'https://accounts.google.com/o/oauth2/token',
client_secret=CLIENT_SECRET,
code=authorization_response)
return token['refresh_token'], token['access_token'], float(token['expires_in'])
def save_username():
username = input('Please enter your Gmail address: ')
with open(
path.join(path.expanduser('~'), SETTINGS_DIR, SETTINGS_FILE),
'a+') as settings_file:
settings_file.write('username=' + username + '\n')
return username
def authenticate():
"""Authenticates the current user using OAuth2"""
username = None
try:
access_token = get_access_token()
except IOError:
os.makedirs(path.join(path.expanduser('~'), SETTINGS_DIR), exist_ok=True)
username = save_username()
refresh_token, access_token, expiration = oauth_process()
save_expiration(expiration)
with open(
path.join(path.expanduser('~'), SETTINGS_DIR, REFRESH_KEY_FILE),
'wb+') as refresh_key_file:
refresh_key_file.write(encrypt(CLIENT_SECRET, refresh_token))
with open(
path.join(path.expanduser('~'), SETTINGS_DIR, ACCESS_KEY_FILE),
'wb+') as access_key_file:
access_key_file.write(encrypt(CLIENT_SECRET, access_token))
if username is None:
try:
username = get_username()
except IOError:
username = save_username()
auth_string = _generate_auth_string(username, access_token)
imap_conn = imaplib.IMAP4_SSL('imap.gmail.com')
imap_conn.authenticate('XOAUTH2', lambda x: auth_string.encode('ascii'))
return imap_conn
|
nortonprojects/terMail
|
authentication.py
|
Python
|
lgpl-2.1
| 5,601
|
[
"VisIt"
] |
25f7e34fc1ce86c12e278249d6362387982d4369ed175c8393bd65922ac9f5cb
|
# ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import numpy as np
from ._base import Ordination, OrdinationResults
from ._utils import corr, svd_rank, scale
from skbio.util._decorator import experimental
class RDA(Ordination):
r"""Compute redundancy analysis, a type of canonical analysis.
It is related to PCA and multiple regression because the explained
variables `Y` are fitted to the explanatory variables `X` and PCA
is then performed on the fitted values. A similar process is
performed on the residuals.
RDA should be chosen if the studied gradient is small, and CCA
when it's large, so that the contingency table is sparse.
Parameters
----------
Y : array_like
:math:`n \times p` response matrix. Its columns need be
dimensionally homogeneous (or you can set `scale_Y=True`).
X : array_like
:math:`n \times m, n \geq m` matrix of explanatory
variables. Its columns need not be standardized, but doing so
turns regression coefficients into standard regression
coefficients.
scale_Y : bool, optional
Controls whether the response matrix columns are scaled to
have unit standard deviation. Defaults to `False`.
Notes
-----
The algorithm is based on [1]_, \S 11.1, and is expected to
give the same results as ``rda(Y, X)`` in R's package vegan.
See Also
--------
CCA
References
----------
.. [1] Legendre P. and Legendre L. 1998. Numerical
Ecology. Elsevier, Amsterdam.
"""
short_method_name = 'RDA'
long_method_name = 'Redundancy Analysis'
@experimental(as_of="0.4.0")
def __init__(self, Y, X, site_ids, species_ids, scale_Y=False):
self.Y = np.asarray(Y, dtype=np.float64)
self.X = np.asarray(X, dtype=np.float64)
self.site_ids = site_ids
self.species_ids = species_ids
self._rda(scale_Y)
def _rda(self, scale_Y):
n, p = self.Y.shape
n_, m = self.X.shape
if n != n_:
raise ValueError(
"Both data matrices must have the same number of rows.")
if n < m:
# Mmm actually vegan is able to do this case, too
raise ValueError(
"Explanatory variables cannot have less rows than columns.")
# Centre response variables (they must be dimensionally
# homogeneous)
Y = scale(self.Y, with_std=scale_Y)
# Centre explanatory variables
X = scale(self.X, with_std=False)
# Distribution of variables should be examined and transformed
# if necessary (see paragraph 4 in p. 580 L&L 1998)
# Compute Y_hat (fitted values by multivariate linear
# regression, that is, linear least squares). Formula 11.6 in
# L&L 1998 involves solving the normal equations, but that fails
# when cond(X) ~ eps**(-0.5). A more expensive but much more
# stable solution (fails when cond(X) ~ eps**-1) is computed
# using the QR decomposition of X = QR:
# (11.6) Y_hat = X [X' X]^{-1} X' Y
# = QR [R'Q' QR]^{-1} R'Q' Y
# = QR [R' R]^{-1} R'Q' Y
# = QR R^{-1} R'^{-1} R' Q' Y
# = Q Q' Y
# and B (matrix of regression coefficients)
# (11.4) B = [X' X]^{-1} X' Y
# = R^{-1} R'^{-1} R' Q' Y
# = R^{-1} Q'
# Q, R = np.linalg.qr(X)
# Y_hat = Q.dot(Q.T).dot(Y)
# B = scipy.linalg.solve_triangular(R, Q.T.dot(Y))
# This works provided X has full rank. When not, you can still
# fix it using R's pseudoinverse or partitioning R. To avoid any
# issues, like the numerical instability when trying to
# reproduce an example in L&L where X was rank-deficient, we'll
# just use `np.linalg.lstsq`, which uses the SVD decomposition
# under the hood and so it's also more expensive.
B, _, rank_X, _ = np.linalg.lstsq(X, Y)
Y_hat = X.dot(B)
# Now let's perform PCA on the fitted values from the multiple
# regression
u, s, vt = np.linalg.svd(Y_hat, full_matrices=False)
# vt are the right eigenvectors, which is what we need to
# perform PCA. That is, we're changing points in Y_hat from the
# canonical basis to the orthonormal basis given by the right
# eigenvectors of Y_hat (or equivalently, the eigenvectors of
# the covariance matrix Y_hat.T.dot(Y_hat))
# See 3) in p. 583 in L&L 1998
rank = svd_rank(Y_hat.shape, s)
# Theoretically, there're at most min(p, m, n - 1) non-zero eigenvalues
U = vt[:rank].T # U as in Fig. 11.2
# Ordination in the space of response variables. Its columns are
# site scores. (Eq. 11.12)
F = Y.dot(U)
# Ordination in the space of explanatory variables. Its columns
# are fitted site scores. (Eq. 11.13)
Z = Y_hat.dot(U)
# Canonical coefficients (formula 11.14)
# C = B.dot(U) # Not used
Y_res = Y - Y_hat
# PCA on the residuals
u_res, s_res, vt_res = np.linalg.svd(Y_res, full_matrices=False)
# See 9) in p. 587 in L&L 1998
rank_res = svd_rank(Y_res.shape, s_res)
# Theoretically, there're at most min(p, n - 1) non-zero eigenvaluesas
U_res = vt_res[:rank_res].T
F_res = Y_res.dot(U_res) # Ordination in the space of residuals
# Storing values needed to compute scores
iter_ = (('U', U), ('U_res', U_res),
('F', F),
('F_res', F_res),
('Z', Z),
('u', u[:, :rank]))
for val_name, val in iter_:
setattr(self, val_name, val)
self.eigenvalues = np.r_[s[:rank], s_res[:rank_res]]
@experimental(as_of="0.4.0")
def scores(self, scaling):
"""Compute site, species and biplot scores for different scalings.
Parameters
----------
scaling : int
Scaling type 1 produces a distance biplot. It focuses on
the ordination of rows (sites) because their transformed
distances approximate their original euclidean
distances. Especially interesting when most explanatory
variables are binary.
Scaling type 2 produces a correlation biplot. It focuses
on the relationships among explained variables (`Y`). It
is interpreted like scaling type 1, but taking into
account that distances between objects don't approximate
their euclidean distances.
See more details about distance and correlation biplots in
[1]_, \S 9.1.4.
Returns
-------
OrdinationResults
Object that stores the computed eigenvalues, the
proportion explained by each of them (per unit),
transformed coordinates for species and sites, biplot
scores, site constraints, etc.
See Also
--------
OrdinationResults
References
----------
.. [1] Legendre P. and Legendre L. 1998. Numerical
Ecology. Elsevier, Amsterdam.
"""
if scaling not in {1, 2}:
raise NotImplementedError("Only scalings 1, 2 available for RDA.")
# According to the vegan-FAQ.pdf, the scaling factor for scores
# is (notice that L&L 1998 says in p. 586 that such scaling
# doesn't affect the interpretation of a biplot):
eigvals = self.eigenvalues
const = np.sum(eigvals**2)**0.25
if scaling == 1:
scaling_factor = const
elif scaling == 2:
scaling_factor = eigvals / const
species_scores = np.hstack((self.U, self.U_res)) * scaling_factor
site_scores = np.hstack((self.F, self.F_res)) / scaling_factor
# TODO not yet used/displayed
site_constraints = np.hstack((self.Z, self.F_res)) / scaling_factor
# vegan seems to compute them as corr(self.X[:, :rank_X],
# self.u) but I don't think that's a good idea. In fact, if
# you take the example shown in Figure 11.3 in L&L 1998 you
# can see that there's an arrow for each of the 4
# environmental variables (depth, coral, sand, other) even if
# other = not(coral or sand)
biplot_scores = corr(self.X, self.u)
# The "Correlations of environmental variables with site
# scores" from table 11.4 are quite similar to vegan's biplot
# scores, but they're computed like this:
# corr(self.X, self.F))
return OrdinationResults(eigvals=eigvals,
proportion_explained=eigvals / eigvals.sum(),
species=species_scores,
site=site_scores,
biplot=biplot_scores,
site_constraints=site_constraints,
site_ids=self.site_ids,
species_ids=self.species_ids)
|
demis001/scikit-bio
|
skbio/stats/ordination/_redundancy_analysis.py
|
Python
|
bsd-3-clause
| 9,550
|
[
"scikit-bio"
] |
7d988203dce790a4f117ffcba66fe95a810da1f013a1fbac5f4a601100f2a89e
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RGostats(RPackage):
"""Tools for manipulating GO and microarrays
A set of tools for interacting with GO and microarray data. A variety of
basic manipulation tools for graphs, hypothesis testing and other simple
calculations."""
homepage = "https://bioconductor.org/packages/GOstats"
git = "https://git.bioconductor.org/packages/GOstats.git"
version('2.56.0', commit='8f988c3b4b1ce7e05626aae8956004c7bbdd6f3a')
version('2.50.0', commit='ee13f84341988d537a5485dcdcfb71f69e6e4930')
version('2.48.0', commit='5db7020f4bab725cd729b32bd1d5e819b31f2485')
version('2.46.0', commit='489d7a437488f77c3010f6212f3b81f4e240cd17')
version('2.44.0', commit='fc64ca2aa37c52656d396d6e46611f39d6efd48a')
version('2.42.0', commit='8b29709064a3b66cf1d963b2be0c996fb48c873e')
depends_on('r+X', type=('build', 'run'))
depends_on('r@2.10:', type=('build', 'run'))
depends_on('r-biobase@1.15.29:', type=('build', 'run'))
depends_on('r-category@2.3.26:', type=('build', 'run'))
depends_on('r-category@2.43.2:', when='@2.44.0:', type=('build', 'run'))
depends_on('r-graph@1.15.15:', when='@2.42.0', type=('build', 'run'))
depends_on('r-graph', when='@2.44.0:', type=('build', 'run'))
depends_on('r-annotationdbi@0.0.89:', type=('build', 'run'))
depends_on('r-go-db@1.13.0:', type=('build', 'run'))
depends_on('r-rbgl', type=('build', 'run'))
depends_on('r-annotate@1.13.2:', type=('build', 'run'))
depends_on('r-annotationforge', type=('build', 'run'))
depends_on('r-rgraphviz', when='@2.44.0:', type=('build', 'run'))
|
LLNL/spack
|
var/spack/repos/builtin/packages/r-gostats/package.py
|
Python
|
lgpl-2.1
| 1,837
|
[
"Bioconductor"
] |
40be587b101737c3cf8a49570e92eeec4600c14a96af9583fb288d7f8a856ff4
|
# Copyright 2020-2021 Peppy Player peppy.player@gmail.com
#
# This file is part of Peppy Player.
#
# Peppy Player is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Peppy Player is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Peppy Player. If not, see <http://www.gnu.org/licenses/>.
import os
import pygame
import base64
import logging
import codecs
import random
from util.config import SHOW_EMBEDDED_IMAGES, USAGE, USE_WEB, COLORS, COLOR_DARK_LIGHT, COLOR_MUTE, IMAGE_SIZE, \
SCREEN_INFO, WIDTH, HEIGHT, BACKGROUND, BLUR_RADIUS, OVERLAY_COLOR, OVERLAY_OPACITY, BACKGROUND_DEFINITIONS, \
BGR_FILENAME, SCREEN_BGR_NAMES, ICONS, ICONS_COLOR_1_MAIN, ICONS_COLOR_1_ON, ICONS_COLOR_2_MAIN, ICONS_COLOR_2_ON, \
IMAGE_SIZE_WITHOUT_LABEL, ICONS_TYPE, ICON_SIZE, GENERATED_IMAGE, COLOR_MEDIUM, HIDE_FOLDER_NAME, ENABLE_EMBEDDED_IMAGES
from PIL import Image, ImageFilter
from PIL.ImageColor import getcolor, getrgb
from PIL.ImageOps import grayscale
from io import BytesIO
from svg import Parser, Rasterizer
from util.fileutil import FOLDER, FOLDER_WITH_ICON, FILE_AUDIO, FILE_PLAYLIST, FILE_IMAGE, FILE_CD_DRIVE
from urllib import request
from urllib.request import urlopen
from mutagen.id3 import ID3
from mutagen.flac import FLAC
from mutagen.mp4 import MP4
FOLDER_ICONS = "icons"
FOLDER_BACKGROUNDS = "backgrounds"
ICON_FOLDER = "folder"
ICON_FILE_AUDIO = "audio-file"
ICON_FILE_PLAYLIST = "playlist"
ICON_CD_DRIVE = "cd-player"
ICON_IMAGE_FILE = "image-file"
FILE_COLON = "colon.png"
DEFAULT_CD_IMAGE = "cd.png"
SVG_DEFAULT_COLOR_1 = "#808080"
SVG_DEFAULT_COLOR_2 = "#C0C0C0"
SVG_DEFAULT_GRADIENT_COLOR_1 = "#404040"
SVG_DEFAULT_GRADIENT_COLOR_2 = "#A0A0A0"
EXT_PNG = ".png"
EXT_JPG = ".jpg"
EXT_SVG = ".svg"
EXT_MP3 = ".mp3"
EXT_FLAC = ".flac"
EXT_MP4 = ".mp4"
EXT_M4A = ".m4a"
MONOCHROME = "monochrome"
BI_COLOR = "bi-color"
GRADIENT = "gradient"
class ImageUtil(object):
""" Image Utility class """
def __init__(self, util):
""" Initializer.
:param util: utility object
"""
self.util = util
self.config = util.config
self.discogs_util = util.discogs_util
self.COLOR_MAIN_1 = self.color_to_hex(self.config[ICONS][ICONS_COLOR_1_MAIN])
self.COLOR_ON_1 = self.color_to_hex(self.config[ICONS][ICONS_COLOR_1_ON])
try:
self.COLOR_MAIN_2 = self.color_to_hex(self.config[ICONS][ICONS_COLOR_2_MAIN])
self.COLOR_ON_2 = self.color_to_hex(self.config[ICONS][ICONS_COLOR_2_ON])
except:
self.COLOR_MAIN_2 = self.COLOR_ON_2 = "#000000"
self.COLOR_OFF = self.color_to_hex(self.config[COLORS][COLOR_DARK_LIGHT])
self.COLOR_MUTE = self.color_to_hex(self.config[COLORS][COLOR_MUTE])
self.image_cache = {}
self.image_cache_base64 = {}
self.svg_cache = {}
self.background_cache = {}
self.album_art_url_cache = {}
self.FILE_EXTENSIONS_EMBEDDED_IMAGES = None
if self.config[SHOW_EMBEDDED_IMAGES]:
self.FILE_EXTENSIONS_EMBEDDED_IMAGES = ["." + s for s in self.config[SHOW_EMBEDDED_IMAGES]]
def load_image(self, path, base64=False, bounding_box=None):
""" Load and return image
:param path: image path
:param base64: True - encode image using base64 algorithm (for web), False - don't encode
:param bounding_box: bounding box
"""
if base64:
return self.load_base64_image(path)
else:
return self.load_pygame_image(path, bounding_box)
def load_base64_image(self, path, cache_key=None):
""" Load image and encode it using base64 encoding.
:param path: image path
:param cache_key: cache key
:return: base64 encoded image
"""
try:
img = self.image_cache_base64[path]
return img
except:
pass
key = path
if cache_key:
key = cache_key
if EXT_SVG in path:
svg_image = self.svg_cache[path]
img = base64.b64encode(svg_image.encode()).decode()
self.image_cache_base64[key] = img
return img
else:
p = path.lower()
if p.endswith(EXT_MP3) or p.endswith(EXT_FLAC) or p.endswith(EXT_MP4) or p.endswith(EXT_M4A):
image_buffer = self.get_image_from_audio_file(path, True)
if image_buffer:
img = base64.b64encode(image_buffer.read()).decode()
self.image_cache_base64[key] = img
return img
with open(path, 'rb') as f:
img = base64.b64encode(f.read()).decode()
self.image_cache_base64[key] = img
return img
def load_pygame_image(self, path, bounding_box=None, use_cache=True):
""" Load image.
First, check if image is in the cache.
If yes, return the image from the cache.
If not load image file and place it in the cache.
:param path: image path
:param bounding_box: bounding box
:param use_cache: True - use cache, False - don't use cache
:return: tuple where the first element is the path to the image and the second element is the image itself
"""
image = None
if use_cache:
try:
p = path
if bounding_box:
p = path + str(bounding_box[0])
i = self.image_cache[p]
return (path, i)
except KeyError:
pass
try:
image = pygame.image.load(path.encode("utf-8")).convert_alpha()
except Exception:
pass
if image:
img = image
p = path
if bounding_box:
scale_ratio = self.get_scale_ratio(bounding_box, img)
img = self.scale_image(image, scale_ratio)
p = path + str(bounding_box[0])
if use_cache:
self.image_cache[p] = img
return (path, img)
else:
return None
def get_scale_ratio(self, bounding_box, img, fit_height=False, fit_width=False, fit_all=False):
""" Return scale ratio calculated from provided constraints (bounding box) and image
:param bounding_box: bounding box
:param img: image
:param fit_height: True - fit image height to bounding box
:return: tuple representing scale ratio
"""
if not img: return
w = bounding_box[0]
h = bounding_box[1]
width = img.get_size()[0]
height = img.get_size()[1]
if (width >= w and height > h) or (width > w and height >= h):
k1 = w/width
k2 = h/height
if fit_height:
width = int(width * k2)
height = int(height * k2)
elif fit_width:
width = int(width * k1)
height = int(height * k1)
elif fit_all:
width = int(width * (max(k1, k2)))
height = int(height * (max(k1, k2)))
else:
width = int(width * (min(k1, k2)))
height = int(height * (min(k1, k2)))
elif width > w and height < h:
k = w/width
width = int(width * k)
height = int(height * k)
elif width < w and height > h:
k = h/height
width = int(width * k)
height = int(height * k)
elif width < w and height < h:
k1 = w/width
k2 = h/height
width = int(width * (min(k1, k2)))
height = int(height * (min(k1, k2)))
return (width, height)
def scale_image(self, image, ratio):
""" Scale image using specified ratio
:param image: image to scale
:param ratio: scaling ratio
:return: scaled image
"""
if image == None:
return None
s = pygame.Surface(ratio, flags=pygame.SRCALPHA)
if isinstance(image, tuple):
image = image[1]
image = self.pre_scale(image)
if image:
d = pygame.image.tostring(image, "RGBA", False)
img = Image.frombytes("RGBA", image.get_size(), d)
i = img.resize(ratio)
d = pygame.image.fromstring(i.tobytes(), i.size, i.mode)
s.blit(d, (0, 0))
return s
else:
return None
def pre_scale(self, image):
""" Scale down very large image to avoid Out of Memory exception in 'pygame.image.tostring'
:param image: input image
:return: original input image if pre-scale not reuired, scaled image if pre-scale required
"""
if image == None: return None
size = image.get_size()
large_image_size = 2000
if size[0] > large_image_size or size[1] > large_image_size:
k_x = size[0] / large_image_size
k_y = size[1] / large_image_size
width = int(size[0] / k_x)
height = int(size[1] / k_y)
return pygame.transform.scale(image, (width, height))
else:
return image
def get_image_from_audio_file(self, filename, return_buffer=False):
""" Fetch image from audio file.
Supported formats: MP3, FLAC, MP4, M4A
:param filename: file name
:param return_buffer: True - return image buffer, False - return Pygame image
:return: image or None if not found
"""
if not filename: return None
name = filename.lower()
if name.endswith(EXT_MP3):
if self.FILE_EXTENSIONS_EMBEDDED_IMAGES and EXT_MP3 in self.FILE_EXTENSIONS_EMBEDDED_IMAGES:
return self.get_image_from_mp3(filename, return_buffer)
elif name.endswith(EXT_FLAC):
if self.FILE_EXTENSIONS_EMBEDDED_IMAGES and EXT_FLAC in self.FILE_EXTENSIONS_EMBEDDED_IMAGES:
return self.get_image_from_flac(filename, return_buffer)
elif name.endswith(EXT_MP4) or name.endswith(EXT_M4A):
if self.FILE_EXTENSIONS_EMBEDDED_IMAGES and ((EXT_MP4 in self.FILE_EXTENSIONS_EMBEDDED_IMAGES) or (EXT_M4A in self.FILE_EXTENSIONS_EMBEDDED_IMAGES)):
return self.get_image_from_mp4(filename, return_buffer)
return None
def get_image_from_mp3(self, filename, return_buffer=False):
""" Fetch image from mp3 file
:param filename: file name
:param return_buffer: True - return image buffer, False - return Pygame image
:return: image or None if not found
"""
try:
tags = ID3(filename)
except:
return None
if tags and tags.get("APIC:"):
try:
data = tags.get("APIC:").data
buffer = BytesIO(data)
if return_buffer:
return buffer
else:
return pygame.image.load(buffer).convert_alpha()
except:
return None
else:
return None
def get_image_from_flac(self, filename, return_buffer=False):
""" Fetch image from flac file
:param filename: file name
:param return_buffer: True - return image buffer, False - return Pygame image
:return: image or None if not found
"""
try:
pictures = FLAC(filename).pictures
if pictures:
data = pictures[0].data
buffer = BytesIO(data)
if return_buffer:
return buffer
else:
return pygame.image.load(buffer).convert_alpha()
else:
return None
except:
return None
def get_image_from_mp4(self, filename, return_buffer=False):
""" Fetch image from mp4/m4a file
:param filename: file name
:param return_buffer: True - return image buffer, False - return Pygame image
:return: image or None if not found
"""
try:
f = MP4(filename)
t = f.tags
pictures = t['covr']
if pictures:
data = pictures[0]
buffer = BytesIO(data)
if return_buffer:
return buffer
else:
return pygame.image.load(buffer).convert_alpha()
else:
return None
except:
return None
def get_audio_file_icon(self, folder, bb, url=None):
""" Return audio file icon which is album art image.
If it's not available then CD image will be returned.
:param folder: folder name
:param bb: bounding box
:param url: audio file name
:return: audio file icon
"""
if url:
img = self.get_image_from_audio_file(url)
if img:
ratio = self.get_scale_ratio((bb.w, bb.h), img)
scaled_img = self.scale_image(img, ratio)
return (url, scaled_img)
d = os.path.join(FOLDER_ICONS, DEFAULT_CD_IMAGE)
p = self.util.get_folder_image_path(folder)
if not p: p = d
img = self.load_image(p, False, (bb.w, bb.h))
return (p, img[1])
def get_base64_surface(self, surface):
""" Encode Pygame Surface using Base 64
:param surface: Pygame Surface object
:return: base 64 encoded surface as PNG image
"""
if surface == None:
return None
s = None
try:
d = pygame.image.tostring(surface, "RGBA", False)
img = Image.frombytes("RGBA", surface.get_size(), d)
buffer = BytesIO()
img.save(buffer, "PNG")
s = base64.b64encode(buffer.getvalue()).decode()
except Exception as e:
logging.debug(e)
return s
def blur_image(self, surface, blur_radius, argb=False):
""" Blur image using Gaussian method
:param surface: surface to blur
:param blur_radius: integer number defining how blurry the surface should be
:param argb: True - ARGB order, False - RGBA order
:return: blurred surface
"""
size = surface.get_size()
if argb:
r, g, b, a = surface.get_shifts()
rm, gm, bm, am = surface.get_masks()
surface.set_shifts((b, g, r, a))
surface.set_masks((bm, gm, rm, am))
s = pygame.image.tostring(surface, "RGBA", False)
img = Image.frombytes("RGBA", size, s)
blurred = img.filter(ImageFilter.GaussianBlur(blur_radius))
b = blurred.tobytes("raw", 'RGBA')
return pygame.image.fromstring(b, size, 'RGBA')
def invert_image(self, surface):
""" Invert colors of the Pygame surface
:param surface: image to invert
:return: inverted image
"""
size = surface.get_size()
img = pygame.Surface(size, pygame.SRCALPHA)
img.fill((255, 255, 255, 255))
img.blit(surface, (0, 0), None, pygame.BLEND_RGB_SUB)
return img
def load_icon_main(self, filename, bounding_box=None, scale=1.0):
""" Load icon with main color
:param filename: icon filename
:param bounding_box: icon bounding box
:param scale: icon scale ratio
:return: icon object
"""
if self.config[ICONS][ICONS_TYPE] == MONOCHROME:
return self.load_svg_icon(filename, self.COLOR_MAIN_1, bounding_box, scale, self.COLOR_MAIN_1)
elif self.config[ICONS][ICONS_TYPE] == BI_COLOR:
return self.load_svg_icon(filename, self.COLOR_MAIN_1, bounding_box, scale, self.COLOR_MAIN_2)
elif self.config[ICONS][ICONS_TYPE] == GRADIENT:
return self.load_svg_icon(filename, self.COLOR_MAIN_1, bounding_box, scale, self.COLOR_MAIN_2, True)
def load_icon_on(self, filename, bounding_box=None, scale=1.0):
""" Load icon with selection color
:param filename: icon filename
:param bounding_box: icon bounding box
:param scale: icon scale ratio
:return: icon object
"""
if self.config[ICONS][ICONS_TYPE] == MONOCHROME:
return self.load_svg_icon(filename, self.COLOR_ON_1, bounding_box, scale, self.COLOR_ON_1)
elif self.config[ICONS][ICONS_TYPE] == BI_COLOR:
return self.load_svg_icon(filename, self.COLOR_ON_1, bounding_box, scale, self.COLOR_ON_2)
elif self.config[ICONS][ICONS_TYPE] == GRADIENT:
return self.load_svg_icon(filename, self.COLOR_ON_1, bounding_box, scale, self.COLOR_ON_2, True)
def load_icon_off(self, filename, bounding_box=None, scale=1.0):
""" Load icon with disabled color
:param filename: icon filename
:param bounding_box: icon bounding box
:param scale: icon scale ratio
:return: icon object
"""
return self.load_svg_icon(filename, self.COLOR_OFF, bounding_box, scale, self.COLOR_OFF)
def load_icon_mute(self, filename, bounding_box=None, scale=1.0):
""" Load icon with mute color
:param filename: icon filename
:param bounding_box: icon bounding box
:param scale: icon scale ratio
:return: icon object
"""
return self.load_svg_icon(filename, self.COLOR_MUTE, bounding_box, scale, self.COLOR_MUTE)
def load_svg_icon(self, filename, color_1, bounding_box=None, scale=1.0, color_2=None, gradient=False):
""" Load monochrome SVG image with replaced color
:param filename: svg image file name
:param color_1: base icon hex color
:param bounding_box: image bounding box
:param scale: scale factor
:param color_2: second hex color
:param gradient: True - create gradient, False - use solid colors
:return: bitmap image rasterized from svg image
"""
filename += EXT_SVG
path = os.path.join(FOLDER_ICONS, filename)
t = path.replace('\\','/')
if color_2:
c_2 = "_" + color_2
else:
c_2 = ""
cache_path = t + "_" + str(scale) + "_" + color_1 + c_2
try:
i = self.image_cache[cache_path]
return (cache_path, i)
except KeyError:
pass
s = codecs.open(path, "r").read()
if gradient:
g = "url(#gradient)"
s = s.replace(SVG_DEFAULT_GRADIENT_COLOR_1, color_2)
s = s.replace(SVG_DEFAULT_GRADIENT_COLOR_2, color_1)
s = s.replace(SVG_DEFAULT_COLOR_1, g)
s = s.replace(SVG_DEFAULT_COLOR_2, g)
else:
if color_2:
s = s.replace(SVG_DEFAULT_COLOR_2, color_1)
s = s.replace(SVG_DEFAULT_COLOR_1, color_2)
else:
s = s.replace(SVG_DEFAULT_COLOR_1, color_1)
try:
bitmap_image = Parser.parse(s)
except:
logging.debug("Problem parsing file %s", path)
return None
if self.config[USAGE][USE_WEB]:
self.svg_cache[cache_path] = s
return self.scale_svg_image(cache_path, bitmap_image, bounding_box, scale)
def load_multi_color_svg_icon(self, filename, bounding_box=None, scale=1.0):
""" Load SVG image
:param filename: svg image file name
:param bounding_box: image bounding box
:param scale: scale factor
:return: bitmap image rasterized from svg image
"""
filename += EXT_SVG
path = os.path.join(FOLDER_ICONS, filename)
cache_path = path + "_" + str(scale)
try:
i = self.image_cache[cache_path]
return (cache_path, i)
except KeyError:
pass
try:
svg_image = Parser.parse_file(path)
except:
logging.debug("Problem parsing file %s", path)
return None
if self.config[USAGE][USE_WEB]:
try:
self.svg_cache[cache_path]
except KeyError:
t = cache_path.replace('\\','/')
self.svg_cache[t] = codecs.open(path, "r").read()
return self.scale_svg_image(cache_path, svg_image, bounding_box, scale)
def scale_svg_image(self, cache_path, svg_image, bounding_box=None, scale=1.0):
""" Scale SVG image
:param cache_path: cache key for image
:param svg_image: SVG image
:param bounding_box: image bounding box
:param scale: scale factor
:return: scaled bitmap image
"""
w = svg_image.width + 2
h = svg_image.height + 2
if bounding_box == None:
bb_w = w * scale
bb_h = h * scale
else:
bb_w = bounding_box.w * scale
bb_h = bounding_box.h * scale
w_scaled = bb_w / w
h_scaled = bb_h / h
scale_factor = min(w_scaled, h_scaled)
w_final = int(w * scale_factor)
h_final = int(h * scale_factor)
r = Rasterizer()
buff = r.rasterize(svg_image, w_final, h_final, scale_factor)
image = pygame.image.frombuffer(buff, (w_final, h_final), 'RGBA')
self.image_cache[cache_path] = image
return (cache_path, image)
def get_image_names_from_folder(self, folder):
""" Get image names from folder
:param folder: image folder
:return: list of image names
"""
image_names = []
for f in os.listdir(folder):
path = os.path.join(folder, f)
p = path.lower()
if p.endswith(EXT_PNG) or p.endswith(EXT_JPG):
image_names.append(path)
return image_names
def get_scaled_image(self, img):
width = img[1].get_size()[0]
height = img[1].get_size()[1]
w = self.config[SCREEN_INFO][WIDTH]
h = self.config[SCREEN_INFO][HEIGHT]
if width == w and height == h:
return img
else:
scale_ratio = self.get_scale_ratio((w, h), img[1], True)
img_scaled = self.scale_image(img[1], scale_ratio)
return (img[0], img_scaled)
def load_scaled_images(self, folder):
""" Load all images from folder, scale to fit screen
:param folder: image folder
:return: list of images
"""
images = []
for f in os.listdir(folder):
path = os.path.join(folder, f)
img = self.load_image(path)
if img == None: continue
images.append(self.get_scaled_image(img))
return images
def load_scaled_image(self, path):
img = self.load_pygame_image(path, bounding_box=None, use_cache=False)
if img == None:
return None
else:
return self.get_scaled_image(img)
def load_images_from_folder(self, folder):
""" Load all images from folder
:param folder: image folder
:return: list of images
"""
images = []
for f in os.listdir(folder):
path = os.path.join(folder, f)
img = self.load_image(path)
if img:
images.append(img)
return images
def load_background_images(self, folder):
""" Load background images
:param folder: images folder
"""
image_files = self.load_images_from_folder(folder)
w = self.config[SCREEN_INFO][WIDTH]
h = self.config[SCREEN_INFO][HEIGHT]
images = []
for image in image_files:
width = image[1].get_size()[0]
height = image[1].get_size()[1]
if width == w and height == h:
images.append(image)
else:
scale_ratio = self.get_scale_ratio((w, h), image[1], True)
img = self.scale_image(image[1], scale_ratio)
t = (image[0], img)
images.append(t)
return images
def prepare_flag_image(self, path, button_bounding_box):
""" Prepare flag image
:param path: image file path
:param button_bounding_box: button bounding box
:return: flag image
"""
flag = self.load_image(path)
bb_w = button_bounding_box.w
bb_h = button_bounding_box.h
scale_ratio = self.get_scale_ratio((bb_w, bb_h), flag[1])
im = self.scale_image(flag, (scale_ratio[0], scale_ratio[1]))
img = pygame.Surface((scale_ratio[0], scale_ratio[1]), pygame.SRCALPHA)
img.blit(im, (0, 0))
return img
def get_file_icon(self, file_type, file_image_path=None, icon_bb=None, scale_factor=0.6, url=None, show_label=True):
""" Load image representing file. Six types of icons supported:
1. Folder icon
2. Audio file icon
3. Image fetched from audio file
4. Folder with folder icon (folder icon will be displayed in this case)
5. Playlist icon
6. CD Drive
:param file_type: defines file type
:param file_image_path: path to image file
:param icon_bb: image bounding box
:param scale_factor: scale factor
:param url: file name
:param show_label: True - take label into account
:return: image representing file
"""
if icon_bb:
bb = pygame.Rect(0, 0, icon_bb[0], icon_bb[1])
else:
bb = None
icon_folder = self.load_icon_main(ICON_FOLDER, bb, scale_factor)
icon_file_audio = self.load_icon_main(ICON_FILE_AUDIO, bb, scale_factor)
icon_file_playlist = self.load_icon_main(ICON_FILE_PLAYLIST, bb, scale_factor)
icon_cd_drive = self.load_icon_main(ICON_CD_DRIVE, bb, scale_factor)
icon_image_file = self.load_icon_main(ICON_IMAGE_FILE, bb, scale_factor)
icon_size = self.config[ICON_SIZE]
if icon_bb:
w = (icon_bb[0] / 100) * icon_size
h = (icon_bb[1] / 100) * icon_size
icon_box = (w, h)
else:
icon_box = (icon_size, icon_size)
image_size = self.config[IMAGE_SIZE]
if icon_bb:
if show_label:
image_box = ((icon_bb[0] /100) * image_size, (icon_bb[1] / 100) * image_size)
else:
w = (icon_bb[0] / 100) * self.config[IMAGE_SIZE_WITHOUT_LABEL]
h = (icon_bb[1] / 100) * self.config[IMAGE_SIZE_WITHOUT_LABEL]
image_box = (w, h)
if file_type == FOLDER:
ratio = self.get_scale_ratio(icon_box, icon_folder[1])
scaled_img = self.scale_image(icon_folder, ratio)
return (icon_folder[0], scaled_img)
elif file_type == FILE_AUDIO:
if self.config[ENABLE_EMBEDDED_IMAGES]:
img = self.get_image_from_audio_file(url)
else:
img = None
if img:
ratio = self.get_scale_ratio(image_box, img)
scaled_img = self.scale_image(img, ratio)
return (url, scaled_img)
else:
ratio = self.get_scale_ratio(icon_box, icon_file_audio[1])
scaled_img = self.scale_image(icon_file_audio, ratio)
return (icon_file_audio[0], scaled_img)
elif file_type == FILE_PLAYLIST:
ratio = self.get_scale_ratio(icon_box, icon_file_playlist[1])
scaled_img = self.scale_image(icon_file_playlist, ratio)
return (icon_file_playlist[0], scaled_img)
elif file_type == FILE_CD_DRIVE:
return icon_cd_drive
elif file_type == FOLDER_WITH_ICON:
img = self.load_image(file_image_path, bounding_box=image_box)
if img:
return img
else:
return icon_folder
elif file_type == FILE_IMAGE:
if file_image_path:
img = self.load_image(file_image_path, bounding_box=image_box)
if img:
return img
else:
return icon_image_file
else:
return icon_image_file
def get_cd_album_art(self, album, bb):
""" Return album art image
:param album: artist name, song name
:param bb: bounding box
:return: album art image
"""
img = url = None
if album != None:
try:
url = self.album_art_url_cache[album]
except:
url = self.discogs_util.get_album_art_url(album)
if url != None:
self.album_art_url_cache[album] = url
if url == None:
d = os.path.join(FOLDER_ICONS, DEFAULT_CD_IMAGE)
img = self.load_image(d)
url = d
else:
try:
i = self.image_cache[url]
return (url, i)
except KeyError:
pass
img = self.load_image_from_url(url)
ratio = self.get_scale_ratio((bb.w, bb.h), img[1])
if ratio[0] % 2 != 0:
ratio = (ratio[0] - 1, ratio[1])
if ratio[1] % 2 != 0:
ratio = (ratio[0], ratio[1] - 1)
img = self.scale_image(img, ratio)
if url != None:
self.image_cache[url] = img
return (url, img)
def get_flipclock_digits(self, bb):
""" Get digits for the flip clock
:param bb: digit image bounding box
:return: list of digit images
"""
digits = []
for n in map(str, range(10)):
filename = n + EXT_PNG
path = os.path.join(FOLDER_ICONS, filename)
t = path.replace('\\','/')
image = self.load_image(t)
r = self.get_scale_ratio((bb.w/4, bb.h), image[1], True)
i = self.scale_image(image, r)
digits.append((path, i))
return digits
def get_flipclock_separator(self, height):
""" Get image for flip clock separator/colon
:param height: image height
:return: separator image
"""
path = os.path.join(FOLDER_ICONS, FILE_COLON)
t = path.replace('\\','/')
image = self.load_image(t)
r = self.get_scale_ratio((height, height), image[1], True)
i = self.scale_image(image, r)
return (path, i)
def get_flipclock_key(self, image_name, height):
""" Get key image for flip clock
:param image_name: image name
:param height: image height
:return: key image
"""
path = os.path.join(FOLDER_ICONS, image_name)
t = path.replace('\\','/')
image = self.load_image(t)
s = image[1].get_size()
h = height / 7.05
k = h / s[1]
w = s[0] * k
r = self.get_scale_ratio((w, h), image[1], True)
i = self.scale_image(image, r)
return (path, i)
def color_to_hex(self, color):
""" Convert color tuple into hex representation for web
:param color: list of integer numbers
:return: hex representation of the color
"""
if not color:
return None
if len(color) == 4:
return "#%08x" % ((color[0] << 24) + (color[1] << 16) + (color[2] << 8) + color[3])
else:
return "#%06x" % ((color[0] << 16) + (color[1] << 8) + color[2])
def load_image_from_url(self, url):
""" Load image from specified URL
:param url: image url
:return: image from url
"""
try:
hdrs = {'User-Agent': 'PeppyPlayer + https://github.com/project-owner/Peppy'}
req = request.Request(url, headers=hdrs)
stream = urlopen(req).read()
buf = BytesIO(stream)
image = pygame.image.load(buf).convert_alpha()
return (url, image)
except Exception as e:
logging.debug(e)
return None
def scale_image_with_padding(self, w, h, img, padding=0, scale_factor=1):
""" Scale image using specified padding and sacle factor
:param w: image width
:param h: image height
:param img: image
:param padding: padding
:param scale_factor: scale factor
:return: scaled image
"""
w_adjusted = (w - (padding * 2)) * scale_factor
h_adjusted = (h - (padding * 2)) * scale_factor
scale_ratio = self.get_scale_ratio((w_adjusted, h_adjusted), img)
return self.scale_image(img, scale_ratio)
def load_menu_screen_image(self, url, w, h):
""" Load image
:param url: image url
:param w: image width
:param h: image height
:return: hash of the input string
"""
img_scaled = None
img = self.load_image_from_url(url)
image_padding = 4
if img:
img_scaled = self.scale_image_with_padding(w, h, img[1], image_padding, 1.0)
if not img_scaled:
return None
else:
return img_scaled
def get_bgr_info(self, name):
""" Get background definition by name
:param name: definition section name
:return: definition section
"""
definitions = self.config[BACKGROUND_DEFINITIONS]
names = self.config[BACKGROUND][SCREEN_BGR_NAMES]
if len(names) == 1 and len(names[0]) == 0:
names = list(definitions.keys())
for n in names:
if n not in definitions.keys():
continue
section = definitions[n]
if n == name:
section["num"] = n
return section
return None
def prepare_background(self, surface, section):
"""
Paint surface by specified color.
:param surface: surface to paint
:param section: background definition
:return: new painted surface
"""
image = surface
size = surface.get_size()
result = pygame.Surface(size)
result.fill((0, 0, 0), None, pygame.BLEND_RGB_ADD)
overlay = section[OVERLAY_COLOR]
overlay_opacity = section[OVERLAY_OPACITY]
blur_radius = section[BLUR_RADIUS]
if blur_radius and blur_radius > 0:
image = self.blur_image(image, blur_radius)
if overlay:
image.fill(overlay, None, pygame.BLEND_RGBA_ADD)
image.fill((255, 255, 255, overlay_opacity), None, pygame.BLEND_RGBA_MULT)
result.blit(image, (0, 0))
return result
def get_background_count(self):
""" Get background count
:return: the number of available backgrounds
"""
definitions = self.config[BACKGROUND_DEFINITIONS]
names = self.config[BACKGROUND][SCREEN_BGR_NAMES]
if len(names) == 1 and len(names[0]) == 0:
names = list(definitions.keys())
return len(names)
def get_screen_bgr_image(self, index=None, blur_radius=None):
""" Get screen background image.
First check cache, if not in cache load and prepare image and put to cache.
:param index: image index in the list of definitions
:param blur_radius: blur radius
:return: background image tuple - (filename, image) or None if not found
"""
definitions = self.config[BACKGROUND_DEFINITIONS]
names = self.config[BACKGROUND][SCREEN_BGR_NAMES]
if len(names) == 1 and len(names[0]) == 0:
names = list(definitions.keys())
if index != None:
name = names[index]
else:
name = names[random.randrange(0, len(names))]
info = self.get_bgr_info(name)
if not info:
return None
filename = info[BGR_FILENAME]
image = None
cache_key = filename
if blur_radius:
cache_key = filename + "." + str(blur_radius)
info[BLUR_RADIUS] = blur_radius
try:
image = self.background_cache[cache_key]
return image
except:
pass
path = os.path.join(FOLDER_BACKGROUNDS, filename)
image = self.load_pygame_image(path, None, use_cache=False)
if not image:
return None
w = self.config[SCREEN_INFO][WIDTH]
h = self.config[SCREEN_INFO][HEIGHT]
width = image[1].get_size()[0]
height = image[1].get_size()[1]
if width == w and height == h:
i = self.prepare_background(image, info)
else:
ratio = min(width / w, height / h)
scale_ratio = (int(width / ratio), int(height / ratio))
img = self.scale_image(image[1], scale_ratio)
i = self.prepare_background(img, info)
self.background_cache[cache_key] = (filename, i, info["num"])
return self.background_cache[cache_key]
def tint_image(self, src):
""" Tint the provided image
The solution was borrowed here:
https://stackoverflow.com/questions/29332424/changing-colour-of-an-image/29379704#29379704
:param src: the source image
:return: the tinted image
"""
tint_color = "#ffffff"
tr, tg, tb = getrgb(tint_color)
tl = getcolor(tint_color, "L")
if not tl: tl = 1
tl = float(tl)
sr, sg, sb = map(lambda tv: tv/tl, (tr, tg, tb))
luts = (tuple(map(lambda lr: int(lr*sr + 0.5), range(256))) +
tuple(map(lambda lg: int(lg*sg + 0.5), range(256))) +
tuple(map(lambda lb: int(lb*sb + 0.5), range(256))))
l = grayscale(src)
if Image.getmodebands(src.mode) < 4:
merge_args = (src.mode, (l, l, l))
else:
a = Image.new("L", src.size)
a.putdata(src.getdata(3))
merge_args = (src.mode, (l, l, l, a))
luts += tuple(range(256))
return Image.merge(*merge_args).point(luts)
def create_button_images(self, path, bounding_box, bar_height):
""" Create image button images
:param path: image path
:param bounding_box: button bounding box
:param bar_height: bottom bar height
:return: tuple with images for On/Off states
"""
overlay = self.config[COLORS][COLOR_MEDIUM]
image_off_alpha = 80
image = self.load_image(path)
bb_w = bounding_box.w
bb_h = bounding_box.h
overlay_height = int((bb_h / 100) * bar_height)
scale_ratio = self.get_scale_ratio((bb_w, bb_h), image[1], fit_all=True)
im = self.scale_image(image, scale_ratio)
image_on = pygame.Surface((bounding_box.w, bounding_box.h), pygame.SRCALPHA)
image_on.blit(im, (0, 0), pygame.Rect(0, 0, bb_w, bb_h))
image_on.fill(overlay, pygame.Rect(0, bb_h - overlay_height, bb_w, overlay_height), pygame.BLEND_RGB_MULT)
size = image_on.get_size()
s = pygame.image.tostring(image_on, "RGBA", False)
image_off = Image.frombytes("RGBA", size, s)
image_off.putalpha(image_off_alpha)
tinted = self.tint_image(image_off)
b = tinted.tobytes("raw", 'RGBA')
image_off = pygame.image.fromstring(b, size, 'RGBA')
image_off.fill(overlay, pygame.Rect(0, bb_h - overlay_height, bb_w, overlay_height), pygame.BLEND_RGB_MULT)
return (image_on, image_off)
def set_button_images(self, state, bar_height):
""" Set button images for On/Off states
:param state: button state
:param bar_height: bottom bar height
"""
path = os.path.join(state.folder, state.filename + EXT_JPG)
if not os.path.exists(path):
path = os.path.join(state.folder, state.filename + EXT_PNG)
if not os.path.exists(path):
return
images = self.create_button_images(path, state.bounding_box, bar_height)
state.state_off_image = state.icon_base = (GENERATED_IMAGE + state.name + ".off", images[1])
state.state_on_image = (GENERATED_IMAGE + state.name + ".on", images[0])
def add_file_icon(self, page, icon_box, icon_box_without_label):
""" Set file icons
:param page: page items
:param icon_box: icon bounding box
:param icon_box_without_label: icon bounding box without label
"""
for s in page:
if getattr(s, "icon_base", None) != None:
continue
has_embedded_image = getattr(s, "has_embedded_image", False)
if (s.file_type == FOLDER_WITH_ICON or s.file_type == FILE_IMAGE or has_embedded_image) and self.config[HIDE_FOLDER_NAME]:
s.show_label = False
w = icon_box_without_label.w
h = icon_box_without_label.h
else:
s.show_label = True
w = icon_box.w
h = icon_box.h
s.icon_base = self.get_file_icon(s.file_type, getattr(s, "file_image_path", ""), (w, h), url=s.url, show_label=s.show_label)
|
project-owner/Peppy
|
util/imageutil.py
|
Python
|
gpl-3.0
| 42,494
|
[
"Gaussian"
] |
6add39883de662498ace68146da13005a1cd51d9bef36b7788d33bce1b669810
|
# -*- coding: utf-8 -*-
#
# test_status.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""
Test if Set/GetStatus work properly
"""
import unittest
import nest
@nest.ll_api.check_stack
class StatusTestCase(unittest.TestCase):
"""Tests of Get/SetStatus, Get/SetDefaults, and Get/SetKernelStatus via get/set"""
def test_kernel_attributes(self):
"""Test nest attribute access of kernel attributes"""
nest.ResetKernel()
self.assertEqual(nest.GetKernelStatus(), nest.kernel_status)
self.assertEqual(nest.GetKernelStatus("resolution"), nest.resolution)
nest.resolution = 0.4
self.assertEqual(0.4, nest.resolution)
self.assertRaises(AttributeError, setattr, nest, "network_size", 120)
def test_GetKernelStatus(self):
"""GetKernelStatus"""
nest.ResetKernel()
kernel_status = nest.GetKernelStatus()
self.assertIsInstance(kernel_status, dict)
self.assertGreater(len(kernel_status), 1)
self.assertRaises(KeyError, nest.GetKernelStatus, "nonexistent_status_key")
test_keys = ("resolution", ) * 3
kernel_status = nest.GetKernelStatus(test_keys)
self.assertEqual(len(kernel_status), len(test_keys))
self.assertRaises(TypeError, nest.GetKernelStatus, 42)
def test_SetKernelStatus(self):
"""SetKernelStatus"""
nest.ResetKernel()
nest.SetKernelStatus({})
nest.SetKernelStatus({'resolution': 0.2})
self.assertRaises(ValueError, nest.SetKernelStatus, {'nonexistent_status_key': 0})
# Readonly check
self.assertRaises(ValueError, nest.SetKernelStatus, {'network_size': 120})
def test_GetDefaults(self):
"""GetDefaults"""
nest.ResetKernel()
for model in nest.node_models + nest.synapse_models:
model_status = nest.GetDefaults(model)
self.assertIsInstance(model_status, dict)
self.assertGreater(len(model_status), 1)
self.assertRaises(TypeError, nest.GetDefaults, model, 42)
if "V_m" in model_status:
test_value = nest.GetDefaults(model, "V_m")
self.assertIsInstance(test_value, float)
test_keys = ("V_m", ) * 3
model_status = nest.GetDefaults(model, test_keys)
self.assertEqual(len(model_status), len(test_keys))
def test_SetDefaults(self):
"""SetDefaults"""
nest.ResetKernel()
for model in nest.node_models:
if 'V_m' in nest.GetDefaults(model):
v_m = nest.GetDefaults(model)['V_m']
nest.SetDefaults(model, {'V_m': -1.})
self.assertEqual(nest.GetDefaults(model, 'V_m'), -1.)
nest.SetDefaults(model, 'V_m', v_m)
self.assertEqual(nest.GetDefaults(model, 'V_m'), v_m)
self.assertRaisesRegex(
nest.kernel.NESTError, "DictError",
nest.SetDefaults, model, 'nonexistent_status_key', 0)
def test_GetStatus(self):
"""GetStatus"""
for model in nest.node_models:
if 'V_m' in nest.GetDefaults(model):
nest.ResetKernel()
n = nest.Create(model)
d = nest.GetStatus(n)
self.assertIsInstance(d, tuple)
self.assertIsInstance(d[0], dict)
self.assertGreater(len(d[0]), 1)
v1 = nest.GetStatus(n)[0]['V_m']
v2 = nest.GetStatus(n, 'V_m')[0]
self.assertEqual(v1, v2)
n = nest.Create(model, 10)
d = nest.GetStatus(n, 'V_m')
self.assertEqual(len(d), len(n))
self.assertIsInstance(d[0], float)
test_keys = ("V_m", ) * 3
d = nest.GetStatus(n, test_keys)
self.assertEqual(len(d), len(n))
self.assertEqual(len(d[0]), len(test_keys))
def test_SetStatus(self):
"""SetStatus with dict"""
for model in nest.node_models:
if 'V_m' in nest.GetDefaults(model):
nest.ResetKernel()
n = nest.Create(model)
nest.SetStatus(n, {'V_m': 1.})
self.assertEqual(nest.GetStatus(n, 'V_m')[0], 1.)
def test_SetStatusList(self):
"""SetStatus with list"""
for model in nest.node_models:
if 'V_m' in nest.GetDefaults(model):
nest.ResetKernel()
n = nest.Create(model)
nest.SetStatus(n, [{'V_m': 2.}])
self.assertEqual(nest.GetStatus(n, 'V_m')[0], 2.)
def test_SetStatusParam(self):
"""SetStatus with parameter"""
for model in nest.node_models:
if 'V_m' in nest.GetDefaults(model):
nest.ResetKernel()
n = nest.Create(model)
nest.SetStatus(n, 'V_m', 3.)
self.assertEqual(nest.GetStatus(n, 'V_m')[0], 3.)
def test_SetStatusVth_E_L(self):
"""SetStatus of reversal and threshold potential """
excluded = ['a2eif_cond_exp_HW', 'mat2_psc_exp', 'amat2_psc_exp']
models = nest.node_models + nest.synapse_models
for model in [m for m in models if m not in excluded]:
if all(key in nest.GetDefaults(model) for key in ('V_th', 'E_L')):
nest.ResetKernel()
neuron1 = nest.Create(model)
neuron2 = nest.Create(model)
# must not depend on the order
new_EL = -90.
new_Vth = -10.
if 'V_reset' in nest.GetDefaults(model):
nest.SetStatus(neuron1 + neuron2, {'V_reset': new_EL})
nest.SetStatus(neuron1, {'E_L': new_EL})
nest.SetStatus(neuron2, {'V_th': new_Vth})
nest.SetStatus(neuron1, {'V_th': new_Vth})
nest.SetStatus(neuron2, {'E_L': new_EL})
vth1, vth2 = nest.GetStatus(neuron1 + neuron2, 'V_th')
self.assertEqual(vth1, vth2)
def test_SetStatusV_th_smaller_V_reset(self):
"""SetStatus of reversal and threshold potential
check if error is raised if V_reset > V_th"""
for model in nest.node_models + nest.synapse_models:
if all(key in nest.GetDefaults(model) for key in ('V_th', 'V_reset')):
nest.ResetKernel()
neuron = nest.Create(model)
# should raise exception
self.assertRaisesRegex(
nest.kernel.NESTError, "BadProperty",
nest.SetStatus, neuron,
{'V_reset': 10., 'V_th': 0.}
)
def suite():
suite = unittest.makeSuite(StatusTestCase, 'test')
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
|
steffengraber/nest-simulator
|
testsuite/pytests/test_status.py
|
Python
|
gpl-2.0
| 7,597
|
[
"NEURON"
] |
73408e965e2c6e0ea876be0c52496a3ba59733b81a4104bb0cb026b99c3172e1
|
# $HeadURL: $
''' SpaceTokenOccupancyCommand
The Command gets information of the SpaceTokenOccupancy from the lcg_utils
'''
import lcg_util
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.Utilities.Subprocess import pythonCall
from DIRAC.ResourceStatusSystem.Command.Command import Command
from DIRAC.ResourceStatusSystem.Client.ResourceManagementClient import ResourceManagementClient
from DIRAC.ResourceStatusSystem.Utilities import CSHelpers
__RCSID__ = '$Id: $'
class SpaceTokenOccupancyCommand( Command ):
'''
Uses lcg_util to query status of endpoint for a given token.
'''
def __init__( self, args = None, clients = None ):
super( SpaceTokenOccupancyCommand, self ).__init__( args, clients )
if 'ResourceManagementClient' in self.apis:
self.rmClient = self.apis[ 'ResourceManagementClient' ]
else:
self.rmClient = ResourceManagementClient()
def _storeCommand( self, results ):
'''
Stores the results of doNew method on the database.
'''
for result in results:
resQuery = self.rmClient.addOrModifySpaceTokenOccupancyCache( result[ 'Endpoint' ],
result[ 'Token' ],
result[ 'Total' ],
result[ 'Guaranteed' ],
result[ 'Free' ] )
if not resQuery[ 'OK' ]:
return resQuery
return S_OK()
def _prepareCommand( self ):
'''
SpaceTokenOccupancy requires one argument:
- elementName : <str>
Given a (storage)elementName, we calculate its endpoint and spaceToken,
which are used to query the srm interface.
'''
if not 'name' in self.args:
return S_ERROR( '"name" not found in self.args' )
elementName = self.args[ 'name' ]
endpoint = CSHelpers.getStorageElementEndpoint( elementName )
if not endpoint[ 'OK' ]:
return endpoint
endpoint = endpoint[ 'Value' ]
spaceToken = CSHelpers.getStorageElementSpaceToken( elementName )
if not spaceToken[ 'OK' ]:
return spaceToken
spaceToken = spaceToken[ 'Value']
return S_OK( ( endpoint, spaceToken ) )
def doNew( self, masterParams = None ):
'''
Gets the parameters to run, either from the master method or from its
own arguments.
It queries the srm interface, and hopefully it will not crash. Out of the
results, we keep totalsize, guaranteedsuze, and unusedsize.
Then, they are recorded and returned.
'''
if masterParams is not None:
spaceTokenEndpoint, spaceToken = masterParams
else:
params = self._prepareCommand()
if not params[ 'OK' ]:
return params
spaceTokenEndpoint, spaceToken = params[ 'Value' ]
# 10 secs of timeout. If it works, the reply is immediate.
occupancy = pythonCall( 10, lcg_util.lcg_stmd, spaceToken, spaceTokenEndpoint, True, 0 )
if not occupancy[ 'OK' ]:
return occupancy
occupancy = occupancy[ 'Value' ]
#Timeout does not work here...
#occupancy = lcg_util.lcg_stmd( spaceToken, spaceTokenEndpoint, True, 0 )
if occupancy[ 0 ] != 0:
return S_ERROR( occupancy )
output = occupancy[ 1 ][ 0 ]
sTokenDict = {}
sTokenDict[ 'Endpoint' ] = spaceTokenEndpoint
sTokenDict[ 'Token' ] = spaceToken
sTokenDict[ 'Total' ] = float( output.get( 'totalsize', '0' ) ) / 1e12 # Bytes to Terabytes
sTokenDict[ 'Guaranteed' ] = float( output.get( 'guaranteedsize', '0' ) ) / 1e12
sTokenDict[ 'Free' ] = float( output.get( 'unusedsize', '0' ) ) / 1e12
storeRes = self._storeCommand( [ sTokenDict ] )
if not storeRes[ 'OK' ]:
return storeRes
return S_OK( [ sTokenDict ] )
def doCache( self ):
'''
Method that reads the cache table and tries to read from it. It will
return a list of dictionaries if there are results.
'''
params = self._prepareCommand()
if not params[ 'OK' ]:
return params
spaceTokenEndpoint, spaceToken = params[ 'Value' ]
result = self.rmClient.selectSpaceTokenOccupancyCache( spaceTokenEndpoint, spaceToken )
if result[ 'OK' ]:
result = S_OK( [ dict( zip( result[ 'Columns' ], res ) ) for res in result[ 'Value' ] ] )
return result
def doMaster( self ):
'''
Master method. Gets all endpoints from the storage elements and all
the spaceTokens. Could have taken from Shares/Disk as well.
It queries for all their possible combinations, unless there are records
in the database for those combinations, which then are not queried.
'''
storageElementNames = CSHelpers.getStorageElements()
if not storageElementNames[ 'OK' ]:
return storageElementNames
storageElementNames = storageElementNames[ 'Value' ]
endpointTokenSet = set()
for storageElementName in storageElementNames:
endpoint = CSHelpers.getStorageElementEndpoint( storageElementName )
if not endpoint[ 'OK' ]:
continue
endpoint = endpoint[ 'Value' ]
spaceToken = CSHelpers.getStorageElementSpaceToken( storageElementName )
if not spaceToken[ 'OK' ]:
continue
spaceToken = spaceToken[ 'Value' ]
endpointTokenSet.add( ( endpoint, spaceToken ) )
gLogger.verbose( 'Processing %s' % endpointTokenSet )
for elementToQuery in endpointTokenSet:
result = self.doNew( elementToQuery )
if not result[ 'OK' ]:
self.metrics[ 'failed' ].append( result )
return S_OK( self.metrics )
#...............................................................................
#EOF
|
marcelovilaca/DIRAC
|
ResourceStatusSystem/Command/SpaceTokenOccupancyCommand.py
|
Python
|
gpl-3.0
| 6,123
|
[
"DIRAC"
] |
ea55c8041c41a3eb929a5d844d96ca8946470753d7e31a79009ba3069386e838
|
__all__ = [
"bedtools",
"bvatools",
"bwa",
"cufflinks",
"differential_expression",
"gatk",
"gq_seq_utils",
"htseq",
"igvtools",
"metrics",
"mummer",
"pacbio_tools",
"picard",
"ray",
"readset",
"rrna_amplicons",
"run_processing_aligner"
"sample",
"samtools",
"sequence_dictionary",
"smrtanalysis",
"snpeff",
"tools",
"tophat",
"trimmomatic",
"vcftools"
]
|
ccmbioinfo/mugqic_pipelines
|
bfx/__init__.py
|
Python
|
lgpl-3.0
| 458
|
[
"BWA",
"HTSeq"
] |
0d2d1ef825c96af000330a44cf931e39428a4b24cea4120506ee2539ec81eefd
|
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Scientific Package. This package holds all simulators, and
# analysers necessary to run brain-simulations. You can use it stand alone or
# in conjunction with TheVirtualBrain-Framework Package. See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
.. moduleauthor:: Bogdan Neacsa <bogdan.neacsa@codemart.ro>
"""
if __name__ == "__main__":
from tvb.tests.library import setup_test_console_env
setup_test_console_env()
import numpy
import unittest
from tvb.datatypes import patterns, equations, connectivity, surfaces
from tvb.tests.library.base_testcase import BaseTestCase
class PatternsTest(BaseTestCase):
"""
Tests the defaults for `tvb.datatypes.patterns` module.
"""
def test_spatialpattern(self):
dt = patterns.SpatialPattern()
dt.spatial = equations.DoubleGaussian()
dt.spatial_pattern = numpy.arange(100).reshape((10, 10))
dt.configure_space(numpy.arange(100).reshape((10, 10)))
dt.configure()
summary = dt.summary_info
self.assertEqual(summary['Type'], 'SpatialPattern')
self.assertEqual(dt.space.shape, (10, 10))
self.assertTrue(isinstance(dt.spatial, equations.DoubleGaussian))
self.assertTrue(dt.spatial_pattern.shape, (10, 1))
def test_spatiotemporalpattern(self):
dt = patterns.SpatioTemporalPattern()
dt.spatial = equations.DoubleGaussian()
dt.temporal = equations.Gaussian()
dt.spatial_pattern = numpy.arange(100).reshape((10, 10))
dt.configure_space(numpy.arange(100).reshape((10, 10)))
dt.configure()
summary = dt.summary_info
self.assertEqual(summary['Type'], 'SpatioTemporalPattern')
self.assertEqual(dt.space.shape, (10, 10))
self.assertTrue(isinstance(dt.spatial, equations.DoubleGaussian))
self.assertEqual(dt.spatial_pattern.shape, (10, 1))
self.assertTrue(isinstance(dt.temporal, equations.Gaussian))
self.assertTrue(dt.temporal_pattern is None)
self.assertTrue(dt.time is None)
def test_stimuliregion(self):
conn = connectivity.Connectivity(load_default=True)
conn.configure()
dt = patterns.StimuliRegion()
dt.connectivity = conn
dt.spatial = equations.DiscreteEquation()
dt.temporal = equations.Gaussian()
dt.weight = [0 for _ in range(conn.number_of_regions)]
dt.configure_space()
self.assertEqual(dt.summary_info['Type'], 'StimuliRegion')
self.assertTrue(dt.connectivity is not None)
self.assertEqual(dt.space.shape, (76, 1))
self.assertEqual(dt.spatial_pattern.shape, (76, 1))
self.assertTrue(isinstance(dt.temporal, equations.Gaussian))
self.assertTrue(dt.temporal_pattern is None)
self.assertTrue(dt.time is None)
def test_stimulisurface(self):
srf = surfaces.CorticalSurface(load_default=True)
srf.configure()
dt = patterns.StimuliSurface()
dt.surface = srf
dt.spatial = equations.DiscreteEquation()
dt.temporal = equations.Gaussian()
dt.focal_points_surface = [0, 1, 2]
dt.focal_points_triangles = [0, 1, 2]
dt.configure()
dt.configure_space()
summary = dt.summary_info
self.assertEqual(summary['Type'], "StimuliSurface")
self.assertEqual(dt.space.shape, (16384, 3))
self.assertTrue(isinstance(dt.spatial, equations.DiscreteEquation))
self.assertEqual(dt.spatial_pattern.shape, (16384, 1))
self.assertTrue(dt.surface is not None)
self.assertTrue(isinstance(dt.temporal, equations.Gaussian))
self.assertTrue(dt.temporal_pattern is None)
self.assertTrue(dt.time is None)
def test_spatialpatternvolume(self):
dt = patterns.SpatialPatternVolume()
self.assertTrue(dt.space is None)
self.assertTrue(dt.spatial is None)
self.assertTrue(dt.spatial_pattern is None)
self.assertTrue(dt.volume is None)
self.assertEqual(dt.focal_points_volume.shape, (0,))
def suite():
"""
Gather all the tests in a test suite.
"""
test_suite = unittest.TestSuite()
test_suite.addTest(unittest.makeSuite(PatternsTest))
return test_suite
if __name__ == "__main__":
#So you can run tests from this package individually.
TEST_RUNNER = unittest.TextTestRunner()
TEST_SUITE = suite()
TEST_RUNNER.run(TEST_SUITE)
|
stuart-knock/tvb-library
|
tvb/tests/library/datatypes/patterns_test.py
|
Python
|
gpl-2.0
| 5,663
|
[
"Gaussian"
] |
6d7b3fa8b092ab4ec0dd6818cac2f97a34e3f5a984af790bf75c0c54b17035a2
|
#
# Copyright (C) 2000 greg Landrum
#
""" unit tests for the Neural network trainer implementation
this basically works out **all** of the network code
"""
from __future__ import print_function
import unittest
from rdkit.ML.Neural.ActFuncs import Sigmoid, TanH
from rdkit.ML.Neural.NetNode import NetNode
from rdkit.ML.Neural.Network import Network
class TestCaseActFuncs(unittest.TestCase):
def test_Sigmoid(self):
f = Sigmoid()
self.assertAlmostEqual(f(0), 0.5)
self.assertAlmostEqual(f(0), f.Eval(0))
self.assertAlmostEqual(f.Deriv(0), 0.25)
self.assertAlmostEqual(f(1), 1.0 - f(-1))
self.assertAlmostEqual(f(2), 1.0 - f(-2))
self.assertAlmostEqual(f.Deriv(1), f.Deriv(-1))
self.assertAlmostEqual(f.Deriv(2), f.Deriv(-2))
self.assertLess(f(1), f(2))
self.assertLess(f.Deriv(2), f.Deriv(1))
self.assertAlmostEqual(f.Deriv(1), f.DerivFromVal(f(1)))
def test_TanH(self):
f = TanH()
self.assertAlmostEqual(f(0), 0.0)
self.assertAlmostEqual(f(0), f.Eval(0))
self.assertAlmostEqual(f.Deriv(0), 1.0)
self.assertAlmostEqual(f(1), -f(-1))
self.assertAlmostEqual(f(2), -f(-2))
self.assertAlmostEqual(f.Deriv(1), f.Deriv(-1))
self.assertAlmostEqual(f.Deriv(2), f.Deriv(-2))
self.assertLess(f(1), f(2))
self.assertLess(f.Deriv(2), f.Deriv(1))
self.assertAlmostEqual(f.Deriv(1), f.DerivFromVal(f(1)))
class TestCaseNetNode(unittest.TestCase):
def test_NetNode(self):
# A node without input always returns 1
nodeList = [None] * 2
node = NetNode(0, nodeList)
nodeList[0] = node
valVect = [None] * 2
self.assertEqual(node.Eval(valVect), 1)
self.assertEqual(valVect, [1, None])
node = NetNode(1, nodeList, inputNodes=[0], weights=[0.1])
self.assertRaises(AssertionError, node.SetWeights, [0, 1])
self.assertRaises(AssertionError, node.SetInputs, [0, 1])
class TestCaseNetwork(unittest.TestCase):
def test_Network(self):
nodeCounts = [2, 2, 1, 2]
net = Network(nodeCounts)
self.assertEqual(net.GetNumNodes(), 7)
self.assertEqual(len(net.GetAllNodes()), 7)
self.assertEqual(net.GetInputNodeList(), [0, 1])
self.assertEqual(net.GetHiddenLayerNodeList(0), [2, 3])
self.assertEqual(net.GetHiddenLayerNodeList(1), [4])
self.assertEqual(net.GetOutputNodeList(), [5, 6])
# We get a representation of the network
s = str(net)
self.assertIn('Network', s)
if __name__ == '__main__': # pragma: nocover
unittest.main()
|
rvianello/rdkit
|
rdkit/ML/Neural/UnitTestOther.py
|
Python
|
bsd-3-clause
| 2,498
|
[
"RDKit"
] |
47392c63726cdc2c661329554c2a741d7daaabfef27b723580df75e7a31530f4
|
import argparse
import pysam
import numpy
def print_sex(bam):
"""
Print sex based on chr x ratio
Args:
bam (str): Path to bam file
"""
idxstats = pysam.idxstats(bam)
chr_ratio = []
# Calculate read / chromosome length ratio per chromosome
for chr in idxstats[0:24]:
chr = chr.strip('\n').split('\t')
chr_length = float(chr[1])
chr_mapped = float(chr[2])
ratio = chr_mapped / chr_length
chr_ratio.append(ratio)
chr_ratio_std = numpy.std(chr_ratio)
chr_ratio_mean = numpy.mean(chr_ratio)
chr_x = idxstats[22].strip('\n').split('\t')
chr_x_ratio = float(chr_x[2]) / float(chr_x[1])
if ( (chr_x_ratio > chr_ratio_mean - (2 * chr_ratio_std)) and (chr_x_ratio < chr_ratio_mean + (2 * chr_ratio_std)) ):
print 'female'
elif (chr_x_ratio < chr_ratio_mean - (2 * chr_ratio_std)):
print 'male'
else:
print "unkown"
if __name__ == "__main__":
# Parse arguments
parser = argparse.ArgumentParser()
parser = argparse.ArgumentParser(formatter_class=lambda prog: argparse.HelpFormatter(prog,max_help_position=100, width=200),
description = 'Determine and print sex based on bam file')
parser.add_argument('-b','--bam', type=str, action='append', help='Bam file', required=True)
args = parser.parse_args()
for bam in args.bam:
print_sex(bam)
|
CuppenResearch/IAP
|
scripts/determine_sex.py
|
Python
|
mit
| 1,337
|
[
"pysam"
] |
9a37f1d837eb9a4d3304c9402c8de8e4cbd9f1164a301f1fecaddf61ee3e3554
|
########################################################################
#
# (C) 2015, Brian Coca <bcoca@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
########################################################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import datetime
import os
import tarfile
import tempfile
import yaml
from distutils.version import LooseVersion
from shutil import rmtree
import ansible.constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.urls import open_url
from ansible.playbook.role.requirement import RoleRequirement
from ansible.galaxy.api import GalaxyAPI
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class GalaxyRole(object):
SUPPORTED_SCMS = set(['git', 'hg'])
META_MAIN = os.path.join('meta', 'main.yml')
META_INSTALL = os.path.join('meta', '.galaxy_install_info')
ROLE_DIRS = ('defaults','files','handlers','meta','tasks','templates','vars','tests')
def __init__(self, galaxy, name, src=None, version=None, scm=None, path=None):
self._metadata = None
self._install_info = None
self._validate_certs = not galaxy.options.ignore_certs
display.debug('Validate TLS certificates: %s' % self._validate_certs)
self.options = galaxy.options
self.galaxy = galaxy
self.name = name
self.version = version
self.src = src or name
self.scm = scm
if path is not None:
if self.name not in path:
path = os.path.join(path, self.name)
self.path = path
else:
for role_path_dir in galaxy.roles_paths:
role_path = os.path.join(role_path_dir, self.name)
if os.path.exists(role_path):
self.path = role_path
break
else:
# use the first path by default
self.path = os.path.join(galaxy.roles_paths[0], self.name)
# create list of possible paths
self.paths = [x for x in galaxy.roles_paths]
self.paths = [os.path.join(x, self.name) for x in self.paths]
def __repr__(self):
"""
Returns "rolename (version)" if version is not null
Returns "rolename" otherwise
"""
if self.version:
return "%s (%s)" % (self.name, self.version)
else:
return self.name
def __eq__(self, other):
return self.name == other.name
@property
def metadata(self):
"""
Returns role metadata
"""
if self._metadata is None:
meta_path = os.path.join(self.path, self.META_MAIN)
if os.path.isfile(meta_path):
try:
f = open(meta_path, 'r')
self._metadata = yaml.safe_load(f)
except:
display.vvvvv("Unable to load metadata for %s" % self.name)
return False
finally:
f.close()
return self._metadata
@property
def install_info(self):
"""
Returns role install info
"""
if self._install_info is None:
info_path = os.path.join(self.path, self.META_INSTALL)
if os.path.isfile(info_path):
try:
f = open(info_path, 'r')
self._install_info = yaml.safe_load(f)
except:
display.vvvvv("Unable to load Galaxy install info for %s" % self.name)
return False
finally:
f.close()
return self._install_info
def _write_galaxy_install_info(self):
"""
Writes a YAML-formatted file to the role's meta/ directory
(named .galaxy_install_info) which contains some information
we can use later for commands like 'list' and 'info'.
"""
info = dict(
version=self.version,
install_date=datetime.datetime.utcnow().strftime("%c"),
)
if not os.path.exists(os.path.join(self.path, 'meta')):
os.makedirs(os.path.join(self.path, 'meta'))
info_path = os.path.join(self.path, self.META_INSTALL)
with open(info_path, 'w+') as f:
try:
self._install_info = yaml.safe_dump(info, f)
except:
return False
return True
def remove(self):
"""
Removes the specified role from the roles path.
There is a sanity check to make sure there's a meta/main.yml file at this
path so the user doesn't blow away random directories.
"""
if self.metadata:
try:
rmtree(self.path)
return True
except:
pass
return False
def fetch(self, role_data):
"""
Downloads the archived role from github to a temp location
"""
if role_data:
# first grab the file and save it to a temp location
if "github_user" in role_data and "github_repo" in role_data:
archive_url = 'https://github.com/%s/%s/archive/%s.tar.gz' % (role_data["github_user"], role_data["github_repo"], self.version)
else:
archive_url = self.src
display.display("- downloading role from %s" % archive_url)
try:
url_file = open_url(archive_url, validate_certs=self._validate_certs)
temp_file = tempfile.NamedTemporaryFile(delete=False)
data = url_file.read()
while data:
temp_file.write(data)
data = url_file.read()
temp_file.close()
return temp_file.name
except Exception as e:
display.error("failed to download the file: %s" % str(e))
return False
def install(self):
# the file is a tar, so open it that way and extract it
# to the specified (or default) roles directory
local_file = False
if self.scm:
# create tar file from scm url
tmp_file = RoleRequirement.scm_archive_role(**self.spec)
elif self.src:
if os.path.isfile(self.src):
# installing a local tar.gz
local_file = True
tmp_file = self.src
elif '://' in self.src:
role_data = self.src
tmp_file = self.fetch(role_data)
else:
api = GalaxyAPI(self.galaxy)
role_data = api.lookup_role_by_name(self.src)
if not role_data:
raise AnsibleError("- sorry, %s was not found on %s." % (self.src, api.api_server))
if role_data.get('role_type') == 'CON' and not os.environ.get('ANSIBLE_CONTAINER'):
# Container Enabled, running outside of a container
display.warning("%s is a Container Enabled role and should only be installed using "
"Ansible Container" % self.name)
if role_data.get('role_type') == 'APP':
# Container Role
display.warning("%s is a Container App role and should only be installed using Ansible "
"Container" % self.name)
role_versions = api.fetch_role_related('versions', role_data['id'])
if not self.version:
# convert the version names to LooseVersion objects
# and sort them to get the latest version. If there
# are no versions in the list, we'll grab the head
# of the master branch
if len(role_versions) > 0:
loose_versions = [LooseVersion(a.get('name',None)) for a in role_versions]
loose_versions.sort()
self.version = str(loose_versions[-1])
elif role_data.get('github_branch', None):
self.version = role_data['github_branch']
else:
self.version = 'master'
elif self.version != 'master':
if role_versions and str(self.version) not in [a.get('name', None) for a in role_versions]:
raise AnsibleError("- the specified version (%s) of %s was not found in the list of available versions (%s)." % (self.version, self.name, role_versions))
tmp_file = self.fetch(role_data)
else:
raise AnsibleError("No valid role data found")
if tmp_file:
display.debug("installing from %s" % tmp_file)
if not tarfile.is_tarfile(tmp_file):
raise AnsibleError("the file downloaded was not a tar.gz")
else:
if tmp_file.endswith('.gz'):
role_tar_file = tarfile.open(tmp_file, "r:gz")
else:
role_tar_file = tarfile.open(tmp_file, "r")
# verify the role's meta file
meta_file = None
members = role_tar_file.getmembers()
# next find the metadata file
for member in members:
if self.META_MAIN in member.name:
meta_file = member
break
if not meta_file:
raise AnsibleError("this role does not appear to have a meta/main.yml file.")
else:
try:
self._metadata = yaml.safe_load(role_tar_file.extractfile(meta_file))
except:
raise AnsibleError("this role does not appear to have a valid meta/main.yml file.")
# we strip off the top-level directory for all of the files contained within
# the tar file here, since the default is 'github_repo-target', and change it
# to the specified role's name
installed = False
while not installed:
display.display("- extracting %s to %s" % (self.name, self.path))
try:
if os.path.exists(self.path):
if not os.path.isdir(self.path):
raise AnsibleError("the specified roles path exists and is not a directory.")
elif not getattr(self.options, "force", False):
raise AnsibleError("the specified role %s appears to already exist. Use --force to replace it." % self.name)
else:
# using --force, remove the old path
if not self.remove():
raise AnsibleError("%s doesn't appear to contain a role.\n please remove this directory manually if you really want to put the role here." % self.path)
else:
os.makedirs(self.path)
# now we do the actual extraction to the path
for member in members:
# we only extract files, and remove any relative path
# bits that might be in the file for security purposes
# and drop the leading directory, as mentioned above
if member.isreg() or member.issym():
parts = member.name.split(os.sep)[1:]
final_parts = []
for part in parts:
if part != '..' and '~' not in part and '$' not in part:
final_parts.append(part)
member.name = os.path.join(*final_parts)
role_tar_file.extract(member, self.path)
# write out the install info file for later use
self._write_galaxy_install_info()
installed = True
except OSError as e:
error = True
if e[0] == 13 and len(self.paths) > 1:
current = self.paths.index(self.path)
nextidx = current + 1
if len(self.paths) >= current:
self.path = self.paths[nextidx]
error = False
if error:
raise AnsibleError("Could not update files in %s: %s" % (self.path, str(e)))
# return the parsed yaml metadata
display.display("- %s was installed successfully" % str(self))
if not local_file:
try:
os.unlink(tmp_file)
except (OSError,IOError) as e:
display.warning("Unable to remove tmp file (%s): %s" % (tmp_file, str(e)))
return True
return False
@property
def spec(self):
"""
Returns role spec info
{
'scm': 'git',
'src': 'http://git.example.com/repos/repo.git',
'version': 'v1.0',
'name': 'repo'
}
"""
return dict(scm=self.scm, src=self.src, version=self.version, name=self.name)
|
emersonsoftware/ansiblefork
|
lib/ansible/galaxy/role.py
|
Python
|
gpl-3.0
| 14,352
|
[
"Brian",
"Galaxy"
] |
f041eafb4184032030175ed850716ac46d960b8b60683bcf89794413e9a39ead
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import tensorflow as tf
import warnings
from tensorflow_addons.utils.types import TensorLike
@tf.keras.utils.register_keras_serializable(package="Addons")
def gelu(x: TensorLike, approximate: bool = True) -> tf.Tensor:
r"""Gaussian Error Linear Unit.
Computes gaussian error linear:
$$
\mathrm{gelu}(x) = x \Phi(x),
$$
where
$$
\Phi(x) = \frac{1}{2} \left[ 1 + \mathrm{erf}(\frac{x}{\sqrt{2}}) \right]$
$$
when `approximate` is `False`; or
$$
\Phi(x) = \frac{x}{2} \left[ 1 + \tanh(\sqrt{\frac{2}{\pi}} \cdot (x + 0.044715 \cdot x^3)) \right]
$$
when `approximate` is `True`.
See [Gaussian Error Linear Units (GELUs)](https://arxiv.org/abs/1606.08415)
and [BERT: Pre-training of Deep Bidirectional Transformers for Language Understanding](https://arxiv.org/abs/1810.04805).
Consider using `tf.nn.gelu` instead.
Note that the default of `approximate` changed to `False` in `tf.nn.gelu`.
Usage:
>>> x = tf.constant([-1.0, 0.0, 1.0])
>>> tfa.activations.gelu(x, approximate=False)
<tf.Tensor: shape=(3,), dtype=float32, numpy=array([-0.15865529, 0. , 0.8413447 ], dtype=float32)>
>>> tfa.activations.gelu(x, approximate=True)
<tf.Tensor: shape=(3,), dtype=float32, numpy=array([-0.15880796, 0. , 0.841192 ], dtype=float32)>
Args:
x: A `Tensor`. Must be one of the following types:
`float16`, `float32`, `float64`.
approximate: bool, whether to enable approximation.
Returns:
A `Tensor`. Has the same type as `x`.
"""
warnings.warn(
"gelu activation has been migrated to core TensorFlow, "
"and will be deprecated in Addons 0.13. "
"Note that the default of `approximate` changed to `False` in `tf.nn.gelu`.",
DeprecationWarning,
)
return tf.nn.gelu(x, approximate)
|
tensorflow/addons
|
tensorflow_addons/activations/gelu.py
|
Python
|
apache-2.0
| 2,575
|
[
"Gaussian"
] |
80b1787c441cd59c8ee3a68f09712176a1e659bb62ca95506fa605c0790700b5
|
# -*- coding: utf-8 -*-
# Copyright 2008-2013 Jaap Karssenberg <jaap.karssenberg@gmail.com>
'''Package with source formats for pages.
Each module in zim.formats should contains exactly one subclass of
DumperClass and exactly one subclass of ParserClass
(optional for export formats). These can be loaded by L{get_parser()}
and L{get_dumper()} respectively. The requirement to have exactly one
subclass per module means you can not import other classes that derive
from these base classes directly into the module.
For format modules it is safe to import '*' from this module.
Parse tree structure
====================
Parse trees are build using the (c)ElementTree module (included in
python 2.5 as xml.etree.ElementTree). It is basically a xml structure
supporting a subset of "html like" tags.
Supported tags:
- page root element for grouping paragraphs
- p for paragraphs
- h for heading, level attribute can be 1..6
- pre for verbatim paragraphs (no further parsing in these blocks)
- em for emphasis, rendered italic by default
- strong for strong emphasis, rendered bold by default
- mark for highlighted text, renderd with background color or underlined
- strike for text that is removed, usually renderd as strike through
- code for inline verbatim text
- ul for bullet and checkbox lists
- ol for numbered lists
- li for list items
- link for links, attribute href gives the target
- img for images, attributes src, width, height an optionally href and alt
- type can be used to control plugin functionality, e.g. type=equation
- table for tables, attributes
* aligns - comma separated values: right,left,center
* wraps - 0 for not wrapped, 1 for auto-wrapped line display
- thead for table header row
- th for table header cell
- trow for table row
- td for table data cell
Unlike html we respect line breaks and other whitespace as is.
When rendering as html use the "white-space: pre" CSS definition to
get the same effect.
Since elements are based on the functional markup instead of visual
markup it is not allowed to nest elements in arbitrary ways.
TODO: allow links to be nested in other elements
TODO: allow strike to have sub elements
TODO: add HR element
If a page starts with a h1 this heading is considered the page title,
else we can fall back to the page name as title.
NOTE: To avoid confusion: "headers" refers to meta data, usually in
the form of rfc822 headers at the top of a page. But "heading" refers
to a title or subtitle in the document.
'''
import re
import string
import logging
import types
from zim.fs import Dir, File
from zim.parsing import link_type, is_url_re, \
url_encode, url_decode, URL_ENCODE_READABLE, URL_ENCODE_DATA
from zim.parser import Builder
from zim.config import data_file, ConfigDict
from zim.objectmanager import ObjectManager
import zim.plugins
logger = logging.getLogger('zim.formats')
# Needed to determine RTL, but may not be available
# if gtk bindings are not installed
try:
import pango
except:
pango = None
logger.warn('Could not load pango - RTL scripts may look bad')
try:
import xml.etree.cElementTree as ElementTreeModule
except: #pragma: no cover
logger.warn('Could not load cElementTree, defaulting to ElementTree')
import xml.etree.ElementTree as ElementTreeModule
EXPORT_FORMAT = 1
IMPORT_FORMAT = 2
NATIVE_FORMAT = 4
TEXT_FORMAT = 8 # Used for "Copy As" menu - these all prove "text/plain" mimetype
UNCHECKED_BOX = 'unchecked-box'
CHECKED_BOX = 'checked-box'
XCHECKED_BOX = 'xchecked-box'
BULLET = '*' # FIXME make this 'bullet'
FORMATTEDTEXT = 'zim-tree'
FRAGMENT = 'zim-tree'
HEADING = 'h'
PARAGRAPH = 'p'
VERBATIM_BLOCK = 'pre' # should be same as verbatim
BLOCK = 'div'
IMAGE = 'img'
OBJECT = 'object'
BULLETLIST = 'ul'
NUMBEREDLIST = 'ol'
LISTITEM = 'li'
EMPHASIS = 'emphasis' # TODO change to "em" to be in line with html
STRONG = 'strong'
MARK = 'mark'
VERBATIM = 'code'
STRIKE = 'strike'
SUBSCRIPT = 'sub'
SUPERSCRIPT = 'sup'
LINK = 'link'
TAG = 'tag'
ANCHOR = 'anchor'
TABLE = 'table'
HEADROW = 'thead'
HEADDATA = 'th'
TABLEROW = 'trow'
TABLEDATA = 'td'
BLOCK_LEVEL = (PARAGRAPH, HEADING, VERBATIM_BLOCK, BLOCK, OBJECT, IMAGE, LISTITEM, TABLE)
_letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
def increase_list_iter(listiter):
'''Get the next item in a list for a numbered list
E.g if C{listiter} is C{"1"} this function returns C{"2"}, if it
is C{"a"} it returns C{"b"}.
@param listiter: the current item, either an integer number or
single letter
@returns: the next item, or C{None}
'''
try:
i = int(listiter)
return str(i + 1)
except ValueError:
try:
i = _letters.index(listiter)
return _letters[i+1]
except ValueError: # listiter is not a letter
return None
except IndexError: # wrap to start of list
return _letters[0]
def encode_xml(text):
'''Encode text such that it can be used in xml
@param text: label text as string
@returns: encoded text
'''
return text.replace('&', '&').replace('>', '>').replace('<', '<').replace('"', '"').replace("'", ''')
def list_formats(type):
if type == EXPORT_FORMAT:
return ['HTML','LaTeX', 'Markdown (pandoc)', 'RST (sphinx)']
elif type == TEXT_FORMAT:
return ['Text', 'Wiki', 'Markdown (pandoc)', 'RST (sphinx)']
else:
assert False, 'TODO'
def canonical_name(name):
# "HTML" -> html
# "Markdown (pandoc)" -> "markdown"
# "Text" -> "plain"
name = name.lower()
if ' ' in name:
name, _ = name.split(' ', 1)
if name == 'text': return 'plain'
else: return name
def get_format(name):
'''Returns the module object for a specific format.'''
# If this method is removes, class names in formats/*.py can be made more explicit
#~ print 'DEPRECATED: get_format() is deprecated in favor if get_parser() and get_dumper()'
return get_format_module(name)
def get_format_module(name):
'''Returns the module object for a specific format
@param name: the format name
@returns: a module object
'''
return zim.plugins.get_module('zim.formats.' + canonical_name(name))
def get_parser(name, *arg, **kwarg):
'''Returns a parser object instance for a specific format
@param name: format name
@param arg: arguments to pass to the parser object
@param kwarg: keyword arguments to pass to the parser object
@returns: parser object instance (subclass of L{ParserClass})
'''
module = get_format_module(name)
klass = zim.plugins.lookup_subclass(module, ParserClass)
return klass(*arg, **kwarg)
def get_dumper(name, *arg, **kwarg):
'''Returns a dumper object instance for a specific format
@param name: format name
@param arg: arguments to pass to the dumper object
@param kwarg: keyword arguments to pass to the dumper object
@returns: dumper object instance (subclass of L{DumperClass})
'''
module = get_format_module(name)
klass = zim.plugins.lookup_subclass(module, DumperClass)
return klass(*arg, **kwarg)
class ParseTree(object):
'''Wrapper for zim parse trees.'''
# No longer derives from ElementTree, internals are not private
# TODO, also remove etree args from init
# TODO, rename to FormattedText
def __init__(self, *arg, **kwarg):
self._etree = ElementTreeModule.ElementTree(*arg, **kwarg)
self._object_cache = {}
@property
def hascontent(self):
'''Returns True if the tree contains any content at all.'''
root = self._etree.getroot()
return bool(root.getchildren()) or (root.text and not root.text.isspace())
@property
def ispartial(self):
'''Returns True when this tree is a segment of a page
(like a copy-paste buffer).
'''
return self._etree.getroot().attrib.get('partial', False)
@property
def israw(self):
'''Returns True when this is a raw tree (which is representation
of TextBuffer, but not really valid).
'''
return self._etree.getroot().attrib.get('raw', False)
def extend(self, tree):
# Do we need a deepcopy here ?
myroot = self._etree.getroot()
otherroot = tree._etree.getroot()
if otherroot.text:
children = myroot.getchildren()
if children:
last = children[-1]
last.tail = (last.tail or '') + otherroot.text
else:
myroot.text = (myroot.text or '') + otherroot.text
for element in otherroot.getchildren():
myroot.append(element)
return self
__add__ = extend
def fromstring(self, string):
'''Set the contents of this tree from XML representation.'''
parser = ElementTreeModule.XMLTreeBuilder()
parser.feed(string)
root = parser.close()
self._etree._setroot(root)
return self # allow ParseTree().fromstring(..)
def tostring(self):
'''Serialize the tree to a XML representation'''
from cStringIO import StringIO
# Parent dies when we have attributes that are not a string
for element in self._etree.getiterator('*'):
for key in element.attrib.keys():
element.attrib[key] = str(element.attrib[key])
xml = StringIO()
xml.write("<?xml version='1.0' encoding='utf-8'?>\n")
ElementTreeModule.ElementTree.write(self._etree, xml, 'utf-8')
return xml.getvalue()
def copy(self):
# By using serialization we are absolutely sure all refs are new
xml = self.tostring()
try:
return ParseTree().fromstring(xml)
except:
print ">>>", xml, "<<<"
raise
def _get_heading_element(self, level=1):
root = self._etree.getroot()
children = root.getchildren()
if root.text and not root.text.isspace():
return None
if children:
first = children[0]
if first.tag == 'h' and first.attrib['level'] >= level:
return first
return None
def get_heading_level(self):
heading_elem = self._get_heading_element()
if heading_elem is not None:
return int(heading_elem.attrib['level'])
else:
return None
def get_heading(self, level=1):
heading_elem = self._get_heading_element(level)
if heading_elem is not None:
return heading_elem.text
else:
return ""
def set_heading(self, text, level=1):
'''Set the first heading of the parse tree to 'text'. If the tree
already has a heading of the specified level or higher it will be
replaced. Otherwise the new heading will be prepended.
'''
heading = self._get_heading_element(level)
if heading is not None:
heading.text = text
else:
root = self._etree.getroot()
heading = ElementTreeModule.Element('h', {'level': level})
heading.text = text
heading.tail = root.text
root.text = None
root.insert(0, heading)
def pop_heading(self, level=-1):
'''If the tree starts with a heading, remove it and any trailing
whitespace.
Will modify the tree.
@returns: a 2-tuple of text and heading level or C{(None, None)}
'''
root = self._etree.getroot()
children = root.getchildren()
if root.text and not root.text.isspace():
return None, None
if children:
first = children[0]
if first.tag == 'h':
mylevel = int(first.attrib['level'])
if level == -1 or mylevel <= level:
root.remove(first)
if first.tail and not first.tail.isspace():
root.text = first.tail # Keep trailing text
return first.text, mylevel
else:
return None, None
else:
return None, None
else:
return None, None
def cleanup_headings(self, offset=0, max=6):
'''Change the heading levels throughout the tree. This makes sure that
al headings are nested directly under their parent (no gaps in the
levels of the headings). Also you can set an offset for the top level
and a max depth.
'''
path = []
for heading in self._etree.getiterator('h'):
level = int(heading.attrib['level'])
# find parent header in path using old level
while path and path[-1][0] >= level:
path.pop()
if not path:
newlevel = offset+1
else:
newlevel = path[-1][1] + 1
if newlevel > max:
newlevel = max
heading.attrib['level'] = newlevel
path.append((level, newlevel))
def resolve_images(self, notebook=None, path=None):
'''Resolves the source files for all images relative to a page path and
adds a '_src_file' attribute to the elements with the full file path.
'''
if notebook is None:
for element in self._etree.getiterator('img'):
filepath = element.attrib['src']
element.attrib['_src_file'] = File(filepath)
else:
for element in self._etree.getiterator('img'):
filepath = element.attrib['src']
element.attrib['_src_file'] = notebook.resolve_file(element.attrib['src'], path)
def unresolve_images(self):
'''Undo effect of L{resolve_images()}, mainly intended for
testing.
'''
for element in self._etree.getiterator('img'):
if '_src_file' in element.attrib:
element.attrib.pop('_src_file')
def encode_urls(self, mode=URL_ENCODE_READABLE):
'''Calls encode_url() on all links that contain urls.
See zim.parsing for details. Modifies the parse tree.
'''
for link in self._etree.getiterator('link'):
href = link.attrib['href']
if is_url_re.match(href):
link.attrib['href'] = url_encode(href, mode=mode)
if link.text == href:
link.text = link.attrib['href']
def decode_urls(self, mode=URL_ENCODE_READABLE):
'''Calls decode_url() on all links that contain urls.
See zim.parsing for details. Modifies the parse tree.
'''
for link in self._etree.getiterator('link'):
href = link.attrib['href']
if is_url_re.match(href):
link.attrib['href'] = url_decode(href, mode=mode)
if link.text == href:
link.text = link.attrib['href']
def count(self, text):
'''Returns the number of occurences of 'text' in this tree.'''
count = 0
for element in self._etree.getiterator():
if element.text:
count += element.text.count(text)
if element.tail:
count += element.tail.count(text)
return count
def countre(self, regex):
'''Returns the number of matches for a regular expression
in this tree.
'''
count = 0
for element in self._etree.getiterator():
if element.text:
newstring, n = regex.subn('', element.text)
count += n
if element.tail:
newstring, n = regex.subn('', element.tail)
count += n
return count
def get_ends_with_newline(self):
'''Checks whether this tree ends in a newline or not'''
return self._get_element_ends_with_newline(self._etree.getroot())
def _get_element_ends_with_newline(self, element):
if element.tail:
return element.tail.endswith('\n')
elif element.tag in ('li', 'h'):
return True # implicit newline
else:
children = element.getchildren()
if children:
return self._get_element_ends_with_newline(children[-1]) # recurs
elif element.text:
return element.text.endswith('\n')
else:
return False # empty element like image
def visit(self, visitor):
'''Visit all nodes of this tree
@note: If the visitor modifies the attrib dict on nodes, this
will modify the tree.
@param visitor: a L{Visitor} or L{Builder} object
'''
try:
self._visit(visitor, self._etree.getroot())
except VisitorStop:
pass
def _visit(self, visitor, node):
try:
if len(node): # Has children
visitor.start(node.tag, node.attrib)
if node.text:
visitor.text(node.text)
for child in node:
self._visit(visitor, child) # recurs
if child.tail:
visitor.text(child.tail)
visitor.end(node.tag)
else:
visitor.append(node.tag, node.attrib, node.text)
except VisitorSkip:
pass
def find(self, tag):
'''Find first occurence of C{tag} in the tree
@returns: a L{Node} object or C{None}
'''
for elt in self.findall(tag):
return elt # return first
else:
return None
def findall(self, tag):
'''Find all occurences of C{tag} in the tree
@param tag: tag name
@returns: yields L{Node} objects
'''
for elt in self._etree.getiterator(tag):
yield Element.new_from_etree(elt)
def replace(self, tag, func):
'''Modify the tree by replacing all occurences of C{tag}
by the return value of C{func}.
@param tag: tag name
@param func: function to generate replacement values.
Function will be called as::
func(node)
Where C{node} is a L{Node} object representing the subtree.
If the function returns another L{Node} object or modifies
C{node} and returns it, the subtree will be replaced by this
new node.
If the function raises L{VisitorSkip} the replace is skipped.
If the function raises L{VisitorStop} the replacement of all
nodes will stop.
'''
try:
self._replace(self._etree.getroot(), tag, func)
except VisitorStop:
pass
def _replace(self, elt, tag, func):
# Two-step replace in order to do items in order
# of appearance.
replacements = []
for i, child in enumerate(elt):
if child.tag == tag:
try:
replacement = func(Element.new_from_etree(child))
except VisitorSkip:
pass
else:
replacements.append((i, child, replacement))
elif len(child):
self._replace(child, tag, func) # recurs
else:
pass
if replacements:
self._do_replace(elt, replacements)
def _do_replace(self, elt, replacements):
offset = 0 # offset due to replacements
for i, child, node in replacements:
i += offset
if node is None or len(node) == 0:
# Remove element
tail = child.tail
elt.remove(child)
if tail:
self._insert_text(elt, i, tail)
offset -= 1
elif isinstance(node, Element):
# Just replace elements
newchild = self._node_to_etree(node)
newchild.tail = child.tail
elt[i] = newchild
elif isinstance(node, DocumentFragment):
# Insert list of elements and text
tail = child.tail
elt.remove(child)
offset -= 1
for item in node:
if isinstance(item, basestring):
self._insert_text(elt, i, item)
else:
assert isinstance(item, Element)
elt.insert(i, self._node_to_etree(item))
i += 1
offset += 1
if tail:
self._insert_text(elt, i, tail)
else:
raise TypeError, 'BUG: invalid replacement result'
@staticmethod
def _node_to_etree(node):
builder = ParseTreeBuilder()
node.visit(builder)
return builder._b.close()
def _insert_text(self, elt, i, text):
if i == 0:
if elt.text:
elt.text += text
else:
elt.text = text
else:
prev = elt[i-1]
if prev.tail:
prev.tail += text
else:
prev.tail = text
def get_objects(self, type=None):
'''Generator that yields all custom objects in the tree,
or all objects of a certain type.
@param type: object type to return or C{None} to get all
@returns: yields objects (as provided by L{ObjectManager})
'''
for elt in self._etree.getiterator(OBJECT):
if type and elt.attrib.get('type') != type:
pass
else:
obj = self._get_object(elt)
if obj is not None:
yield obj
def _get_object(self, elt):
## TODO optimize using self._object_cache or new API for
## passing on objects in the tree
type = elt.attrib.get('type')
if elt.tag == OBJECT and type:
return ObjectManager.get_object(type, elt.attrib, elt.text)
else:
return None
class VisitorStop(Exception):
'''Exception to be raised to cancel a visitor action'''
pass
class VisitorSkip(Exception):
'''Exception to be raised when the visitor should skip a leaf node
and not decent into it.
'''
pass
class Visitor(object):
'''Conceptual opposite of a builder, but with same API.
Used to walk nodes in a parsetree and call callbacks for each node.
See e.g. L{ParseTree.visit()}.
'''
def start(self, tag, attrib=None):
'''Start formatted region
Visitor objects can raise two exceptions in this method
to influence the tree traversal:
1. L{VisitorStop} will cancel the current parsing, but without
raising an error. So code implementing a visit method should
catch this.
2. L{VisitorSkip} can be raised when the visitor wants to skip
a node, and should prevent the implementation from further
decending into this node
@note: If the visitor modifies the attrib dict on nodes, this
will modify the tree. If this is not intended, the implementation
needs to take care to copy the attrib to break the reference.
@param tag: the tag name
@param attrib: optional dict with attributes
@implementation: optional for subclasses
'''
pass
def text(self, text):
'''Append text
@param text: text to be appended as string
@implementation: optional for subclasses
'''
pass
def end(self, tag):
'''End formatted region
@param tag: the tag name
@raises AssertionError: when tag does not match current state
@implementation: optional for subclasses
'''
pass
def append(self, tag, attrib=None, text=None):
'''Convenience function to open a tag, append text and close
it immediatly.
Can raise L{VisitorStop} or L{VisitorSkip}, see C{start()}
for the conditions.
@param tag: the tag name
@param attrib: optional dict with attributes
@param text: formatted text
@implementation: optional for subclasses, default implementation
calls L{start()}, L{text()}, and L{end()}
'''
self.start(tag, attrib)
if text is not None:
self.text(text)
self.end(tag)
class ParseTreeBuilder(Builder):
'''Builder object that builds a L{ParseTree}'''
def __init__(self, partial=False):
self.partial = partial
self._b = ElementTreeModule.TreeBuilder()
self.stack = [] #: keeps track of current open elements
self._last_char = None
def get_parsetree(self):
'''Returns the constructed L{ParseTree} object.
Can only be called once, after calling this method the object
can not be re-used.
'''
root = self._b.close()
if self.partial:
root.attrib['partial'] = True
return zim.formats.ParseTree(root)
def start(self, tag, attrib=None):
self._b.start(tag, attrib)
self.stack.append(tag)
if tag in BLOCK_LEVEL:
self._last_char = None
def text(self, text):
self._last_char = text[-1]
# FIXME hack for backward compat
if self.stack and self.stack[-1] in (HEADING, LISTITEM):
text = text.strip('\n')
self._b.data(text)
def end(self, tag):
if tag != self.stack[-1]:
raise AssertionError, 'Unmatched tag closed: %s' % tag
if tag in BLOCK_LEVEL:
if self._last_char is not None and not self.partial:
#~ assert self._last_char == '\n', 'Block level text needs to end with newline'
if self._last_char != '\n' and tag not in (HEADING, LISTITEM):
self._b.data('\n')
# FIXME check for HEADING LISTITME for backward compat
# TODO if partial only allow missing \n at end of tree,
# delay message and trigger if not followed by get_parsetree ?
self._b.end(tag)
self.stack.pop()
# FIXME hack for backward compat
if tag == HEADING:
self._b.data('\n')
self._last_char = None
def append(self, tag, attrib=None, text=None):
if tag in BLOCK_LEVEL:
if text and not text.endswith('\n'):
text += '\n'
# FIXME hack for backward compat
if text and tag in (HEADING, LISTITEM):
text = text.strip('\n')
self._b.start(tag, attrib)
if text:
self._b.data(text)
self._b.end(tag)
# FIXME hack for backward compat
if tag == HEADING:
self._b.data('\n')
self._last_char = None
count_eol_re = re.compile(r'\n+\Z')
split_para_re = re.compile(r'((?:^[ \t]*\n){2,})', re.M)
class OldParseTreeBuilder(object):
'''This class supplies an alternative for xml.etree.ElementTree.TreeBuilder
which cleans up the tree on the fly while building it. The main use
is to normalize the tree that is produced by the editor widget, but it can
also be used on other "dirty" interfaces.
This builder takes care of the following issues:
- Inline tags ('emphasis', 'strong', 'h', etc.) can not span multiple lines
- Tags can not contain only whitespace
- Tags can not be empty (with the exception of the 'img' tag)
- There should be an empty line before each 'h', 'p' or 'pre'
(with the exception of the first tag in the tree)
- The 'p' and 'pre' elements should always end with a newline ('\\n')
- Each 'p', 'pre' and 'h' should be postfixed with a newline ('\\n')
(as a results 'p' and 'pre' are followed by an empty line, the
'h' does not end in a newline itself, so it is different)
- Newlines ('\\n') after a <li> alement are removed (optional)
- The element '_ignore_' is silently ignored
'''
## TODO TODO this also needs to be based on Builder ##
def __init__(self, remove_newlines_after_li=True):
assert remove_newlines_after_li, 'TODO'
self._stack = [] # stack of elements for open tags
self._last = None # last element opened or closed
self._data = [] # buffer with data
self._tail = False # True if we are after an end tag
self._seen_eol = 2 # track line ends on flushed data
# starts with "2" so check is ok for first top level element
def start(self, tag, attrib=None):
if tag == '_ignore_':
return self._last
elif tag == 'h':
self._flush(need_eol=2)
elif tag in ('p', 'pre'):
self._flush(need_eol=1)
else:
self._flush()
#~ print 'START', tag
if tag == 'h':
if not (attrib and 'level' in attrib):
logger.warn('Missing "level" attribute for heading')
attrib = attrib or {}
attrib['level'] = 1
elif tag == 'link':
if not (attrib and 'href' in attrib):
logger.warn('Missing "href" attribute for link')
attrib = attrib or {}
attrib['href'] = "404"
# TODO check other mandatory properties !
if attrib:
self._last = ElementTreeModule.Element(tag, attrib)
else:
self._last = ElementTreeModule.Element(tag)
if self._stack:
self._stack[-1].append(self._last)
else:
assert tag == 'zim-tree', 'root element needs to be "zim-tree"'
self._stack.append(self._last)
self._tail = False
return self._last
def end(self, tag):
if tag == '_ignore_':
return None
elif tag in ('p', 'pre'):
self._flush(need_eol=1)
else:
self._flush()
#~ print 'END', tag
self._last = self._stack[-1]
assert self._last.tag == tag, \
"end tag mismatch (expected %s, got %s)" % (self._last.tag, tag)
self._tail = True
if len(self._stack) > 1 and not (
tag in (IMAGE, OBJECT, HEADDATA, TABLEDATA)
or (self._last.text and not self._last.text.isspace())
or self._last.getchildren()
):
# purge empty tags
if self._last.text and self._last.text.isspace():
self._append_to_previous(self._last.text)
empty = self._stack.pop()
self._stack[-1].remove(empty)
children = self._stack[-1].getchildren()
if children:
self._last = children[-1]
if not self._last.tail is None:
self._data = [self._last.tail]
self._last.tail = None
else:
self._last = self._stack[-1]
self._tail = False
if not self._last.text is None:
self._data = [self._last.text]
self._last.text = None
return empty
else:
return self._stack.pop()
def data(self, text):
assert isinstance(text, basestring)
self._data.append(text)
def append(self, tag, text):
self.start(tag)
self.data(text)
self.end(tag)
def _flush(self, need_eol=0):
# need_eol makes sure previous data ends with \n
#~ print 'DATA:', self._data
text = ''.join(self._data)
# Fix trailing newlines
if text:
m = count_eol_re.search(text)
if m: self._seen_eol = len(m.group(0))
else: self._seen_eol = 0
if need_eol > self._seen_eol:
text += '\n' * (need_eol - self._seen_eol)
self._seen_eol = need_eol
# Fix prefix newlines
if self._tail and self._last.tag in ('h', 'p') \
and not text.startswith('\n'):
if text:
text = '\n' + text
else:
text = '\n'
self._seen_eol = 1
elif self._tail and self._last.tag == 'li' \
and text.startswith('\n'):
text = text[1:]
if not text.strip('\n'):
self._seen_eol -=1
if text:
assert not self._last is None, 'data seen before root element'
self._data = []
# Tags that are not allowed to have newlines
if not self._tail and self._last.tag in (
'h', 'emphasis', 'strong', 'mark', 'strike', 'code'):
# assume no nested tags in these types ...
if self._seen_eol:
text = text.rstrip('\n')
self._data.append('\n' * self._seen_eol)
self._seen_eol = 0
lines = text.split('\n')
for line in lines[:-1]:
assert self._last.text is None, "internal error (text)"
assert self._last.tail is None, "internal error (tail)"
if line and not line.isspace():
self._last.text = line
self._last.tail = '\n'
attrib = self._last.attrib.copy()
self._last = ElementTreeModule.Element(self._last.tag, attrib)
self._stack[-2].append(self._last)
self._stack[-1] = self._last
else:
self._append_to_previous(line + '\n')
assert self._last.text is None, "internal error (text)"
self._last.text = lines[-1]
else:
# TODO split paragraphs
if self._tail:
assert self._last.tail is None, "internal error (tail)"
self._last.tail = text
else:
assert self._last.text is None, "internal error (text)"
self._last.text = text
else:
self._data = []
def close(self):
assert len(self._stack) == 0, 'missing end tags'
assert not self._last is None and self._last.tag == 'zim-tree', 'missing root element'
return self._last
def _append_to_previous(self, text):
'''Add text before current element'''
parent = self._stack[-2]
children = parent.getchildren()[:-1]
if children:
if children[-1].tail:
children[-1].tail = children[-1].tail + text
else:
children[-1].tail = text
else:
if parent.text:
parent.text = parent.text + text
else:
parent.text = text
class ParserClass(object):
'''Base class for parsers
Each format that can be used natively should define a class
'Parser' which inherits from this base class.
'''
def parse(self, input):
'''ABSTRACT METHOD: needs to be overloaded by sub-classes.
This method takes a text or an iterable with lines and returns
a ParseTree object.
'''
raise NotImplementedError
@classmethod
def parse_image_url(self, url):
'''Parse urls style options for images like "foo.png?width=500" and
returns a dict with the options. The base url will be in the dict
as 'src'.
'''
i = url.find('?')
if i > 0:
attrib = {'src': url[:i]}
for option in url[i+1:].split('&'):
if option.find('=') == -1:
logger.warn('Mal-formed options in "%s"' , url)
break
k, v = option.split('=', 1)
if k in ('width', 'height', 'type', 'href'):
if len(v) > 0:
value = url_decode(v, mode=URL_ENCODE_DATA)
attrib[str(k)] = value # str to avoid unicode key
else:
logger.warn('Unknown attribute "%s" in "%s"', k, url)
return attrib
else:
return {'src': url}
import collections
DumperContextElement = collections.namedtuple('DumperContextElement', ('tag', 'attrib', 'text'))
# FIXME unify this class with a generic Element class (?)
class DumperClass(Visitor):
'''Base class for dumper classes. Dumper classes serialize the content
of a parse tree back to a text representation of the page content.
Therefore this class implements the visitor API, so it can be
used with any parse tree implementation or parser object that supports
this API.
To implement a dumper class, you need to define handlers for all
tags that can appear in a page. Tags that are represented by a simple
prefix and postfix string can be defined in the dictionary C{TAGS}.
For example to define the italic tag in html output the dictionary
should contain a definition like: C{EMPHASIS: ('<i>', '</i>')}.
For tags that require more complex logic you can define a method to
format the tag. Typical usage is to format link attributes in such
a method. The method name should be C{dump_} + the name of the tag,
e.g. C{dump_link()} for links (see the constants with tag names for
the other tags). Such a sump method will get 3 arguments: the tag
name itself, a dictionary with the tag attributes and a list of
strings that form the tag content. The method should return a list
of strings that represents the formatted text.
This base class takes care of a stack of nested formatting tags and
when a tag is closed either picks the appropriate prefix and postfix
from C{TAGS} or calls the corresponding C{dump_} method. As a result
tags are serialized depth-first.
@ivar linker: the (optional) L{Linker} object, used to resolve links
@ivar template_options: a L{ConfigDict} with options that may be set
in a template (so inherently not safe !) to control the output style.
Formats using this need to define the supported keys in the dict
C{TEMPLATE_OPTIONS}.
@ivar context: the stack of open tags maintained by this class. Can
be used in C{dump_} methods to inspect the parent scope of the
format. Elements on this stack have "tag", "attrib" and "text"
attributes. Keep in mind that the parent scope is not yet complete
when a tag is serialized.
'''
TAGS = {} #: dict mapping formatting tags to 2-tuples of a prefix and a postfix string
TEMPLATE_OPTIONS = {} #: dict mapping ConfigDefinitions for template options
def __init__(self, linker=None, template_options=None):
self.linker = linker
self.template_options = ConfigDict(template_options)
self.template_options.define(self.TEMPLATE_OPTIONS)
self.context = []
self._text = []
def dump(self, tree):
'''Convenience methods to dump a given tree.
@param tree: a parse tree object that supports a C{visit()} method
'''
# FIXME - issue here is that we need to reset state - should be in __init__
self._text = []
self.context = [DumperContextElement(None, None, self._text)]
tree.visit(self)
if len(self.context) != 1:
raise AssertionError, 'Unclosed tags on tree: %s' % self.context[-1].tag
#~ import pprint; pprint.pprint(self._text)
return self.get_lines() # FIXME - maybe just return text ?
def get_lines(self):
'''Return the dumped content as a list of lines
Should only be called after closing the top level element
'''
return u''.join(self._text).splitlines(1)
def start(self, tag, attrib=None):
if attrib:
attrib = attrib.copy() # Ensure dumping does not change tree
self.context.append(DumperContextElement(tag, attrib, []))
def text(self, text):
assert not text is None
if self.context[-1].tag != OBJECT:
text = self.encode_text(self.context[-1].tag, text)
self.context[-1].text.append(text)
def end(self, tag):
if not tag or tag != self.context[-1].tag:
raise AssertionError, 'Unexpected tag closed: %s' % tag
_, attrib, strings = self.context.pop()
if tag in self.TAGS:
assert strings, 'Can not append empty %s element' % tag
start, end = self.TAGS[tag]
strings.insert(0, start)
strings.append(end)
elif tag == FORMATTEDTEXT:
pass
else:
try:
method = getattr(self, 'dump_'+tag)
except AttributeError:
raise AssertionError, 'BUG: Unknown tag: %s' % tag
strings = method(tag, attrib, strings)
#~ try:
#~ u''.join(strings)
#~ except:
#~ print "BUG: %s returned %s" % ('dump_'+tag, strings)
if strings is not None:
self.context[-1].text.extend(strings)
def append(self, tag, attrib=None, text=None):
strings = None
if tag in self.TAGS:
assert text is not None, 'Can not append empty %s element' % tag
start, end = self.TAGS[tag]
text = self.encode_text(tag, text)
strings = [start, text, end]
elif tag == FORMATTEDTEXT:
if text is not None:
strings = [self.encode_text(tag, text)]
else:
if attrib:
attrib = attrib.copy() # Ensure dumping does not change tree
try:
method = getattr(self, 'dump_'+tag)
except AttributeError:
raise AssertionError, 'BUG: Unknown tag: %s' % tag
if text is None:
strings = method(tag, attrib, [])
elif tag == OBJECT:
strings = method(tag, attrib, [text])
else:
strings = method(tag, attrib, [self.encode_text(tag, text)])
if strings is not None:
self.context[-1].text.extend(strings)
def encode_text(self, tag, text):
'''Optional method to encode text elements in the output
@note: Do not apply text encoding in the C{dump_} methods, the
list of strings given there may contain prefix and postfix
formatting of nested tags.
@param tag: formatting tag
@param text: text to be encoded
@returns: encoded text
@implementation: optional, default just returns unmodified input
'''
return text
def prefix_lines(self, prefix, strings):
'''Convenience method to wrap a number of lines with e.g. an
indenting sequence.
@param prefix: a string to prefix each line
@param strings: a list of pieces of text
@returns: a new list of lines, each starting with prefix
'''
lines = u''.join(strings).splitlines(1)
return [prefix + l for l in lines]
def dump_object(self, tag, attrib, strings=None):
'''Dumps object using proper ObjectManager'''
format = str(self.__class__.__module__).split('.')[-1]
if 'type' in attrib:
obj = ObjectManager.get_object(attrib['type'], attrib, u''.join(strings))
output = obj.dump(format, self, self.linker)
if isinstance(output, basestring):
return [output]
elif output is not None:
return output
return self.dump_object_fallback(tag, attrib, strings)
# TODO put content in attrib, use text for caption (with full recursion)
# See img
def dump_object_fallback(self, tag, attrib, strings=None):
'''Method to serialize objects that do not have their own
handler for this format.
@implementation: must be implemented in sub-classes
'''
raise NotImplementedError
def isrtl(self, text):
'''Check for Right To Left script
@param text: the text to check
@returns: C{True} if C{text} starts with characters in a
RTL script, or C{None} if direction is not determined.
'''
if pango is None:
return None
# It seems the find_base_dir() function is not documented in the
# python language bindings. The Gtk C code shows the signature:
#
# pango.find_base_dir(text, length)
#
# It either returns a direction, or NEUTRAL if e.g. text only
# contains punctuation but no real characters.
dir = pango.find_base_dir(text, len(text))
if dir == pango.DIRECTION_NEUTRAL:
return None
else:
return dir == pango.DIRECTION_RTL
class BaseLinker(object):
'''Base class for linker objects
Linker object translate links in zim pages to (relative) URLs.
This is used when exporting data to resolve links.
Relative URLs start with "./" or "../" and should be interpreted
in the same way as in HTML. Both URLs and relative URLs are
already URL encoded.
'''
def link(self, link):
'''Returns an url for a link in a zim page
This method is used to translate links of any type.
@param link: link to be translated
@returns: url, uri, or relative path
context of this linker
@implementation: must be implemented by child classes
'''
raise NotImplementedError
def img(self, src):
'''Returns an url for image file 'src'
@implementation: must be implemented by child classes
'''
raise NotImplementedError
#~ def icon(self, name):
#~ '''Returns an url for an icon
#~ @implementation: must be implemented by child classes
#~ '''
#~ raise NotImplementedError
def resource(self, path):
'''Return an url for template resources
@implementation: must be implemented by child classes
'''
raise NotImplementedError
def resolve_source_file(self, link):
'''Find the source file for an attachment
Used e.g. by the latex format to find files for equations to
be inlined. Do not use this method to resolve links, the file
given here might be temporary and is not guaranteed to be
available after the export.
@returns: a L{File} object or C{None} if no file was found
@implementation: must be implemented by child classes
'''
raise NotImplementedError
def page_object(self, path):
'''Turn a L{Path} object in a relative link or URI'''
raise NotImplementedError
def file_object(self, file):
'''Turn a L{File} object in a relative link or URI
@implementation: must be implemented by child classes
'''
raise NotImplementedError
class StubLinker(BaseLinker):
'''Linker used for testing - just gives back the link as it was
parsed. DO NOT USE outside of testing.
'''
def __init__(self, source_dir=None):
self.source_dir = source_dir
def link(self, link):
type = link_type(link)
if type == 'mailto' and not link.startswith('mailto:'):
return 'mailto:' + link
elif type == 'interwiki':
return 'interwiki:' + link
else:
return link
def img(self, src):
return src
#~ def icon(self, name):
#~ return 'icon:' + name
def resource(self, path):
return path
def resolve_source_file(self, link):
if self.source_dir:
return self.source_dir.file(link)
else:
return None
def page_object(self, path):
return path.name
def file_object(self, file):
return file.name
class Node(list):
'''Base class for DOM-like access to the document structure.
@note: This class is not optimized for keeping large structures
in memory.
@ivar tag: tag name
@ivar attrib: dict with attributes
'''
__slots__ = ('tag', 'attrib')
def __init__(self, tag, attrib=None, *content):
self.tag = tag
self.attrib = attrib
if content:
self.extend(content)
@classmethod
def new_from_etree(klass, elt):
obj = klass(elt.tag, dict(elt.attrib))
if elt.text:
obj.append(elt.text)
for child in elt:
subnode = klass.new_from_etree(child) # recurs
obj.append(subnode)
if child.tail:
obj.append(child.tail)
return obj
def get(self, key, default=None):
if self.attrib:
return self.attrib.get(key, default)
else:
return default
def set(self, key, value):
if not self.attrib:
self.attrib = {}
self.attrib[key] = value
def append(self, item):
if isinstance(item, DocumentFragment):
list.extend(self, item)
else:
list.append(self, item)
def gettext(self):
'''Get text as string
Ignores any markup and attributes and simply returns textual
content.
@note: do _not_ use as replacement for exporting to plain text
@returns: string
'''
strings = self._gettext()
return u''.join(strings)
def _gettext(self):
strings = []
for item in self:
if isinstance(item, basestring):
strings.append(item)
else:
strings.extend(item._gettext())
return strings
def toxml(self):
strings = self._toxml()
return u''.join(strings)
def _toxml(self):
strings = []
if self.attrib:
strings.append('<%s' % self.tag)
for key in sorted(self.attrib):
strings.append(' %s="%s"' % (key, encode_xml(self.attrib[key])))
strings.append('>')
else:
strings.append("<%s>" % self.tag)
for item in self:
if isinstance(item, basestring):
strings.append(encode_xml(item))
else:
strings.extend(item._toxml())
strings.append("</%s>" % self.tag)
return strings
__repr__ = toxml
def visit(self, visitor):
if len(self) == 1 and isinstance(self[0], basestring):
visitor.append(self.tag, self.attrib, self[0])
else:
visitor.start(self.tag, self.attrib)
for item in self:
if isinstance(item, basestring):
visitor.text(item)
else:
item.visit(visitor)
visitor.end(self.tag)
class Element(Node):
'''Element class for DOM-like access'''
pass
class DocumentFragment(Node):
'''Document fragment class for DOM-like access'''
def __init__(self, *content):
self.tag = FRAGMENT
self.attrib = None
if content:
self.extend(content)
class TableParser():
'''Common functions for converting a table from its' xml structure to another format'''
@staticmethod
def width2dim(lines):
'''
Calculates the characters on each column and return list of widths
:param lines: 2-dim multiline rows
:return: the number of characters of the longest cell-value by column
'''
widths = [max(map(len, line)) for line in zip(*lines)]
return widths
@staticmethod
def width3dim(lines):
'''
Calculates the characters on each column and return list of widths
:param lines: 3-dim multiline rows
:return: the number of characters of the longest cell-value by column
'''
lines = reduce(lambda x, y: x+y, lines)
widths = [max(map(len, line)) for line in zip(*lines)]
return widths
@staticmethod
def convert_to_multiline_cells(rows):
'''
Each cell of a list of list is splitted by "\n" and a 3-dimensional list is returned,
whereas each tuple represents a line and multiple lines represents a row and multiple rows represents the table
c11a = Cell in Row 1 in Column 1 in first = a line
:param strings: format like (('c11a \n c11b', 'c12a \n c12b'), ('c21', 'c22a \n 22b'))
:return: format like (((c11a, c12a), (c11b, c12b)), ((c21, c22a), ('', c22b)))
'''
multi_rows = [map(lambda cell: cell.split("\n"), row) for row in rows]
# grouping by line, not by row
strings = [map(lambda *line: map(lambda val: val if val is not None else '', line), *row) for row in multi_rows]
return strings
@staticmethod
def get_options(attrib):
'''
Lists the attributes as tuple
:param attrib:
:return: tuple of attributes
'''
aligns = attrib['aligns'].split(',')
wraps = map(int, attrib['wraps'].split(','))
return aligns, wraps
@staticmethod
def rowsep(maxwidths, x='+', y='-'):
'''
Displays a row separator
example: rowsep((3,0), '-', '+') -> +-----+--+
:param maxwidths: list of column lengths
:param x: point-separator
:param y: line-separator
:return: a textline
'''
return x + x.join(map(lambda width: (width+2) * y, maxwidths)) + x
@staticmethod
def headsep(maxwidths, aligns, x='|', y='-'):
'''
Displays a header separation with alignment infos
example: rowsep((3,0), '-', '+') -> +-----+--+
:param maxwidths: list of column lengths
:param aligns: list of alignments
:param x: point-separator
:param y: line-separator
:return: a textline
'''
cells = []
for width, align in zip(maxwidths, aligns):
line = width * y
if align == 'left':
cell = ':' + line + y
elif align == 'right':
cell = y + line + ':'
elif align == 'center':
cell = ':' + line + ':'
else:
cell = y + line + y
cells.append(cell)
return x + x.join(cells) + x
@staticmethod
def headline(row, maxwidths, aligns, wraps, x='|', y=' '):
'''
Displays a headerline line in text format
:param row: tuple of cells
:param maxwidths: list of column length
:param aligns: list of alignments
:param x: point-separator
:param y: space-separator
:return: a textline
'''
row = TableParser.alignrow(row, maxwidths, aligns, y)
cells = []
for val, wrap in zip(row, wraps):
if wrap == 1:
val = val[:-1]+'<'
cells.append(val)
return x + x.join(cells) + x
@staticmethod
def rowline(row, maxwidths, aligns, x='|', y=' '):
'''
Displays a normal column line in text format
example: rowline((3,0), (left, left), '+','-') -> +-aa--+--+
:param row: tuple of cells
:param maxwidths: list of column length
:param aligns: list of alignments
:param x: point-separator
:param y: space-separator
:return: a textline
'''
cells = TableParser.alignrow(row, maxwidths, aligns, y)
return x + x.join(cells) + x
@staticmethod
def alignrow(row, maxwidths, aligns, y=' '):
'''
Formats a row with the right alignments
:param row: tuple of cells
:param maxwidths: list of column length
:param aligns: list of alignments
:param x: point-separator
:param y: space-separator
:return: a textline
'''
cells = []
for val, align, maxwidth in zip(row, aligns, maxwidths):
if align == 'left':
(lspace, rspace) = (1, maxwidth - len(val) + 1)
elif align == 'right':
(lspace, rspace) = (maxwidth - len(val) + 1, 1)
elif align == 'center':
lspace = (maxwidth - len(val)) / 2 + 1
rspace = (maxwidth - lspace - len(val) + 2)
else:
(lspace, rspace) = (1, maxwidth - len(val) + 1)
cells.append(lspace * y + val + rspace * y)
return cells
|
hjq300/zim-wiki
|
zim/formats/__init__.py
|
Python
|
gpl-2.0
| 47,634
|
[
"VisIt"
] |
424c74eeae4e7ddf88179f90e99a3cba9f42e26874bfe248d7c90860a8cc975f
|
# -*- coding: utf-8 -*-
"""
zine.plugins.eric_the_fish.fortunes
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Erics fortune cookies (ripped and stripped from the ubuntu `fortune`
fortune data file.)
:copyright: (c) 2010 by the Zine Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
FORTUNES = [
'A day for firm decisions!!!!! Or is it?',
'A few hours grace before the madness begins again.',
'A gift of a flower will soon be made to you.',
'A tall, dark stranger will have more fun than you.',
'A visit to a fresh place will bring strange work.',
'A visit to a strange place will bring fresh work.',
'A vivid and creative mind characterizes you.',
'Abandon the search for Truth; settle for a good fantasy.',
'Accent on helpful side of your nature. Drain the moat.',
'Advancement in position.',
'After your lover has gone you will still have PEANUT BUTTER!',
'Afternoon very favorable for romance. Try a single person for a change.',
'Alimony and bribes will engage a large share of your wealth.',
'All the troubles you have will pass away very quickly.',
'Among the lucky, you are the chosen one.',
'An avocado-tone refrigerator would look good on your resume.',
'An exotic journey in downtown Newark is in your future.',
'Another good night not to sleep in a eucalyptus tree.',
'Are you a turtle?',
'Are you making all this up as you go along?',
'Are you sure the back door is locked?',
'Artistic ventures highlighted. Rob a museum.',
'Avert misunderstanding by calm, poise, and balance.',
'Avoid gunfire in the bathroom tonight.',
'Avoid reality at all costs.',
'Bank error in your favor. Collect $200.',
'Be careful! Is it classified?',
'Be careful! UGLY strikes 9 out of 10!',
'Be cautious in your daily affairs.',
'Be cheerful while you are alive. -- Phathotep, 24th Century B.C.',
'Be different: conform.',
'Be security conscious -- National defense is at stake.',
'Beauty and harmony are as necessary to you as the very breath of life.',
'Best of all is never to have been born. Second best is to die soon.',
'Beware of a dark-haired man with a loud tie.',
'Beware of a tall black man with one blond shoe.',
'Beware of a tall blond man with one black shoe.',
'Beware of Bigfoot!',
'Beware of low-flying butterflies.',
'Beware the one behind you.',
'Blow it out your ear.',
'Break into jail and claim police brutality.',
'Bridge ahead. Pay troll.',
'Caution: breathing may be hazardous to your health.',
'Caution: Keep out of reach of children.',
'Celebrate Hannibal Day this year. Take an elephant to lunch.',
'Change your thoughts and you change your world.',
'Cheer Up! Things are getting worse at a slower rate.',
'Chess tonight.',
'Chicken Little only has to be right once.',
'Chicken Little was right.',
'Cold hands, no gloves.',
"Communicate! It can't make things any worse.",
'Courage is your greatest present need.',
'Day of inquiry. You will be subpoenaed.',
'Do not overtax your powers.',
'Do not sleep in a eucalyptus tree tonight.',
'Do nothing unless you must, and when you must act -- hesitate.',
'Do something unusual today. Pay a bill.',
'Do what comes naturally. See the and fume and throw a tantrum.',
'Domestic happiness and faithful friends.',
"Don't feed the bats tonight.",
"Don't get stuck in a closet -- wear yourself out.",
"Don't get to bragging.",
"Don't go surfing in South Dakota for a while.",
"Don't hate yourself in the morning -- sleep till noon.",
"Don't kiss an elephant on the lips today.",
"Don't let your mind wander -- it's too little to be let out alone.",
"Don't look back, the lemmings are gaining on you.",
"Don't look now, but the man in the moon is laughing at you.",
"Don't look now, but there is a multi-legged creature on your shoulder.",
"Don't plan any hasty moves. You'll be evicted soon anyway.",
"Don't read any sky-writing for the next two weeks.",
"Don't read everything you believe.",
"Don't relax! It's only your tension that's holding you together.",
"Don't tell any big lies today. Small ones can be just as effective.",
"Don't worry so loud, your roommate can't think.",
"Don't Worry, Be Happy. -- Meher Baba",
"Don't worry. Life's too long. -- Vincent Sardi, Jr.",
"Don't you feel more like you do now than you did when you came in?",
"Don't you wish you had more energy... or less ambition?",
'Everything that you know is wrong, but you can be straightened out.',
'Everything will be just tickety-boo today.',
'Excellent day for putting Slinkies on an escalator.',
'Excellent day to have a rotten day.',
'Excellent time to become a missing person.',
'Executive ability is prominent in your make-up.',
'Exercise caution in your daily affairs.',
'Expect a letter from a friend who will ask a favor of you.',
"Expect the worst, it's the least you can do.",
'Fine day for friends. So-so day for you.',
'Fine day to work off excess energy. Steal something heavy.',
'Future looks spotty. You will spill soup in late evening.',
'Generosity and perfection are your everlasting goals.',
'Give him an evasive answer.',
"Give your very best today. Heaven knows it's little enough.",
'Go to a movie tonight. Darkness becomes you.',
'Good day for a change of scene. Repaper the bedroom wall.',
'Good day for overcoming obstacles. Try a steeplechase.',
'Good day to let down old friends who need help.',
'Good news from afar can bring you a welcome visitor.',
'Good news. Ten weeks from Friday will be a pretty good day.',
'Hope that the day after you die is a nice day.',
"If you can read this, you're too close.",
'If you sow your wild oats, hope for a crop failure.',
'If you stand on your head, you will get footprints in your hair.',
"If your life was a horse, you'd have to shoot it.",
"In the stairway of life, you'd best take the elevator.",
"Increased knowledge will help you now. Have mate's phone bugged.",
'Is that really YOU that is reading this?',
'Is this really happening?',
'It may or may not be worthwhile, but it still has to be done.',
'It was all so different before everything changed.',
"It's all in the mind, ya know.",
'Just to have it is enough.',
'Keep emotionally active. Cater to your favorite neurosis.',
'Keep it short for pithy sake.',
'Lady Luck brings added income today. Lady friend takes it away tonight.',
'Learn to pause -- or nothing worthwhile can catch up to you.',
'Let me put it this way: today is going to be a learning experience.',
'Life is to you a dashing and bold adventure.',
'Live in a world of your own, but always welcome visitors.',
'Long life is in store for you.',
'Look afar and see the end from the beginning.',
'Love is in the offing. Be affectionate to one who adores you.',
'Make a wish, it might come true.',
'Many changes of mind and mood; do not hesitate too long.',
'Never be led astray onto the path of virtue.',
'Never commit yourself! Let someone else commit you.',
'Never give an inch!',
'Never look up when dragons fly overhead.',
'Never reveal your best argument.',
'Of course you have a purpose -- to find a purpose.',
'Perfect day for scrubbing the floor and other exciting things.',
'Questionable day. Ask somebody something.',
'Reply hazy, ask again later.',
'Save energy: be apathetic.',
'Ships are safe in harbor, but they were never meant to stay there.',
'Slow day. Practice crawling.',
'Snow Day -- stay home.',
"So this is it. We're going to die.",
"So you're back... about time...",
'Someone is speaking well of you.',
'Someone is speaking well of you. How unusual!',
'Someone whom you reject today, will reject you tomorrow.',
'Stay away from flying saucers today.',
'Stay away from hurricanes for a while.',
'Stay the curse.',
"That secret you've been guarding, isn't.",
'The time is right to make new friends.',
'There is a 20% chance of tomorrow.',
'There is a fly on your nose.',
'There was a phone call for you.',
'There will be big changes for you but you will be happy.',
'Things will be bright in P.M. A cop will shine a light in your face.',
'Think twice before speaking, but don\'t say "think think click click".',
'This will be a memorable month -- no matter how hard you try to forget it.',
'Time to be aggressive. Go after a tattooed Virgo.',
'Today is National Existential Ennui Awareness Day.',
'Today is the first day of the rest of the mess.',
'Today is the first day of the rest of your life.',
'Today is the last day of your life so far.',
'Today is the tomorrow you worried about yesterday.',
'Today is what happened to yesterday.',
"Today's weirdness is tomorrow's reason why. -- Hunter S. Thompson",
'Tomorrow will be cancelled due to lack of interest.',
'Tomorrow, you can be anywhere.',
"Tonight you will pay the wages of sin; Don't forget to leave a tip.",
"Tonight's the night: Sleep in a eucalyptus tree.",
'Truth will out this morning. (Which may really mess things up.)',
'Try the Moo Shu Pork. It is especially good today.',
'Try to get all of your posthumous medals in advance.',
'Try to have as good a life as you can under the circumstances.',
'Try to relax and enjoy the crisis. -- Ashleigh Brilliant',
'Try to value useful qualities in one who loves you.',
'Tuesday After Lunch is the cosmic time of the week.',
'Tuesday is the Wednesday of the rest of your life.',
'What happened last night can happen again.',
'Write yourself a threatening letter and pen a defiant reply.',
'You are a bundle of energy, always on the go.',
'You are a fluke of the universe; you have no right to be here.',
"You are a very redundant person, that's what kind of person you are.",
'You are always busy.',
'You are as I am with You.',
'You are capable of planning your future.',
'You are confused; but this is your normal state.',
'You are deeply attached to your friends and acquaintances.',
'You are dishonest, but never to the point of hurting a friend.',
'You are fairminded, just and loving.',
'You are fighting for survival in your own sweet and gentle way.',
'You are going to have a new love affair.',
'You are magnetic in your bearing.',
'You are not dead yet. But watch for further reports.',
'You are number 6! Who is number one?',
'You are only young once, but you can stay immature indefinitely.',
'You are sick, twisted and perverted. I like that in a person.',
'You are so boring that when I see you my feet go to sleep.',
'You are standing on my toes.',
'You are taking yourself far too seriously.',
'You are the only person to ever get this message.',
'You can rent this space for only $5 a week.',
'You could live a better life, if you had a better mind and a better body.',
'You definitely intend to start living sometime soon.',
'You dialed 5483.',
'You display the wonderful traits of charm and courtesy.',
"You don't become a failure until you're satisfied with being one.",
'You enjoy the company of other people.',
'You feel a whole lot more like you do now than you did when you used to.',
'You fill a much-needed gap.',
'You get along very well with everyone except animals and people.',
'You have a deep appreciation of the arts and music.',
'You have a deep interest in all that is artistic.',
'You have a strong appeal for members of the opposite sex.',
'You have a strong appeal for members of your own sex.',
'You have a strong desire for a home and your family interests come first.',
'You have a truly strong individuality.',
'You have an ability to sense and know higher truth.',
'You have an ambitious nature and may make a name for yourself.',
'You have an unusual equipment for success. Be sure to use it properly.',
'You have an unusual understanding of the problems of human relationships.',
'You have been selected for a secret mission.',
"You have Egyptian flu: you're going to be a mummy.",
'You have had a long-term stimulation relative to business.',
'You have literary talent that you should take pains to develop.',
'You have many friends and very few living enemies.',
'You have no real enemies.',
'You have taken yourself too seriously.',
"You have the capacity to learn from mistakes. You'll learn a lot today.",
'You have the power to influence all with whom you come in contact.',
'You like to form new friendships and make new acquaintances.',
'You look like a million dollars. All green and wrinkled.',
'You look tired.',
'You love peace.',
'You love your home and want it to be beautiful.',
'You may be recognized soon. Hide.',
'You may get an opportunity for advancement today. Watch it!',
'You need more time; and you probably always will.',
'You never hesitate to tackle the most difficult problems.',
'You now have Asian Flu.',
'You own a dog, but you can only feed a cat.',
'You possess a mind not merely twisted, but actually sprained.',
'You recoil from the crude; you tend naturally toward the exquisite.',
'You seek to shield those you love and you like the role of the provider.',
'You shall be rewarded for a dastardly deed.',
'You should go home.',
'You single-handedly fought your way into this hopeless mess.',
'You teach best what you most need to learn.',
'You too can wear a nose mitten.',
'You will always get the greatest recognition for the job you least like.',
'You will always have good luck in your personal affairs.',
'You will attract cultured and artistic people to your home.',
'You will be a winner today. Pick a fight with a four-year-old.',
'You will be advanced socially, without any special effort on your part.',
'You will be aided greatly by a person whom you thought to be unimportant.',
'You will be audited by the Internal Revenue Service.',
'You will be awarded a medal for disregarding safety in saving someone.',
'You will be awarded some great honor.',
'You will be awarded the Nobel Peace Prize... posthumously.',
'You will be called upon to help a friend in trouble.',
'You will be divorced within a year.',
'You will be given a post of trust and responsibility.',
'You will be held hostage by a radical group.',
'You will be married within a year, and divorced within two.',
'You will be married within a year.',
'You will be misunderstood by everyone.',
'You will be recognized and honored as a community leader.',
'You will be reincarnated as a toad; and you will be much happier.',
'You will be run over by a beer truck.',
'You will be run over by a bus.',
'You will be singled out for promotion in your work.',
'You will be successful in love.',
'You will be surprised by a loud noise.',
'You will be surrounded by luxury.',
'You will be the last person to buy a Chrysler.',
'You will be the victim of a bizarre joke.',
'You will be Told about it Tomorrow. Go Home and Prepare Thyself.',
'You will be traveling and coming into a fortune.',
'You will be winged by an anti-aircraft battery.',
"You will become rich and famous unless you don't.",
'You will contract a rare disease.',
'You will engage in a profitable business activity.',
'You will experience a strong urge to do good; but it will pass.',
'You will feel hungry again in another hour.',
'You will forget that you ever knew me.',
'You will gain money by a fattening action.',
'You will gain money by a speculation or lottery.',
'You will gain money by an illegal action.',
'You will gain money by an immoral action.',
'You will get what you deserve.',
'You will give someone a piece of your mind, which you can ill afford.',
'You will have a long and boring life.',
'You will have a long and unpleasant discussion with your supervisor.',
'You will have domestic happiness and faithful friends.',
'You will have good luck and overcome many hardships.',
'You will have long and healthy life.',
'You will hear good news from one you thought unfriendly to you.',
'You will inherit millions of dollars.',
'You will inherit some money or a small piece of land.',
'You will live a long, healthy, happy life and make bags of money.',
'You will live to see your grandchildren.',
'You will never know hunger.',
'You will not be elected to public office this year.',
'You will obey or molten silver will be poured into your ears.',
'You will outgrow your usefulness.',
'You will overcome the attacks of jealous associates.',
'You will pass away very quickly.',
'You will pioneer the first Martian colony.',
'You will probably marry after a very brief courtship.',
'You will reach the highest possible point in your business or profession.',
'You will receive a legacy which will place you above want.',
'You will remember something that you should not have forgotten.',
'You will soon forget this.',
'You will soon meet a person who will play an important role in your life.',
'You will step on the night soil of many countries.',
'You will triumph over your enemy.',
'You will visit the Dung Pits of Glive soon.',
'You will win success in whatever calling you adopt.',
"You will wish you hadn't.",
"You work very hard. Don't try to think as well.",
"You would if you could but you can't so you won't.",
"You'd like to do it instantaneously, but that's too slow.",
"You'll be called to a post requiring ability in handling groups of people.",
"You'll be sorry...",
"You'll feel much better once you've given up hope.",
"You'll never be the man your mother was!",
"You're a card which will have to be dealt with.",
"You're almost as happy as you think you are.",
"You're at the end of the road again.",
"You're being followed. Cut out the hanky-panky for a few days.",
"You're not my type. For that matter, you're not even my species!!!",
"You're ugly and your mother dresses you funny.",
"You're working under a slight handicap. You happen to be human.",
"You've been leading a dog's life. Stay off the furniture.",
'Your aim is high and to the right.',
'Your aims are high, and you are capable of much.',
'Your boss climbed the corporate ladder, wrong by wrong.',
'Your boss is a few sandwiches short of a picnic.',
'Your boyfriend takes chocolate from strangers.',
'Your business will assume vast proportions.',
'Your business will go through a period of considerable expansion.',
'Your depth of comprehension may tend to make you lax in worldly ways.',
'Your domestic life may be harmonious.',
"Your fly might be open (but don't check it just now).",
'Your goose is cooked. (Your current chick is burned up too!)',
'Your heart is pure, and your mind clear, and your soul devout.',
'Your ignorance cramps my conversation.',
'Your life would be very empty if you had nothing to regret.',
'Your love life will be happy and harmonious.',
'Your love life will be... interesting.',
'Your lover will never wish to leave you.',
'Your lucky color has faded.',
'Your lucky number has been disconnected.',
'Your lucky number is 3552664958674928. Watch for it everywhere.',
'Your nature demands love and your happiness depends on it.',
'Your object is to save the world, while still leading a pleasant life.',
'Your own qualities will help prevent your advancement in the world.',
'Your present plans will be successful.',
'Your reasoning powers are good, and you are a fairly good planner.',
'Your sister swims out to meet troop ships.',
'Your society will be sought by people of taste and refinement.',
'Your step will soil many countries.',
'Your supervisor is thinking about you.',
'Your talents will be recognized and suitably rewarded.',
'Your true value depends entirely on what you are compared with.'
]
|
mitsuhiko/zine
|
external-plugins/eric_the_fish/fortunes.py
|
Python
|
bsd-3-clause
| 20,673
|
[
"VisIt",
"exciting"
] |
539e5deebc76041f623e66547016bcc26fb1fa93032035ca91ef7cd3a55fc2ca
|
#!/usr/bin/python
#------------------------------------------------------------------------------
# Name: getThresh.py
# Author: Ra Inta, 20150716
# Last Modified: 20150716
# This is a pared down version of lookThresh.py. This is used to give
# an estimate of the (1-alpha confidence) 2F threshold for a given number of templates
#This is to calculate the 'further look' threshold for the maximum 2F value from a directed CW gravitational wave search.
# It also creates a top_jobs.txt list of candidates and generates figures in a newly created 'figures' folder.
#------------------------------------------------------------------------------
from scipy.stats import chi2
import numpy as np
from math import pow
import os
from sys import argv
# the only input parameter is the total number of templates
Ntot = float(argv[1])
if argv[2]:
max2F = float(argv[2])
if argv[3]:
effective_ratio = float(argv[3])
else:
effective_ratio = 1.0
#Confidence level
alpha=0.05
# Kludge approach: specify support for 2F
# TODO come up with a way to do this without a priori support
min2Fthresh = 20
max2Fthresh = 400
############################################################
#1) Find further look threshold by evaluating where [ CDF(Chi2(Neff,4))== \alpha ] for confidence level \alpha
############################################################
# Simple theoretical probability the overall max2F came from Gaussian noise
def prob(N, max2F):
"""Works out the probability given a number of templates and a maximum twoF"""
littleP = 1 - chi2.cdf(max2F, 4)
return N * littleP * pow(chi2.cdf(max2F, 4) , N )
if max2F:
P2Fmax = 1 - chi2.cdf(max2F, 4)
Pval = prob( Ntot * effective_ratio, max2F)
max2F_string = 'Maximum 2F value overall: 2F_max=' + str(max2F) + '\n'
prob2F_string = 'Probability of this in Gaussian noise: P(2F_max)=' + str(Pval) +'\n'
else:
max2F_string = ''
prob2F_string = ''
############################################################
# Find x, where p(x) is first expected to be > 95%
############################################################
def getLookThresh(Ntot, min2Fthresh, max2Fthresh ):
"""getLookThresh produces a fine Chi2 distribution to evaluate the further look threshold.
TODO get rid of explicit domain support for 2F range."""
x2F = np.arange(min2Fthresh, max2Fthresh, 0.1)
probVector = [prob(Ntot, x) for x in x2F]
# only evaluate distribution after maximum
# have to worry about numpy's float64 return values... (and array indexing)
x2FmaxIdx = np.where( probVector == max(probVector) )
return x2F[ np.min( np.where( probVector[x2FmaxIdx[0][0]:] < np.float64( alpha ) ) ) + x2FmaxIdx ][0][0]
#x2F = np.arange(min2Fthresh, max2Fthresh, 0.1)
#probVector = [prob(Ntot, x) for x in x2F]
#
## only evaluate distribution after maximum
## have to worry about numpy's float64 return values... (and array indexing)
#x2FmaxIdx = np.where( probVector == max(probVector) )
#lookThresh = x2F[ np.min( np.where( probVector[x2FmaxIdx[0][0]:] < np.float64(0.05) ) ) + x2FmaxIdx ][0][0]
lookThresh = getLookThresh(Ntot*effective_ratio, min2Fthresh, max2Fthresh)
############################################################
############################################################
# 2) Display everything
############################################################
print("Total number of templates: " + str( Ntot ) )
print("Effective ratio: " + str(effective_ratio) )
print(max2F_string + prob2F_string)
print("Further look theshold: " + str( lookThresh ) )
############################################################
# End of lookThresh.py
############################################################
|
NotAFakeRa/Front_end_CW_searches
|
getThresh.py
|
Python
|
mit
| 3,741
|
[
"Gaussian"
] |
02c8d71cb009080f5431970bc47dedc06d62a53a1745497eadbd583cc6a7993e
|
#!/usr/local/bin/python
# coding: utf-8
'''
PocketPidge.py - Produce PDF pocket diaries for users.
'''
import datetime
from reportlab.pdfgen.canvas import Canvas
from reportlab.lib.pagesizes import A4
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.enums import TA_JUSTIFY, TA_CENTER, TA_RIGHT
from reportlab.platypus import Paragraph, KeepTogether, BaseDocTemplate, PageTemplate, FrameBreak, Macro, Spacer, Image, PageBreak, Table, TableStyle
from reportlab.platypus import Frame as PlatypusFrame
from reportlab.platypus.flowables import Flowable
from reportlab.lib import colors
from PIL import Image as ImageHandler
from django.core.management import setup_environ
import settings
setup_environ(settings)
from MyPidge.Users.models import User, Membership
from MyPidge.Events.models import Observance, Event, EventTime
from MyPidge.Groups.models import Group
def trunc(string, maxlen):
'''
Return a version of the input string which is of a maximum length and ends with "..."
'''
if len(string) > maxlen:
string = string[0:maxlen-3] + "..."
return string
class Line(Flowable):
'''
A line flowable.
'''
def __init__(self, xoffset=0):
self.xoffset = xoffset
def wrap(self, *args):
return (self.xoffset, 0)
def draw(self):
canvas = self.canv
canvas.setLineWidth(1)
canvas.setFillColorRGB(0, 0, 0)
canvas.setStrokeColorRGB(0, 0, 0)
canvas.line(5, 5, 80, 5)
class TheMap(Flowable):
'''
Image of Cambridge Map.
'''
def __init__(self, xoffset=0):
self.xoffset = xoffset
def wrap(self, *args):
return (self.xoffset, 0)
def draw(self):
canvas = self.canv
canvas.drawImage("centre2_8000_rotated.jpg", -10,-260, width=400,height=280)
# Create the styles for the document
logoStyle = ParagraphStyle('normal')
logoStyle.fontSize=16
logoStyle.leading=15
logoStyle.rightindent=5
logoStyle.backColor='grey'
#logoStyle.textColor='white'
logoStyle.borderWidth=5
logoStyle.borderColor='grey'
rightStyle = ParagraphStyle('normal')
rightStyle.alignment = TA_RIGHT
rightStyle.spaceBefore = 10
paraStyle = ParagraphStyle('normal')
paraStyle.spaceAfter = 10
paraStyle.alignment = TA_JUSTIFY
medStyle = ParagraphStyle('normal')
medStyle.fontSize = 8
medStyle.leading = 8
medStyle.spaceAfter = 10
medStyle.alignment = TA_JUSTIFY
smallStyle = ParagraphStyle('normal')
smallStyle.fontSize = 6
smallStyle.leading = 6
smallStyle.spaceAfter = 0
smallStyle.alignment = TA_JUSTIFY
centreNormalStyle = ParagraphStyle('normal')
centreNormalStyle.alignment = TA_CENTER
centreNormalStyle.spaceAfter = 10
style = getSampleStyleSheet()
headlineStyle = style["Heading2"]
centreStyle = style["Heading1"]
centreStyle.alignment = TA_CENTER
class Frame:
'''
A placeholder for the contents of one side of a page.
'''
def __init__(self, user):
'''Set the user for the page.'''
self.items = []
self.user = user
def makeItems(self):
'''Return the frame's Flowables.'''
pass
def makeFrame(self):
'''Return a KeepTogether-ed flowable set.'''
self.makeItems()
return KeepTogether(self.items)
class BasicFrame(Frame):
'''
Frame with a heading and line.
'''
heading = "Hey"
def makeItems(self):
'''Extend flowables with heading and line.'''
self.items.append(Paragraph("<img src='pidge.png' width=15 height=10 valign=middle /> " + self.heading, headlineStyle))
self.items.append(Line())
class FrontCover(Frame):
'''
Front page frame.
'''
def makeItems(self):
self.items.append(Spacer(60,60))
self.items.append(Paragraph("<img src='pidge.png' width=88 height=57 valign=middle /> MyPidge.com", logoStyle))
self.items.append(Paragraph("PocketPidge", rightStyle))
self.items.append(Spacer(30,30))
self.items.append(Paragraph("This PocketPidge belongs to:", centreNormalStyle))
self.items.append(Paragraph(self.user.firstname + ' ' + self.user.lastname, centreStyle))
try:
colleges = Membership.objects.filter(group__category__name="College", user=self.user)
self.college = colleges[0]
except:
pass
else:
imageSrc = "../I" + self.college.group.logo_url[2:]
print imageSrc
collegeCrest = ImageHandler.open(imageSrc)
if (collegeCrest.size[0] > 25) or (collegeCrest.size[1] > 25):
aspect = float(collegeCrest.size[1])/float(collegeCrest.size[0])
if (aspect>1):
newheight = 25
newwidth = int(collegeCrest.size[0] * (float(newheight) / float(collegeCrest.size[1])))
else:
newwidth = 25
newheight = int(collegeCrest.size[1] * (float(newwidth) / float(collegeCrest.size[0])))
self.items.append(Paragraph("<img src='" + imageSrc + "' width=" + str(newwidth) + " height=" + str(newheight) + " valign=middle /> " + self.college.group.official_name, centreStyle))
class BackCover(BasicFrame):
heading = "All Good Things..."
class LocationsOfInterest(BasicFrame):
heading = "Locations of Interest"
class FutureAppointmentsFirst(BasicFrame):
'''
Future Appointments frame - front side.
'''
heading = "Future Appointments"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Paragraph("""
Mon, 5th Jan 2009 - Lent Term Begins<br />
<b>Tue, 13th Jan 2009 - Full Lent Term Begins<br />
Fri, 13th Mar 2009 - Full Lent Term Ends</b><br />
Wed, 25th Mar 2009 - Lent Term Ends<br /><br />
Fri, 10th Apr 2009 - Easter Term Begins<br />
<b>Tue, 21st Apr 2009 - Full Easter Term Begins<br />
Fri, 12th Jun 2009 - Full Easter Term Ends</b><br />
Thu, 18th Jun 2009 - Easter Term Ends
""",medStyle))
class FutureAppointments(BasicFrame):
'''
Future Appointments frame - back side, blank.
'''
heading = "Future Appointments"
class CambridgeMap(BasicFrame):
'''
Future Appointments frame - back side, blank.
'''
heading = "Cambridge Map"
def makeItems(self):
BasicFrame.makeItems(self)
#self.items.append(TheMap())
class UserGuide(BasicFrame):
'''
Introduction to the PocketPidge.
'''
heading = "PocketPidge User Guide"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Paragraph("""<b>Further Event Information</b><br />
It's impossible to print full details in, so we've designed this diary to complement the web site - visit <b>MyPidge.com/Events</b> for further details [locations, full descriptions] on any of the events printed herein. The web site also has many more events than we could print here.<br /><br />
<b>Questions? Comments!</b><br />Our goal is to connect you with Cambridge. Please help us to do that, and forward your questions, comments and ideas to <b>thePidge@myPidge.com</b>.""", paraStyle))
class CareersInfo(BasicFrame):
'''
Careers spam.
'''
heading = "Career Opportunities"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Paragraph(u"<b>Looking at Grad Recruiters?</b><br />Save yourself time, and request information automatically from any or all of the employers featured in <i>The Times' Top 100 Graduate Employers</i>.<br /><br />Just visit <b>Top100GraduateEmployers.co.uk</b> now. Every completed entry will be entered into a special prize draw to win £5,000, or one of 50 Nintendo DS Lites. ".encode("utf-8"), medStyle))
self.items.append(Line())
self.items.append(Paragraph(u"<b>Final Year Undergrad? Want Some Spare Cash?</b><br/>If you're actively seeking graduate employment I want to hear from you.<br /><br />You will be <b>well-paid</b> [~£10/hour] for an hour or two of your time filling out surveys of your opinions on graduate recruitment.<br /><br />I can involve you in several such surveys, but please email me [Fergus] soon as there are a limited number of places: <b>frf21@cam.ac.uk</b>.".encode("utf-8"), medStyle))
class UsefulInformation(BasicFrame):
heading = "Useful Information"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Paragraph("""
<b>Welfare</b><br />
Emergency Services - <b>999</b> or <b>112</b><br /><i>For use in emergency situations. 24hrs.</i><br /><br />
Cambridgeshire Police - <b>0845 456 456 4</b><br /><i>For all non-emergencies. 24hrs. </i><br /><br />
Samaritans - <b>08457 90 90 90</b><br /><i>Confidential listening service. 24hrs.</i><br /><br />
NHS Direct - <b>0845 4647</b><br /><i>Medical help and advice. 24hrs.<br />Call 999 in an emergency.</i><br /><br />
<b>Local Services</b><br />
Panther Taxis - <b>01223 715 715</b>""", medStyle))
class ToDo(BasicFrame):
heading = "To Do Items"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Paragraph("Print more @ myPidge.com/Pocket", paraStyle))
class Notes(BasicFrame):
heading = "Notes"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Paragraph("To add more pages, just cut up A4 paper to A7-sized pieces, and hole-punch.", smallStyle))
class Groups(BasicFrame):
heading = "Your Groups"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Paragraph("Your groups, and the abbreviations we've used in listing events. ", paraStyle))
userAssocs = Membership.objects.filter(user=self.user).values('group__official_name', 'group__friendly_name').order_by('group__official_name').distinct()
for eachAssoc in userAssocs:
# Find a suitable moniker for society
uniqueName = ""
if eachAssoc['group__friendly_name']:
uniqueName = eachAssoc['group__friendly_name']
else:
uniqueName = eachAssoc['group__official_name']
if uniqueName == "talks.cam": uniqueName = "Talk"
self.items.append(Paragraph(eachAssoc['group__official_name'] + " - abbreviated as: <b>" + trunc(uniqueName,15) + "</b>", smallStyle))
class WeekToView(BasicFrame):
'''
Create a diary page.
'''
heading = "Week"
Week1Start = datetime.date(2008, 10, 9)
daydifference = datetime.timedelta(days=1)
def __init__(self, user, week, isEnd=False):
BasicFrame.__init__(self, user)
self.week = week
self.startDate = self.Week1Start + self.daydifference*((week-1)*7)
self.isEnd = isEnd
if isEnd:
self.startDate = self.startDate + self.daydifference*4
if week > 8:
self.heading = "Week 8+" + str(week-8)
else:
self.heading = "Week " + str(week)
def getObservanceText(self, theDate):
''' Get day's observance '''
observanceText = ""
daysObservances = Observance.objects.filter(date=theDate)
for myObs in daysObservances:
observanceText = "<b>" + myObs.title + "</b>"
return observanceText
def makeItems(self):
''' Create the page. '''
BasicFrame.makeItems(self)
import random
rangeHeight=4;
if self.isEnd: rangeHeight=3
for daysinweek in range(0, rangeHeight):
timeinquestion = self.startDate + self.daydifference*daysinweek
dayText = Paragraph("<i>%s</i>" % timeinquestion.strftime("%a, %d %b"), paraStyle)
obsText = Paragraph("<b>%s</b>" % self.getObservanceText(timeinquestion), smallStyle)
# Get events
eventsText = []
allEvents = EventTime.objects.filter(start__gte=timeinquestion, start__lt=(timeinquestion+self.daydifference))
# Maximum of three, spilling down this priority list:
eventSet = []
allowedevents = 3
# 1) Any events for groups they're into
myGroups = Membership.objects.filter(user=self.user).values_list('group', flat=True).distinct()
myGroupEvents = []
for eachGroup in myGroups:
group = Group.objects.get(id=eachGroup)
groupEvents = allEvents.filter(event__group = group).values_list('event', flat=True).distinct()
myGroupEvents.extend(groupEvents)
if len(myGroupEvents) > 3:
random.shuffle(myGroupEvents)
eventSet.extend(myGroupEvents[0:3])
else:
eventSet.extend(myGroupEvents)
# 2) Any events with categories they're into, except talks.cam
# Get full list of eventtags they like
# For each tag, find events and append to list
# Pop up to 3 randomly whilst checking the event ids are unique
if len(eventSet) < 3:
userTags = self.user.interests.all()
taggedEvents = []
# Aprise events not including talks.cam events
excludedEvents = allEvents.exclude(event__group__friendly_name__exact = "talks.cam")
for userTag in userTags:
tagEvents = excludedEvents.filter(event__tags=userTag)
for tagEvent in tagEvents:
# This does not return unique events. If an event ticks more than one of their boxes, we want a higher probability that it will show up
taggedEvents.append(tagEvent.event.id)
random.shuffle(taggedEvents)
for eachEvent in taggedEvents:
# Add it to our list if it's not already in the list
for item in eventSet:
if item == eachEvent: break
else:
eventSet.append(eachEvent)
if len(eventSet) == 3:
break
# 3) Talks.cam stuff if they like talks, broadcastable
# Get talks that are labelled as broadcastable by talks.cam
# If too many, pop randomly
if len(eventSet) < 3:
if self.user.interests.filter(name__exact='Academic Events / Talks').count() == 1:
# and bit set to publishable
talkscamEvents = allEvents.filter(event__group__friendly_name__exact = "talks.cam").distinct()
talkSet = []
for talk in talkscamEvents:
talkSet.append(talk.event.id)
random.shuffle(talkSet)
for eachTalk in talkSet:
eventSet.append(eachTalk)
if len(eventSet) == 3: break
# Sort based on earliest start time of each event, and remove duplicates
allEventTimes = allEvents.filter(event__id__in=eventSet).order_by("start")
eventSetTracker = {}
eventSetSorted = []
for eventTime in allEventTimes:
currentEventID = eventTime.event.id
if not eventSetTracker.has_key(currentEventID):
eventSetTracker[currentEventID] = True
eventSetSorted.append(currentEventID)
# Vary length of title based on the number of events being shown
totalEvents = len(eventSetSorted)
eventDescriptionLength = [0, 120, 60, 40]
thisDescriptionLength = eventDescriptionLength[totalEvents]
for eventID in eventSetSorted:
event = Event.objects.get(id=eventID)
# Print all times of that event on that day
allEventTimes = allEvents.filter(event=event).order_by("start")
eventTimes = ""
for eachEventTime in allEventTimes:
if eventTimes:
eventTimes += " & " + eachEventTime.start.strftime("%H.%M")
else:
eventTimes = eachEventTime.start.strftime("%H.%M")
firstEventTime = eachEventTime.start
# Find a suitable moniker for society
uniqueName = ""
if event.group.friendly_name:
uniqueName = event.group.friendly_name
else:
uniqueName = event.group.official_name
if uniqueName == "talks.cam": uniqueName = "Talk"
# Use abbreviated name
eventHost = trunc(uniqueName, 15)
eventLocation = trunc(event.location_additional.split("|")[0].title(), 20)
eventTitle = event.title.title()
if eventLocation:
eventText = "<b>%s</b> %s - %s - <i>%s</i>" % (eventTimes, eventHost, trunc(eventTitle, thisDescriptionLength), eventLocation)
else:
eventText = "<b>%s</b> %s - %s" % (eventTimes, eventHost, trunc(eventTitle, thisDescriptionLength+20))
eventsText.append(Paragraph(eventText,smallStyle))
# Print date next to observance
data= [[dayText, obsText]]
t = Table(data, colWidths=[65,135], rowHeights=[10], style=None, splitByRow=1, repeatRows=0, repeatCols=0)
t.setStyle(TableStyle([('VALIGN',(0,0),(1,0),"TOP")]))
self.items.append(t)
# Print events on right
data= [[None, eventsText]]
t = Table(data, colWidths=[100,100], rowHeights=[50], style=None, splitByRow=1, repeatRows=0, repeatCols=0)
t.setStyle(TableStyle([('VALIGN',(0,0),(1,0),"TOP")]))
self.items.append(t)
if self.isEnd:
self.items.append(Line())
self.items.append(Paragraph("<i>Notes</i>", paraStyle))
if self.week == 2:
self.items.append(Paragraph("Looking for graduate work? Get the top employers to send you the information you want and save yourself time. Visit <b>Top100GraduateEmployers.co.uk</b> now.", medStyle))
class PromoFront(BasicFrame):
'''
Selling PocketPidge.
'''
heading = "Free Pocket Diary"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Paragraph("<br />- Events Prefilled to Your Interests<br />- Notepages for Important Stuff <br />- Handy Maps of Town<br />- Easily Fits in Your Pocket<br />- Recyclable!<br />", paraStyle))
self.items.append(Paragraph("<br /><img src='pidge.png' width=88 height=57 valign=middle /> MyPidge.com", logoStyle))
self.items.append(Paragraph("PocketPidge", rightStyle))
self.items.append(Paragraph("<br/><br />Get yours at <b>MyPidge.com/Pocket</b>", paraStyle))
self.items.append(Paragraph("[If you have already applied, please pass this on to a friend]", paraStyle))
class PromoBack(BasicFrame):
'''
Selling MyPidge Homepage.
'''
heading = "MyPidge.com/Homepage"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Paragraph("- <b>One-Click</b> to Your Favourite Sites<br />- Search Google, Wikipedia, And More<br />- <b>Boatie</b>? Get the River Flag Status<br />- Discover Cambridge Events/Societies<br />- Available at <b>Any Computer You Use</b>", paraStyle))
self.items.append(Image("start2.png", width=155, height=120))
self.items.append(Paragraph("<br/>Try it out at <b>MyPidge.com/Homepage</b>", paraStyle))
class FormFront(BasicFrame):
'''
Front side of paper application form for PocketPidge.
'''
heading = "Get Your MyPidge.com PocketPidge"
def makeItems(self):
BasicFrame.makeItems(self)
self.items.append(Spacer(10,10))
self.items.append(Paragraph("It's a <b>free personal organiser</b>, customised for you, and delivered to your college in a few days' time. ", paraStyle))
self.items.append(Paragraph("Your details will be kept safe and only used to provide you with your <b>PocketPidge</b> and to customise <b>MyPidge.com</b> for you - both student-run non-profit services. ", paraStyle))
self.items.append(Paragraph("<b>PLEASE COMPLETE ALL, IN BLOCK CAPITALS</b>", centreNormalStyle))
tabledata = [
["FIRST NAME:", ""],
["SURNAME:", ""],
["CAMBRIDGE EMAIL:", "...........@CAM.AC.UK"],
["COLLEGE:", ""],
["SUBJECT:", ""],
["YEAR:", ""],
["BOATIE?", "YES / NO"],
]
t = Table(tabledata, 2*[140], 1*25)
t.setStyle(TableStyle([('ALIGN',(0,0),(0,6),'LEFT'),
('ALIGN',(1,0),(1,6),'LEFT'),
('VALIGN',(0,0),(1,6),'MIDDLE'),
]))
self.items.append(t)
self.items.append(Spacer(30,30))
self.items.append(Paragraph("<b>PLEASE TURN OVER</b><br/>Just two more questions.", centreNormalStyle))
class FormBack(BasicFrame):
'''
Back side of paper application form for PocketPidge.
'''
heading = "Get Your MyPidge.com PocketPidge"
def makeItems(self):
self.items.append(Paragraph("<b>Tick Your Interests</b><br/>...and you'll get relevant events <u>printed into your PocketPidge for you</u>! ", paraStyle))
tabledata = [
["[ ] Theatre - Watching", "[ ] Theatre - Being Involved In"],
["[ ] Gigs / Contemporary Music", "[ ] Classical Music"],
["[ ] Christian Events", "[ ] Careers Events"],
["[ ] Cinema", "[ ] Club Nights"],
["[ ] LesBiGayTrans", "[ ] Politics"],
["[ ] Academic Events / Lectures", "[ ] International Events"],
["[ ] Sport", "[ ] Volunteering"],
]
t = Table(tabledata, 2*[140], 1*20)
t.setStyle(TableStyle([('ALIGN',(0,0),(1,6),'LEFT'),
('VALIGN',(0,0),(1,6),'TOP'),
]))
self.items.append(t)
self.items.append(Spacer(20,20))
self.items.append(Paragraph("<b>Your Groups and Societies</b><br/>Please list any societies you are a member of or interested in. ", paraStyle))
self.items.append(Spacer(70,70))
self.items.append(Paragraph("Would you be interested in helping to run and develop MyPidge?<br /> YES / NO", paraStyle))
self.items.append(Paragraph("That's it - just hand the form back for your free sweet. ", paraStyle))
def makeMapCutOut():
''' Make PDF of PocketPidge's generic Map insert. '''
posts = []
pageHeight, pageWidth = A4
A4_Landscape = [pageWidth, pageHeight]
formPages = (
CareersInfo(None),
UsefulInformation(None),
CareersInfo(None),
UsefulInformation(None),
CareersInfo(None),
UsefulInformation(None),
CareersInfo(None),
UsefulInformation(None),
TownMapLeft(None),
TownMapRight(None),
TownMapLeft(None),
TownMapRight(None),
TownMapLeft(None),
TownMapRight(None),
TownMapLeft(None),
TownMapRight(None),
)
for (offset, frame) in enumerate(formPages):
posts.append(frame.makeFrame())
if len(formPages)==(offset+1):
# Break page at end of a PocketPidge
# But need to do this twice if on an odd page
posts.append(PageBreak())
else:
posts.append(FrameBreak())
# Build File
document = BaseDocTemplate("map.pdf", pagesize=A4_Landscape)
template = PageTemplate(frames=createFrames(pageWidth, pageHeight, 4, 2))
document.addPageTemplates(template)
document.build(posts)
def makePocketPidge(filename):
''' Make PDF of PocketPidges. '''
import datetime
tempposts = {}
posts = []
pageHeight, pageWidth = A4
A4_Landscape = [pageWidth, pageHeight]
totalusers = 4
setsofusers = 5
framesperuser = 24
myUsers = User.objects.filter(pocketpidge__isnull=True).order_by("lastname").distinct()
if len(myUsers) >= (totalusers*setsofusers):
myUsers = myUsers[:(totalusers*setsofusers)]
framesperpage = 16
# Index of back slot given duplexing on short edge, and sweeping left to right, then top to bottom
A4BackMap = {
1: '12',
2: '11',
3: '10',
4: '9',
5: '16',
6: '15',
7: '14',
8: '13'}
enumerateUsers = enumerate(myUsers)
for batch in range(0,setsofusers):
currentuser = 0
for subbatch in range(0,totalusers):
try:
(userNumber, user) = enumerateUsers.next()
except StopIteration:
break
print user.__unicode__()
startPage = (currentuser * 2) + 1
startIndex = batch*totalusers*framesperuser
userPocketPidge = (
[WeekTimetable(user),
Groups(user),
FrontCover(user),
UserGuide(user),],
[WeekToView(user, 2),
WeekToView(user, 2, True),
WeekToView(user, 7),
WeekToView(user, 7, True),],
[WeekToView(user, 3),
WeekToView(user, 3, True),
WeekToView(user, 8),
WeekToView(user, 8, True),],
[WeekToView(user, 4),
WeekToView(user, 4, True),
WeekToView(user, 9),
WeekToView(user, 9, True),],
[WeekToView(user, 5),
WeekToView(user, 5, True),
FutureAppointmentsFirst(user),
FutureAppointments(user),],
[WeekToView(user, 6),
WeekToView(user, 6, True),
Notes(user),
Notes(user)]
)
# Add to posts dictionary in the order they'll be processed
for (offset, frames) in enumerate(userPocketPidge):
# first - pfoffset*framesperpage + startindex
first = (offset*framesperpage)+ startPage + startIndex
tempposts[first] = frames[0]
# second - pfoffset*framesperpage + A4BackMap[startindex]
second = (offset*framesperpage) + int(A4BackMap[startPage]) + startIndex
tempposts[second] = frames[1]
# third - first + 1
third = first + 1
tempposts[third] = frames[2]
# fourth - second - 1
fourth = second - 1
tempposts[fourth] = frames[3]
currentuser += 1
user.pocketpidge = datetime.datetime.now()
user.save()
for value in range(0,totalusers*framesperuser*setsofusers):
if tempposts.has_key(value+1):
frame = tempposts[value+1]
posts.append(frame.makeFrame())
posts.append(FrameBreak())
else:
posts.append(Paragraph("---", medStyle))
posts.append(FrameBreak())
# Build File
document = BaseDocTemplate(filename + ".pdf", pagesize=A4_Landscape)
template = PageTemplate(frames=createFrames(pageWidth, pageHeight, 4, 2))
document.addPageTemplates(template)
document.build(posts)
def makePromo():
''' Make PDF of MyPidge promotional flyer. '''
posts = []
pageHeight, pageWidth = A4
A4_Landscape = [pageWidth, pageHeight]
formPages = (
PromoFront(None),
PromoFront(None),
PromoFront(None),
PromoFront(None),
PromoFront(None),
PromoFront(None),
PromoFront(None),
PromoFront(None),
PromoBack(None),
PromoBack(None),
PromoBack(None),
PromoBack(None),
PromoBack(None),
PromoBack(None),
PromoBack(None),
PromoBack(None),
)
for (offset, frame) in enumerate(formPages):
posts.append(frame.makeFrame())
if len(formPages)==(offset+1):
# Break page at end of a PocketPidge
# But need to do this twice if on an odd page
posts.append(PageBreak())
else:
posts.append(FrameBreak())
# Build File
document = BaseDocTemplate("promo.pdf", pagesize=A4_Landscape)
template = PageTemplate(frames=createFrames(pageWidth, pageHeight, 4, 2))
document.addPageTemplates(template)
document.build(posts)
def makeForm():
''' Make PDF of MyPidge app form. '''
posts = []
pageWidth, pageHeight = A4
formPages = (
FormFront(None),
FormFront(None),
FormFront(None),
FormFront(None),
FormBack(None),
FormBack(None),
FormBack(None),
FormBack(None),
)
for (offset, frame) in enumerate(formPages):
posts.append(frame.makeFrame())
if len(formPages)==(offset+1):
# Break page at end of a PocketPidge
# But need to do this twice if on an odd page
posts.append(PageBreak())
else:
posts.append(FrameBreak())
# Build File
document = BaseDocTemplate("form.pdf", pagesize=A4)
template = PageTemplate(frames=createFrames(pageWidth, pageHeight, 2, 2))
document.addPageTemplates(template)
document.build(posts)
def createFrames(pageWidth, pageHeight, framesAcross, framesUp, framePadding=10):
''' Set up the page layout. '''
frames = []
frameWidth = pageWidth/framesAcross
frameHeight = pageHeight/framesUp
for y in range(0, framesUp):
for x in range(0, framesAcross):
# Sweep left to right
frameX = (x)*frameWidth
# Then top to bottom
frameY = (framesUp - y - 1)*frameHeight
thisFrame = PlatypusFrame(frameX+framePadding, frameY+framePadding, frameWidth-2*framePadding, frameHeight-2*framePadding)
frames.append(thisFrame)
return frames
|
fergusrossferrier/mypidge.com
|
MyPidge/PocketPidge.py
|
Python
|
agpl-3.0
| 30,632
|
[
"VisIt"
] |
9cabbf7f9ef33f9d88e05feb8703aac0ec5849ac910cbe7ea53ec575a270c257
|
# -*- coding: utf-8 -*-
import shutil
import tempfile
import os
import optparse
import traceback
import re
import threading
import time
from io import open
from lwr.lwr_client import submit_job
from lwr.lwr_client import finish_job
from lwr.lwr_client import LwrOutputs
from lwr.lwr_client import ClientOutputs
from lwr.lwr_client import build_client_manager
from lwr.lwr_client import ClientJobDescription
from galaxy.tools.deps.dependencies import DependenciesDescription
from galaxy.tools.deps.requirements import ToolRequirement
from .test_common import write_config
TEST_SCRIPT = b"""
import sys
from os import getenv
from os import makedirs
from os import listdir
from os.path import join
from os.path import basename
from os.path import dirname
config_input = open(sys.argv[1], 'r')
input_input = open(sys.argv[2], 'r')
input_extra = open(sys.argv[8], 'r')
output = open(sys.argv[3], 'w')
output2 = open(sys.argv[5], 'w')
output2_contents = sys.argv[6]
output3 = open(sys.argv[7], 'w')
version_output = open(sys.argv[9], 'w')
index_path = sys.argv[10]
assert len(listdir(dirname(index_path))) == 2
assert len(listdir(join(dirname(dirname(index_path)), "seq"))) == 1
output4_index_path = open(sys.argv[11], 'w')
try:
assert input_input.read() == "Hello world input!!@!"
assert input_extra.read() == "INPUT_EXTRA_CONTENTS"
contents = config_input.read(1024)
output.write(contents)
open("workdir_output", "w").write("WORK DIR OUTPUT")
open("env_test", "w").write(getenv("TEST_ENV", "DEFAULT"))
open("rewrite_action_test", "w").write(sys.argv[12])
output2.write(output2_contents)
with open("galaxy.json", "w") as f: f.write("GALAXY_JSON")
output3.write(getenv("MOO", "moo_default"))
output1_extras_path = "%s_files" % sys.argv[3][0:-len(".dat")]
makedirs(output1_extras_path)
open(join(output1_extras_path, "extra"), "w").write("EXTRA_OUTPUT_CONTENTS")
version_output.write("1.0.1")
output4_index_path.write(index_path)
finally:
output.close()
config_input.close()
output2.close()
output3.close()
version_output.close()
output4_index_path.close()
"""
EXPECTED_OUTPUT = b"hello world output"
EXAMPLE_UNICODE_TEXT = u'єχαмρℓє συтρυт'
TEST_REQUIREMENT = ToolRequirement(name="dep1", version="1.1", type="package")
TEST_DEPENDENCIES = DependenciesDescription(requirements=[TEST_REQUIREMENT])
class MockTool(object):
def __init__(self, tool_dir):
self.id = "client_test"
self.version = "1.0"
self.tool_dir = tool_dir
def run(options):
try:
temp_directory = tempfile.mkdtemp()
temp_index_dir = os.path.join(temp_directory, "idx", "bwa")
temp_index_dir_sibbling = os.path.join(temp_directory, "idx", "seq")
temp_shared_dir = os.path.join(temp_directory, "shared", "test1")
temp_work_dir = os.path.join(temp_directory, "w")
temp_tool_dir = os.path.join(temp_directory, "t")
__makedirs([temp_tool_dir, temp_work_dir, temp_index_dir, temp_index_dir_sibbling, temp_shared_dir])
temp_input_path = os.path.join(temp_directory, "dataset_0.dat")
temp_input_extra_path = os.path.join(temp_directory, "dataset_0_files", "input_subdir", "extra")
temp_index_path = os.path.join(temp_index_dir, "human.fa")
temp_config_path = os.path.join(temp_work_dir, "config.txt")
temp_tool_path = os.path.join(temp_directory, "t", "script.py")
temp_output_path = os.path.join(temp_directory, "dataset_1.dat")
temp_output2_path = os.path.join(temp_directory, "dataset_2.dat")
temp_output3_path = os.path.join(temp_directory, "dataset_3.dat")
temp_output4_path = os.path.join(temp_directory, "dataset_4.dat")
temp_version_output_path = os.path.join(temp_directory, "GALAXY_VERSION_1234")
temp_output_workdir_destination = os.path.join(temp_directory, "dataset_77.dat")
temp_output_workdir = os.path.join(temp_work_dir, "env_test")
temp_output_workdir_destination2 = os.path.join(temp_directory, "dataset_78.dat")
temp_output_workdir2 = os.path.join(temp_work_dir, "rewrite_action_test")
__write_to_file(temp_input_path, b"Hello world input!!@!")
__write_to_file(temp_input_extra_path, b"INPUT_EXTRA_CONTENTS")
__write_to_file(temp_config_path, EXPECTED_OUTPUT)
__write_to_file(temp_tool_path, TEST_SCRIPT)
__write_to_file(temp_index_path, b"AGTC")
# Implicit files that should also get transferred since depth > 0
__write_to_file("%s.fai" % temp_index_path, b"AGTC")
__write_to_file(os.path.join(temp_index_dir_sibbling, "human_full_seqs"), b"AGTC")
empty_input = u"/foo/bar/x"
test_unicode = getattr(options, "test_unicode", False) # TODO Switch this in integration tests
cmd_text = EXAMPLE_UNICODE_TEXT if test_unicode else "Hello World"
command_line_params = (
temp_tool_path,
temp_config_path,
temp_input_path,
temp_output_path,
empty_input,
temp_output2_path,
cmd_text,
temp_output3_path,
temp_input_extra_path,
temp_version_output_path,
temp_index_path,
temp_output4_path,
temp_shared_dir,
)
assert os.path.exists(temp_index_path)
command_line = u'python %s "%s" "%s" "%s" "%s" "%s" "%s" "%s" "%s" "%s" "%s" "%s" "%s"' % command_line_params
config_files = [temp_config_path]
input_files = [temp_input_path, empty_input]
output_files = [
temp_output_path,
temp_output2_path,
temp_output3_path,
temp_output4_path,
temp_output_workdir_destination,
temp_output_workdir_destination2
]
client, client_manager = __client(temp_directory, options)
waiter = Waiter(client, client_manager)
client_outputs = ClientOutputs(
working_directory=temp_work_dir,
work_dir_outputs=[
(temp_output_workdir, temp_output_workdir_destination),
(temp_output_workdir2, temp_output_workdir_destination2),
],
output_files=output_files,
version_file=temp_version_output_path,
)
job_description = ClientJobDescription(
command_line=command_line,
tool=MockTool(temp_tool_dir),
config_files=config_files,
input_files=input_files,
client_outputs=client_outputs,
working_directory=temp_work_dir,
**__extra_job_description_kwargs(options)
)
submit_job(client, job_description)
result_status = waiter.wait()
__finish(options, client, client_outputs, result_status)
__assert_contents(temp_output_path, EXPECTED_OUTPUT, result_status)
__assert_contents(temp_output2_path, cmd_text, result_status)
__assert_contents(os.path.join(temp_work_dir, "galaxy.json"), b"GALAXY_JSON", result_status)
__assert_contents(os.path.join(temp_directory, "dataset_1_files", "extra"), b"EXTRA_OUTPUT_CONTENTS", result_status)
if getattr(options, "test_rewrite_action", False):
__assert_contents(temp_output_workdir_destination2, os.path.join(temp_directory, "shared2", "test1"), result_status)
if job_description.env:
__assert_contents(temp_output_workdir_destination, b"TEST_ENV_VALUE", result_status)
__assert_contents(temp_version_output_path, b"1.0.1", result_status)
if job_description.dependencies_description:
__assert_contents(temp_output3_path, "moo_override", result_status)
else:
__assert_contents(temp_output3_path, "moo_default", result_status)
if client.default_file_action != "none":
rewritten_index_path = open(temp_output4_path, 'r', encoding='utf-8').read()
# Path written to this file will differ between Windows and Linux.
assert re.search(r"123456[/\\]unstructured[/\\]\w+[/\\]bwa[/\\]human.fa", rewritten_index_path) is not None
__exercise_errors(options, client, temp_output_path, temp_directory)
client_manager.shutdown()
except BaseException:
if not options.suppress_output:
traceback.print_exc()
raise
finally:
shutil.rmtree(temp_directory)
class Waiter(object):
def __init__(self, client, client_manager):
self.client = client
self.client_manager = client_manager
self.async = hasattr(client_manager, 'ensure_has_status_update_callback')
self.__setup_callback()
def __setup_callback(self):
if self.async:
self.event = threading.Event()
def on_update(message):
if message["status"] in ["complete", "cancelled"]:
self.final_status = message
self.event.set()
self.client_manager.ensure_has_status_update_callback(on_update)
def wait(self):
final_status = None
if not self.async:
i = 0
while i < 5:
complete_response = self.client.raw_check_complete()
if complete_response["status"] in ["complete", "cancelled"]:
final_status = complete_response
break
time.sleep(1)
i = i + 1
else:
self.event.wait(5)
if self.event.is_set():
final_status = self.final_status
if not final_status:
raise Exception("Job not completed properly")
return final_status
def __assert_contents(path, expected_contents, lwr_state):
if not os.path.exists(path):
raise AssertionError("File %s not created. Final LWR response state [%s]" % (path, lwr_state))
file = open(path, 'r', encoding="utf-8")
try:
contents = file.read()
if contents != expected_contents:
message = "File (%s) contained invalid contents [%s]." % (path, contents)
message = "%s Expected contents [%s]. Final LWR response state [%s]" % (message, expected_contents, lwr_state)
raise AssertionError(message)
finally:
file.close()
def __exercise_errors(options, client, temp_output_path, temp_directory):
"""
Exercise error conditions.
TODO: Improve. Something should be checked here.
"""
if getattr(options, 'test_errors', False):
try:
client._fetch_output(temp_output_path + "x")
except BaseException:
if not options.suppress_output:
traceback.print_exc()
def __client(temp_directory, options):
default_file_action = getattr(options, "default_file_action", None)
unstructured_action = default_file_action or "transfer"
path_defs = [
dict(path=os.path.join(temp_directory, "idx"), path_types="unstructured", depth=2, action=unstructured_action),
]
if getattr(options, "test_rewrite_action", False):
rewrite_def = dict(
path=os.path.join(temp_directory, "shared"),
path_types="unstructured",
action="rewrite",
source_directory=os.path.join(temp_directory, "shared"),
destination_directory=os.path.join(temp_directory, "shared2")
)
path_defs.append(rewrite_def)
client_options = {
"url": getattr(options, "url", None),
"private_token": getattr(options, "private_token", None),
"file_action_config": write_config(temp_directory, dict(paths=path_defs)),
}
if default_file_action:
client_options["default_file_action"] = default_file_action
if hasattr(options, "jobs_directory"):
client_options["jobs_directory"] = getattr(options, "jobs_directory")
if hasattr(options, "files_endpoint"):
client_options["files_endpoint"] = getattr(options, "files_endpoint")
user = getattr(options, 'user', None)
if user:
client_options["submit_user"] = user
client_manager = __client_manager(options)
client = client_manager.get_client(client_options, "123456")
return client, client_manager
def __client_manager(options):
manager_args = {}
simple_client_manager_options = ['cache', 'job_manager', 'file_cache']
for client_manager_option in simple_client_manager_options:
if getattr(options, client_manager_option, None):
manager_args[client_manager_option] = getattr(options, client_manager_option)
if getattr(options, 'transport', None):
manager_args['transport'] = options.transport
if getattr(options, 'manager_url', None):
manager_args['url'] = options.manager_url
return build_client_manager(**manager_args)
def __write_to_file(path, contents):
dirname = os.path.dirname(path)
if not os.path.exists(dirname):
os.makedirs(dirname)
with open(path, "wb") as file:
file.write(contents)
def __makedirs(directories):
for directory in directories:
os.makedirs(directory)
def __extra_job_description_kwargs(options):
dependencies_description = None
test_requirement = getattr(options, "test_requirement", False)
if test_requirement:
requirements = [TEST_REQUIREMENT]
dependencies_description = DependenciesDescription(requirements=requirements)
test_env = getattr(options, "test_env", False)
env = []
if test_env:
env.append(dict(name="TEST_ENV", value="TEST_ENV_VALUE"))
return dict(dependencies_description=dependencies_description, env=env)
def __finish(options, client, client_outputs, result_status):
lwr_outputs = LwrOutputs.from_status_response(result_status)
cleanup_job = 'always'
if not getattr(options, 'cleanup', True):
cleanup_job = 'never'
finish_args = dict(
client=client,
job_completed_normally=True,
cleanup_job=cleanup_job, # Default should 'always' if overridden via options.
client_outputs=client_outputs,
lwr_outputs=lwr_outputs,
)
failed = finish_job(**finish_args)
if failed:
failed_message_template = "Failed to complete job correctly, final status %s, finish exceptions %s."
failed_message = failed_message_template % (result_status, failed)
assert False, failed_message
def main():
""" Exercises a running lwr server application with the lwr client. """
parser = optparse.OptionParser()
parser.add_option('--url', dest='url', default='http://localhost:8913/')
parser.add_option('--private_token', default=None)
parser.add_option('--transport', default=None) # set to curl to use pycurl
parser.add_option('--cache', default=False, action="store_true")
parser.add_option('--test_errors', default=False, action="store_true")
parser.add_option('--suppress_output', default=False, action="store_true")
parser.add_option('--disable_cleanup', dest="cleanup", default=True, action="store_false")
(options, args) = parser.parse_args()
run(options)
if __name__ == "__main__":
main()
|
jmchilton/lwr
|
test/check.py
|
Python
|
apache-2.0
| 15,187
|
[
"BWA",
"Galaxy"
] |
cbda052e219566927e4d7cf87c966ff448fb7823fac2cfe5027f984b21c49c75
|
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import logging
import sys
try:
from urllib.parse import urlparse, unquote
except ImportError:
from urlparse import urlparse # type: ignore
from urllib2 import unquote # type: ignore
try:
from yarl import URL
except ImportError:
pass
try:
from azure.core.pipeline.transport import AioHttpTransport
except ImportError:
AioHttpTransport = None
from azure.core.exceptions import ClientAuthenticationError
from azure.core.pipeline.policies import SansIOHTTPPolicy
from . import sign_string
logger = logging.getLogger(__name__)
# wraps a given exception with the desired exception type
def _wrap_exception(ex, desired_type):
msg = ""
if ex.args:
msg = ex.args[0]
if sys.version_info >= (3,):
# Automatic chaining in Python 3 means we keep the trace
return desired_type(msg)
# There isn't a good solution in 2 for keeping the stack trace
# in general, or that will not result in an error in 3
# However, we can keep the previous error type and message
# TODO: In the future we will log the trace
return desired_type('{}: {}'.format(ex.__class__.__name__, msg))
class AzureSigningError(ClientAuthenticationError):
"""
Represents a fatal error when attempting to sign a request.
In general, the cause of this exception is user error. For example, the given account key is not valid.
Please visit https://docs.microsoft.com/en-us/azure/storage/common/storage-create-storage-account for more info.
"""
# pylint: disable=no-self-use
class SharedKeyCredentialPolicy(SansIOHTTPPolicy):
def __init__(self, account_name, account_key):
self.account_name = account_name
self.account_key = account_key
super(SharedKeyCredentialPolicy, self).__init__()
@staticmethod
def _get_headers(request, headers_to_sign):
headers = dict((name.lower(), value) for name, value in request.http_request.headers.items() if value)
if 'content-length' in headers and headers['content-length'] == '0':
del headers['content-length']
return '\n'.join(headers.get(x, '') for x in headers_to_sign) + '\n'
@staticmethod
def _get_verb(request):
return request.http_request.method + '\n'
def _get_canonicalized_resource(self, request):
uri_path = urlparse(request.http_request.url).path
try:
if isinstance(request.context.transport, AioHttpTransport) or \
isinstance(getattr(request.context.transport, "_transport", None), AioHttpTransport) or \
isinstance(getattr(getattr(request.context.transport, "_transport", None), "_transport", None),
AioHttpTransport):
uri_path = URL(uri_path)
return '/' + self.account_name + str(uri_path)
except TypeError:
pass
return '/' + self.account_name + uri_path
@staticmethod
def _get_canonicalized_headers(request):
string_to_sign = ''
x_ms_headers = []
for name, value in request.http_request.headers.items():
if name.startswith('x-ms-'):
x_ms_headers.append((name.lower(), value))
x_ms_headers.sort()
for name, value in x_ms_headers:
if value is not None:
string_to_sign += ''.join([name, ':', value, '\n'])
return string_to_sign
@staticmethod
def _get_canonicalized_resource_query(request):
sorted_queries = list(request.http_request.query.items())
sorted_queries.sort()
string_to_sign = ''
for name, value in sorted_queries:
if value is not None:
string_to_sign += '\n' + name.lower() + ':' + unquote(value)
return string_to_sign
def _add_authorization_header(self, request, string_to_sign):
try:
signature = sign_string(self.account_key, string_to_sign)
auth_string = 'SharedKey ' + self.account_name + ':' + signature
request.http_request.headers['Authorization'] = auth_string
except Exception as ex:
# Wrap any error that occurred as signing error
# Doing so will clarify/locate the source of problem
raise _wrap_exception(ex, AzureSigningError)
def on_request(self, request):
string_to_sign = \
self._get_verb(request) + \
self._get_headers(
request,
[
'content-encoding', 'content-language', 'content-length',
'content-md5', 'content-type', 'date', 'if-modified-since',
'if-match', 'if-none-match', 'if-unmodified-since', 'byte_range'
]
) + \
self._get_canonicalized_headers(request) + \
self._get_canonicalized_resource(request) + \
self._get_canonicalized_resource_query(request)
self._add_authorization_header(request, string_to_sign)
#logger.debug("String_to_sign=%s", string_to_sign)
|
Azure/azure-sdk-for-python
|
sdk/storage/azure-storage-queue/azure/storage/queue/_shared/authentication.py
|
Python
|
mit
| 5,369
|
[
"VisIt"
] |
60f87e28954c535682e41b8e7e232322ebac5ab494d05c18457ca6999f843f9b
|
"""
FitPanel class contains fields allowing to display results when
fitting a model and one data
"""
import sys
import wx
import wx.lib.newevent
import numpy as np
import copy
import math
import time
import traceback
from sasmodels.weights import MODELS as POLYDISPERSITY_MODELS
from sas.sasgui.guiframe.events import StatusEvent, NewPlotEvent, \
PlotQrangeEvent
from sas.sasgui.guiframe.dataFitting import check_data_validity
from sas.sasgui.guiframe.utils import format_number, check_float
from sas.sasgui.guiframe.documentation_window import DocumentationWindow
from sas.sasgui.perspectives.fitting.basepage import BasicPage as BasicPage
from sas.sasgui.perspectives.fitting.basepage import PageInfoEvent as \
PageInfoEvent
from sas.sascalc.data_util.qsmearing import smear_selection
from .basepage import ModelTextCtrl
(Chi2UpdateEvent, EVT_CHI2_UPDATE) = wx.lib.newevent.NewEvent()
_BOX_WIDTH = 76
_DATA_BOX_WIDTH = 300
SMEAR_SIZE_H = 0.00
CUSTOM_MODEL = 'Plugin Models'
class FitPage(BasicPage):
"""
FitPanel class contains fields allowing to display results when
fitting a model and one data
:note: For Fit to be performed the user should check at least one parameter
on fit Panel window.
"""
def __init__(self, parent, color=None):
"""
Initialization of the Panel
"""
BasicPage.__init__(self, parent, color=color)
# draw sizer
self._fill_data_sizer()
self.is_2D = None
self.fit_started = False
self.weightbt_string = None
self.m_name = None
# get smear info from data
self._get_smear_info()
self._fill_model_sizer(self.sizer1)
self._get_defult_custom_smear()
self._fill_range_sizer()
self._set_smear(self.data)
self.Bind(EVT_CHI2_UPDATE, self.on_complete_chisqr)
# bind key event
self.Bind(wx.EVT_RIGHT_DOWN, self.on_right_down)
self._set_bookmark_flag(False)
self._set_save_flag(False)
self._set_preview_flag(False)
self._set_copy_flag(False)
self._set_paste_flag(False)
self.btFit.SetFocus()
self.enable_fit_button()
self.fill_data_combobox(data_list=self.data_list)
# create a default data for an empty panel
self.create_default_data()
self._manager.frame.Bind(wx.EVT_SET_FOCUS, self.on_set_focus)
def enable_fit_button(self):
"""
Enable fit button if data is valid and model is valid
"""
flag = check_data_validity(self.data) & (self.model is not None)
self.btFit.Enable(flag)
def on_set_focus(self, event):
"""
Override the basepage focus method to ensure the save flag is set
properly when focusing on the fit page.
"""
flag = check_data_validity(self.data) & (self.model is not None)
self._set_save_flag(flag)
self.parent.on_set_focus(event)
self.on_tap_focus()
def _fill_data_sizer(self):
"""
fill sizer 0 with data info
"""
self.data_box_description = wx.StaticBox(self, wx.ID_ANY,
'I(q) Data Source')
if check_data_validity(self.data):
dname_color = wx.BLUE
else:
dname_color = wx.RED
self.data_box_description.SetForegroundColour(dname_color)
boxsizer1 = wx.StaticBoxSizer(self.data_box_description, wx.VERTICAL)
# ----------------------------------------------------------
sizer_data = wx.BoxSizer(wx.HORIZONTAL)
self.dataSource = wx.ComboBox(self, wx.ID_ANY, style=wx.CB_READONLY)
wx.EVT_COMBOBOX(self.dataSource, wx.ID_ANY, self.on_select_data)
self.dataSource.SetMinSize((_DATA_BOX_WIDTH, -1))
sizer_data.Add(wx.StaticText(self, wx.ID_ANY, 'Name : '))
sizer_data.Add(self.dataSource)
sizer_data.Add((0, 5))
boxsizer1.Add(sizer_data, 0, wx.ALL, 10)
self.sizer0.Add(boxsizer1, 0, wx.EXPAND | wx.ALL, 10)
self.sizer0.Layout()
def enable_datasource(self):
"""
Enable or disable data source control depending on existing data
"""
if not self.data_list:
self.dataSource.Disable()
else:
self.dataSource.Enable()
def fill_data_combobox(self, data_list):
"""
Get a list of data and fill the corresponding combobox
"""
self.dataSource.Clear()
self.data_list = data_list
self.enable_datasource()
if len(data_list) > 0:
# find the maximum range covering all data
qmin, qmax, npts = self.compute_data_set_range(data_list)
self.qmin_data_set = qmin
self.qmax_data_set = qmax
self.npts_data_set = npts
self.qmin.SetValue(str(self.qmin_data_set))
self.qmax.SetValue(str(self.qmax_data_set))
self.qmin.SetBackgroundColour("white")
self.qmax.SetBackgroundColour("white")
self.qmin_x = self.qmin_data_set
self.qmax_x = self.qmax_data_set
self.state.qmin = self.qmin_x
self.state.qmax = self.qmax_x
is_data = False
for data in self.data_list:
if data is not None:
self.dataSource.Append(str(data.name), clientData=data)
if not is_data:
is_data = check_data_validity(data)
if is_data:
self.dataSource.SetSelection(0)
self.on_select_data(event=None)
if len(data_list) == 1:
self.dataSource.Disable()
def on_select_data(self, event=None):
"""
On_select_data
"""
if self.dataSource.GetCount() > 0:
pos = self.dataSource.GetSelection() if event is not None else 0
data = self.dataSource.GetClientData(pos)
self.set_data(data)
def _on_fit_complete(self):
"""
When fit is complete ,reset the fit button label.
"""
self.fit_started = False
self.set_fitbutton()
def _is_2D(self):
"""
Check if data_name is Data2D
:return: True or False
"""
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
return True
return False
def _fill_range_sizer(self):
"""
Fill the Fitting sizer on the fit panel which contains: the smearing
information (dq), the weighting information (dI or other), the plotting
range, access to the 2D mask editor, the compute, fit, and help
buttons, xi^2, number of points etc.
"""
is_2d_data = False
# Check if data is 2D
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
is_2d_data = True
title = "Fitting"
# smear messages & titles
smear_message_none = "No smearing is selected..."
smear_message_dqdata = "The dQ data is being used for smearing..."
smear_message_2d = \
"Higher accuracy is very time-expensive. Use it with care..."
smear_message_new_ssmear = \
"Please enter only the value of interest to customize smearing..."
smear_message_new_psmear = \
"Please enter a fixed percentage to be applied to all Q values..."
smear_message_2d_x_title = "<dQp>[1/A]:"
smear_message_2d_y_title = "<dQs>[1/A]:"
smear_message_pinhole_percent_title = "dQ[%]:"
smear_message_slit_height_title = "Slit height[1/A]:"
smear_message_slit_width_title = "Slit width[1/A]:"
self._get_smear_info()
# Sizers
box_description_range = wx.StaticBox(self, wx.ID_ANY, str(title))
box_description_range.SetForegroundColour(wx.BLUE)
boxsizer_range = wx.StaticBoxSizer(box_description_range, wx.VERTICAL)
self.sizer_set_smearer = wx.BoxSizer(wx.VERTICAL)
sizer_smearer = wx.BoxSizer(wx.HORIZONTAL)
self.sizer_new_smear = wx.BoxSizer(wx.HORIZONTAL)
self.sizer_set_masking = wx.BoxSizer(wx.HORIZONTAL)
sizer_chi2 = wx.BoxSizer(wx.VERTICAL)
smear_set_box = wx.StaticBox(self, wx.ID_ANY,
'Set Instrumental Smearing')
sizer_smearer_box = wx.StaticBoxSizer(smear_set_box, wx.HORIZONTAL)
sizer_smearer_box.SetMinSize((_DATA_BOX_WIDTH, 60))
weighting_set_box = wx.StaticBox(self, wx.ID_ANY,
'Set Weighting by Selecting dI Source')
weighting_box = wx.StaticBoxSizer(weighting_set_box, wx.HORIZONTAL)
sizer_weighting = wx.BoxSizer(wx.HORIZONTAL)
weighting_box.SetMinSize((_DATA_BOX_WIDTH, 40))
# Filling the sizer containing weighting info.
self.dI_noweight = wx.RadioButton(self, wx.ID_ANY,
'No Weighting', style=wx.RB_GROUP)
self.dI_didata = wx.RadioButton(self, wx.ID_ANY, 'Use dI Data')
self.dI_sqrdata = wx.RadioButton(self, wx.ID_ANY, 'Use |sqrt(I Data)|')
self.dI_idata = wx.RadioButton(self, wx.ID_ANY, 'Use |I Data|')
self.Bind(wx.EVT_RADIOBUTTON, self.onWeighting,
id=self.dI_noweight.GetId())
self.Bind(wx.EVT_RADIOBUTTON, self.onWeighting,
id=self.dI_didata.GetId())
self.Bind(wx.EVT_RADIOBUTTON, self.onWeighting,
id=self.dI_sqrdata.GetId())
self.Bind(wx.EVT_RADIOBUTTON, self.onWeighting,
id=self.dI_idata.GetId())
self.dI_noweight.SetValue(True)
# add 4 types of weighting to the sizer
sizer_weighting.Add(self.dI_noweight, 0, wx.LEFT, 10)
sizer_weighting.Add((14, 10))
sizer_weighting.Add(self.dI_didata)
sizer_weighting.Add((14, 10))
sizer_weighting.Add(self.dI_sqrdata)
sizer_weighting.Add((14, 10))
sizer_weighting.Add(self.dI_idata)
sizer_weighting.Add((10, 10))
self.dI_noweight.Enable(True)
self.dI_didata.Enable(False)
self.dI_sqrdata.Enable(False)
self.dI_idata.Enable(False)
weighting_box.Add(sizer_weighting)
# combobox for smear2d accuracy selection
self.smear_accuracy = wx.ComboBox(self, wx.ID_ANY,
size=(50, -1), style=wx.CB_READONLY)
self._set_accuracy_list()
self.smear_accuracy.SetValue(self.smear2d_accuracy)
self.smear_accuracy.SetSelection(0)
self.smear_accuracy.SetToolTipString(
"'Higher' uses more Gaussian points for smearing computation.")
wx.EVT_COMBOBOX(self.smear_accuracy, wx.ID_ANY,
self._on_select_accuracy)
# Fit button
self.btFit = wx.Button(self, self._ids.next(), 'Fit')
self.default_bt_colour = self.btFit.GetDefaultAttributes()
self.btFit.Bind(wx.EVT_BUTTON, self._onFit, id=self.btFit.GetId())
self.btFit.SetToolTipString("Start fitting.")
# General Help button
self.btFitHelp = wx.Button(self, wx.ID_ANY, 'Help')
self.btFitHelp.SetToolTipString("General fitting help.")
self.btFitHelp.Bind(wx.EVT_BUTTON, self._onFitHelp)
# Resolution Smearing Help button (for now use same technique as
# used for dI help to get tiniest possible button that works
# both on MAC and PC. Should completely rewrite the fitting sizer
# in future. This is minimum to get out release 3.1
# comment June 14, 2015 --- PDB
if sys.platform.count("win32") > 0:
size_q = (20, 15) # on PC
else:
size_q = (30, 20) # on MAC
self.btSmearHelp = wx.Button(self, wx.ID_ANY, '?',
style=wx.BU_EXACTFIT, size=size_q)
self.btSmearHelp.SetToolTipString("Resolution smearing help.")
self.btSmearHelp.Bind(wx.EVT_BUTTON, self._onSmearHelp)
# textcntrl for custom resolution
self.smear_pinhole_percent = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH - 25, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=
self.onPinholeSmear)
self.smear_slit_height = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH - 25, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self.onSlitSmear)
self.smear_slit_width = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH - 25, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self.onSlitSmear)
# smear
self.smear_data_left = BGTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH - 25, 20), style=0)
self.smear_data_left.SetValue(str(self.dq_l))
self.smear_data_right = BGTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH - 25, 20), style=0)
self.smear_data_right.SetValue(str(self.dq_r))
# set default values for smear
self.smear_pinhole_percent.SetValue(str(self.dx_percent))
self.smear_slit_height.SetValue(str(self.dxl))
self.smear_slit_width.SetValue(str(self.dxw))
# Filling the sizer containing instruments smearing info.
self.disable_smearer = wx.RadioButton(self, wx.ID_ANY,
'None', style=wx.RB_GROUP)
self.enable_smearer = wx.RadioButton(self, wx.ID_ANY, 'Use dQ Data')
# self.enable_smearer.SetToolTipString(
# "Click to use the loaded dQ data for smearing.")
self.pinhole_smearer = wx.RadioButton(self, wx.ID_ANY,
'Custom Pinhole Smear')
# self.pinhole_smearer.SetToolTipString
# ("Click to input custom resolution for pinhole smearing.")
self.slit_smearer = wx.RadioButton(self, wx.ID_ANY, 'Custom Slit Smear')
# self.slit_smearer.SetToolTipString
# ("Click to input custom resolution for slit smearing.")
self.Bind(wx.EVT_RADIOBUTTON, self.onSmear,
id=self.disable_smearer.GetId())
self.Bind(wx.EVT_RADIOBUTTON, self.onSmear,
id=self.enable_smearer.GetId())
self.Bind(wx.EVT_RADIOBUTTON, self.onPinholeSmear,
id=self.pinhole_smearer.GetId())
self.Bind(wx.EVT_RADIOBUTTON, self.onSlitSmear,
id=self.slit_smearer.GetId())
self.disable_smearer.SetValue(True)
sizer_smearer.Add(self.disable_smearer, 0, wx.LEFT, 10)
sizer_smearer.Add(self.enable_smearer)
sizer_smearer.Add(self.pinhole_smearer)
sizer_smearer.Add(self.slit_smearer)
sizer_smearer.Add(self.btSmearHelp)
sizer_smearer.Add((10, 10))
# StaticText for chi2, N(for fitting), Npts + Log/linear spacing
self.tcChi = BGTextCtrl(self, wx.ID_ANY, "-", size=(75, 20), style=0)
self.tcChi.SetToolTipString("Chi2/Npts(Fit)")
self.Npts_fit = BGTextCtrl(self, wx.ID_ANY, "-", size=(75, 20), style=0)
self.Npts_fit.SetToolTipString(
" Npts : number of points selected for fitting")
self.Npts_total = ModelTextCtrl(self, wx.ID_ANY, size=(_BOX_WIDTH, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self._onQrangeEnter)
self.Npts_total.SetValue(format_number(self.npts_x))
self.Npts_total.SetToolTipString(
" Total Npts : total number of data points")
# Update and Draw button
self.draw_button = wx.Button(self, self._ids.next(), 'Compute')
self.draw_button.Bind(wx.EVT_BUTTON,
self._onDraw, id=self.draw_button.GetId())
self.draw_button.SetToolTipString("Compute and Draw.")
self.points_sizer = wx.BoxSizer(wx.HORIZONTAL)
self.pointsbox = wx.CheckBox(self, wx.ID_ANY, 'Log?', (10, 10))
self.pointsbox.SetValue(False)
self.pointsbox.SetToolTipString("Check mark to use log spaced points")
wx.EVT_CHECKBOX(self, self.pointsbox.GetId(), self.select_log)
self.points_sizer.Add(wx.StaticText(self, wx.ID_ANY, 'Npts '))
self.points_sizer.Add(self.pointsbox)
box_description_1 = wx.StaticText(self, wx.ID_ANY, ' Chi2/Npts')
box_description_2 = wx.StaticText(self, wx.ID_ANY, 'Npts(Fit)')
# StaticText for smear
self.smear_description_none = wx.StaticText(self, wx.ID_ANY,
smear_message_none, style=wx.ALIGN_LEFT)
self.smear_description_dqdata = wx.StaticText(self, wx.ID_ANY,
smear_message_dqdata, style=wx.ALIGN_LEFT)
self.smear_description_type = wx.StaticText(self, wx.ID_ANY,
"Type:", style=wx.ALIGN_LEFT)
self.smear_description_accuracy_type = wx.StaticText(self, wx.ID_ANY,
"Accuracy:", style=wx.ALIGN_LEFT)
self.smear_description_smear_type = BGTextCtrl(self, wx.ID_ANY,
size=(57, 20), style=0)
self.smear_description_smear_type.SetValue(str(self.dq_l))
self.SetBackgroundColour(self.GetParent().GetBackgroundColour())
self.smear_description_2d = wx.StaticText(self, wx.ID_ANY,
smear_message_2d, style=wx.ALIGN_LEFT)
self.smear_message_new_s = wx.StaticText(self, wx.ID_ANY,
smear_message_new_ssmear, style=wx.ALIGN_LEFT)
self.smear_message_new_p = wx.StaticText(self, wx.ID_ANY,
smear_message_new_psmear, style=wx.ALIGN_LEFT)
self.smear_description_2d_x = wx.StaticText(self, wx.ID_ANY,
smear_message_2d_x_title, style=wx.ALIGN_LEFT)
self.smear_description_2d_x.SetToolTipString(
" dQp(parallel) in q_r direction.")
self.smear_description_2d_y = wx.StaticText(self, wx.ID_ANY,
smear_message_2d_y_title, style=wx.ALIGN_LEFT)
self.smear_description_2d_y.SetToolTipString(
" dQs(perpendicular) in q_phi direction.")
self.smear_description_pin_percent = wx.StaticText(self, wx.ID_ANY,
smear_message_pinhole_percent_title,
style=wx.ALIGN_LEFT)
self.smear_description_slit_height = wx.StaticText(self, wx.ID_ANY,
smear_message_slit_height_title, style=wx.ALIGN_LEFT)
self.smear_description_slit_width = wx.StaticText(self, wx.ID_ANY,
smear_message_slit_width_title, style=wx.ALIGN_LEFT)
# arrange sizers
self.sizer_set_smearer.Add(sizer_smearer)
self.sizer_set_smearer.Add((10, 10))
self.sizer_set_smearer.Add(self.smear_description_none,
0, wx.CENTER, 10)
self.sizer_set_smearer.Add(self.smear_description_dqdata,
0, wx.CENTER, 10)
self.sizer_set_smearer.Add(self.smear_description_2d,
0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_description_type,
0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_description_accuracy_type,
0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_accuracy)
self.sizer_new_smear.Add(self.smear_description_smear_type,
0, wx.CENTER, 10)
self.sizer_new_smear.Add((15, -1))
self.sizer_new_smear.Add(self.smear_description_2d_x, 0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_description_slit_height,
0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_slit_height, 0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_data_left, 0, wx.CENTER, 10)
self.sizer_new_smear.Add((20, -1))
self.sizer_new_smear.Add(self.smear_description_2d_y,
0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_description_pin_percent,
0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_description_slit_width,
0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_pinhole_percent, 0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_slit_width, 0, wx.CENTER, 10)
self.sizer_new_smear.Add(self.smear_data_right, 0, wx.CENTER, 10)
self.sizer_set_smearer.Add(self.smear_message_new_s, 0, wx.CENTER, 10)
self.sizer_set_smearer.Add(self.smear_message_new_p, 0, wx.CENTER, 10)
self.sizer_set_smearer.Add((5, 2))
self.sizer_set_smearer.Add(self.sizer_new_smear, 0, wx.CENTER, 10)
# add all to chi2 sizer
sizer_smearer_box.Add(self.sizer_set_smearer)
sizer_chi2.Add(sizer_smearer_box)
sizer_chi2.Add((-1, 5))
sizer_chi2.Add(weighting_box)
sizer_chi2.Add((-1, 5))
# hide all smear messages and textctrl
self._hide_all_smear_info()
# get smear_selection
self.current_smearer = smear_selection(self.data, self.model)
# Show only the relevant smear messages, etc
if self.current_smearer is None:
if not is_2d_data:
self.smear_description_none.Show(True)
self.enable_smearer.Disable()
else:
self.smear_description_none.Show(True)
self.slit_smearer.Disable()
if self.data is None:
self.slit_smearer.Disable()
self.pinhole_smearer.Disable()
self.enable_smearer.Disable()
else:
self._show_smear_sizer()
boxsizer_range.Add(self.sizer_set_masking)
# 2D data? default
is_2d_data = False
# check if it is 2D data
if self.data.__class__.__name__ == "Data2D" or self.enable2D:
is_2d_data = True
self.sizer5.Clear(True)
self.qmin = ModelTextCtrl(self, wx.ID_ANY, size=(_BOX_WIDTH, 20),
style=wx.TE_PROCESS_ENTER,
set_focus_callback=self.qrang_set_focus,
text_enter_callback=self._onQrangeEnter,
name='qmin')
self.qmin.SetValue(str(self.qmin_x))
q_tip = "Click outside of the axes\n to remove the lines."
qmin_tip = "Minimun value of Q.\n"
qmin_tip += q_tip
self.qmin.SetToolTipString(qmin_tip)
self.qmax = ModelTextCtrl(self, wx.ID_ANY, size=(_BOX_WIDTH, 20),
style=wx.TE_PROCESS_ENTER,
set_focus_callback=self.qrang_set_focus,
text_enter_callback=self._onQrangeEnter,
name='qmax')
self.qmax.SetValue(str(self.qmax_x))
qmax_tip = "Maximum value of Q.\n"
qmax_tip += q_tip
self.qmax.SetToolTipString(qmax_tip)
self.qmin.Bind(wx.EVT_MOUSE_EVENTS, self.qrange_click)
self.qmax.Bind(wx.EVT_MOUSE_EVENTS, self.qrange_click)
self.qmin.Bind(wx.EVT_KEY_DOWN, self.on_key)
self.qmax.Bind(wx.EVT_KEY_DOWN, self.on_key)
self.qmin.Bind(wx.EVT_TEXT, self.on_qrange_text)
self.qmax.Bind(wx.EVT_TEXT, self.on_qrange_text)
wx_id = self._ids.next()
self.reset_qrange = wx.Button(self, wx_id, 'Reset')
self.reset_qrange.Bind(wx.EVT_BUTTON, self.on_reset_clicked, id=wx_id)
self.reset_qrange.SetToolTipString("Reset Q range to the default")
sizer = wx.GridSizer(5, 5, 2, 6)
self.btEditMask = wx.Button(self, self._ids.next(), 'Editor')
self.btEditMask.Bind(wx.EVT_BUTTON, self._onMask,
id=self.btEditMask.GetId())
self.btEditMask.SetToolTipString("Edit Mask.")
self.EditMask_title = wx.StaticText(self, wx.ID_ANY, ' Masking(2D)')
sizer.Add(wx.StaticText(self, wx.ID_ANY, ' Q range'))
sizer.Add(wx.StaticText(self, wx.ID_ANY, ' Min[1/A]'))
sizer.Add(wx.StaticText(self, wx.ID_ANY, ' Max[1/A]'))
sizer.Add(self.EditMask_title)
sizer.Add((-1, 5))
sizer.Add(self.reset_qrange)
sizer.Add(self.qmin)
sizer.Add(self.qmax)
sizer.Add(self.btEditMask)
sizer.Add((-1, 5))
sizer.AddMany(5*[(-1, 5)])
sizer.Add(box_description_1, 0, 0)
sizer.Add(box_description_2, 0, 0)
sizer.Add(self.points_sizer, 0, 0)
sizer.Add(self.draw_button, 0, 0)
sizer.Add((-1, 5))
sizer.Add(self.tcChi, 0, 0)
sizer.Add(self.Npts_fit, 0, 0)
sizer.Add(self.Npts_total, 0, 0)
sizer.Add(self.btFit, 0, 0)
sizer.Add(self.btFitHelp, 0, 0)
boxsizer_range.Add(sizer_chi2)
boxsizer_range.Add(sizer)
if is_2d_data:
self.btEditMask.Enable()
self.EditMask_title.Enable()
else:
self.btEditMask.Disable()
self.EditMask_title.Disable()
# save state
self.save_current_state()
self.sizer5.Add(boxsizer_range, 0, wx.EXPAND | wx.ALL, 10)
self.sizer5.Layout()
def _set_sizer_dispersion(self):
"""
draw sizer with gaussian dispersity parameters
"""
self.fittable_param = []
self.fixed_param = []
self.orientation_params_disp = []
self.sizer4_4.Clear(True)
if self.model is None:
# no model is selected
return
if not self.enable_disp.GetValue():
# the user didn't select dispersity display
return
self._reset_dispersity()
# fill a sizer with the combobox to select dispersion type
model_disp = wx.StaticText(self, wx.ID_ANY, 'Function')
CHECK_STATE = False
ix = 0
iy = 0
disp = wx.StaticText(self, wx.ID_ANY, ' ')
self.sizer4_4.Add(disp, (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 15)
ix += 1
values = wx.StaticText(self, wx.ID_ANY, 'PD[ratio]')
polytext = "Polydispersity (= STD/mean); "
polytext += "the standard deviation over the mean value."
values.SetToolTipString(polytext)
self.sizer4_4.Add(values, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix += 2
if self.is_mac:
err_text = 'Error'
else:
err_text = ''
self.text_disp_1 = wx.StaticText(self, wx.ID_ANY, err_text)
self.sizer4_4.Add(self.text_disp_1, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix += 1
self.text_disp_min = wx.StaticText(self, wx.ID_ANY, 'Min')
self.sizer4_4.Add(self.text_disp_min, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix += 1
self.text_disp_max = wx.StaticText(self, wx.ID_ANY, 'Max')
self.sizer4_4.Add(self.text_disp_max, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix += 1
npts = wx.StaticText(self, wx.ID_ANY, 'Npts')
npts.SetToolTipString("Number of sampling points for the numerical\n\
integration over the distribution function.")
self.sizer4_4.Add(npts, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix += 1
nsigmas = wx.StaticText(self, wx.ID_ANY, 'Nsigs')
nsigmas.SetToolTipString("Number of sigmas between which the range\n\
of the distribution function will be used for weighting. \n\
The value '3' covers 99.5% for Gaussian distribution \n\
function. Note: Not recommended to change this value.")
self.sizer4_4.Add(nsigmas, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix += 1
self.sizer4_4.Add(model_disp, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
self.text_disp_max.Show(True)
self.text_disp_min.Show(True)
for item in self.model.dispersion.keys():
if not self.magnetic_on:
if item in self.model.magnetic_params:
continue
if item not in self.model.orientation_params:
if item not in self.disp_cb_dict:
self.disp_cb_dict[item] = None
name0 = "Distribution of " + item
name1 = item + ".width"
name2 = item + ".npts"
name3 = item + ".nsigmas"
if name1 not in self.model.details:
self.model.details[name1] = ["", None, None]
iy += 1
for p in self.model.dispersion[item].keys():
if p == "width":
ix = 0
cb = wx.CheckBox(self, wx.ID_ANY, name0, (10, 10))
cb.SetValue(CHECK_STATE)
cb.SetToolTipString("Check mark to fit")
wx.EVT_CHECKBOX(self, cb.GetId(), self.select_param)
self.sizer4_4.Add(cb, (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 5)
ix = 1
value = self.model.getParam(name1)
ctl1 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 1.3, 20),
style=wx.TE_PROCESS_ENTER)
ctl1.SetLabel('PD[ratio]')
poly_text = "Polydispersity (STD/mean) of %s\n" % item
poly_text += "STD: the standard deviation"
poly_text += " from the mean value."
ctl1.SetToolTipString(poly_text)
ctl1.SetValue(str(format_number(value, True)))
self.sizer4_4.Add(ctl1, (iy, ix), (1, 1), wx.EXPAND)
# text to show error sign
ix = 2
text2 = wx.StaticText(self, wx.ID_ANY, '+/-')
self.sizer4_4.Add(text2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
if not self.is_mac:
text2.Hide()
ix = 3
ctl2 = wx.TextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 1.3, 20),
style=0)
self.sizer4_4.Add(ctl2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
if not self.is_mac:
ctl2.Hide()
ix = 4
ctl3 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 2, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self._onparamRangeEnter)
self.sizer4_4.Add(ctl3, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix = 5
ctl4 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 2, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self._onparamRangeEnter)
self.sizer4_4.Add(ctl4, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ctl3.Show(True)
ctl4.Show(True)
elif p == "npts":
ix = 6
value = self.model.getParam(name2)
Tctl = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 2.2, 20),
style=wx.TE_PROCESS_ENTER)
Tctl.SetValue(str(format_number(value)))
self.sizer4_4.Add(Tctl, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
self.fixed_param.append([None, name2, Tctl, None, None,
None, None, None])
elif p == "nsigmas":
ix = 7
value = self.model.getParam(name3)
Tct2 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 2.2, 20),
style=wx.TE_PROCESS_ENTER)
Tct2.SetValue(str(format_number(value)))
self.sizer4_4.Add(Tct2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
self.fixed_param.append([None, name3, Tct2,
None, None, None,
None, None])
ix = 8
disp_box = wx.ComboBox(self, wx.ID_ANY, size=(65, -1),
style=wx.CB_READONLY, name='%s' % name1)
for key, value in POLYDISPERSITY_MODELS.iteritems():
name_disp = str(key)
disp_box.Append(name_disp, value)
disp_box.SetStringSelection("gaussian")
wx.EVT_COMBOBOX(disp_box, wx.ID_ANY, self._on_disp_func)
self.sizer4_4.Add(disp_box, (iy, ix), (1, 1), wx.EXPAND)
self.fittable_param.append([cb, name1, ctl1, text2,
ctl2, ctl3, ctl4, disp_box])
ix = 0
iy += 1
self.sizer4_4.Add((20, 20), (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 15)
first_orient = True
for item in self.model.dispersion.keys():
if not self.magnetic_on:
if item in self.model.magnetic_params:
continue
if item in self.model.orientation_params:
if item not in self.disp_cb_dict:
self.disp_cb_dict[item] = None
name0 = "Distribution of " + item
name1 = item + ".width"
name2 = item + ".npts"
name3 = item + ".nsigmas"
if name1 not in self.model.details:
self.model.details[name1] = ["", None, None]
iy += 1
for p in self.model.dispersion[item].keys():
if p == "width":
ix = 0
cb = wx.CheckBox(self, wx.ID_ANY, name0, (10, 10))
cb.SetValue(CHECK_STATE)
cb.SetToolTipString("Check mark to fit")
wx.EVT_CHECKBOX(self, cb.GetId(), self.select_param)
self.sizer4_4.Add(cb, (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 5)
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
cb.Show(True)
elif cb.IsShown():
cb.Hide()
ix = 1
value = self.model.getParam(name1)
ctl1 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 1.3, 20),
style=wx.TE_PROCESS_ENTER)
poly_tip = "Absolute Sigma for %s." % item
ctl1.SetToolTipString(poly_tip)
ctl1.SetValue(str(format_number(value, True)))
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
if first_orient:
values.SetLabel('PD[ratio], Sig[deg]')
poly_text = "PD(polydispersity for lengths):\n"
poly_text += "It should be a value between"
poly_text += "0 and 1\n"
poly_text += "Sigma for angles: \n"
poly_text += "It is the STD (ratio*mean)"
poly_text += " of the distribution.\n "
values.SetToolTipString(poly_text)
first_orient = False
ctl1.Show(True)
elif ctl1.IsShown():
ctl1.Hide()
self.sizer4_4.Add(ctl1, (iy, ix), (1, 1), wx.EXPAND)
# text to show error sign
ix = 2
text2 = wx.StaticText(self, wx.ID_ANY, '+/-')
self.sizer4_4.Add(text2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
text2.Hide()
ix = 3
ctl2 = wx.TextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 1.3, 20),
style=0)
self.sizer4_4.Add(ctl2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ctl2.Hide()
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
if self.is_mac:
text2.Show(True)
ctl2.Show(True)
ix = 4
ctl3 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 2, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self._onparamRangeEnter)
self.sizer4_4.Add(ctl3, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ctl3.Hide()
ix = 5
ctl4 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 2, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self._onparamRangeEnter)
self.sizer4_4.Add(ctl4, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ctl4.Hide()
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
ctl3.Show(True)
ctl4.Show(True)
elif p == "npts":
ix = 6
value = self.model.getParam(name2)
Tctl = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 2.2, 20),
style=wx.TE_PROCESS_ENTER)
Tctl.SetValue(str(format_number(value)))
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
Tctl.Show(True)
else:
Tctl.Hide()
self.sizer4_4.Add(Tctl, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
self.fixed_param.append([None, name2, Tctl, None, None,
None, None, None])
self.orientation_params_disp.append([None, name2,
Tctl, None, None,
None, None, None])
elif p == "nsigmas":
ix = 7
value = self.model.getParam(name3)
Tct2 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 2.2, 20),
style=wx.TE_PROCESS_ENTER)
Tct2.SetValue(str(format_number(value)))
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
Tct2.Show(True)
else:
Tct2.Hide()
self.sizer4_4.Add(Tct2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
self.fixed_param.append([None, name3, Tct2,
None, None, None, None, None])
self.orientation_params_disp.append([None, name3,
Tct2, None, None, None, None, None])
ix = 8
disp_box = wx.ComboBox(self, wx.ID_ANY, size=(65, -1),
style=wx.CB_READONLY, name='%s' % name1)
for key, value in POLYDISPERSITY_MODELS.iteritems():
name_disp = str(key)
disp_box.Append(name_disp, value)
disp_box.SetStringSelection("gaussian")
wx.EVT_COMBOBOX(disp_box, wx.ID_ANY, self._on_disp_func)
self.sizer4_4.Add(disp_box, (iy, ix), (1, 1), wx.EXPAND)
self.fittable_param.append([cb, name1, ctl1, text2,
ctl2, ctl3, ctl4, disp_box])
self.orientation_params_disp.append([cb, name1, ctl1,
text2, ctl2, ctl3, ctl4, disp_box])
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
disp_box.Show(True)
else:
disp_box.Hide()
self.state.disp_cb_dict = copy.deepcopy(self.disp_cb_dict)
self.state.model = self.model.clone()
# save state into
self._copy_parameters_state(self.parameters, self.state.parameters)
self._copy_parameters_state(self.orientation_params_disp,
self.state.orientation_params_disp)
self._copy_parameters_state(self.fittable_param,
self.state.fittable_param)
self._copy_parameters_state(self.fixed_param, self.state.fixed_param)
wx.PostEvent(self.parent,
StatusEvent(status=" Selected Distribution: Gaussian"))
# Fill the list of fittable parameters
self.get_all_checked_params()
self.Layout()
def _onDraw(self, event):
"""
Update and Draw the model
"""
if self.model is None:
msg = "Please select a Model first..."
wx.MessageBox(msg, 'Info')
return
"""
if not self.data.is_data:
self.npts_x = self.Npts_total.GetValue()
self.Npts_fit.SetValue(self.npts_x)
self.create_default_data()
"""
flag, is_modified = self._update_paramv_on_fit()
wx.CallAfter(self._onparamEnter_helper, is_modified)
if not flag:
msg = "The parameters are invalid"
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
return
def _onFit(self, event):
"""
Allow to fit
"""
if event is not None:
event.Skip()
if self.fit_started:
self._StopFit()
self.fit_started = False
wx.CallAfter(self.set_fitbutton)
return
if self.data is None:
msg = "Please get Data first..."
wx.MessageBox(msg, 'Info')
wx.PostEvent(self._manager.parent,
StatusEvent(status="Fit: %s" % msg))
return
if self.model is None:
msg = "Please select a Model first..."
wx.MessageBox(msg, 'Info')
wx.PostEvent(self._manager.parent,
StatusEvent(status="Fit: %s" % msg, type="stop"))
return
if len(self.param_toFit) <= 0:
msg = "Select at least one parameter to fit"
wx.MessageBox(msg, 'Info')
wx.PostEvent(self._manager.parent,
StatusEvent(status=msg, type="stop"))
return
flag = self._update_paramv_on_fit()
if self.batch_on and not self._is_2D():
if not self._validate_Npts_1D():
return
if not flag:
msg = "Fitting range or parameters are invalid"
wx.PostEvent(self._manager.parent,
StatusEvent(status=msg, type="stop"))
return
self.select_param()
# Remove or do not allow fitting on the Q=0 point, especially
# when y(q=0)=None at x[0].
self.qmin_x = float(self.qmin.GetValue())
self.qmax_x = float(self.qmax.GetValue())
self._manager._reset_schedule_problem(value=0, uid=self.uid)
self._manager.schedule_for_fit(uid=self.uid, value=1)
self._manager.set_fit_range(uid=self.uid, qmin=self.qmin_x,
qmax=self.qmax_x)
# single fit
# self._manager.onFit(uid=self.uid)
self.fit_started = self._manager.onFit(uid=self.uid)
wx.CallAfter(self.set_fitbutton)
def _onFitHelp(self, event):
"""
Bring up the Full Fitting Documentation whenever the HELP button is
clicked.
Calls DocumentationWindow with the path of the location within the
documentation tree (after /doc/ ....". Note that when using old
versions of Wx (before 2.9) and thus not the release version of
installers, the help comes up at the top level of the file as
web browser does not pass anything past the # to the browser when it is
running "file:///...."
:param evt: Triggers on clicking the help button
"""
_TreeLocation = "user/sasgui/perspectives/fitting/fitting_help.html"
_doc_viewer = DocumentationWindow(self, wx.ID_ANY, _TreeLocation, "",
"General Fitting Help")
def _onSmearHelp(self, event):
"""
Bring up the instrumental resolution smearing Documentation whenever
the ? button in the smearing box is clicked.
Calls DocumentationWindow with the path of the location within the
documentation tree (after /doc/ ....". Note that when using old
versions of Wx (before 2.9) and thus not the release version of
installers, the help comes up at the top level of the file as
web browser does not pass anything past the # to the browser when it is
running "file:///...."
:param evt: Triggers on clicking the help button
"""
_TreeLocation = "user/sasgui/perspectives/fitting/sm_help.html"
_doc_viewer = DocumentationWindow(self, wx.ID_ANY, _TreeLocation, "",
"Instrumental Resolution Smearing \
Help")
def set_fitbutton(self):
"""
Set fit button label depending on the fit_started[bool]
"""
# Skip this feature if we are not on Windows
# NOTE: the is_mac data member actually means "is no Windows".
if self.is_mac:
return
if self.fit_started:
label = "Stop"
color = "red"
else:
label = "Fit"
color = "black"
# self.btFit.Enable(False)
self.btFit.SetLabel(label)
self.btFit.SetForegroundColour(color)
self.btFit.Enable(True)
def get_weight_flag(self):
"""
Get flag corresponding to a given weighting dI data.
"""
button_list = [self.dI_noweight,
self.dI_didata,
self.dI_sqrdata,
self.dI_idata]
flag = 1
for item in button_list:
if item.GetValue():
if button_list.index(item) == 0:
flag = 0 # dy = np.ones_like(dy_data)
elif button_list.index(item) == 1:
flag = 1 # dy = dy_data
elif button_list.index(item) == 2:
flag = 2 # dy = np.sqrt(np.abs(data))
elif button_list.index(item) == 3:
flag = 3 # dy = np.abs(data)
break
return flag
def _StopFit(self, event=None):
"""
Stop fit
"""
if event is not None:
event.Skip()
self._manager.stop_fit(self.uid)
self._manager._reset_schedule_problem(value=0)
self._on_fit_complete()
def rename_model(self):
"""
find a short name for model
"""
if self.model is not None:
self.model.name = "M" + str(self.index_model)
def _on_select_model(self, event=None):
"""
call back for model selection
"""
self.Show(False)
copy_flag = False
is_poly_enabled = None
if event is not None:
if (event.GetEventObject() == self.formfactorbox
and self.structurebox.GetLabel() != 'None')\
or event.GetEventObject() == self.structurebox\
or event.GetEventObject() == self.multifactorbox:
copy_flag = self.get_copy_params()
is_poly_enabled = self.enable_disp.GetValue()
self._on_select_model_helper()
self.set_model_param_sizer(self.model)
if self.model is None:
self._set_bookmark_flag(False)
self._keep.Enable(False)
self._set_save_flag(False)
self.enable_disp.SetValue(False)
self.disable_disp.SetValue(True)
# TODO: should not have an untrapped exception when displaying disperser
# TODO: do we need to create the disperser panel on every model change?
# Note: if we fix this, then remove ID_DISPERSER_HELP from basepage
try:
self.set_dispers_sizer()
except:
pass
self.state.enable_disp = self.enable_disp.GetValue()
self.state.disable_disp = self.disable_disp.GetValue()
self.state.pinhole_smearer = self.pinhole_smearer.GetValue()
self.state.slit_smearer = self.slit_smearer.GetValue()
self.state.structurecombobox = self.structurebox.GetValue()
self.state.formfactorcombobox = self.formfactorbox.GetValue()
self.state.categorycombobox = self.categorybox.GetValue()
self.enable_fit_button()
if self.model is not None:
self.m_name = self.model.name
self.state.m_name = self.m_name
self.rename_model()
self._set_copy_flag(True)
self._set_paste_flag(True)
if self.data is not None:
is_data = check_data_validity(self.data)
if is_data:
self._set_bookmark_flag(not self.batch_on)
self._keep.Enable(not self.batch_on)
self._set_save_flag(True)
#Setting smearing for cases with and without data.
self._set_smear(self.data)
# more disables for 2D
self._set_smear_buttons()
try:
# update smearer sizer
#This call for smearing set up caused double evaluation of
#I(q) and double compilation as results
#self.onSmear(None)
temp_smear = None
if not self.disable_smearer.GetValue():
# Set the smearer environments
temp_smear = self.current_smearer
except:
raise
# error occured on chisqr computation
# pass
# event to post model to fit to fitting plugins
(ModelEventbox, EVT_MODEL_BOX) = wx.lib.newevent.NewEvent()
# set smearing value whether or not data contain the smearing info
evt = ModelEventbox(model=self.model,
smearer=temp_smear,
enable_smearer=not self.disable_smearer.GetValue(),
qmin=float(self.qmin_x),
uid=self.uid,
caption=self.window_caption,
qmax=float(self.qmax_x))
self._manager._on_model_panel(evt=evt)
self.mbox_description.SetLabel("Model [ %s ]" %
str(self.model.name))
self.mbox_description.SetForegroundColour(wx.BLUE)
self.state.model = self.model.clone()
self.state.model.name = self.model.name
if event is not None:
# post state to fit panel
new_event = PageInfoEvent(page=self)
wx.PostEvent(self.parent, new_event)
# update list of plugins if new plugin is available
custom_model = CUSTOM_MODEL
mod_cat = self.categorybox.GetStringSelection()
if mod_cat == custom_model:
temp = self.parent.update_model_list()
if temp:
self.model_list_box = temp
current_val = self.formfactorbox.GetLabel()
pos = self.formfactorbox.GetSelection()
self._show_combox_helper()
self.formfactorbox.SetSelection(pos)
self.formfactorbox.SetValue(current_val)
# when select a model only from guictr/button
if is_poly_enabled is not None:
self.enable_disp.SetValue(is_poly_enabled)
self.disable_disp.SetValue(not is_poly_enabled)
self._set_dipers_Param(event=None)
self.state.enable_disp = self.enable_disp.GetValue()
self.state.disable_disp = self.disable_disp.GetValue()
# Keep the previous param values
if copy_flag:
self.get_paste_params(copy_flag)
wx.CallAfter(self._onDraw, None)
else:
self._draw_model()
if self.batch_on:
self.slit_smearer.Enable(False)
self.pinhole_smearer.Enable(False)
self.btEditMask.Disable()
self.EditMask_title.Disable()
self.Show(True)
self.SetupScrolling()
def _onparamEnter(self, event):
"""
when enter value on panel redraw model according to changed
"""
if self.model is None:
msg = "Please select a Model first..."
wx.MessageBox(msg, 'Info')
return
# default flag
flag = False
self.fitrange = True
# get event object
tcrtl = event.GetEventObject()
# Clear msg if previously shown.
msg = ""
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
if check_float(tcrtl):
flag = self._onparamEnter_helper()
self.show_npts2fit()
if self.fitrange:
temp_smearer = None
if not self.disable_smearer.GetValue():
temp_smearer = self.current_smearer
# set smearing value whether or not data contain the
# smearing info
if self.slit_smearer.GetValue():
flag1 = self.update_slit_smear()
flag = flag or flag1
elif self.pinhole_smearer.GetValue():
flag1 = self.update_pinhole_smear()
flag = flag or flag1
elif self.data.__class__.__name__ != "Data2D" and \
not self.enable2D:
enable_smearer = not self.disable_smearer.GetValue()
self._manager.set_smearer(smearer=temp_smearer,
fid=self.data.id,
uid=self.uid,
qmin=float(self.qmin_x),
qmax=float(self.qmax_x),
enable_smearer=enable_smearer)
if flag:
# self.compute_chisqr(smearer= temp_smearer)
# new state posted
if self.state_change:
# self._undo.Enable(True)
event = PageInfoEvent(page=self)
wx.PostEvent(self.parent, event)
self.state_change = False
else:
# invalid fit range: do nothing here:
# msg already displayed in validate
return
else:
self.save_current_state()
msg = "Cannot Plot: Must enter a number!!! "
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
self.save_current_state()
return
def _onparamRangeEnter(self, event):
"""
Check validity of value enter in the parameters range field
"""
tcrtl = event.GetEventObject()
# Clear msg if previously shown.
msg = ""
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
# Flag to register when a parameter has changed.
is_modified = False
if tcrtl.GetValue().lstrip().rstrip() != "":
try:
tcrtl.SetBackgroundColour(wx.WHITE)
self._check_value_enter(self.fittable_param)
self._check_value_enter(self.parameters)
except:
tcrtl.SetBackgroundColour("pink")
msg = "Model Error:wrong value entered : %s" % sys.exc_value
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
return
else:
tcrtl.SetBackgroundColour(wx.WHITE)
# self._undo.Enable(True)
self.save_current_state()
event = PageInfoEvent(page=self)
wx.PostEvent(self.parent, event)
self.state_change = False
def qrang_set_focus(self, event=None):
"""
ON Qrange focus
"""
if event is not None:
event.Skip()
# tcrtl = event.GetEventObject()
self._validate_qrange(self.qmin, self.qmax)
def qrange_click(self, event):
"""
On Qrange textctrl click, make the qrange lines in the plot
"""
if event is not None:
event.Skip()
if self.data.__class__.__name__ == "Data2D":
return
is_click = event.LeftDown()
if is_click:
d_id = self.data.id
d_group_id = self.data.group_id
act_ctrl = event.GetEventObject()
wx.PostEvent(self._manager.parent,
PlotQrangeEvent(ctrl=[self.qmin, self.qmax], id=d_id,
group_id=d_group_id, leftdown=is_click,
active=act_ctrl))
def on_qrange_text(self, event):
"""
#On q range value updated. DO not combine with qrange_click().
"""
if event is not None:
event.Skip()
if self.data.__class__.__name__ == "Data2D":
return
act_ctrl = event.GetEventObject()
d_id = self.data.id
d_group_id = self.data.group_id
wx.PostEvent(self._manager.parent,
PlotQrangeEvent(ctrl=[self.qmin, self.qmax], id=d_id,
group_id=d_group_id, leftdown=False,
active=act_ctrl))
self._validate_qrange(self.qmin, self.qmax)
def on_key(self, event):
"""
On Key down
"""
event.Skip()
if self.data.__class__.__name__ == "Data2D":
return
ctrl = event.GetEventObject()
try:
x_data = float(ctrl.GetValue())
except:
return
key = event.GetKeyCode()
length = len(self.data.x)
indx = (np.abs(self.data.x - x_data)).argmin()
# return array.flat[idx]
if key == wx.WXK_PAGEUP or key == wx.WXK_NUMPAD_PAGEUP:
indx += 1
if indx >= length:
indx = length - 1
elif key == wx.WXK_PAGEDOWN or key == wx.WXK_NUMPAD_PAGEDOWN:
indx -= 1
if indx < 0:
indx = 0
else:
return
ctrl.SetValue(str(self.data.x[indx]))
self._validate_qrange(self.qmin, self.qmax)
def _onQrangeEnter(self, event):
"""
Check validity of value enter in the Q range field
"""
tcrtl = event.GetEventObject()
# Clear msg if previously shown.
msg = ""
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
# For theory mode
if not self.data.is_data:
self.npts_x = self.Npts_total.GetValue()
self.Npts_fit.SetValue(self.npts_x)
self.create_default_data()
# Flag to register when a parameter has changed.
if tcrtl.GetValue().lstrip().rstrip() != "":
try:
tcrtl.SetBackgroundColour(wx.WHITE)
# If qmin and qmax have been modified, update qmin and qmax
if self._validate_qrange(self.qmin, self.qmax):
tempmin = float(self.qmin.GetValue())
if tempmin != self.qmin_x:
self.qmin_x = tempmin
tempmax = float(self.qmax.GetValue())
if tempmax != self.qmax_x:
self.qmax_x = tempmax
else:
tcrtl.SetBackgroundColour("pink")
msg = "Model Error:wrong value entered : %s" % sys.exc_value
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
return
except:
tcrtl.SetBackgroundColour("pink")
msg = "Model Error:wrong value entered : %s" % sys.exc_value
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
return
# Check if # of points for theory model are valid(>0).
# check for 2d
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
# set mask
radius = np.sqrt(self.data.qx_data * self.data.qx_data +
self.data.qy_data * self.data.qy_data)
index_data = ((self.qmin_x <= radius) & (radius <= self.qmax_x))
index_data = (index_data) & (self.data.mask)
index_data = (index_data) & (np.isfinite(self.data.data))
if len(index_data[index_data]) < 10:
msg = "Cannot Plot :No or too little npts in"
msg += " that data range!!! "
wx.PostEvent(self._manager.parent,
StatusEvent(status=msg))
return
else:
# self.data.mask = index_data
# self.Npts_fit.SetValue(str(len(self.data.mask)))
self.show_npts2fit()
else:
index_data = ((self.qmin_x <= self.data.x) &
(self.data.x <= self.qmax_x))
self.Npts_fit.SetValue(str(len(self.data.x[index_data])))
self.npts_x = self.Npts_total.GetValue()
self.create_default_data()
self._save_plotting_range()
else:
tcrtl.SetBackgroundColour("pink")
msg = "Model Error:wrong value entered!!!"
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
self._draw_model()
self.save_current_state()
event = PageInfoEvent(page=self)
wx.PostEvent(self.parent, event)
self.state_change = False
return
def _clear_Err_on_Fit(self):
"""
hide the error text control shown
after fitting
"""
if self.is_mac:
return
if hasattr(self, "text2_3"):
self.text2_3.Hide()
if len(self.parameters) > 0:
for item in self.parameters:
if item[0].IsShown():
# Skip the angle parameters if 1D data
if self.data.__class__.__name__ != "Data2D" and \
not self.enable2D:
if item in self.orientation_params:
continue
if item in self.param_toFit:
continue
# hide statictext +/-
if len(item) < 4:
continue
if item[3] is not None and item[3].IsShown():
item[3].Hide()
# hide textcrtl for error after fit
if item[4] is not None and item[4].IsShown():
item[4].Hide()
if len(self.fittable_param) > 0:
for item in self.fittable_param:
if item[0].IsShown():
# Skip the angle parameters if 1D data
if self.data.__class__.__name__ != "Data2D" and \
not self.enable2D:
if item in self.orientation_params:
continue
if item in self.param_toFit:
continue
if len(item) < 4:
continue
# hide statictext +/-
if item[3] is not None and item[3].IsShown():
item[3].Hide()
# hide textcrtl for error after fit
if item[4] is not None and item[4].IsShown():
item[4].Hide()
return
def _get_defult_custom_smear(self):
"""
Get the defult values for custum smearing.
"""
# get the default values
if self.dxl is None:
self.dxl = 0.0
if self.dxw is None:
self.dxw = ""
if self.dx_percent is None:
self.dx_percent = SMEAR_SIZE_H
def _get_smear_info(self):
"""
Get the smear info from data.
:return: self.smear_type, self.dq_l and self.dq_r,
respectively the type of the smear, dq_min and
dq_max for pinhole smear data
while dxl and dxw for slit smear
"""
# default
self.smear_type = None
self.dq_l = None
self.dq_r = None
data = self.data
if self.data is None:
return
elif self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
if data.dqx_data is None or data.dqy_data is None:
return
elif self.current_smearer is not None \
and data.dqx_data.any() != 0 \
and data.dqx_data.any() != 0:
self.smear_type = "Pinhole2d"
self.dq_l = format_number(np.average(data.dqx_data))
self.dq_r = format_number(np.average(data.dqy_data))
return
else:
return
# check if it is pinhole smear and get min max if it is.
if data.dx is not None and np.any(data.dx):
self.smear_type = "Pinhole"
self.dq_l = data.dx[0]
self.dq_r = data.dx[-1]
# check if it is slit smear and get min max if it is.
elif data.dxl is not None or data.dxw is not None:
self.smear_type = "Slit"
if data.dxl is not None and np.all(data.dxl, 0):
self.dq_l = data.dxl[0]
if data.dxw is not None and np.all(data.dxw, 0):
self.dq_r = data.dxw[0]
# return self.smear_type,self.dq_l,self.dq_r
def _show_smear_sizer(self):
"""
Show only the sizers depending on smear selection
"""
# smear disabled
if self.disable_smearer.GetValue():
self.smear_description_none.Show(True)
# 2Dsmear
elif self._is_2D():
self.smear_description_accuracy_type.Show(True)
self.smear_accuracy.Show(True)
self.smear_description_accuracy_type.Show(True)
self.smear_description_2d.Show(True)
self.smear_description_2d_x.Show(True)
self.smear_description_2d_y.Show(True)
if self.pinhole_smearer.GetValue():
self.smear_pinhole_percent.Show(True)
# smear from data
elif self.enable_smearer.GetValue():
self.smear_description_dqdata.Show(True)
if self.smear_type is not None:
self.smear_description_smear_type.Show(True)
if self.smear_type == 'Slit':
self.smear_description_slit_height.Show(True)
self.smear_description_slit_width.Show(True)
elif self.smear_type == 'Pinhole':
self.smear_description_pin_percent.Show(True)
self.smear_description_smear_type.Show(True)
self.smear_description_type.Show(True)
self.smear_data_left.Show(True)
self.smear_data_right.Show(True)
# custom pinhole smear
elif self.pinhole_smearer.GetValue():
if self.smear_type == 'Pinhole':
self.smear_message_new_p.Show(True)
self.smear_description_pin_percent.Show(True)
self.smear_pinhole_percent.Show(True)
# custom slit smear
elif self.slit_smearer.GetValue():
self.smear_message_new_s.Show(True)
self.smear_description_slit_height.Show(True)
self.smear_slit_height.Show(True)
self.smear_description_slit_width.Show(True)
self.smear_slit_width.Show(True)
def _hide_all_smear_info(self):
"""
Hide all smearing messages in the set_smearer sizer
"""
self.smear_description_none.Hide()
self.smear_description_dqdata.Hide()
self.smear_description_type.Hide()
self.smear_description_smear_type.Hide()
self.smear_description_accuracy_type.Hide()
self.smear_description_2d_x.Hide()
self.smear_description_2d_y.Hide()
self.smear_description_2d.Hide()
self.smear_accuracy.Hide()
self.smear_data_left.Hide()
self.smear_data_right.Hide()
self.smear_description_pin_percent.Hide()
self.smear_pinhole_percent.Hide()
self.smear_description_slit_height.Hide()
self.smear_slit_height.Hide()
self.smear_description_slit_width.Hide()
self.smear_slit_width.Hide()
self.smear_message_new_p.Hide()
self.smear_message_new_s.Hide()
def _set_accuracy_list(self):
"""
Set the list of an accuracy in 2D custum smear:
Xhigh, High, Med, or Low
"""
# list of accuracy choices
list = ['Low', 'Med', 'High', 'Xhigh']
for idx in range(len(list)):
self.smear_accuracy.Append(list[idx], idx)
def _set_fun_box_list(self, fun_box):
"""
Set the list of func for multifunctional models
"""
# Check if it is multi_functional model
if self.model.__class__ not in self.model_list_box["Multi-Functions"] \
and not self.temp_multi_functional:
return None
# Get the func name list
list = self.model.fun_list
if len(list) == 0:
return None
# build function (combo)box
ind = 0
while(ind < len(list)):
for key, val in list.iteritems():
if (val == ind):
fun_box.Append(key, val)
break
ind += 1
def _on_select_accuracy(self, event):
"""
Select an accuracy in 2D custom smear: Xhigh, High, Med, or Low
"""
# event.Skip()
# Check if the accuracy is same as before
# self.smear2d_accuracy = event.GetEventObject().GetValue()
self.smear2d_accuracy = self.smear_accuracy.GetValue()
if self.pinhole_smearer.GetValue():
self.onPinholeSmear(event=None)
else:
self.onSmear(event=None)
if self.current_smearer is not None:
self.current_smearer.set_accuracy(accuracy=\
self.smear2d_accuracy)
event.Skip()
def _on_fun_box(self, event):
"""
Select an func: Erf,Rparabola,LParabola
"""
fun_val = None
fun_box = event.GetEventObject()
name = fun_box.Name
value = fun_box.GetValue()
if value in self.model.fun_list:
fun_val = self.model.fun_list[value]
self.model.setParam(name, fun_val)
# save state
self._copy_parameters_state(self.str_parameters,
self.state.str_parameters)
# update params
self._update_paramv_on_fit()
# draw
self._draw_model()
self.Refresh()
# get ready for new event
event.Skip()
def _onMask(self, event):
"""
Build a panel to allow to edit Mask
"""
from sas.sasgui.guiframe.local_perspectives.plotting.masking import \
MaskPanel as MaskDialog
self.panel = MaskDialog(base=self, data=self.data, id=wx.NewId())
self.panel.ShowModal()
def _draw_masked_model(self, event):
"""
Draw model image w/mask
"""
is_valid_qrange = self._update_paramv_on_fit()
if is_valid_qrange and self.model is not None:
self.panel.MakeModal(False)
event.Skip()
# try re draw the model plot if it exists
self._draw_model()
self.show_npts2fit()
elif self.model is None:
self.panel.MakeModal(False)
event.Skip()
self.show_npts2fit()
msg = "No model is found on updating MASK in the model plot... "
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
else:
event.Skip()
msg = ' Please consider your Q range, too.'
self.panel.ShowMessage(msg)
def _set_smear(self, data):
"""
Set_smear
"""
if data is None:
return
self.current_smearer = smear_selection(data, self.model)
flag = self.disable_smearer.GetValue()
if self.current_smearer is None:
self.enable_smearer.Disable()
else:
self.enable_smearer.Enable()
if not flag:
self.onSmear(None)
def get_view_mode(self):
"""
return True if the panel allow 2D or False if 1D
"""
return self.enable2D
def compute_data_set_range(self, data_list):
"""
find the range that include all data in the set
return the minimum and the maximum values
"""
if data_list is not None and data_list != []:
for data in data_list:
qmin, qmax, npts = self.compute_data_range(data)
self.qmin_data_set = min(self.qmin_data_set, qmin)
self.qmax_data_set = max(self.qmax_data_set, qmax)
self.npts_data_set += npts
return self.qmin_data_set, self.qmax_data_set, self.npts_data_set
def compute_data_range(self, data):
"""
compute the minimum and the maximum range of the data
return the npts contains in data
:param data:
"""
qmin, qmax, npts = None, None, None
if data is not None:
if not hasattr(data, "data"):
try:
qmin = min(data.x)
# Maximum value of data
qmax = max(data.x)
npts = len(data.x)
except:
msg = "Unable to find min/max/length of \n data named %s" %\
data.filename
wx.PostEvent(self._manager.parent, StatusEvent(status=msg,
info="error"))
raise ValueError, msg
else:
qmin = 0
try:
x = max(math.fabs(data.xmin), math.fabs(data.xmax))
y = max(math.fabs(data.ymin), math.fabs(data.ymax))
except:
msg = "Unable to find min/max of \n data named %s" % \
data.filename
wx.PostEvent(self._manager.parent, StatusEvent(status=msg,
info="error"))
raise ValueError, msg
# Maximum value of data
qmax = math.sqrt(x * x + y * y)
npts = len(data.data)
return qmin, qmax, npts
def set_data(self, data):
"""
reset the current data
"""
id = None
flag = False
is_data = False
npts = 0
try:
old_id = self.data.id
old_group_id = self.data.group_id
except:
old_id = id
old_group_id = id
if self.data is not None:
is_data = check_data_validity(self.data)
if not is_data and data is not None:
flag = True
if data is not None:
if is_data:
self.graph_id = self.data.group_id
flag = (data.id != self.data.id)
self.data = data
if check_data_validity(data):
self.graph_id = data.group_id
self.data.group_id = self.graph_id
if self.data is None:
data_name = ""
self._set_bookmark_flag(False)
self._keep.Enable(False)
self._set_save_flag(False)
else:
if self.model is not None:
self._set_bookmark_flag(not self.batch_on)
self._keep.Enable(not self.batch_on)
if self.data.is_data:
self._set_save_flag(True)
self._set_preview_flag(True)
# more disables for 2D
di_flag = False
dq_flag = False
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
self.slit_smearer.Disable()
self.pinhole_smearer.Enable(True)
self.default_mask = copy.deepcopy(self.data.mask)
if self.data.err_data is not None \
and np.any(self.data.err_data):
di_flag = True
if self.data.dqx_data is not None \
and np.any(self.data.dqx_data):
dq_flag = True
else:
self.slit_smearer.Enable(True)
self.pinhole_smearer.Enable(True)
if self.data.dy is not None and np.any(self.data.dy):
di_flag = True
if self.data.dx is not None and np.any(self.data.dx):
dq_flag = True
elif self.data.dxl is not None and np.any(self.data.dxl):
dq_flag = True
if dq_flag:
self.enable_smearer.Enable(True)
self.enable_smearer.SetValue(True)
self.disable_smearer.SetValue(False)
else:
self.enable_smearer.Disable()
self.disable_smearer.Enable(True)
self.disable_smearer.SetValue(True)
if di_flag:
self.dI_didata.Enable(True)
self.dI_didata.SetValue(True)
self.weightbt_string = self.dI_didata.GetLabelText()
else:
self.dI_didata.Enable(False)
self.dI_noweight.SetValue(True)
self.weightbt_string = self.dI_noweight.GetLabelText()
# Enable weighting radio buttons
self.dI_noweight.Enable(True)
self.dI_sqrdata.Enable(True)
self.dI_idata.Enable(True)
self.formfactorbox.Enable()
self.structurebox.Enable()
data_name = self.data.name
_, _, npts = self.compute_data_range(self.data)
# set maximum range for x in linear scale
if not hasattr(self.data, "data"): # Display only for 1D data fit
self.btEditMask.Disable()
self.EditMask_title.Disable()
else:
self.btEditMask.Enable()
self.EditMask_title.Enable()
self.Npts_total.SetValue(str(npts))
# default:number of data points selected to fit
self.Npts_fit.SetValue(str(npts))
self.Npts_total.SetEditable(False)
self.Npts_total.SetBackgroundColour(
self.GetParent().GetBackgroundColour())
self.Npts_total.Bind(wx.EVT_MOUSE_EVENTS, self._npts_click)
self.pointsbox.Disable()
self.dataSource.SetValue(data_name)
self.state.data = data
self.enable_fit_button()
# send graph_id to page_finder
self._manager.set_graph_id(uid=self.uid, graph_id=self.graph_id)
# focus the page
if check_data_validity(data):
self.data_box_description.SetForegroundColour(wx.BLUE)
if self.batch_on:
self.slit_smearer.Enable(False)
self.pinhole_smearer.Enable(False)
self.btEditMask.Disable()
self.EditMask_title.Disable()
self.on_smear_helper()
self.on_set_focus(None)
self.Refresh()
# update model plot with new data information
if flag:
if self.data.__class__.__name__ == "Data2D":
self.enable2D = True
self.model_view.SetLabel("2D Mode")
else:
self.enable2D = False
self.model_view.SetLabel("1D Mode")
self.model_view.Disable()
# replace data plot on combo box selection
# by removing the previous selected data
try:
wx.PostEvent(self._manager.parent,
NewPlotEvent(action="delete",
group_id=old_group_id, id=old_id))
except:
pass
# plot the current selected data
wx.PostEvent(self._manager.parent,
NewPlotEvent(action="check", plot=self.data,
title=str(self.data.title)))
self._draw_model()
def _npts_click(self, event):
"""
Prevent further handling of the mouse event on Npts_total
by not calling Skip().
"""
pass
def reset_page(self, state, first=False):
"""
reset the state
"""
try:
self.reset_page_helper(state)
self.select_param()
# Save state_fit
self.save_current_state_fit()
except:
self._show_combox_helper()
msg = "Error: This model state has missing or outdated "
msg += "information.\n"
msg += traceback.format_exc()
wx.PostEvent(self._manager.parent,
StatusEvent(status=msg, info="error"))
self._lay_out()
self.Refresh()
def get_range(self):
"""
return the fitting range
"""
return float(self.qmin_x), float(self.qmax_x)
def get_npts2fit(self):
"""
return numbers of data points within qrange
:Note: This is to normalize chisq by Npts of fit
"""
if self.data is None:
return
npts2fit = 0
qmin, qmax = self.get_range()
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
radius = np.sqrt(self.data.qx_data * self.data.qx_data +
self.data.qy_data * self.data.qy_data)
index_data = (self.qmin_x <= radius) & (radius <= self.qmax_x)
index_data = (index_data) & (self.data.mask)
index_data = (index_data) & (np.isfinite(self.data.data))
npts2fit = len(self.data.data[index_data])
else:
for qx in self.data.x:
if qmax >= qx >= qmin:
npts2fit += 1
return npts2fit
def show_npts2fit(self):
"""
setValue Npts for fitting
"""
self.Npts_fit.SetValue(str(self.get_npts2fit()))
def get_chi2(self):
"""
return the current chi2
"""
return self.tcChi.GetValue()
def onsetValues(self, chisqr, p_name, out, cov):
"""
Build the panel from the fit result
:param chisqr: Value of the goodness of fit metric
:param p_name: the name of parameters
:param out: list of parameter with the best value found during fitting
:param cov: Covariance matrix
"""
# make sure stop button to fit button all the time
self._on_fit_complete()
if out is None or not np.isfinite(chisqr):
raise ValueError, "Fit error occured..."
is_modified = False
has_error = False
dispersity = ''
# Hide textctrl boxes of errors.
self._clear_Err_on_Fit()
# Check if chi2 is finite
if chisqr is not None and np.isfinite(chisqr):
# format chi2
chi2 = format_number(chisqr, True)
self.tcChi.SetValue(chi2)
self.tcChi.Refresh()
else:
self.tcChi.SetValue("-")
# Hide error title
if self.text2_3.IsShown() and not self.is_mac:
self.text2_3.Hide()
try:
if self.enable_disp.GetValue():
if hasattr(self, "text_disp_1"):
if self.text_disp_1 is not None and not self.is_mac:
self.text_disp_1.Hide()
except:
dispersity = None
pass
i = 0
# Set the panel when fit result are list
for item in self.param_toFit:
if len(item) > 5 and item is not None:
if item[0].IsShown():
# reset error value to initial state
if not self.is_mac:
item[3].Hide()
item[4].Hide()
for ind in range(len(out)):
if item[1] == p_name[ind]:
break
if len(out) > 0 and out[ind] is not None:
val_out = format_number(out[ind], True)
item[2].SetValue(val_out)
if(cov is not None and len(cov) == len(out)):
try:
if dispersity is not None:
if self.enable_disp.GetValue():
if hasattr(self, "text_disp_1"):
if self.text_disp_1 is not None:
if not self.text_disp_1.IsShown()\
and not self.is_mac:
self.text_disp_1.Show(True)
except:
pass
if cov[ind] is not None:
if np.isfinite(float(cov[ind])):
val_err = format_number(cov[ind], True)
item[4].SetForegroundColour(wx.BLACK)
else:
val_err = 'NaN'
item[4].SetForegroundColour(wx.RED)
if not self.is_mac:
item[3].Show(True)
item[4].Show(True)
item[4].SetValue(val_err)
has_error = True
i += 1
else:
raise ValueError, "onsetValues: Invalid parameters..."
# Show error title when any errors displayed
if has_error:
if not self.text2_3.IsShown():
self.text2_3.Show(True)
# save current state
self.save_current_state()
# plot model ( when drawing, do not update chisqr value again)
self._draw_model(update_chisqr=False, source='fit')
def onWeighting(self, event):
"""
On Weighting radio button event, sets the weightbt_string
"""
self.weightbt_string = event.GetEventObject().GetLabelText()
self._set_weight()
def _set_weight(self, is_2D=None):
"""
Set weight in fit problem
"""
# compute weight for the current data
flag_weight = self.get_weight_flag()
if is_2D is None:
is_2D = self._is_2D()
self._manager.set_fit_weight(uid=self.uid,
flag=flag_weight,
is2d=is_2D,
fid=None)
def onPinholeSmear(self, event):
"""
Create a custom pinhole smear object that will change the way residuals
are compute when fitting
:Note: accuracy is given by strings'High','Med', 'Low' FOR 2d,
None for 1D
"""
# Need update param values
self._update_paramv_on_fit()
if event is not None:
tcrtl = event.GetEventObject()
# event case of radio button
if tcrtl.GetValue():
self.dx_percent = 0.0
is_new_pinhole = True
else:
is_new_pinhole = self._is_changed_pinhole()
else:
is_new_pinhole = True
# if any value is changed
if is_new_pinhole:
self._set_pinhole_smear()
# hide all silt sizer
self._hide_all_smear_info()
# show relevant slit sizers
self._show_smear_sizer()
self.sizer_set_smearer.Layout()
# we need FitInside here not just self.Layout to ensure all the sizers
# end up with the necessasary space to in the scroll panel. In
# particular the compute and fit buttons end up on top of each other
# PDB Nov 28 2015.
self.FitInside()
if event is not None:
event.Skip()
# self._undo.Enable(True)
self.save_current_state()
event = PageInfoEvent(page=self)
wx.PostEvent(self.parent, event)
def _is_changed_pinhole(self):
"""
check if any of pinhole smear is changed
:return: True or False
"""
# get the values
pin_percent = self.smear_pinhole_percent.GetValue()
# Check changes in slit heigth
try:
dx_percent = float(pin_percent)
except:
return True
if self.dx_percent != dx_percent:
return True
return False
def _set_pinhole_smear(self):
"""
Set custom pinhole smear
:return: msg
"""
# copy data
data = copy.deepcopy(self.data)
if self._is_2D():
self.smear_type = 'Pinhole2d'
len_data = len(data.data)
data.dqx_data = np.zeros(len_data)
data.dqy_data = np.zeros(len_data)
else:
self.smear_type = 'Pinhole'
len_data = len(data.x)
data.dx = np.zeros(len_data)
data.dxl = None
data.dxw = None
msg = None
get_pin_percent = self.smear_pinhole_percent
if not check_float(get_pin_percent):
get_pin_percent.SetBackgroundColour("pink")
msg = "Model Error:wrong value entered!!!"
else:
if len_data < 2:
len_data = 2
self.dx_percent = float(get_pin_percent.GetValue())
if self.dx_percent < 0:
get_pin_percent.SetBackgroundColour("pink")
msg = "Model Error:This value can not be negative!!!"
elif self.dx_percent is not None:
percent = self.dx_percent/100
if self._is_2D():
data.dqx_data[data.dqx_data == 0] = percent * data.qx_data
data.dqy_data[data.dqy_data == 0] = percent * data.qy_data
else:
data.dx = percent * data.x
self.current_smearer = smear_selection(data, self.model)
# 2D need to set accuracy
if self._is_2D():
self.current_smearer.set_accuracy(
accuracy=self.smear2d_accuracy)
if msg is not None:
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
else:
get_pin_percent.SetBackgroundColour("white")
# set smearing value whether or not the data contain the smearing info
enable_smearer = not self.disable_smearer.GetValue()
self._manager.set_smearer(smearer=self.current_smearer,
fid=self.data.id,
qmin=float(self.qmin_x),
qmax=float(self.qmax_x),
enable_smearer=enable_smearer,
uid=self.uid)
return msg
def update_pinhole_smear(self):
"""
called by kill_focus on pinhole TextCntrl
to update the changes
:return: False when wrong value was entered
"""
# msg default
msg = None
# check if any value is changed
if self._is_changed_pinhole():
msg = self._set_pinhole_smear()
wx.CallAfter(self.save_current_state)
if msg is not None:
return False
else:
return True
def onSlitSmear(self, event):
"""
Create a custom slit smear object that will change the way residuals
are compute when fitting
"""
# Need update param values
self._update_paramv_on_fit()
# msg default
msg = None
# for event given
if event is not None:
tcrtl = event.GetEventObject()
# event case of radio button
if tcrtl.GetValue():
self.dxl = 0.0
self.dxw = 0.0
is_new_slit = True
else:
is_new_slit = self._is_changed_slit()
else:
is_new_slit = True
# if any value is changed
if is_new_slit:
msg = self._set_slit_smear()
# hide all silt sizer
self._hide_all_smear_info()
# show relevant slit sizers
self._show_smear_sizer()
self.sizer_set_smearer.Layout()
# we need FitInside here not just self.Layout to ensure all the sizers
# end up with the necessasary space to in the scroll panel. In
# particular the compute and fit buttons end up on top of each other
# PDB Nov 28 2015.
self.FitInside()
if event is not None:
event.Skip()
self.save_current_state()
event = PageInfoEvent(page=self)
wx.PostEvent(self.parent, event)
if msg is not None:
wx.PostEvent(self._manager.parent, StatusEvent(status=msg))
def _is_changed_slit(self):
"""
check if any of slit lengths is changed
:return: True or False
"""
# get the values
width = self.smear_slit_width.GetValue()
height = self.smear_slit_height.GetValue()
# check and change the box bg color if it was pink
# but it should be white now
# because this is the case that _set_slit_smear() will not handle
if height.lstrip().rstrip() == "":
self.smear_slit_height.SetBackgroundColour(wx.WHITE)
if width.lstrip().rstrip() == "":
self.smear_slit_width.SetBackgroundColour(wx.WHITE)
# Check changes in slit width
if width == "":
dxw = 0.0
else:
try:
dxw = float(width)
except:
return True
if self.dxw != dxw:
return True
# Check changes in slit heigth
if height == "":
dxl = 0.0
else:
try:
dxl = float(height)
except:
return True
if self.dxl != dxl:
return True
return False
def _set_slit_smear(self):
"""
Set custom slit smear
:return: message to inform the user about the validity
of the values entered for slit smear
"""
if self.data.__class__.__name__ == "Data2D" or self.enable2D:
return
# make sure once more if it is smearer
data = copy.deepcopy(self.data)
data_len = len(data.x)
data.dx = None
data.dxl = None
data.dxw = None
msg = None
try:
self.dxl = float(self.smear_slit_height.GetValue())
data.dxl = self.dxl * np.ones(data_len)
self.smear_slit_height.SetBackgroundColour(wx.WHITE)
except:
self.dxl = None
data.dxl = np.zeros(data_len)
if self.smear_slit_height.GetValue().lstrip().rstrip() != "":
self.smear_slit_height.SetBackgroundColour("pink")
msg = "Wrong value entered... "
else:
self.smear_slit_height.SetBackgroundColour(wx.WHITE)
try:
self.dxw = float(self.smear_slit_width.GetValue())
self.smear_slit_width.SetBackgroundColour(wx.WHITE)
data.dxw = self.dxw * np.ones(data_len)
except:
self.dxw = None
data.dxw = np.zeros(data_len)
if self.smear_slit_width.GetValue().lstrip().rstrip() != "":
self.smear_slit_width.SetBackgroundColour("pink")
msg = "Wrong Fit value entered... "
else:
self.smear_slit_width.SetBackgroundColour(wx.WHITE)
self.current_smearer = smear_selection(data, self.model)
# set smearing value whether or not the data contain the smearing info
enable_smearer = not self.disable_smearer.GetValue()
self._manager.set_smearer(smearer=self.current_smearer,
fid=self.data.id,
qmin=float(self.qmin_x),
qmax=float(self.qmax_x),
enable_smearer=enable_smearer,
uid=self.uid)
return msg
def update_slit_smear(self):
"""
called by kill_focus on pinhole TextCntrl
to update the changes
:return: False when wrong value was entered
"""
# msg default
msg = None
# check if any value is changed
if self._is_changed_slit():
msg = self._set_slit_smear()
# self._undo.Enable(True)
self.save_current_state()
if msg is not None:
return False
else:
return True
def onSmear(self, event):
"""
Create a smear object that will change the way residuals
are computed when fitting
"""
if event is not None:
event.Skip()
if self.data is None:
return
# Need update param values
self._update_paramv_on_fit()
if self.model is not None:
if self.data.is_data:
self._manager.page_finder[self.uid].add_data(data=self.data)
temp_smearer = self.on_smear_helper()
self.sizer_set_smearer.Layout()
# we need FitInside here not just self.Layout to ensure all the sizers
# end up with the necessasary space to in the scroll panel. In
# particular the compute and fit buttons end up on top of each other
# PDB Nov 28 2015.
self.FitInside()
self._set_weight()
# set smearing value whether or not the data contain the smearing info
enable_smearer = not self.disable_smearer.GetValue()
wx.CallAfter(self._manager.set_smearer, uid=self.uid,
smearer=temp_smearer,
fid=self.data.id,
qmin=float(self.qmin_x),
qmax=float(self.qmax_x),
enable_smearer=enable_smearer)
self.state.enable_smearer = self.enable_smearer.GetValue()
self.state.disable_smearer = self.disable_smearer.GetValue()
self.state.pinhole_smearer = self.pinhole_smearer.GetValue()
self.state.slit_smearer = self.slit_smearer.GetValue()
def on_smear_helper(self, update=False):
"""
Help for onSmear
:param update: force or not to update
"""
self._get_smear_info()
# renew smear sizer
if self.smear_type is not None:
self.smear_description_smear_type.SetValue(str(self.smear_type))
self.smear_data_left.SetValue(str(self.dq_l))
self.smear_data_right.SetValue(str(self.dq_r))
self._hide_all_smear_info()
data = copy.deepcopy(self.data)
# make sure once more if it is smearer
temp_smearer = smear_selection(data, self.model)
if self.current_smearer != temp_smearer or update:
self.current_smearer = temp_smearer
if self.enable_smearer.GetValue():
if self.current_smearer is None:
wx.PostEvent(self._manager.parent,
StatusEvent(status="Data contains no smearing information"))
else:
wx.PostEvent(self._manager.parent,
StatusEvent(status="Data contains smearing information"))
self.smear_data_left.Show(True)
self.smear_data_right.Show(True)
temp_smearer = self.current_smearer
elif self.disable_smearer.GetValue():
self.smear_description_none.Show(True)
elif self.pinhole_smearer.GetValue():
self.onPinholeSmear(None)
elif self.slit_smearer.GetValue():
self.onSlitSmear(None)
self._show_smear_sizer()
return temp_smearer
def on_complete_chisqr(self, event):
"""
Display result chisqr on the panel
:event: activated by fitting/ complete after draw
"""
try:
if event is None:
output = "-"
elif not np.isfinite(event.output):
output = "-"
else:
output = event.output
self.tcChi.SetValue(str(format_number(output, True)))
self.state.tcChi = self.tcChi.GetValue()
except:
pass
def get_all_checked_params(self):
"""
Found all parameters current check and add them to list of parameters
to fit
"""
self.param_toFit = []
for item in self.parameters:
if item[0].GetValue() and item not in self.param_toFit:
if item[0].IsShown():
self.param_toFit.append(item)
for item in self.fittable_param:
if item[0].GetValue() and item not in self.param_toFit:
if item[0].IsShown():
self.param_toFit.append(item)
self.save_current_state_fit()
event = PageInfoEvent(page=self)
wx.PostEvent(self.parent, event)
param2fit = []
for item in self.param_toFit:
if item[0] and item[0].IsShown():
param2fit.append(item[1])
self._manager.set_param2fit(self.uid, param2fit)
def select_param(self, event=None):
"""
Select TextCtrl checked for fitting purpose and stores them
in self.param_toFit=[] list
"""
self.param_toFit = []
for item in self.parameters:
# Skip t ifhe angle parameters if 1D data
if self.data.__class__.__name__ != "Data2D" and\
not self.enable2D:
if item in self.orientation_params:
continue
# Select parameters to fit for list of primary parameters
if item[0].GetValue() and item[0].IsShown():
if not (item in self.param_toFit):
self.param_toFit.append(item)
else:
#remove parameters from the fitting list
if item in self.param_toFit:
self.param_toFit.remove(item)
# Select parameters to fit for list of fittable parameters
# with dispersion
for item in self.fittable_param:
# Skip t ifhe angle parameters if 1D data
if self.data.__class__.__name__ != "Data2D" and\
not self.enable2D:
if item in self.orientation_params:
continue
if item[0].GetValue() and item[0].IsShown():
if not (item in self.param_toFit):
self.param_toFit.append(item)
else:
# remove parameters from the fitting list
if item in self.param_toFit:
self.param_toFit.remove(item)
# Calculate num. of angle parameters
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
len_orient_para = 0
else:
len_orient_para = len(self.orientation_params) # assume even len
# Total num. of angle parameters
if len(self.fittable_param) > 0:
len_orient_para *= 2
self.save_current_state_fit()
if event is not None:
# post state to fit panel
event = PageInfoEvent(page=self)
wx.PostEvent(self.parent, event)
param2fit = []
for item in self.param_toFit:
if item[0] and item[0].IsShown():
param2fit.append(item[1])
self._manager.set_param2fit(self.uid, param2fit)
def set_model_param_sizer(self, model):
"""
Build the panel from the model content
:param model: the model selected in combo box for fitting purpose
"""
self.sizer3.Clear(True)
self.parameters = []
self.str_parameters = []
self.param_toFit = []
self.fittable_param = []
self.fixed_param = []
self.orientation_params = []
self.orientation_params_disp = []
if model is None:
self.sizer3.Layout()
self.SetupScrolling()
return
box_description = wx.StaticBox(self, wx.ID_ANY, str("Model Parameters"))
boxsizer1 = wx.StaticBoxSizer(box_description, wx.VERTICAL)
sizer = wx.GridBagSizer(5, 5)
# save the current model
self.model = model
keys = self.model.getParamList()
# list of dispersion parameters
self.disp_list = self.model.getDispParamList()
def custom_compare(a, b):
"""
Custom compare to order, first by alphabets then second by number.
"""
# number at the last digit
a_last = a[len(a) - 1]
b_last = b[len(b) - 1]
# default
num_a = None
num_b = None
# split the names
a2 = a.lower().split('_')
b2 = b.lower().split('_')
# check length of a2, b2
len_a2 = len(a2)
len_b2 = len(b2)
# check if it contains a int number(<10)
try:
num_a = int(a_last)
except:
pass
try:
num_b = int(b_last)
except:
pass
# Put 'scale' near the top; happens
# when numbered param name exists
if a == 'scale':
return -1
# both have a number
if num_a is not None and num_b is not None:
if num_a > num_b:
return -1
# same number
elif num_a == num_b:
# different last names
if a2[len_a2 - 1] != b2[len_b2 - 1] and num_a != 0:
return -cmp(a2[len_a2 - 1], b2[len_b2 - 1])
else:
return cmp(a, b)
else:
return 1
# one of them has a number
elif num_a is not None:
return 1
elif num_b is not None:
return -1
# no numbers
else:
return cmp(a.lower(), b.lower())
# keys obtained now from ordered dict, so commenting alphabetical
# ordering keys.sort(custom_compare)
iy = 0
ix = 0
sizer.Add(wx.StaticText(self, wx.ID_ANY, 'Parameter'),
(iy, ix), (1, 1), wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix += 1
self.text2_2 = wx.StaticText(self, wx.ID_ANY, 'Value')
sizer.Add(self.text2_2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix += 2
self.text2_3 = wx.StaticText(self, wx.ID_ANY, 'Error')
sizer.Add(self.text2_3, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
if not self.is_mac:
self.text2_3.Hide()
ix += 1
self.text2_min = wx.StaticText(self, wx.ID_ANY, 'Min')
sizer.Add(self.text2_min, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
# self.text2_min.Hide()
ix += 1
self.text2_max = wx.StaticText(self, wx.ID_ANY, 'Max')
sizer.Add(self.text2_max, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
# self.text2_max.Hide()
ix += 1
self.text2_4 = wx.StaticText(self, wx.ID_ANY, '[Units]')
sizer.Add(self.text2_4, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
self.text2_4.Hide()
CHECK_STATE = False
for item in keys:
if item not in self.disp_list and not item in \
self.model.orientation_params:
# prepare a spot to store errors
if item not in self.model.details:
self.model.details[item] = ["", None, None]
iy += 1
ix = 0
if (self.model.__class__ in
self.model_list_box["Multi-Functions"] or
self.temp_multi_functional)\
and (item in self.model.non_fittable):
non_fittable_name = wx.StaticText(self, wx.ID_ANY, item)
sizer.Add(non_fittable_name, (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 21)
# add parameter value
ix += 1
value = self.model.getParam(item)
if len(self.model.fun_list) > 0:
# num = item.split('_')[1][5:7]
fun_box = wx.ComboBox(self, wx.ID_ANY, size=(100, -1),
style=wx.CB_READONLY, name='%s' % item)
self._set_fun_box_list(fun_box)
fun_box.SetSelection(0)
# self.fun_box.SetToolTipString("A function
# describing the interface")
wx.EVT_COMBOBOX(fun_box, wx.ID_ANY, self._on_fun_box)
else:
fun_box = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH, 20),
style=wx.TE_PROCESS_ENTER, name='%s' % item)
fun_box.SetToolTipString(
"Hit 'Enter' after typing to update the plot.")
fun_box.SetValue(format_number(value, True))
sizer.Add(fun_box, (iy, ix), (1, 1), wx.EXPAND)
self.str_parameters.append([None, item, fun_box,
None, None, None,
None, None])
else:
# add parameters name with checkbox for selecting to fit
cb = wx.CheckBox(self, wx.ID_ANY, item)
cb.SetValue(CHECK_STATE)
cb.SetToolTipString(" Check mark to fit.")
# cb.SetValue(True)
wx.EVT_CHECKBOX(self, cb.GetId(), self.select_param)
sizer.Add(cb, (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 5)
# add parameter value
ix += 1
value = self.model.getParam(item)
ctl1 = ModelTextCtrl(self, wx.ID_ANY, size=(_BOX_WIDTH, 20),
style=wx.TE_PROCESS_ENTER)
ctl1.SetToolTipString(
"Hit 'Enter' after typing to update the plot.")
ctl1.SetValue(format_number(value, True))
sizer.Add(ctl1, (iy, ix), (1, 1), wx.EXPAND)
# text to show error sign
ix += 1
text2 = wx.StaticText(self, wx.ID_ANY, '+/-')
sizer.Add(text2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
if not self.is_mac:
text2.Hide()
ix += 1
ctl2 = wx.TextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 1.2, 20), style=0)
sizer.Add(ctl2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
if not self.is_mac:
ctl2.Hide()
ix += 1
ctl3 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 1.9, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self._onparamRangeEnter)
min_bound = self.model.details[item][1]
if min_bound is not None:
ctl3.SetValue(format_number(min_bound, True))
sizer.Add(ctl3, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ix += 1
ctl4 = ModelTextCtrl(self, wx.ID_ANY,
size=(_BOX_WIDTH / 1.9, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self._onparamRangeEnter)
max_bound = self.model.details[item][2]
if max_bound is not None:
ctl4.SetValue(format_number(max_bound, True))
sizer.Add(ctl4, (iy, ix), (1, 1),
wx.EXPAND | wx.FIXED_MINSIZE, 0)
ix += 1
# Units
if item in self.model.details:
units = wx.StaticText(self, wx.ID_ANY,
self.model.details[item][0], style=wx.ALIGN_LEFT)
else:
units = wx.StaticText(self, wx.ID_ANY, "",
style=wx.ALIGN_LEFT)
sizer.Add(units, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
self.parameters.append([cb, item, ctl1,
text2, ctl2, ctl3, ctl4, units])
iy += 1
sizer.Add((10, 10), (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 15)
# type can be either Guassian or Array
if len(self.model.dispersion.values()) > 0:
type = self.model.dispersion.values()[0]["type"]
else:
type = "Gaussian"
iy += 1
ix = 0
# Add tile for orientational angle
for item in keys:
if item in self.model.orientation_params:
orient_angle = wx.StaticText(self, wx.ID_ANY, '[For 2D only]:')
mag_on_button = wx.Button(self, wx.ID_ANY, "Magnetic ON")
mag_on_button.SetToolTipString("Turn Pol Beam/Mag scatt on/off")
mag_on_button.Bind(wx.EVT_BUTTON, self._on_mag_on)
mag_angle_help_button = wx.Button(self, wx.ID_ANY,
"Magnetic angles?")
mag_angle_help_button.SetToolTipString("see angle definitions")
mag_help_button = wx.Button(self, wx.ID_ANY, "Mag HELP")
mag_help_button.SetToolTipString("Help on pol beam/mag fitting")
mag_help_button.Bind(wx.EVT_BUTTON, self._on_mag_help)
mag_angle_help_button.Bind(wx.EVT_BUTTON,
self._on_mag_angle_help)
sizer.Add(orient_angle, (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 15)
iy += 1
sizer.Add(mag_on_button, (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 15)
ix += 1
sizer.Add(mag_angle_help_button, (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 15)
sizer.Add(mag_help_button, (iy, ix + 1), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 15)
# handle the magnetic buttons
# clean this up so that assume mag is off then turn
# all buttons on IF mag has mag and has 2D
if not self._has_magnetic:
mag_on_button.Show(False)
elif not self.data.__class__.__name__ == "Data2D":
mag_on_button.Show(False)
else:
mag_on_button.Show(True)
mag_help_button.Show(False)
mag_angle_help_button.Show(False)
if mag_on_button.IsShown():
if self.magnetic_on:
mag_on_button.SetLabel("Magnetic OFF")
mag_help_button.Show(True)
mag_angle_help_button.Show(True)
else:
mag_on_button.SetLabel("Magnetic ON")
mag_help_button.Show(False)
mag_angle_help_button.Show(False)
if not self.data.__class__.__name__ == "Data2D" and \
not self.enable2D:
orient_angle.Hide()
else:
orient_angle.Show(True)
break
# For Gaussian only
if type.lower() != "array":
for item in self.model.orientation_params:
if not self.magnetic_on:
if item in self.model.magnetic_params:
continue
if item not in self.disp_list:
# prepare a spot to store min max
if item not in self.model.details:
self.model.details[item] = ["", None, None]
iy += 1
ix = 0
# add parameters name with checkbox for selecting to fit
cb = wx.CheckBox(self, wx.ID_ANY, item)
cb.SetValue(CHECK_STATE)
cb.SetToolTipString("Check mark to fit")
wx.EVT_CHECKBOX(self, cb.GetId(), self.select_param)
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
cb.Show(True)
else:
cb.Hide()
sizer.Add(cb, (iy, ix), (1, 1),
wx.LEFT | wx.EXPAND | wx.ADJUST_MINSIZE, 5)
# add parameter value
ix += 1
value = self.model.getParam(item)
ctl1 = ModelTextCtrl(self, -1, size=(_BOX_WIDTH, 20),
style=wx.TE_PROCESS_ENTER)
ctl1.SetToolTipString(
"Hit 'Enter' after typing to update the plot.")
ctl1.SetValue(format_number(value, True))
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
ctl1.Show(True)
else:
ctl1.Hide()
sizer.Add(ctl1, (iy, ix), (1, 1), wx.EXPAND)
# text to show error sign
ix += 1
text2 = wx.StaticText(self, -1, '+/-')
sizer.Add(text2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
text2.Hide()
ix += 1
ctl2 = wx.TextCtrl(self, -1,
size=(_BOX_WIDTH / 1.2, 20), style=0)
sizer.Add(ctl2, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ctl2.Hide()
ix += 1
ctl3 = ModelTextCtrl(self, -1,
size=(_BOX_WIDTH / 1.8, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self._onparamRangeEnter)
sizer.Add(ctl3, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ctl3.Hide()
ix += 1
ctl4 = ModelTextCtrl(self, -1,
size=(_BOX_WIDTH / 1.8, 20),
style=wx.TE_PROCESS_ENTER,
text_enter_callback=self._onparamRangeEnter)
sizer.Add(ctl4, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
ctl4.Hide()
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
if self.is_mac:
text2.Show(True)
ctl2.Show(True)
ctl3.Show(True)
ctl4.Show(True)
ix += 1
# Units
if item in self.model.details:
units = wx.StaticText(self, -1,
self.model.details[item][0],
style=wx.ALIGN_LEFT)
else:
units = wx.StaticText(self, -1, "",
style=wx.ALIGN_LEFT)
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
units.Show(True)
else:
units.Hide()
sizer.Add(units, (iy, ix), (1, 1),
wx.EXPAND | wx.ADJUST_MINSIZE, 0)
self.parameters.append([cb, item, ctl1,
text2, ctl2, ctl3, ctl4, units])
self.orientation_params.append([cb, item, ctl1,
text2, ctl2, ctl3, ctl4, units])
iy += 1
box_description.SetForegroundColour(wx.BLUE)
# Display units text on panel
for item in keys:
if item in self.model.details:
self.text2_4.Show()
# Fill the list of fittable parameters
self.get_all_checked_params()
self.save_current_state_fit()
boxsizer1.Add(sizer)
self.sizer3.Add(boxsizer1, 0, wx.EXPAND | wx.ALL, 10)
self.sizer3.Layout()
self.Layout()
def on_right_down(self, event):
"""
Get key stroke event
"""
if self.data is None:
return
# Figuring out key combo: Cmd for copy, Alt for paste
if event.AltDown() and event.ShiftDown():
flag = True
elif event.AltDown() or event.ShiftDown():
flag = False
else:
return
# make event free
event.Skip()
# messages depending on the flag
if not flag:
infor = 'warning'
# inform msg to wx
wx.PostEvent(self._manager.parent,
StatusEvent(status=msg, info=infor))
def _onModel2D(self, event):
"""
toggle view of model from 1D to 2D or 2D from 1D
"""
if self.model_view.GetLabelText() == "Show 2D":
self.model_view.SetLabel("Show 1D")
self.enable2D = True
else:
self.model_view.SetLabel("Show 2D")
self.enable2D = False
self.Show(False)
self.create_default_data()
self._manager.store_data(self.uid, data_list=[self.data])
self.set_model_param_sizer(self.model)
self._set_sizer_dispersion()
self._set_weight(is_2D=self.enable2D)
self._set_smear_buttons()
self.Show(True)
self.SetupScrolling()
self._draw_model()
self.state.enable2D = copy.deepcopy(self.enable2D)
def _set_smear_buttons(self):
"""
Set semarer radio buttons
"""
# more disables for 2D
if self.data.__class__.__name__ == "Data2D" or \
self.enable2D:
self.slit_smearer.Disable()
self.pinhole_smearer.Enable(True)
self.default_mask = copy.deepcopy(self.data.mask)
else:
self.slit_smearer.Enable(True)
self.pinhole_smearer.Enable(True)
class BGTextCtrl(wx.TextCtrl):
"""
Text control used to display outputs.
No editing allowed. The background is
grayed out. User can't select text.
"""
def __init__(self, *args, **kwds):
wx.TextCtrl.__init__(self, *args, **kwds)
self.SetEditable(False)
self.SetBackgroundColour(self.GetParent().parent.GetBackgroundColour())
# Bind to mouse event to avoid text highlighting
# The event will be skipped once the call-back
# is called.
self.Bind(wx.EVT_MOUSE_EVENTS, self._click)
def _click(self, event):
"""
Prevent further handling of the mouse event
by not calling Skip().
"""
pass
|
lewisodriscoll/sasview
|
src/sas/sasgui/perspectives/fitting/fitpage.py
|
Python
|
bsd-3-clause
| 129,061
|
[
"Gaussian"
] |
04d3c1cfa55eded8d7edbd7e97451749691bc325c876bcfaaef9eceef4e53539
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import uuid
class Migration(migrations.Migration):
dependencies = [
('visit', '0037_visit_key'),
]
operations = [
migrations.AlterField(
model_name='visit',
name='key',
field=models.UUIDField(default=uuid.uuid4, unique=True, editable=False),
),
]
|
koebbe/homeworks
|
visit/migrations/0038_auto_20150722_2116.py
|
Python
|
mit
| 433
|
[
"VisIt"
] |
3aa883eae7f78056b77b8f4014affa3bf963866a26f1695053053cbee23de18f
|
# prepare_receptors.py
# Create the .pdbqt files and receptors coordinates file of receptors
# for VinaMPI Docking
# Usage:
# python prepare_receptors.py
#
# Specify the correct paths of prepare_receptor4.py pythonsh VMD
# Make sure the get_AS_grid.tcl file uses the correct residue number
# of the active sites
# Run the scripts in the folder contains all receptors .pdb file
#
# Output: .pdbqt file for each .pdb
# receptors.txt: used in VinaMPI
#
# Authors: Xiaofei Zhang, Sally R. Ellingson
# Date: June 21 2016
import os, glob, sys, shlex, subprocess
mypath = os.path.realpath(__file__)
tclpath = os.path.split(mypath)[0] + os.path.sep + 'tcl' + os.path.sep
# Set the path of prepare_receptor4.py
prepReceptor='/Users/Xiaofei/Documents/2016SpringRA/mgltools_i86Darwin9_1.5.6/MGLToolsPckgs/AutoDockTools/Utilities24/prepare_receptor4.py'
# Set the path of pythonsh
pythonsh='/Users/Xiaofei/Documents/2016SpringRA/mgltools_i86Darwin9_1.5.6/bin/pythonsh'
# Set the path of VMD
vmd='/Volumes/VMD-1.9.2/VMD 1.9.2.app/Contents/vmd/vmd_MACOSXX86'
receptor_list=glob.glob('*.pdb')
# Create pdbqt files
for pdbfile in receptor_list:
pdbqtfile = pdbfile[:-3]+'pdbqt'
os.system(pythonsh + ' ' + prepReceptor + ' -r ' + pdbfile + ' -o ' + pdbqtfile +' -A hydrogens')
# Create receptors.txt file
with open('receptors.txt','w') as f:
f.write('receptor size_x size_y size_z center_x center_y center_z cpu=1\n')
for pdbfile in receptor_list:
pdbid = pdbfile[:-4]
os.system('\"'+ vmd + '\"' +' -dispdev text -eofexit < '+ tclpath + 'get_AS_grid.tcl' + ' ' + '-args' + ' '+ pdbid)
|
Xiaofei-Zhang/NAMD_Docking_pipeline
|
pre_DOCKING/prepare_receptors.py
|
Python
|
mit
| 1,588
|
[
"VMD"
] |
f323b1142ed131671bab751f5e483d65892b20a755fb07ecd16edd83fd2fb211
|
#!/usr/bin/env python
import numpy
import subprocess
import vtk
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
class fluid:
def __init__(self, name = 'unnamed'):
'''
A Navier-Stokes two-dimensional fluid flow simulation object. Most
simulation values are assigned default values upon initialization.
:param name: Simulation identifier
:type name: str
'''
self.sim_id = name
self.init_grid()
self.current_time()
self.end_time()
self.file_time()
self.safety_factor()
self.max_iterations()
self.tolerance_criteria()
self.relaxation_parameter()
self.upwind_differencing_factor()
self.boundary_conditions()
self.reynolds_number()
self.gravity()
def init_grid(self, nx = 10, ny = 10, dx = 0.1, dy = 0.1):
'''
Initializes the numerical grid.
:param nx: Fluid grid width in number of cells
:type nx: int
:param ny: Fluid grid height in number of cells
:type ny: int
:param dx: Grid cell width (meters)
:type dx: float
:param dy: Grid cell height (meters)
:type dy: float
'''
self.nx = numpy.asarray(nx)
self.ny = numpy.asarray(ny)
self.dx = numpy.asarray(dx)
self.dy = numpy.asarray(dy)
self.P = numpy.zeros((nx+2, ny+2))
self.U = numpy.zeros((nx+2, ny+2))
self.V = numpy.zeros((nx+2, ny+2))
def current_time(self, t = 0.0):
'''
Set the current simulation time. Default value = 0.0.
:param t: The current time value.
:type t: float
'''
self.t = numpy.asarray(t)
def end_time(self, t_end = 1.0):
'''
Set the simulation end time.
:param t_end: The time when to stop the simulation.
:type t_end: float
'''
self.t_end = numpy.asarray(t_end)
def file_time(self, t_file = 0.1):
'''
Set the simulation output file interval.
:param t_file: The time when to stop the simulation.
:type t_file: float
'''
self.t_file = numpy.asarray(t_file)
def safety_factor(self, tau = 0.5):
'''
Define the safety factor for the time step size control. Default value =
0.5.
:param tau: Safety factor in ]0;1]
:type tau: float
'''
self.tau = numpy.asarray(tau)
def max_iterations(self, itermax = 5000):
'''
Set the maximal allowed iterations per time step. Default value = 5000.
:param itermax: Max. solution iterations in [1;inf[
:type itermax: int
'''
self.itermax = numpy.asarray(itermax)
def tolerance_criteria(self, epsilon = 1.0e-4):
'''
Set the tolerance criteria for the fluid solver. Default value = 1.0e-4.
:param epsilon: Criteria value
:type epsilon: float
'''
self.epsilon = numpy.asarray(epsilon)
def relaxation_parameter(self, omega = 1.7):
'''
Set the relaxation parameter for the successive overrelaxation (SOR)
solver. The solver is identical to the Gauss-Seidel method when omega =
1. Default value = 1.7.
:param omega: Relaxation parameter value, in ]0;2[
:type omega: float
'''
self.omega = numpy.asarray(omega)
def upwind_differencing_factor(self, gamma = 0.9):
'''
Set the upwind diffencing factor used in the finite difference
approximations. Default value = 0.9.
:param gamma: Upward differencing factor value, in ]0;1[
:type gamma: float
'''
self.gamma = numpy.asarray(gamma)
def boundary_conditions(self, left = 1, right = 1, top = 1, bottom = 1):
'''
Set the wall boundary conditions. The values correspond to the following
conditions: 1) free-slip, 2) no-slip, 3) outflow, 4) periodic
:param left, right, top, bottom: The wall to specify the BC for
:type left, right, top, bottom: int
'''
self.w_left = numpy.asarray(left)
self.w_right = numpy.asarray(right)
self.w_top = numpy.asarray(top)
self.w_bottom = numpy.asarray(bottom)
def reynolds_number(self, re = 100):
'''
Define the simulation Reynolds number.
:param re: Reynolds number in ]0;infty[
:type re: float
'''
self.re = numpy.asarray(re)
def gravity(self, gx = 0.0, gy = 0.0):
'''
Set the gravitational acceleration on the fluid.
:param gx: Horizontal gravitational acceleration.
:type gx: float
:param gy: Vertical gravitational acceleration. Negative values are
downward.
:type gy: float
'''
self.gx = numpy.asarray(gx)
self.gy = numpy.asarray(gy)
def read(self, path, verbose = True):
'''
Read data file from disk.
:param path: Path to data file
:type path: str
'''
fh = None
try:
targetfile = path
if verbose == True:
print('Input file: ' + targetfile)
fh = open(targetfile, 'rb')
self.t = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.t_end = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.t_file = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.tau = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.itermax = numpy.fromfile(fh, dtype=numpy.int32, count=1)
self.epsilon = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.omega = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.gamma = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.gx = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.gy = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.re = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.w_left = numpy.fromfile(fh, dtype=numpy.int32, count=1)
self.w_right = numpy.fromfile(fh, dtype=numpy.int32, count=1)
self.w_top = numpy.fromfile(fh, dtype=numpy.int32, count=1)
self.w_bottom = numpy.fromfile(fh, dtype=numpy.int32, count=1)
self.dx = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.dy = numpy.fromfile(fh, dtype=numpy.float64, count=1)
self.nx = numpy.fromfile(fh, dtype=numpy.int32, count=1)
self.ny = numpy.fromfile(fh, dtype=numpy.int32, count=1)
self.init_grid(dx = self.dx, dy = self.dy,\
nx = self.nx, ny = self.ny)
for i in range(self.nx+2):
for j in range(self.ny+2):
self.P[i,j] = \
numpy.fromfile(fh, dtype=numpy.float64, count=1)
for i in range(self.nx+2):
for j in range(self.ny+2):
self.U[i,j] = \
numpy.fromfile(fh, dtype=numpy.float64, count=1)
for i in range(self.nx+2):
for j in range(self.ny+2):
self.V[i,j] = \
numpy.fromfile(fh, dtype=numpy.float64, count=1)
finally:
if fh is not None:
fh.close()
def write(self, verbose = True, folder = './'):
'''
Write the simulation parameters to disk so that the fluid flow solver
can read it.
'''
fh = None
try:
targetfile = folder + '/' + self.sim_id + '.dat'
if verbose == True:
print('Output file: ' + targetfile)
fh = open(targetfile, 'wb')
fh.write(self.t.astype(numpy.float64))
fh.write(self.t_end.astype(numpy.float64))
fh.write(self.t_file.astype(numpy.float64))
fh.write(self.tau.astype(numpy.float64))
fh.write(self.itermax.astype(numpy.int32))
fh.write(self.epsilon.astype(numpy.float64))
fh.write(self.omega.astype(numpy.float64))
fh.write(self.gamma.astype(numpy.float64))
fh.write(self.gx.astype(numpy.float64))
fh.write(self.gy.astype(numpy.float64))
fh.write(self.re.astype(numpy.float64))
fh.write(self.w_left.astype(numpy.int32))
fh.write(self.w_right.astype(numpy.int32))
fh.write(self.w_top.astype(numpy.int32))
fh.write(self.w_bottom.astype(numpy.int32))
fh.write(self.dx.astype(numpy.float64))
fh.write(self.dy.astype(numpy.float64))
fh.write(self.nx.astype(numpy.int32))
fh.write(self.ny.astype(numpy.int32))
for i in range(self.nx+2):
for j in range(self.ny+2):
fh.write(self.P[i,j].astype(numpy.float64))
for i in range(self.nx+2):
for j in range(self.ny+2):
fh.write(self.U[i,j].astype(numpy.float64))
for i in range(self.nx+2):
for j in range(self.ny+2):
fh.write(self.V[i,j].astype(numpy.float64))
finally:
if fh is not None:
fh.close()
def run(self):
'''
Run the simulation using the C program.
'''
self.write()
subprocess.call('./ns2dfd ' + self.sim_id + '.dat', shell=True)
def writeVTK(self, folder = './', verbose = True):
'''
Writes a VTK file for the fluid grid to the current folder by default.
The file name will be in the format ``<self.sid>.vti``. The vti files
can be used for visualizing the fluid in ParaView.
The fluid grid is visualized by opening the vti files, and pressing
"Apply" to import all fluid field properties. To visualize the scalar
fields, such as the pressure, the porosity, the porosity change or the
velocity magnitude, choose "Surface" or "Surface With Edges" as the
"Representation". Choose the desired property as the "Coloring" field.
It may be desirable to show the color bar by pressing the "Show" button,
and "Rescale" to fit the color range limits to the current file. The
coordinate system can be displayed by checking the "Show Axis" field.
All adjustments by default require the "Apply" button to be pressed
before regenerating the view.
The fluid vector fields (e.g. the fluid velocity) can be visualizing by
e.g. arrows. To do this, select the fluid data in the "Pipeline
Browser". Press "Glyph" from the "Common" toolbar, or go to the
"Filters" mennu, and press "Glyph" from the "Common" list. Make sure
that "Arrow" is selected as the "Glyph type", and "Velocity" as the
"Vectors" value. Adjust the "Maximum Number of Points" to be at least as
big as the number of fluid cells in the grid. Press "Apply" to visualize
the arrows.
If several data files are generated for the same simulation (e.g. using
the :func:`writeVTKall()` function), it is able to step the
visualization through time by using the ParaView controls.
:param folder: The folder where to place the output binary file (default
(default = './')
:type folder: str
:param verbose: Show diagnostic information (default = True)
:type verbose: bool
'''
filename = folder + '/' + self.sim_id + '.vti' # image grid
# initalize VTK data structure
grid = vtk.vtkImageData()
grid.SetOrigin([0.0, 0.0, 0.0])
grid.SetSpacing([self.dx, self.dy, 1])
grid.SetDimensions([self.nx+2, self.ny+2, 1])
# array of scalars: hydraulic pressures
pres = vtk.vtkDoubleArray()
pres.SetName("Pressure")
pres.SetNumberOfComponents(1)
pres.SetNumberOfTuples(grid.GetNumberOfPoints())
# array of vectors: hydraulic velocities
vel = vtk.vtkDoubleArray()
vel.SetName("Velocity")
vel.SetNumberOfComponents(2)
vel.SetNumberOfTuples(grid.GetNumberOfPoints())
# insert values
for y in range(self.ny+2):
for x in range(self.nx+2):
idx = x + (self.nx+2)*y
pres.SetValue(idx, self.P[x,y])
vel.SetTuple(idx, [self.U[x,y], self.V[x,y]])
# add pres array to grid
grid.GetPointData().AddArray(pres)
grid.GetPointData().AddArray(vel)
# write VTK XML image data file
writer = vtk.vtkXMLImageDataWriter()
writer.SetFileName(filename)
writer.SetInput(grid)
writer.Update()
if (verbose == True):
print('Output file: {0}'.format(filename))
def plot_PUV(self, format = 'png'):
plt.figure(figsize=[8,8])
#ax = plt.subplot(1, 3, 1)
plt.title("Pressure")
imgplt = plt.imshow(self.P.T, origin='lower')
imgplt.set_interpolation('nearest')
#imgplt.set_interpolation('bicubic')
#imgplt.set_cmap('hot')
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.colorbar()
# show velocities as arrows
Q = plt.quiver(self.U, self.V)
# show velocities as stream lines
#plt.streamplot(numpy.arange(self.nx+2),numpy.arange(self.ny+2),\
#self.U, self.V)
'''
# show velocities as heat maps
ax = plt.subplot(1, 3, 2)
plt.title("U")
imgplt = plt.imshow(self.U.T, origin='lower')
imgplt.set_interpolation('nearest')
#imgplt.set_interpolation('bicubic')
#imgplt.set_cmap('hot')
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.colorbar()
ax = plt.subplot(1, 3, 3)
plt.title("V")
imgplt = plt.imshow(self.V.T, origin='lower')
imgplt.set_interpolation('nearest')
#imgplt.set_interpolation('bicubic')
#imgplt.set_cmap('hot')
plt.xlabel('$x$')
plt.ylabel('$y$')
plt.colorbar()
'''
plt.savefig(self.sim_id + '-PUV.' + format, transparent=False)
|
anders-dc/ns2dfd
|
ns2dfd.py
|
Python
|
gpl-3.0
| 14,344
|
[
"ParaView",
"VTK"
] |
851bfa37276b7442b45f267b0d49db132e4fec835cb68e6b2f8223f2d5633593
|
# Determines perovskite structures: classifies by vacancy and substitutional defects (impurities)
# Author: Evgeny Blokhin
import math
import random
import six
from ase.data import chemical_symbols
from ase.data import covalent_radii
from ase.spacegroup import crystal
from tilde.core.constants import Perovskite_Structure
from tilde.apps.perovskite_tilting.perovskite_tilting import Perovskite_tilting
# hierarchy API: __order__ to apply classifier
__order__ = 10
def classify(tilde_obj):
if len(tilde_obj.info['elements']) == 1: return tilde_obj
C_site = [e for e in tilde_obj.info['elements'] if e in Perovskite_Structure.C]
if not C_site: return tilde_obj
A_site = [e for e in tilde_obj.info['elements'] if e in Perovskite_Structure.A]
B_site = [e for e in tilde_obj.info['elements'] if e in Perovskite_Structure.B]
# proportional content coefficient D_prop
AB, C = 0, 0
for i in set(A_site + B_site):
AB += tilde_obj.info['contents'][ tilde_obj.info['elements'].index(i) ]
for i in C_site:
C += tilde_obj.info['contents'][ tilde_obj.info['elements'].index(i) ]
try: D_prop = float(C) / AB
except ZeroDivisionError: return tilde_obj
# 2-component pseudo-perovskites
# TODO account other pseudo-perovskites e.g. Mn2O3 or binary-metal ones
if tilde_obj.info['elements'][0] in ['W', 'Re'] and len(tilde_obj.info['elements']) == 2:
if round(D_prop) == 3: tilde_obj.info['tags'].append(0x4)
return tilde_obj
if not A_site or not B_site: return tilde_obj
if not 1.3 < D_prop < 2.3: return tilde_obj # D_prop grows for 2D adsorption cases (>1.9)
n_combs, n_offs = 0, 0
for A in A_site:
for B in B_site:
if B == A: continue
for C in C_site:
rA = covalent_radii[chemical_symbols.index(A)]
rB = covalent_radii[chemical_symbols.index(B)]
rC = covalent_radii[chemical_symbols.index(C)]
# Goldschmidt tolerance factor
# t = (rA + rC) / sqrt(2) * (rB + rC)
# 0.71 =< t =< 1.2
# t < 0.71 ilmenite, corundum or KNbO3 structure
# t > 1 hexagonal perovskite polytypes
# http://en.wikipedia.org/wiki/Goldschmidt_tolerance_factor
factor = (rA + rC) / (math.sqrt(2) * (rB + rC))
if not 0.71 <= factor <= 1.4: n_offs += 1
n_combs += 1
if n_offs == n_combs: return tilde_obj
tilde_obj.info['tags'].append(0x4)
if tilde_obj.structures[-1].periodicity != 3: return tilde_obj # all below is for 3d case : TODO
contents = []
impurities, A_hosts, B_hosts = {}, {}, {}
# What is a defect?
# Empirical criteria of defect for ab initio modeling: =< 25% of the content
for n, i in enumerate(tilde_obj.info['elements']):
contents.append( [n, i, float(tilde_obj.info['contents'][n])/sum(tilde_obj.info['contents'])] )
contents = sorted(contents, key = lambda i: i[2])
for num in range(len(contents)):
try: contents[num+1]
except IndexError: break
# defect content differs at least 2x from the smallest content; defect partial weight <= 1/16
if contents[num][2] <= 0.0625 and contents[num][2] / contents[num+1][2] <= 0.5:
impurities[ contents[num][1] ] = tilde_obj.info['contents'][ contents[num][0] ] # ex: ['Fe', 2]
elif contents[num][1] in Perovskite_Structure.A:
A_hosts[ contents[num][1] ] = tilde_obj.info['contents'][ contents[num][0] ]
elif contents[num][1] in Perovskite_Structure.B:
B_hosts[ contents[num][1] ] = tilde_obj.info['contents'][ contents[num][0] ]
#print impurities, A_hosts, B_hosts
if len(A_hosts) > 1 or len(B_hosts) > 1: return tilde_obj # skip complex perovskites and those where an element may occupy either A or B
# A site or B site?
num = 0
for impurity_element, content in six.iteritems(impurities):
e = tilde_obj.info['elements'].index(impurity_element)
tilde_obj.info['elements'].pop(e) # TODO
tilde_obj.info['contents'].pop(e) # TODO
tilde_obj.info['impurity' + str(num)] = impurity_element + str(content) if content > 1 else impurity_element
num += 1
if impurity_element in Perovskite_Structure.A:
A_hosts[list(A_hosts.keys())[0]] += content
elif impurity_element in Perovskite_Structure.B:
B_hosts[list(B_hosts.keys())[0]] += content
for n, i in enumerate(tilde_obj.info['elements']):
if i in A_hosts:
tilde_obj.info['contents'][n] = A_hosts[i] # TODO
elif i in B_hosts:
tilde_obj.info['contents'][n] = B_hosts[i] # TODO
for i in C_site:
c_content = tilde_obj.info['contents'][ tilde_obj.info['elements'].index(i) ]
tot_content = sum(tilde_obj.info['contents'])
D_O = float(c_content) / tot_content
if D_O < 0.6: # C-site lack
tilde_obj.info['lack'] = i
break # TODO
return tilde_obj
def generate_random_perovskite(lat=None):
'''
This generates a random valid perovskite structure in ASE format.
Useful for testing.
Binary and organic perovskites are not considered.
'''
if not lat:
lat = round(random.uniform(3.5, Perovskite_tilting.OCTAHEDRON_BOND_LENGTH_LIMIT*2), 3)
A_site = random.choice(Perovskite_Structure.A)
B_site = random.choice(Perovskite_Structure.B)
Ci_site = random.choice(Perovskite_Structure.C)
Cii_site = random.choice(Perovskite_Structure.C)
while covalent_radii[chemical_symbols.index(A_site)] - \
covalent_radii[chemical_symbols.index(B_site)] < 0.05 or \
covalent_radii[chemical_symbols.index(A_site)] - \
covalent_radii[chemical_symbols.index(B_site)] > 0.5:
A_site = random.choice(Perovskite_Structure.A)
B_site = random.choice(Perovskite_Structure.B)
return crystal(
[A_site, B_site, Ci_site, Cii_site],
[(0.5, 0.25, 0.0), (0.0, 0.0, 0.0), (0.0, 0.25, 0.0), (0.25, 0.0, 0.75)],
spacegroup=62, cellpar=[lat*math.sqrt(2), 2*lat, lat*math.sqrt(2), 90, 90, 90]
)
|
tilde-lab/tilde
|
tilde/classifiers/perovskites.py
|
Python
|
mit
| 6,214
|
[
"ASE",
"CRYSTAL"
] |
7445ca7a88d612baea79266224c4f31dc770fde57e22af68c4736b2b3d4d6f7a
|
""" This version of the code runs a swarm of simulations of various viscosities and
temperatures per viscosity. Since 5-29, it also includes an adiabatic temperature variance at the LAB.
All quantities are scaled using option B in Cian_RaNotes.pdf until they are written out."""
import math
import os
import errno
from shutil import copyfile
from time import clock
from dolfin import *
# Specify optimization parameters
set_log_level(PROGRESS)
parameters['form_compiler']['optimize'] = True
parameters['form_compiler']['cpp_optimize'] = True
parameters['form_compiler']['representation'] = 'quadrature'
# Constant Definitions
rho_0 = 3300.0
alpha = 2.5e-5
g = 9.81
# kappa = 1.0E-6 not actually used.. (in cian's notes he is using the non dimensional kappa,
# which here i have defined as 1 (as kappa/kappa_0) so i think i can get away with this)
b = 12.7
cc = math.log(128)
theta = 0.5
h = 1000000.0
kappa_0 = 1.0E-6
outputInterval = 1
# Number of separations in each dimension
nx = 16
ny = 16
nz = 16
# Relative dimensions of the domain
meshWidth = 1.0 # defined to be 1.0 -> 1000 km roughly
meshHeight = 0.4
LABHeight = 0.75 * meshHeight # 100 km from surface
class PeriodicBoundary(SubDomain):
def inside(self, x, on_boundary):
return (near(x[0], 0) and on_boundary)
def map(self, x, y):
y[0] = x[0] - meshWidth
y[1] = x[1]
y[2] = x[2]
class LithosExp(Expression):
def eval(self, values, x):
height = 0.25 * meshHeight
width = 0.2 * meshWidth # radius of protrusion
scale = width / 4 # slope of edge of protrusion
tanhStep = lambda radius: tanh((radius - width) / scale) + tanh((-radius - width) / scale)
normalize = height / abs(tanhStep(0))
r = sqrt((x[0] - meshWidth / 2) ** 2 + (x[1] - meshWidth / 2) ** 2)
values[0] = LABHeight + normalize * tanhStep(r)
def runJob(T_b, mu_value, path):
# Create data files -- write all data to one file?
comm = mpi_comm_world()
rank = MPI.rank(comm)
def createXDMF(filePath):
f = XDMFFile(comm, filePath)
f.parameters['flush_output'] = True # Write out data at every step, at a small performance cost
f.parameters['rewrite_function_mesh'] = False # Avoid rewriting the same function mesh
# f.parameters['multi_file'] = 10 # issue 278 https://bitbucket.org/fenics-project/dolfin/issue/278/hdf5-file-integrity
return f
fileNames = ['t6t', 'mu', 'velocity', 'gradp', 'pstar']
[tFile, muFile, uFile, gradpFile, pFile] = [createXDMF(path + name + '.xdmf') for name in fileNames]
logFile = open(path + 'LogFile' + str(rank) + '.txt', 'w', 1) # bufSize = 1 -> line buffered
def logText(s):
print(s)
logFile.write(s + '\n')
# Calculate all values dependent on Tb and mu_value
temps = [27.0 + 273, T_b + 273, 1300.0 + 273, 1500.0 + 273]
dTemp = temps[3] - temps[0]
temps = [x / dTemp for x in temps] # non-dimensionalising temperatures
Ep = b / dTemp
mu_a = mu_value # Assumption taken from the blankenbach paper
mu_bot = exp(-Ep * (temps[3] * dTemp - 1573) + cc) * mu_a
Ra = rho_0 * alpha * g * dTemp * h ** 3 / (kappa_0 * mu_a)
w0 = rho_0 * alpha * g * dTemp * h ** 2 / mu_a
tau = h / w0
p0 = mu_a * w0 / h
vslipx = 1.6e-09 / w0 # non-dimensional
vslip = Constant((vslipx, 0.0, 0.0))
noslip = Constant((0.0, 0.0, 0.0))
dt = 3.0E11 / tau * 10 # TODO:
tEnd = 3.0E15 / tau # non-dimensionalising times
# Specify Mesh and Functions
mesh = BoxMesh(Point(0, 0, 0), Point(meshWidth, meshWidth, meshHeight), nx, ny, nz)
pbc = PeriodicBoundary()
Svel = VectorFunctionSpace(mesh, 'CG', 2, constrained_domain = pbc)
Spre = FunctionSpace(mesh, 'CG', 1, constrained_domain = pbc)
Stemp = FunctionSpace(mesh, 'CG', 1, constrained_domain = pbc)
Smu = FunctionSpace(mesh, 'CG', 1, constrained_domain = pbc)
Sgradp = VectorFunctionSpace(mesh, 'CG', 2, constrained_domain = pbc)
# Create the corresponding functions, S0 is used for convenience
S0 = MixedFunctionSpace([Svel, Spre, Stemp])
u = Function(S0)
du = TrialFunction(S0)
v, p, T = split(u)
v_t, p_t, T_t = TestFunctions(S0)
LAB = LithosExp(element=Stemp.ufl_element())
class TempExp(Expression):
def eval(self, value, x):
if x[2] >= LAB(x):
value[0] = temps[0] + (temps[1] - temps[0]) * (meshHeight - x[2]) / (meshHeight - LAB(x))
else:
value[0] = temps[3] - (temps[3] - temps[2]) * x[2] / LAB(x)
T0 = Function(Stemp, name='Temperature')
T0.interpolate(TempExp(element=Stemp.ufl_element()))
v0 = Function(Svel, name='Velocity')
mu = Function(Smu, name='mu')
muExp = Expression('exp(-Ep * (T_val * dTemp - 1573) + cc * x[2] / meshHeight)', Smu.ufl_element(),
Ep=Ep, dTemp=dTemp, cc=cc, meshHeight=meshHeight, T_val=T0)
mu.interpolate(muExp)
# Specify boundaries and boundary conditions for velocity, temperature, and pressure
def top(x, on_boundary): return near(x[2], meshHeight)
def bottom(x, on_boundary): return near(x[2], 0)
def left(x, on_boundary): return (x[2] <= LAB(x) and near(x[0], 0))
def right(x, on_boundary): return (x[2] <= LAB(x) and near(x[0], meshWidth))
def back(x, on_boundary): return (x[2] <= LAB(x) and near(x[1], meshWidth))
def front(x, on_boundary): return (x[2] <= LAB(x) and near(x[1], 0))
bcv0 = DirichletBC(S0.sub(0), noslip, top)
bcv1 = DirichletBC(S0.sub(0), vslip, bottom)
bcv2 = DirichletBC(S0.sub(0).sub(1), Constant(0.0), back)
bcv3 = DirichletBC(S0.sub(0).sub(1), Constant(0.0), front)
bcp0 = DirichletBC(S0.sub(1), Constant(0.0), top)
bct0 = DirichletBC(S0.sub(2), Constant(temps[0]), top)
bct1 = DirichletBC(S0.sub(2), Constant(temps[3]), bottom)
bcs = [bcv0, bcv1, bcv2, bcv3, bcp0, bct0, bct1]
# Form definitions
v_theta = (1.0 - theta) * v0 + theta * v
T_theta = (1.0 - theta) * T0 + theta * T
r_v = (inner(sym(grad(v_t)), 2.0 * mu * sym(grad(v))) - div(v_t) * p - T * v_t[2]) * dx
r_p = p_t * div(v) * dx
r_T = (T_t * ((T - T0) + dt * inner(v_theta, grad(T_theta))) \
+ (dt / Ra) * inner(grad(T_t), grad(T_theta))) * dx
r = r_v + r_p + r_T
J = derivative(r, u, du)
problem = NonlinearVariationalProblem(r, u, bcs, J)
solver = NonlinearVariationalSolver(problem)
prm = solver.parameters
prm['newton_solver']['linear_solver'] = 'mumps'
print(info(prm, True))
# prm['nonlinear_solver'] = 'snes'
# prm['snes_solver']['line_search'] = 'basic'
# prm['snes_solver']['linear_solver']= 'lu'
# prm['newton_solver']['krylov_solver']['nonzero_initial_guess'] = True
# prm['newton_solver']['krylov_solver']['monitor_convergence'] = True
# Begin the simulation loop
runtimeInit = clock()
t = 0
count = 0
while t < tEnd:
solver.solve()
nV, nP, nT = u.split()
assign(T0, nT)
assign(v0, nV)
mu.interpolate(muExp)
t += dt
count += 1
if count % outputInterval == 0:
# file << (data, timeStamp)
pFile << (nP, t)
uFile << (nV, t)
tFile << (nT, t)
muFile << (mu, t)
gradpFile << (project(grad(nP), Sgradp), t)
# TODO: Output to same function so we can run simulation in paraview
#TODO: write out melt-solid relative motion vectors
timeElapsed = clock() - runtimeInit
rate = timeElapsed / count
perComp = t / tEnd * 100
tEst = (tEnd - t) / t * (clock() - runtimeInit) / 3600
logText('Step %g' % count + ': rate = %g' % rate +
' sec/step --- %g' % perComp + '%%, %g' % tEst + ' hrs')
# TODO: Write with the scaled versions
# Learn how to script paraview to apply common operations
logText('Case mu=%g, Tb=%g complete.' % (mu_a, T_b) + ' Run time = %g' % (clock() - runtimeInit) + 's')
logFile.close()
if __name__ == '__main__':
base = 'run/'
try:
copyfile(__file__, base + 'code_copy.py')
except:
pass
# Mus = [1e19, 1e20, 1e21]
# Tbs = [800, 1000, 1300]
Mus = [1e19];
Tbs = [1000];
for mu in Mus:
for temp in Tbs:
work_path = base + 'mu=' + str(mu) + '/Tb=' + str(temp) + '/'
try:
os.makedirs(work_path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
runJob(temp, mu, work_path)
|
Johnson-A/UNM_Research
|
mantle_simulation/mantle_3d_no_advection.py
|
Python
|
mit
| 8,674
|
[
"ParaView"
] |
56df94c49ca3b077fcd9994d9b15550448801fb2aee5b86acd8edff252f43a01
|
import os
import pymc
import pymbar
import dipoles
import numpy as np
import pandas as pd
import simtk.openmm.app as app
import simtk.openmm as mm
import simtk.unit as u
import mdtraj as md
n_molecules = 500
traj = md.load("./dipoles.pdb")
out_dir = os.path.join(os.getenv("HOME"), "dat", "dipoles")
q0 = pymc.Uniform("q0", 0.1, 0.9)
sigma0 = pymc.Uniform("sigma0", 0.1, 0.3)
sigma1 = pymc.Uniform("sigma1", 0.1, 0.3)
epsilon0 = pymc.Uniform("epsilon0", 0.2, 1.0)
epsilon1 = pymc.Uniform("epsilon1", 0.2, 1.0)
r0 = pymc.Uniform("r0", 0.05, 0.25, value=0.2, observed=True)
model = pymc.Model([q0, sigma0, epsilon0, sigma1, epsilon1, r0])
temperatures = [280 * u.kelvin, 300 * u.kelvin, 320 * u.kelvin]
pressure = 1.0 * u.atmospheres
model.draw_from_prior()
for temperature in temperatures:
dipole = dipoles.Dipole(n_molecules, q0=q0.value, sigma0=sigma0.value, epsilon0=epsilon0.value, sigma1=sigma1.value, epsilon1=epsilon1.value, r0=r0.value)
traj = dipole.build_box()
print(dipole)
try:
values, mu, sigma = dipoles.simulate_density(dipole, temperature, pressure, out_dir)
except Exception as e:
print(e)
|
kyleabeauchamp/DBayes
|
dbayes/simulate/simulate_dipoles.py
|
Python
|
gpl-2.0
| 1,151
|
[
"MDTraj",
"OpenMM"
] |
1d5617a67f8bee2e87b47b2dcfaba47b3eecd1aecea8627842a4eb5664acf85e
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module provides classes to define everything related to band structures.
"""
import collections
import itertools
import math
import re
import warnings
import numpy as np
from monty.json import MSONable
from pymatgen.core.lattice import Lattice
from pymatgen.core.periodic_table import Element, get_el_sp
from pymatgen.core.structure import Structure
from pymatgen.electronic_structure.core import Orbital, Spin
from pymatgen.symmetry.analyzer import SpacegroupAnalyzer
from pymatgen.util.coord import pbc_diff
__author__ = "Geoffroy Hautier, Shyue Ping Ong, Michael Kocher"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "1.0"
__maintainer__ = "Geoffroy Hautier"
__email__ = "geoffroy@uclouvain.be"
__status__ = "Development"
__date__ = "March 14, 2012"
class Kpoint(MSONable):
"""
Class to store kpoint objects. A kpoint is defined with a lattice and frac
or cartesian coordinates syntax similar than the site object in
pymatgen.core.structure.
"""
def __init__(
self,
coords,
lattice,
to_unit_cell=False,
coords_are_cartesian=False,
label=None,
):
"""
Args:
coords: coordinate of the kpoint as a numpy array
lattice: A pymatgen.core.lattice.Lattice lattice object representing
the reciprocal lattice of the kpoint
to_unit_cell: Translates fractional coordinate to the basic unit
cell, i.e., all fractional coordinates satisfy 0 <= a < 1.
Defaults to False.
coords_are_cartesian: Boolean indicating if the coordinates given are
in cartesian or fractional coordinates (by default fractional)
label: the label of the kpoint if any (None by default)
"""
self._lattice = lattice
self._fcoords = lattice.get_fractional_coords(coords) if coords_are_cartesian else coords
self._label = label
if to_unit_cell:
for i in range(len(self._fcoords)):
self._fcoords[i] -= math.floor(self._fcoords[i])
self._ccoords = lattice.get_cartesian_coords(self._fcoords)
@property
def lattice(self):
"""
The lattice associated with the kpoint. It's a
pymatgen.core.lattice.Lattice object
"""
return self._lattice
@property
def label(self):
"""
The label associated with the kpoint
"""
return self._label
@property
def frac_coords(self):
"""
The fractional coordinates of the kpoint as a numpy array
"""
return np.copy(self._fcoords)
@property
def cart_coords(self):
"""
The cartesian coordinates of the kpoint as a numpy array
"""
return np.copy(self._ccoords)
@property
def a(self):
"""
Fractional a coordinate of the kpoint
"""
return self._fcoords[0]
@property
def b(self):
"""
Fractional b coordinate of the kpoint
"""
return self._fcoords[1]
@property
def c(self):
"""
Fractional c coordinate of the kpoint
"""
return self._fcoords[2]
def __str__(self):
"""
Returns a string with fractional, cartesian coordinates and label
"""
return "{} {} {}".format(self.frac_coords, self.cart_coords, self.label)
def as_dict(self):
"""
Json-serializable dict representation of a kpoint
"""
return {
"lattice": self.lattice.as_dict(),
"fcoords": self.frac_coords.tolist(),
"ccoords": self.cart_coords.tolist(),
"label": self.label,
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
}
class BandStructure:
"""
This is the most generic band structure data possible
it's defined by a list of kpoints + energies for each of them
.. attribute:: kpoints:
the list of kpoints (as Kpoint objects) in the band structure
.. attribute:: lattice_rec
the reciprocal lattice of the band structure.
.. attribute:: efermi
the fermi energy
.. attribute:: is_spin_polarized
True if the band structure is spin-polarized, False otherwise
.. attribute:: bands
The energy eigenvalues as a {spin: ndarray}. Note that the use of an
ndarray is necessary for computational as well as memory efficiency
due to the large amount of numerical data. The indices of the ndarray
are [band_index, kpoint_index].
.. attribute:: nb_bands
returns the number of bands in the band structure
.. attribute:: structure
returns the structure
.. attribute:: projections
The projections as a {spin: ndarray}. Note that the use of an
ndarray is necessary for computational as well as memory efficiency
due to the large amount of numerical data. The indices of the ndarray
are [band_index, kpoint_index, orbital_index, ion_index].
"""
def __init__(
self,
kpoints,
eigenvals,
lattice,
efermi,
labels_dict=None,
coords_are_cartesian=False,
structure=None,
projections=None,
):
"""
Args:
kpoints: list of kpoint as numpy arrays, in frac_coords of the
given lattice by default
eigenvals: dict of energies for spin up and spin down
{Spin.up:[][],Spin.down:[][]}, the first index of the array
[][] refers to the band and the second to the index of the
kpoint. The kpoints are ordered according to the order of the
kpoints array. If the band structure is not spin polarized, we
only store one data set under Spin.up
lattice: The reciprocal lattice as a pymatgen Lattice object.
Pymatgen uses the physics convention of reciprocal lattice vectors
WITH a 2*pi coefficient
efermi: fermi energy
labels_dict: (dict) of {} this links a kpoint (in frac coords or
cartesian coordinates depending on the coords) to a label.
coords_are_cartesian: Whether coordinates are cartesian.
structure: The crystal structure (as a pymatgen Structure object)
associated with the band structure. This is needed if we
provide projections to the band structure
projections: dict of orbital projections as {spin: ndarray}. The
indices of the ndarrayare [band_index, kpoint_index, orbital_index,
ion_index].If the band structure is not spin polarized, we only
store one data set under Spin.up.
"""
self.efermi = efermi
self.lattice_rec = lattice
self.kpoints = []
self.labels_dict = {}
self.structure = structure
self.projections = projections or {}
self.projections = {k: np.array(v) for k, v in self.projections.items()}
if labels_dict is None:
labels_dict = {}
if len(self.projections) != 0 and self.structure is None:
raise Exception("if projections are provided a structure object" " needs also to be given")
for k in kpoints:
# let see if this kpoint has been assigned a label
label = None
for c in labels_dict:
if np.linalg.norm(k - np.array(labels_dict[c])) < 0.0001:
label = c
self.labels_dict[label] = Kpoint(
k,
lattice,
label=label,
coords_are_cartesian=coords_are_cartesian,
)
self.kpoints.append(Kpoint(k, lattice, label=label, coords_are_cartesian=coords_are_cartesian))
self.bands = {spin: np.array(v) for spin, v in eigenvals.items()}
self.nb_bands = len(eigenvals[Spin.up])
self.is_spin_polarized = len(self.bands) == 2
def get_projection_on_elements(self):
"""
Method returning a dictionary of projections on elements.
Returns:
a dictionary in the {Spin.up:[][{Element:values}],
Spin.down:[][{Element:values}]} format
if there is no projections in the band structure
returns an empty dict
"""
result = {}
structure = self.structure
for spin, v in self.projections.items():
result[spin] = [
[collections.defaultdict(float) for i in range(len(self.kpoints))] for j in range(self.nb_bands)
]
for i, j, k in itertools.product(
range(self.nb_bands),
range(len(self.kpoints)),
range(structure.num_sites),
):
result[spin][i][j][str(structure[k].specie)] += np.sum(v[i, j, :, k])
return result
def get_projections_on_elements_and_orbitals(self, el_orb_spec):
"""
Method returning a dictionary of projections on elements and specific
orbitals
Args:
el_orb_spec: A dictionary of Elements and Orbitals for which we want
to have projections on. It is given as: {Element:[orbitals]},
e.g., {'Cu':['d','s']}
Returns:
A dictionary of projections on elements in the
{Spin.up:[][{Element:{orb:values}}],
Spin.down:[][{Element:{orb:values}}]} format
if there is no projections in the band structure returns an empty
dict.
"""
result = {}
structure = self.structure
el_orb_spec = {get_el_sp(el): orbs for el, orbs in el_orb_spec.items()}
for spin, v in self.projections.items():
result[spin] = [
[{str(e): collections.defaultdict(float) for e in el_orb_spec} for i in range(len(self.kpoints))]
for j in range(self.nb_bands)
]
for i, j, k in itertools.product(
range(self.nb_bands),
range(len(self.kpoints)),
range(structure.num_sites),
):
sp = structure[k].specie
for orb_i in range(len(v[i][j])):
o = Orbital(orb_i).name[0]
if sp in el_orb_spec:
if o in el_orb_spec[sp]:
result[spin][i][j][str(sp)][o] += v[i][j][orb_i][k]
return result
def is_metal(self, efermi_tol=1e-4):
"""
Check if the band structure indicates a metal by looking if the fermi
level crosses a band.
Returns:
True if a metal, False if not
"""
for spin, values in self.bands.items():
for i in range(self.nb_bands):
if np.any(values[i, :] - self.efermi < -efermi_tol) and np.any(values[i, :] - self.efermi > efermi_tol):
return True
return False
def get_vbm(self):
"""
Returns data about the VBM.
Returns:
dict as {"band_index","kpoint_index","kpoint","energy"}
- "band_index": A dict with spin keys pointing to a list of the
indices of the band containing the VBM (please note that you
can have several bands sharing the VBM) {Spin.up:[],
Spin.down:[]}
- "kpoint_index": The list of indices in self.kpoints for the
kpoint VBM. Please note that there can be several
kpoint_indices relating to the same kpoint (e.g., Gamma can
occur at different spots in the band structure line plot)
- "kpoint": The kpoint (as a kpoint object)
- "energy": The energy of the VBM
- "projections": The projections along sites and orbitals of the
VBM if any projection data is available (else it is an empty
dictionnary). The format is similar to the projections field in
BandStructure: {spin:{'Orbital': [proj]}} where the array
[proj] is ordered according to the sites in structure
"""
if self.is_metal():
return {
"band_index": [],
"kpoint_index": [],
"kpoint": [],
"energy": None,
"projections": {},
}
max_tmp = -float("inf")
index = None
kpointvbm = None
for spin, v in self.bands.items():
for i, j in zip(*np.where(v < self.efermi)):
if v[i, j] > max_tmp:
max_tmp = float(v[i, j])
index = j
kpointvbm = self.kpoints[j]
list_ind_kpts = []
if kpointvbm.label is not None:
for i in range(len(self.kpoints)):
if self.kpoints[i].label == kpointvbm.label:
list_ind_kpts.append(i)
else:
list_ind_kpts.append(index)
# get all other bands sharing the vbm
list_ind_band = collections.defaultdict(list)
for spin in self.bands:
for i in range(self.nb_bands):
if math.fabs(self.bands[spin][i][index] - max_tmp) < 0.001:
list_ind_band[spin].append(i)
proj = {}
for spin, v in self.projections.items():
if len(list_ind_band[spin]) == 0:
continue
proj[spin] = v[list_ind_band[spin][0]][list_ind_kpts[0]]
return {
"band_index": list_ind_band,
"kpoint_index": list_ind_kpts,
"kpoint": kpointvbm,
"energy": max_tmp,
"projections": proj,
}
def get_cbm(self):
"""
Returns data about the CBM.
Returns:
{"band_index","kpoint_index","kpoint","energy"}
- "band_index": A dict with spin keys pointing to a list of the
indices of the band containing the CBM (please note that you
can have several bands sharing the CBM) {Spin.up:[],
Spin.down:[]}
- "kpoint_index": The list of indices in self.kpoints for the
kpoint CBM. Please note that there can be several
kpoint_indices relating to the same kpoint (e.g., Gamma can
occur at different spots in the band structure line plot)
- "kpoint": The kpoint (as a kpoint object)
- "energy": The energy of the CBM
- "projections": The projections along sites and orbitals of the
CBM if any projection data is available (else it is an empty
dictionnary). The format is similar to the projections field in
BandStructure: {spin:{'Orbital': [proj]}} where the array
[proj] is ordered according to the sites in structure
"""
if self.is_metal():
return {
"band_index": [],
"kpoint_index": [],
"kpoint": [],
"energy": None,
"projections": {},
}
max_tmp = float("inf")
index = None
kpointcbm = None
for spin, v in self.bands.items():
for i, j in zip(*np.where(v >= self.efermi)):
if v[i, j] < max_tmp:
max_tmp = float(v[i, j])
index = j
kpointcbm = self.kpoints[j]
list_index_kpoints = []
if kpointcbm.label is not None:
for i in range(len(self.kpoints)):
if self.kpoints[i].label == kpointcbm.label:
list_index_kpoints.append(i)
else:
list_index_kpoints.append(index)
# get all other bands sharing the cbm
list_index_band = collections.defaultdict(list)
for spin in self.bands:
for i in range(self.nb_bands):
if math.fabs(self.bands[spin][i][index] - max_tmp) < 0.001:
list_index_band[spin].append(i)
proj = {}
for spin, v in self.projections.items():
if len(list_index_band[spin]) == 0:
continue
proj[spin] = v[list_index_band[spin][0]][list_index_kpoints[0]]
return {
"band_index": list_index_band,
"kpoint_index": list_index_kpoints,
"kpoint": kpointcbm,
"energy": max_tmp,
"projections": proj,
}
def get_band_gap(self):
r"""
Returns band gap data.
Returns:
A dict {"energy","direct","transition"}:
"energy": band gap energy
"direct": A boolean telling if the gap is direct or not
"transition": kpoint labels of the transition (e.g., "\\Gamma-X")
"""
if self.is_metal():
return {"energy": 0.0, "direct": False, "transition": None}
cbm = self.get_cbm()
vbm = self.get_vbm()
result = dict(direct=False, energy=0.0, transition=None)
result["energy"] = cbm["energy"] - vbm["energy"]
if (cbm["kpoint"].label is not None and cbm["kpoint"].label == vbm["kpoint"].label) or np.linalg.norm(
cbm["kpoint"].cart_coords - vbm["kpoint"].cart_coords
) < 0.01:
result["direct"] = True
result["transition"] = "-".join(
[
str(c.label)
if c.label is not None
else str("(") + ",".join(["{0:.3f}".format(c.frac_coords[i]) for i in range(3)]) + str(")")
for c in [vbm["kpoint"], cbm["kpoint"]]
]
)
return result
def get_direct_band_gap_dict(self):
"""
Returns a dictionary of information about the direct
band gap
Returns:
a dictionary of the band gaps indexed by spin
along with their band indices and k-point index
"""
if self.is_metal():
raise ValueError("get_direct_band_gap_dict should only be used with non-metals")
direct_gap_dict = {}
for spin, v in self.bands.items():
above = v[np.all(v > self.efermi, axis=1)]
min_above = np.min(above, axis=0)
below = v[np.all(v < self.efermi, axis=1)]
max_below = np.max(below, axis=0)
diff = min_above - max_below
kpoint_index = np.argmin(diff)
band_indices = [
np.argmax(below[:, kpoint_index]),
np.argmin(above[:, kpoint_index]) + len(below),
]
direct_gap_dict[spin] = {
"value": diff[kpoint_index],
"kpoint_index": kpoint_index,
"band_indices": band_indices,
}
return direct_gap_dict
def get_direct_band_gap(self):
"""
Returns the direct band gap.
Returns:
the value of the direct band gap
"""
if self.is_metal():
return 0.0
dg = self.get_direct_band_gap_dict()
return min(v["value"] for v in dg.values())
def get_sym_eq_kpoints(self, kpoint, cartesian=False, tol=1e-2):
"""
Returns a list of unique symmetrically equivalent k-points.
Args:
kpoint (1x3 array): coordinate of the k-point
cartesian (bool): kpoint is in cartesian or fractional coordinates
tol (float): tolerance below which coordinates are considered equal
Returns:
([1x3 array] or None): if structure is not available returns None
"""
if not self.structure:
return None
sg = SpacegroupAnalyzer(self.structure)
symmops = sg.get_point_group_operations(cartesian=cartesian)
points = np.dot(kpoint, [m.rotation_matrix for m in symmops])
rm_list = []
# identify and remove duplicates from the list of equivalent k-points:
for i in range(len(points) - 1):
for j in range(i + 1, len(points)):
if np.allclose(pbc_diff(points[i], points[j]), [0, 0, 0], tol):
rm_list.append(i)
break
return np.delete(points, rm_list, axis=0)
def get_kpoint_degeneracy(self, kpoint, cartesian=False, tol=1e-2):
"""
Returns degeneracy of a given k-point based on structure symmetry
Args:
kpoint (1x3 array): coordinate of the k-point
cartesian (bool): kpoint is in cartesian or fractional coordinates
tol (float): tolerance below which coordinates are considered equal
Returns:
(int or None): degeneracy or None if structure is not available
"""
all_kpts = self.get_sym_eq_kpoints(kpoint, cartesian, tol=tol)
if all_kpts is not None:
return len(all_kpts)
return None
def as_dict(self):
"""
Json-serializable dict representation of BandStructure.
"""
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"lattice_rec": self.lattice_rec.as_dict(),
"efermi": self.efermi,
"kpoints": [],
}
# kpoints are not kpoint objects dicts but are frac coords (this makes
# the dict smaller and avoids the repetition of the lattice
for k in self.kpoints:
d["kpoints"].append(k.as_dict()["fcoords"])
d["bands"] = {str(int(spin)): self.bands[spin].tolist() for spin in self.bands}
d["is_metal"] = self.is_metal()
vbm = self.get_vbm()
d["vbm"] = {
"energy": vbm["energy"],
"kpoint_index": vbm["kpoint_index"],
"band_index": {str(int(spin)): vbm["band_index"][spin] for spin in vbm["band_index"]},
"projections": {str(spin): v.tolist() for spin, v in vbm["projections"].items()},
}
cbm = self.get_cbm()
d["cbm"] = {
"energy": cbm["energy"],
"kpoint_index": cbm["kpoint_index"],
"band_index": {str(int(spin)): cbm["band_index"][spin] for spin in cbm["band_index"]},
"projections": {str(spin): v.tolist() for spin, v in cbm["projections"].items()},
}
d["band_gap"] = self.get_band_gap()
d["labels_dict"] = {}
d["is_spin_polarized"] = self.is_spin_polarized
# MongoDB does not accept keys starting with $. Add a blanck space to fix the problem
for c in self.labels_dict:
mongo_key = c if not c.startswith("$") else " " + c
d["labels_dict"][mongo_key] = self.labels_dict[c].as_dict()["fcoords"]
d["projections"] = {}
if len(self.projections) != 0:
d["structure"] = self.structure.as_dict()
d["projections"] = {str(int(spin)): np.array(v).tolist() for spin, v in self.projections.items()}
return d
@classmethod
def from_dict(cls, d):
"""
Create from dict.
Args:
A dict with all data for a band structure object.
Returns:
A BandStructure object
"""
# Strip the label to recover initial string
# (see trick used in as_dict to handle $ chars)
labels_dict = {k.strip(): v for k, v in d["labels_dict"].items()}
projections = {}
structure = None
if isinstance(list(d["bands"].values())[0], dict):
eigenvals = {Spin(int(k)): np.array(d["bands"][k]["data"]) for k in d["bands"]}
else:
eigenvals = {Spin(int(k)): d["bands"][k] for k in d["bands"]}
if "structure" in d:
structure = Structure.from_dict(d["structure"])
try:
if d.get("projections"):
if isinstance(d["projections"]["1"][0][0], dict):
raise ValueError("Old band structure dict format detected!")
projections = {Spin(int(spin)): np.array(v) for spin, v in d["projections"].items()}
return cls(
d["kpoints"],
eigenvals,
Lattice(d["lattice_rec"]["matrix"]),
d["efermi"],
labels_dict,
structure=structure,
projections=projections,
)
except Exception:
warnings.warn(
"Trying from_dict failed. Now we are trying the old "
"format. Please convert your BS dicts to the new "
"format. The old format will be retired in pymatgen "
"5.0."
)
return cls.from_old_dict(d)
@classmethod
def from_old_dict(cls, d):
"""
Args:
d (dict): A dict with all data for a band structure symm line
object.
Returns:
A BandStructureSymmLine object
"""
# Strip the label to recover initial string (see trick used in as_dict to handle $ chars)
labels_dict = {k.strip(): v for k, v in d["labels_dict"].items()}
projections = {}
structure = None
if "projections" in d and len(d["projections"]) != 0:
structure = Structure.from_dict(d["structure"])
projections = {}
for spin in d["projections"]:
dd = []
for i in range(len(d["projections"][spin])):
ddd = []
for j in range(len(d["projections"][spin][i])):
dddd = []
for k in range(len(d["projections"][spin][i][j])):
ddddd = []
orb = Orbital(k).name
for l in range(len(d["projections"][spin][i][j][orb])):
ddddd.append(d["projections"][spin][i][j][orb][l])
dddd.append(np.array(ddddd))
ddd.append(np.array(dddd))
dd.append(np.array(ddd))
projections[Spin(int(spin))] = np.array(dd)
return BandStructure(
d["kpoints"],
{Spin(int(k)): d["bands"][k] for k in d["bands"]},
Lattice(d["lattice_rec"]["matrix"]),
d["efermi"],
labels_dict,
structure=structure,
projections=projections,
)
class BandStructureSymmLine(BandStructure, MSONable):
r"""
This object stores band structures along selected (symmetry) lines in the
Brillouin zone. We call the different symmetry lines (ex: \\Gamma to Z)
"branches".
"""
def __init__(
self,
kpoints,
eigenvals,
lattice,
efermi,
labels_dict,
coords_are_cartesian=False,
structure=None,
projections=None,
):
"""
Args:
kpoints: list of kpoint as numpy arrays, in frac_coords of the
given lattice by default
eigenvals: dict of energies for spin up and spin down
{Spin.up:[][],Spin.down:[][]}, the first index of the array
[][] refers to the band and the second to the index of the
kpoint. The kpoints are ordered according to the order of the
kpoints array. If the band structure is not spin polarized, we
only store one data set under Spin.up.
lattice: The reciprocal lattice.
Pymatgen uses the physics convention of reciprocal lattice vectors
WITH a 2*pi coefficient
efermi: fermi energy
label_dict: (dict) of {} this link a kpoint (in frac coords or
cartesian coordinates depending on the coords).
coords_are_cartesian: Whether coordinates are cartesian.
structure: The crystal structure (as a pymatgen Structure object)
associated with the band structure. This is needed if we
provide projections to the band structure.
projections: dict of orbital projections as {spin: ndarray}. The
indices of the ndarrayare [band_index, kpoint_index, orbital_index,
ion_index].If the band structure is not spin polarized, we only
store one data set under Spin.up.
"""
super().__init__(
kpoints,
eigenvals,
lattice,
efermi,
labels_dict,
coords_are_cartesian,
structure,
projections,
)
self.distance = []
self.branches = []
one_group = []
branches_tmp = []
# get labels and distance for each kpoint
previous_kpoint = self.kpoints[0]
previous_distance = 0.0
previous_label = self.kpoints[0].label
for i in range(len(self.kpoints)):
label = self.kpoints[i].label
if label is not None and previous_label is not None:
self.distance.append(previous_distance)
else:
self.distance.append(
np.linalg.norm(self.kpoints[i].cart_coords - previous_kpoint.cart_coords) + previous_distance
)
previous_kpoint = self.kpoints[i]
previous_distance = self.distance[i]
if label:
if previous_label:
if len(one_group) != 0:
branches_tmp.append(one_group)
one_group = []
previous_label = label
one_group.append(i)
if len(one_group) != 0:
branches_tmp.append(one_group)
for b in branches_tmp:
self.branches.append(
{
"start_index": b[0],
"end_index": b[-1],
"name": str(self.kpoints[b[0]].label) + "-" + str(self.kpoints[b[-1]].label),
}
)
self.is_spin_polarized = False
if len(self.bands) == 2:
self.is_spin_polarized = True
def get_equivalent_kpoints(self, index):
"""
Returns the list of kpoint indices equivalent (meaning they are the
same frac coords) to the given one.
Args:
index: the kpoint index
Returns:
a list of equivalent indices
TODO: now it uses the label we might want to use coordinates instead
(in case there was a mislabel)
"""
# if the kpoint has no label it can"t have a repetition along the band
# structure line object
if self.kpoints[index].label is None:
return [index]
list_index_kpoints = []
for i in range(len(self.kpoints)):
if self.kpoints[i].label == self.kpoints[index].label:
list_index_kpoints.append(i)
return list_index_kpoints
def get_branch(self, index):
r"""
Returns in what branch(es) is the kpoint. There can be several
branches.
Args:
index: the kpoint index
Returns:
A list of dictionaries [{"name","start_index","end_index","index"}]
indicating all branches in which the k_point is. It takes into
account the fact that one kpoint (e.g., \\Gamma) can be in several
branches
"""
to_return = []
for i in self.get_equivalent_kpoints(index):
for b in self.branches:
if b["start_index"] <= i <= b["end_index"]:
to_return.append(
{
"name": b["name"],
"start_index": b["start_index"],
"end_index": b["end_index"],
"index": i,
}
)
return to_return
def apply_scissor(self, new_band_gap):
"""
Apply a scissor operator (shift of the CBM) to fit the given band gap.
If it's a metal. We look for the band crossing the fermi level
and shift this one up. This will not work all the time for metals!
Args:
new_band_gap: the band gap the scissor band structure need to have.
Returns:
a BandStructureSymmLine object with the applied scissor shift
"""
if self.is_metal():
# moves then the highest index band crossing the fermi level
# find this band...
max_index = -1000
# spin_index = None
for i in range(self.nb_bands):
below = False
above = False
for j in range(len(self.kpoints)):
if self.bands[Spin.up][i][j] < self.efermi:
below = True
if self.bands[Spin.up][i][j] > self.efermi:
above = True
if above and below:
if i > max_index:
max_index = i
# spin_index = Spin.up
if self.is_spin_polarized:
below = False
above = False
for j in range(len(self.kpoints)):
if self.bands[Spin.down][i][j] < self.efermi:
below = True
if self.bands[Spin.down][i][j] > self.efermi:
above = True
if above and below:
if i > max_index:
max_index = i
# spin_index = Spin.down
old_dict = self.as_dict()
shift = new_band_gap
for spin in old_dict["bands"]:
for k in range(len(old_dict["bands"][spin])):
for v in range(len(old_dict["bands"][spin][k])):
if k >= max_index:
old_dict["bands"][spin][k][v] = old_dict["bands"][spin][k][v] + shift
else:
shift = new_band_gap - self.get_band_gap()["energy"]
old_dict = self.as_dict()
for spin in old_dict["bands"]:
for k in range(len(old_dict["bands"][spin])):
for v in range(len(old_dict["bands"][spin][k])):
if old_dict["bands"][spin][k][v] >= old_dict["cbm"]["energy"]:
old_dict["bands"][spin][k][v] = old_dict["bands"][spin][k][v] + shift
old_dict["efermi"] = old_dict["efermi"] + shift
return self.from_dict(old_dict)
def as_dict(self):
"""
Json-serializable dict representation of BandStructureSymmLine.
"""
d = super().as_dict()
d["branches"] = self.branches
return d
class LobsterBandStructureSymmLine(BandStructureSymmLine):
"""
Lobster subclass of BandStructure with customized functions.
"""
def as_dict(self):
"""
Json-serializable dict representation of BandStructureSymmLine.
"""
d = {
"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"lattice_rec": self.lattice_rec.as_dict(),
"efermi": self.efermi,
"kpoints": [],
}
# kpoints are not kpoint objects dicts but are frac coords (this makes
# the dict smaller and avoids the repetition of the lattice
for k in self.kpoints:
d["kpoints"].append(k.as_dict()["fcoords"])
d["branches"] = self.branches
d["bands"] = {str(int(spin)): self.bands[spin].tolist() for spin in self.bands}
d["is_metal"] = self.is_metal()
vbm = self.get_vbm()
d["vbm"] = {
"energy": vbm["energy"],
"kpoint_index": [int(x) for x in vbm["kpoint_index"]],
"band_index": {str(int(spin)): vbm["band_index"][spin] for spin in vbm["band_index"]},
"projections": {str(spin): v for spin, v in vbm["projections"].items()},
}
cbm = self.get_cbm()
d["cbm"] = {
"energy": cbm["energy"],
"kpoint_index": [int(x) for x in cbm["kpoint_index"]],
"band_index": {str(int(spin)): cbm["band_index"][spin] for spin in cbm["band_index"]},
"projections": {str(spin): v for spin, v in cbm["projections"].items()},
}
d["band_gap"] = self.get_band_gap()
d["labels_dict"] = {}
d["is_spin_polarized"] = self.is_spin_polarized
# MongoDB does not accept keys starting with $. Add a blanck space to fix the problem
for c in self.labels_dict:
mongo_key = c if not c.startswith("$") else " " + c
d["labels_dict"][mongo_key] = self.labels_dict[c].as_dict()["fcoords"]
if len(self.projections) != 0:
d["structure"] = self.structure.as_dict()
d["projections"] = {str(int(spin)): np.array(v).tolist() for spin, v in self.projections.items()}
return d
@classmethod
def from_dict(cls, d):
"""
Args:
d (dict): A dict with all data for a band structure symm line
object.
Returns:
A BandStructureSymmLine object
"""
try:
# Strip the label to recover initial string (see trick used in as_dict to handle $ chars)
labels_dict = {k.strip(): v for k, v in d["labels_dict"].items()}
projections = {}
structure = None
if d.get("projections"):
if isinstance(d["projections"]["1"][0][0], dict):
raise ValueError("Old band structure dict format detected!")
structure = Structure.from_dict(d["structure"])
projections = {Spin(int(spin)): np.array(v) for spin, v in d["projections"].items()}
return LobsterBandStructureSymmLine(
d["kpoints"],
{Spin(int(k)): d["bands"][k] for k in d["bands"]},
Lattice(d["lattice_rec"]["matrix"]),
d["efermi"],
labels_dict,
structure=structure,
projections=projections,
)
except Exception:
warnings.warn(
"Trying from_dict failed. Now we are trying the old "
"format. Please convert your BS dicts to the new "
"format. The old format will be retired in pymatgen "
"5.0."
)
return LobsterBandStructureSymmLine.from_old_dict(d)
@classmethod
def from_old_dict(cls, d):
"""
Args:
d (dict): A dict with all data for a band structure symm line
object.
Returns:
A BandStructureSymmLine object
"""
# Strip the label to recover initial string (see trick used in as_dict to handle $ chars)
labels_dict = {k.strip(): v for k, v in d["labels_dict"].items()}
projections = {}
structure = None
if "projections" in d and len(d["projections"]) != 0:
structure = Structure.from_dict(d["structure"])
projections = {}
for spin in d["projections"]:
dd = []
for i in range(len(d["projections"][spin])):
ddd = []
for j in range(len(d["projections"][spin][i])):
ddd.append(d["projections"][spin][i][j])
dd.append(np.array(ddd))
projections[Spin(int(spin))] = np.array(dd)
return LobsterBandStructureSymmLine(
d["kpoints"],
{Spin(int(k)): d["bands"][k] for k in d["bands"]},
Lattice(d["lattice_rec"]["matrix"]),
d["efermi"],
labels_dict,
structure=structure,
projections=projections,
)
def get_projection_on_elements(self):
"""
Method returning a dictionary of projections on elements.
It sums over all available orbitals for each element.
Returns:
a dictionary in the {Spin.up:[][{Element:values}],
Spin.down:[][{Element:values}]} format
if there is no projections in the band structure
returns an empty dict
"""
result = {}
for spin, v in self.projections.items():
result[spin] = [
[collections.defaultdict(float) for i in range(len(self.kpoints))] for j in range(self.nb_bands)
]
for i, j in itertools.product(range(self.nb_bands), range(len(self.kpoints))):
for key, item in v[i][j].items():
for key2, item2 in item.items():
specie = str(Element(re.split(r"[0-9]+", key)[0]))
result[spin][i][j][specie] += item2
return result
def get_projections_on_elements_and_orbitals(self, el_orb_spec):
"""
Method returning a dictionary of projections on elements and specific
orbitals
Args:
el_orb_spec: A dictionary of Elements and Orbitals for which we want
to have projections on. It is given as: {Element:[orbitals]},
e.g., {'Si':['3s','3p']} or {'Si':['3s','3p_x', '3p_y', '3p_z']} depending on input files
Returns:
A dictionary of projections on elements in the
{Spin.up:[][{Element:{orb:values}}],
Spin.down:[][{Element:{orb:values}}]} format
if there is no projections in the band structure returns an empty
dict.
"""
result = {}
el_orb_spec = {get_el_sp(el): orbs for el, orbs in el_orb_spec.items()}
for spin, v in self.projections.items():
result[spin] = [
[{str(e): collections.defaultdict(float) for e in el_orb_spec} for i in range(len(self.kpoints))]
for j in range(self.nb_bands)
]
for i, j in itertools.product(range(self.nb_bands), range(len(self.kpoints))):
for key, item in v[i][j].items():
for key2, item2 in item.items():
specie = str(Element(re.split(r"[0-9]+", key)[0]))
if get_el_sp(str(specie)) in el_orb_spec:
if key2 in el_orb_spec[get_el_sp(str(specie))]:
result[spin][i][j][specie][key2] += item2
return result
def get_reconstructed_band_structure(list_bs, efermi=None):
"""
This method takes a list of band structures and reconstructs
one band structure object from all of them.
This is typically very useful when you split non self consistent
band structure runs in several independent jobs and want to merge back
the results
Args:
list_bs: A list of BandStructure or BandStructureSymmLine objects.
efermi: The Fermi energy of the reconstructed band structure. If
None is assigned an average of all the Fermi energy in each
object in the list_bs is used.
Returns:
A BandStructure or BandStructureSymmLine object (depending on
the type of the list_bs objects)
"""
if efermi is None:
efermi = sum([b.efermi for b in list_bs]) / len(list_bs)
kpoints = []
labels_dict = {}
rec_lattice = list_bs[0].lattice_rec
nb_bands = min([list_bs[i].nb_bands for i in range(len(list_bs))])
kpoints = np.concatenate([[k.frac_coords for k in bs.kpoints] for bs in list_bs])
dicts = [bs.labels_dict for bs in list_bs]
labels_dict = {k: v.frac_coords for d in dicts for k, v in d.items()}
eigenvals = {}
eigenvals[Spin.up] = np.concatenate([bs.bands[Spin.up][:nb_bands] for bs in list_bs], axis=1)
if list_bs[0].is_spin_polarized:
eigenvals[Spin.down] = np.concatenate([bs.bands[Spin.down][:nb_bands] for bs in list_bs], axis=1)
projections = {}
if len(list_bs[0].projections) != 0:
projs = [bs.projections[Spin.up][:nb_bands] for bs in list_bs]
projections[Spin.up] = np.concatenate(projs, axis=1)
if list_bs[0].is_spin_polarized:
projs = [bs.projections[Spin.down][:nb_bands] for bs in list_bs]
projections[Spin.down] = np.concatenate(projs, axis=1)
if isinstance(list_bs[0], BandStructureSymmLine):
return BandStructureSymmLine(
kpoints,
eigenvals,
rec_lattice,
efermi,
labels_dict,
structure=list_bs[0].structure,
projections=projections,
)
return BandStructure(
kpoints,
eigenvals,
rec_lattice,
efermi,
labels_dict,
structure=list_bs[0].structure,
projections=projections,
)
|
richardtran415/pymatgen
|
pymatgen/electronic_structure/bandstructure.py
|
Python
|
mit
| 45,088
|
[
"CRYSTAL",
"pymatgen"
] |
f538ca024a3d2ee37b95ad53e51a63198c1d0426b89753afb97e32c4a87c7cbb
|
import unittest
import numpy as np
import pysal
import pysal.spreg as EC
from scipy import sparse
PEGP = pysal.examples.get_path
class TestBaseOLS(unittest.TestCase):
def setUp(self):
db = pysal.open(PEGP('columbus.dbf'),'r')
y = np.array(db.by_col("HOVAL"))
self.y = np.reshape(y, (49,1))
X = []
X.append(db.by_col("INC"))
X.append(db.by_col("CRIME"))
self.X = np.array(X).T
self.w = pysal.weights.rook_from_shapefile(PEGP("columbus.shp"))
def test_ols(self):
self.X = np.hstack((np.ones(self.y.shape),self.X))
self.X = sparse.csr_matrix(self.X)
ols = EC.ols.BaseOLS(self.y,self.X)
np.testing.assert_array_almost_equal(ols.betas, np.array([[
46.42818268], [ 0.62898397], [ -0.48488854]]))
vm = np.array([[ 1.74022453e+02, -6.52060364e+00, -2.15109867e+00],
[ -6.52060364e+00, 2.87200008e-01, 6.80956787e-02],
[ -2.15109867e+00, 6.80956787e-02, 3.33693910e-02]])
np.testing.assert_array_almost_equal(ols.vm, vm,6)
def test_OLS(self):
self.X = sparse.csr_matrix(self.X)
ols = EC.OLS(self.y, self.X, self.w, spat_diag=True, moran=True, \
name_y='home value', name_x=['income','crime'], \
name_ds='columbus', nonspat_diag=True)
np.testing.assert_array_almost_equal(ols.aic, \
408.73548964604873 ,7)
np.testing.assert_array_almost_equal(ols.ar2, \
0.32123239427957662 ,7)
np.testing.assert_array_almost_equal(ols.betas, \
np.array([[ 46.42818268], [ 0.62898397], \
[ -0.48488854]]), 7)
bp = np.array([2, 5.7667905131212587, 0.05594449410070558])
ols_bp = np.array([ols.breusch_pagan['df'], ols.breusch_pagan['bp'], ols.breusch_pagan['pvalue']])
np.testing.assert_array_almost_equal(bp, ols_bp, 7)
np.testing.assert_array_almost_equal(ols.f_stat, \
(12.358198885356581, 5.0636903313953024e-05), 7)
jb = np.array([2, 39.706155069114878, 2.387360356860208e-09])
ols_jb = np.array([ols.jarque_bera['df'], ols.jarque_bera['jb'], ols.jarque_bera['pvalue']])
np.testing.assert_array_almost_equal(ols_jb,jb, 7)
white = np.array([5, 2.90606708, 0.71446484])
ols_white = np.array([ols.white['df'], ols.white['wh'], ols.white['pvalue']])
np.testing.assert_array_almost_equal(ols_white,white, 7)
np.testing.assert_equal(ols.k, 3)
kb = {'df': 2, 'kb': 2.2700383871478675, 'pvalue': 0.32141595215434604}
for key in kb:
self.assertAlmostEqual(ols.koenker_bassett[key], kb[key], 7)
np.testing.assert_array_almost_equal(ols.lm_error, \
(4.1508117035117893, 0.041614570655392716),7)
np.testing.assert_array_almost_equal(ols.lm_lag, \
(0.98279980617162233, 0.32150855529063727), 7)
np.testing.assert_array_almost_equal(ols.lm_sarma, \
(4.3222725729143736, 0.11519415308749938), 7)
np.testing.assert_array_almost_equal(ols.logll, \
-201.3677448230244 ,7)
np.testing.assert_array_almost_equal(ols.mean_y, \
38.436224469387746,7)
np.testing.assert_array_almost_equal(ols.moran_res[0], \
0.20373540938,7)
np.testing.assert_array_almost_equal(ols.moran_res[1], \
2.59180452208,7)
np.testing.assert_array_almost_equal(ols.moran_res[2], \
0.00954740031251,7)
np.testing.assert_array_almost_equal(ols.mulColli, \
12.537554873824675 ,7)
np.testing.assert_equal(ols.n, 49)
np.testing.assert_equal(ols.name_ds, 'columbus')
np.testing.assert_equal(ols.name_gwk, None)
np.testing.assert_equal(ols.name_w, 'unknown')
np.testing.assert_equal(ols.name_x, ['CONSTANT', 'income', 'crime'])
np.testing.assert_equal(ols.name_y, 'home value')
np.testing.assert_array_almost_equal(ols.predy[3], np.array([
33.53969014]),7)
np.testing.assert_array_almost_equal(ols.r2, \
0.34951437785126105 ,7)
np.testing.assert_array_almost_equal(ols.rlm_error, \
(3.3394727667427513, 0.067636278225568919),7)
np.testing.assert_array_almost_equal(ols.rlm_lag, \
(0.17146086940258459, 0.67881673703455414), 7)
np.testing.assert_equal(ols.robust, 'unadjusted')
np.testing.assert_array_almost_equal(ols.schwarz, \
414.41095054038061,7 )
np.testing.assert_array_almost_equal(ols.sig2, \
231.4568494392652,7 )
np.testing.assert_array_almost_equal(ols.sig2ML, \
217.28602192257551,7 )
np.testing.assert_array_almost_equal(ols.sig2n, \
217.28602192257551, 7)
np.testing.assert_array_almost_equal(ols.t_stat[2][0], \
-2.65440864272,7)
np.testing.assert_array_almost_equal(ols.t_stat[2][1], \
0.0108745049098,7)
if __name__ == '__main__':
unittest.main()
|
AlanZatarain/pysal
|
pysal/spreg/tests/test_ols_sparse.py
|
Python
|
bsd-3-clause
| 5,118
|
[
"COLUMBUS"
] |
54dbad4fc63fd5f4abec2d367401fea6ec3b1c1265a6aa84140c92c48e632a08
|
from sys import exit
from random import randint
class Scene(object):
def enter(self):
print "This scene is not yet configured. Subclass it and implement enter()."
exit(1)
class Engine(object):
def __init__(self, scene_map):
self.scene_map = scene_map
def play(self):
current_scene = self.scene_map.opening_scene()
last_scene = self.scene_map.next_scene('finished')
while current_scene != last_scene:
next_scene_name = current_scene.enter()
current_scene = self.scene_map.next_scene(next_scene_name)
current_scene.enter()
class Death(Scene):
quips = [
"You died. You kinda suck at this.",
"Your mom would be proud...if she were smarter.",
"Such a luser.",
"I have a small puppy that's better at this."
]
def enter(self):
print Death.quips[randint(0, len(self.quips)-1)]
exit(1)
class CentralCorridor(Scene):
def enter(self):
print "The Gothons of Planet Percal #25 have invaded your ship and destroyed"
print "your entire crew. You are the last surviving member and your last"
print "mission is to get the neutron destruct bomb from the Weapons Armory,"
print "put it in the bridge, and blow the ship up after getting into an "
print "escape pod."
print "\n"
print "You're running down the central corridor to the Weapons Armory when"
print "a Gothon jumps out, red scaly skin, dark grimy teeth, and evil clown costume"
print "flowing around his hate filled body. He's blocking the door to the"
print "Armory and about to pull a weapon to blast you."
action = raw_input("> ")
if action == "shoot!":
print "Quick on the draw you yank out your blaster and fire it at the Gothon."
print "His clown costume is flowing and moving around his body, which throws"
print "off your aim. Your laser hits his costume but misses him entirely. This"
print "completely ruins his brand new costume his mother bought him, which"
print "makes him fly into an insane rage and blast you repeatedly in the face until"
print "you are dead. Then he eats you."
return 'death'
elif action == "dodge!":
print "Like a world class boxer you dodge, weave, slip and slide right"
print "as the Gothon's blaster cranks a laser past your head."
print "In the middle of your artful dodge your foot slips and you"
print "bang your head on the metal wall and pass out."
print "You wake up shortly after only to die as the Gothon stomps on"
print "your head and eats you."
return 'death'
elif action == "tell a joke":
print "Lucky for you they made you learn Gothon insults in the academy."
print "You tell the one Gothon joke you know:"
print "Lbhe zbgure vf fb sng, jura fur fvgf nebhaq gur ubhfr, fur fvgf nebhaq gur ubhfr."
print "The Gothon stops, tries not to laugh, then busts out laughing and can't move."
print "While he's laughing you run up and shoot him square in the head"
print "putting him down, then jump through the Weapon Armory door."
return 'laser_weapon_armory'
else:
print "DOES NOT COMPUTE!"
return 'central_corridor'
class LaserWeaponArmory(Scene):
def enter(self):
print "You do a dive roll into the Weapon Armory, crouch and scan the room"
print "for more Gothons that might be hiding. It's dead quiet, too quiet."
print "You stand up and run to the far side of the room and find the"
print "neutron bomb in its container. There's a keypad lock on the box"
print "and you need the code to get the bomb out. If you get the code"
print "wrong 10 times then the lock closes forever and you can't"
print "get the bomb. The code is 3 digits."
code = "%d%d%d" % (randint(1,9), randint(1,9), randint(1,9))
guess = raw_input("[keypad]> ")
guesses = 0
while guess != code and guesses < 10:
print "BZZZZEDDD!"
guesses += 1
guess = raw_input("[keypad]> ")
if guess == code:
print "The container clicks open and the seal breaks, letting gas out."
print "You grab the neutron bomb and run as fast as you can to the"
print "bridge where you must place it in the right spot."
return 'the_bridge'
else:
print "The lock buzzes one last time and then you hear a sickening"
print "melting sound as the mechanism is fused together."
print "You decide to sit there, and finally the Gothons blow up the"
print "ship from their ship and you die."
return 'death'
class TheBridge(Scene):
def enter(self):
print "You burst onto the Bridge with the netron destruct bomb"
print "under your arm and surprise 5 Gothons who are trying to"
print "take control of the ship. Each of them has an even uglier"
print "clown costume than the last. They haven't pulled their"
print "weapons out yet, as they see the active bomb under your"
print "arm and don't want to set it off."
action = raw_input("> ")
if action == "throw the bomb":
print "In a panic you throw the bomb at the group of Gothons"
print "and make a leap for the door. Right as you drop it a"
print "Gothon shoots you right in the back killing you."
print "As you die you see another Gothon frantically try to disarm"
print "the bomb. You die knowing they will probably blow up when"
print "it goes off."
return 'death'
elif action == "slowly place the bomb":
print "You point your blaster at the bomb under your arm"
print "and the Gothons put their hands up and start to sweat."
print "You inch backward to the door, open it, and then carefully"
print "place the bomb on the floor, pointing your blaster at it."
print "You then jump back through the door, punch the close button"
print "and blast the lock so the Gothons can't get out."
print "Now that the bomb is placed you run to the escape pod to"
print "get off this tin can."
return 'escape_pod'
else:
print "DOES NOT COMPUTE!"
return "the_bridge"
class EscapePod(Scene):
def enter(self):
print "You rush through the ship desperately trying to make it to"
print "the escape pod before the whole ship explodes. It seems like"
print "hardly any Gothons are on the ship, so your run is clear of"
print "interference. You get to the chamber with the escape pods, and"
print "now need to pick one to take. Some of them could be damaged"
print "but you don't have time to look. There's 5 pods, which one"
print "do you take?"
good_pod = randint(1,5)
guess = raw_input("[pod #]> ")
if int(guess) != good_pod:
print "You jump into pod %s and hit the eject button." % guess
print "The pod escapes out into the void of space, then"
print "implodes as the hull ruptures, crushing your body"
print "into jam jelly."
return 'death'
else:
print "You jump into pod %s and hit the eject button." % guess
print "The pod easily slides out into space heading to"
print "the planet below. As it flies to the planet, you look"
print "back and see your ship implode then explode like a"
print "bright star, taking out the Gothon ship at the same"
print "time. You won!"
return 'finished'
class Finished(Scene):
def enter(self):
print "You won! Good job."
return 'finished'
class Map(object):
scenes = {
'central_corridor': CentralCorridor(),
'laser_weapon_armory': LaserWeaponArmory(),
'the_bridge': TheBridge(),
'escape_pod': EscapePod(),
'death': Death(),
'finished': Finished()
}
def __init__(self, start_scene):
self.start_scene = start_scene
def next_scene(self, scene_name):
val = Map.scenes.get(scene_name)
return val
def opening_scene(self):
return self.next_scene(self.start_scene)
a_map = Map('central_corridor')
a_game = Engine(a_map)
a_game.play()
|
amolborcar/learnpythonthehardway
|
ex43.py
|
Python
|
mit
| 7,999
|
[
"BLAST"
] |
57443ef0a15a140551c90fb7df29ebd693da8d0f90054e18a45004006e298779
|
########################################################################
# File : CSCLI.py
# Author : Adria Casajus
########################################################################
import sys
import types
import atexit
import os
import readline
from DIRAC import gLogger
from DIRAC.Core.Utilities.File import mkDir
from DIRAC.Core.Base.CLI import CLI, colorize
from DIRAC.ConfigurationSystem.private.Modificator import Modificator
from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
from DIRAC.Core.DISET.RPCClient import RPCClient
__RCSID__ = "$Id$"
def _showTraceback():
import traceback
excepType, execpValue = sys.exc_info()[:2]
print "________________________\n"
print "Exception", excepType, ":", execpValue
traceback.print_tb( sys.exc_info()[2] )
print "________________________\n"
def _printComment( comment ):
commentList = comment.split( "\n" )
for commentLine in commentList[ :-1 ]:
print "# %s" % commentLine.strip()
def _appendExtensionIfMissing( filename ):
dotPosition = filename.rfind( "." )
if dotPosition > -1:
filename = filename[ :dotPosition ]
return "%s.cfg" % filename
class CSCLI( CLI ):
def __init__( self ):
CLI.__init__( self )
self.connected = False
self.masterURL = "unset"
self.writeEnabled = False
self.modifiedData = False
self.rpcClient = None
self.do_connect()
if self.connected:
self.modificator = Modificator ( self.rpcClient )
else:
self.modificator = Modificator()
self.indentSpace = 20
self.backupFilename = "dataChanges"
# store history
histfilename = os.path.basename(sys.argv[0])
historyFile = os.path.expanduser( "~/.dirac/%s.history" % histfilename[0:-3])
mkDir(os.path.dirname(historyFile))
if os.path.isfile( historyFile ):
readline.read_history_file( historyFile )
readline.set_history_length(1000)
atexit.register( readline.write_history_file, historyFile )
def start( self ):
if self.connected:
self.modificator.loadFromRemote()
retVal = self.modificator.loadCredentials()
if not retVal[ 'OK' ]:
print "There was an error gathering your credentials"
print retVal[ 'Message' ]
self._setStatus( False )
try:
self.cmdloop()
except KeyboardInterrupt:
gLogger.warn( "Received a keyboard interrupt." )
self.do_quit( "" )
def _setConnected( self, connected, writeEnabled ):
self.connected = connected
self.modifiedData = False
self.writeEnabled = writeEnabled
if connected:
if writeEnabled:
self.prompt = "(%s)-%s> " % ( self.masterURL, colorize( "Connected", "green" ) )
else:
self.prompt = "(%s)-%s> " % ( self.masterURL, colorize( "Connected (RO)", "yellow" ) )
else:
self.prompt = "(%s)-%s> " % ( self.masterURL, colorize( "Disconnected", "red" ) )
def do_quit( self, dummy ):
"""
Exits the application without sending changes to server
Usage: quit
"""
print
if self.modifiedData:
print "Changes are about to be written to file for later use."
self.do_writeToFile( self.backupFilename )
print "Changes written to %s.cfg" % self.backupFilename
sys.exit( 0 )
# def retrieveData( self ):
# if not self.connected:
# return False
# response = self.rpcClient.dumpCompressed()
# if response[ 'Status' ] == 'OK':
# self.cDataHolder.loadFromCompressedSource( response[ 'Value' ] )
# gLogger.info( "Data retrieved from server." )
# return True
# else:
# gLogger.error( "Can't retrieve updated data from server." )
# return False
def _setStatus( self, connected = True ):
if not connected:
self.masterURL = "unset"
self._setConnected( False, False )
else:
retVal = self.rpcClient.writeEnabled()
if retVal[ 'OK' ]:
if retVal[ 'Value' ] == True:
self._setConnected( True, True )
else:
self._setConnected( True, False )
else:
print "Server returned an error: %s" % retVal[ 'Message' ]
self._setConnected( True, False )
def _tryConnection( self ):
print "Trying connection to %s" % self.masterURL
try:
self.rpcClient = RPCClient( self.masterURL )
self._setStatus()
except Exception as x:
gLogger.error( "Couldn't connect to master CS server", "%s (%s)" % ( self.masterURL, str( x ) ) )
self._setStatus( False )
def do_connect( self, args = '' ):
"""
Connects to configuration master server (in specified url if provided).
Usage: connect <url>
"""
if not args or type( args ) not in types.StringTypes:
self.masterURL = gConfigurationData.getMasterServer()
if self.masterURL != "unknown" and self.masterURL:
self._tryConnection()
else:
self._setStatus( False )
else:
splitted = args.split()
if len( splitted ) == 0:
print "Must specify witch url to connect"
self._setStatus( False )
else:
self.masterURL = splitted[0].strip()
self._tryConnection()
def do_sections( self, args ):
"""
Shows all sections with their comments.
If no section is specified, root is taken.
Usage: sections <section>
"""
try:
argList = args.split()
if argList:
baseSection = argList[0].strip()
else:
baseSection = "/"
if not self.modificator.existsSection( baseSection ):
print "Section %s does not exist" % baseSection
return
sectionList = self.modificator.getSections( baseSection )
if not sectionList:
print "Section %s is empty" % baseSection
return
for section in sectionList:
section = "%s/%s" % ( baseSection, section )
self.printPair( section, self.modificator.getComment( section ) , " #" )
except:
_showTraceback()
def do_options( self, args ):
"""
Shows all options and values of a specified section
Usage: options <section>
"""
try:
argList = args.split()
if argList:
section = argList[0].strip()
else:
print "Which section?"
return
if not self.modificator.existsSection( section ):
print "Section %s does not exist" % section
return
optionsList = self.modificator.getOptions( section )
if not optionsList:
print "Section %s has no options" % section
return
for option in optionsList:
_printComment( self.modificator.getComment( "%s/%s" % ( section, option ) ) )
self.printPair( option, self.modificator.getValue( "%s/%s" % ( section, option ) ), "=" )
except:
_showTraceback()
def do_get( self, args ):
"""
Shows value and comment for specified option in section
Usage: get <path to option>
"""
try:
argList = args.split()
if argList:
optionPath = argList[0].strip()
else:
print "Which option?"
return
if self.modificator.existsOption( optionPath ):
option = optionPath.split( "/" )[-1]
_printComment( self.modificator.getComment( optionPath ) )
self.printPair( option, self.modificator.getValue( optionPath ), "=" )
else:
print "Option %s does not exist" % optionPath
except:
_showTraceback()
def do_writeToServer( self, dummy ):
"""
Sends changes to server.
Usage: writeToServer
"""
if not self.connected:
print "You are not connected!"
return
try:
if not self.writeEnabled:
print "This server can't receive data modifications"
return
if not self.modifiedData:
while True:
choice = raw_input( "Data has not been modified, do you still want to upload changes? yes/no [no]: " )
choice = choice.lower()
if choice in ( "yes", "y" ):
break
else:
print "Commit aborted"
return
choice = raw_input( "Do you really want to send changes to server? yes/no [no]: " )
choice = choice.lower()
if choice in ( "yes", "y" ):
print "Uploading changes to %s (It may take some seconds)..." % self.masterURL
response = self.modificator.commit()
if response[ 'OK' ]:
self.modifiedData = False
print "Data sent to server."
self.modificator.loadFromRemote()
else:
print "Error sending data, server said: %s" % response['Message']
return
else:
print "Commit aborted"
except Exception as x:
_showTraceback()
print "Could not upload changes. ", str( x )
def do_set( self, args ):
"""
Sets option's value
Usage: set <optionPath> <value>...
From second argument until the last one is considered option's value
NOTE: If specified section does not exist it is created.
"""
try:
argsList = args.split()
if len( argsList ) < 2:
print "Must specify option and value to use"
return
optionPath = argsList[0].strip()
value = " ".join( argsList[1:] ).strip()
self.modificator.setOptionValue( optionPath, value )
self.modifiedData = True
except Exception as x:
print "Cannot insert value: ", str( x )
def do_removeOption( self, args ):
"""
Removes an option.
Usage: removeOption <option>
There can be empty sections.
"""
try:
argsList = args.split()
if len( argsList ) < 1:
print "Must specify option to delete"
return
optionPath = argsList[0].strip()
choice = raw_input( "Are you sure you want to delete %s? yes/no [no]: " % optionPath )
choice = choice.lower()
if choice in ( "yes", "y", "true" ):
if self.modificator.removeOption( optionPath ):
self.modifiedData = True
else:
print "Can't be deleted"
else:
print "Aborting removal."
except Exception as x:
print "Error removing option, %s" % str( x )
def do_removeSection( self, args ):
"""
Removes a section.
Usage: removeSection <section>
"""
try:
argsList = args.split()
if len( argsList ) < 1:
print "Must specify section to delete"
return
section = argsList[0].strip()
choice = raw_input( "Are you sure you want to delete %s? yes/no [no]: " % section )
choice = choice.lower()
if choice in ( "yes", "y", "true" ):
if self.modificator.removeSection( section ):
self.modifiedData = True
else:
print "Can't be deleted"
else:
print "Aborting removal."
except Exception as x:
print "Error removing section, %s" % str( x )
def do_setComment( self, args ):
"""
Sets option or section's comment. Requested entry MUST exist.
Usage: set <option/section> <comment>...
From third argument until the last one is considered option's comment.
"""
try:
argsList = args.split()
if len( argsList ) < 2:
print "Must specify option and value to use"
return
entryPath = argsList[0].strip()
value = " ".join( argsList[1:] ).strip()
self.modificator.setComment( entryPath, value )
self.modifiedData = True
except Exception as x:
print "Cannot insert comment: ", str( x )
def do_writeToFile( self, args ):
"""
Writes modification to file for later use.
Usage: writeToFile <filename>.cfg
Note that if a file extension is specified, it is replaced by .cfg suffix.
If not it is added automatically
"""
try:
if len( args ) == 0:
print "Filename to write must be specified!"
return
filename = args.split()[0].strip()
filename = _appendExtensionIfMissing( filename )
self.modificator.dumpToFile( filename )
except Exception as x:
print "Couldn't write to file %s: %s" % ( filename, str( x ) )
def do_readFromFile( self, args ):
"""
Reads data from filename to be used. Actual data will be replaced!
Usage: readFromFile <filename>.cfg
Note that if a file extension is specified, it is replaced by .cfg suffix.
If not it is added automatically
"""
try:
if len( args ) == 0:
print "Filename to read must be specified!"
return
filename = args.split()[0].strip()
filename = _appendExtensionIfMissing( filename )
self.modificator.loadFromFile( filename )
self.modifiedData = True
except Exception as x:
print "Couldn't read from file %s: %s" % ( filename, str( x ) )
def do_mergeFromFile( self, args ):
"""
Reads data from filename and merges it with current data.
Data read from file has more precedence that current one.
Usage: mergeFromFile <filename>.cfg
Note that if a file extension is specified, it is replaced by .cfg suffix.
If not it is added automatically
"""
try:
if len( args ) == 0:
print "Filename to read must be specified!"
return
filename = args.split()[0].strip()
filename = _appendExtensionIfMissing( filename )
self.modificator.mergeFromFile( filename )
self.modifiedData = True
except Exception as x:
_showTraceback()
print "Couldn't read from file %s: %s" % ( filename, str( x ) )
def do_showData( self, dummy ):
"""
Shows the current modified configuration
Usage: showData
"""
print self.modificator
def do_showHistory( self, args ):
"""
Shows the last commit history
Usage: showHistory <update limit>
"""
try:
argsList = args.split()
limit = 100
if len( argsList ) > 0:
limit = int( argsList[0] )
history = self.modificator.getHistory( limit )
print "%s recent commits:" % limit
for entry in history:
self.printPair( entry[0], entry[1], "@" )
except:
_showTraceback()
def do_showDiffWithServer( self, dummy ):
"""
Shows diff with lastest version in server
Usage: showDiffWithServer
"""
try:
diffData = self.modificator.showCurrentDiff()
print "Diff with latest from server ( + local - remote )"
for line in diffData:
if line[0] in ( '-' ):
print colorize( line, "red" )
elif line[0] in ( '+' ):
print colorize( line, "green" )
elif line[0] in ( '?' ):
print colorize( line, "yellow" ),
except:
_showTraceback()
def do_showDiffBetweenVersions( self, args ):
"""
Shows diff between two versions
Usage: showDiffBetweenVersions <version 1 with spaces> <version 2 with spaces>
"""
try:
argsList = args.split()
if len( argsList ) < 4:
print "What are the two versions to compare?"
return
v1 = " ".join ( argsList[0:2] )
v2 = " ".join ( argsList[2:4] )
print "Comparing '%s' with '%s' " % ( v1, v2 )
diffData = self.modificator.getVersionDiff( v1, v2 )
print "Diff with latest from server ( + %s - %s )" % ( v2, v1 )
for line in diffData:
if line[0] in ( '-' ):
print colorize( line, "red" )
elif line[0] in ( '+' ):
print colorize( line, "green" )
elif line[0] in ( '?' ):
print colorize( line, "yellow" ),
else:
print line
except:
_showTraceback()
def do_rollbackToVersion( self, args ):
"""
rolls back to user selected version of the configuration
Usage: rollbackToVersion <version with spaces>>
"""
try:
argsList = args.split()
if len( argsList ) < 2:
print "What version to rollback?"
return
version = " ".join ( argsList[0:2] )
choice = raw_input( "Do you really want to rollback to version %s? yes/no [no]: " % version )
choice = choice.lower()
if choice in ( "yes", "y" ):
response = self.modificator.rollbackToVersion( version )
if response[ 'OK' ]:
self.modifiedData = False
print "Rolled back."
self.modificator.loadFromRemote()
else:
print "Error sending data, server said: %s" % response['Message']
except:
_showTraceback()
def do_mergeWithServer( self, dummy ):
"""
Shows diff with lastest version in server
Usage: diffWithServer
"""
try:
choice = raw_input( "Do you want to merge with server configuration? yes/no [no]: " )
choice = choice.lower()
if choice in ( "yes", "y" ):
retVal = self.modificator.mergeWithServer()
if retVal[ 'OK' ]:
print "Merged"
else:
print "There was an error: ", retVal[ 'Message' ]
else:
print "Merge aborted"
except:
_showTraceback()
|
andresailer/DIRAC
|
ConfigurationSystem/Client/CSCLI.py
|
Python
|
gpl-3.0
| 16,811
|
[
"DIRAC"
] |
2cdfe70a6ad704a035eb968089682c3d151bd12f379a89de82399797cd6e491d
|
from os.path import join
from os.path import relpath
from re import compile
from contextlib import contextmanager
from ..staging import COMMAND_VERSION_FILENAME
from ..action_mapper import FileActionMapper
from logging import getLogger
log = getLogger(__name__)
# All output files marked with from_work_dir attributes will copied or downloaded
# this pattern picks up attiditional files to copy back - such as those
# associated with multiple outputs and metadata configuration. Set to .* to just
# copy everything
COPY_FROM_WORKING_DIRECTORY_PATTERN = compile(r"primary_.*|galaxy.json|metadata_.*|dataset_\d+\.dat|__instrument_.*|dataset_\d+_files.+")
def finish_job(client, cleanup_job, job_completed_normally, client_outputs, lwr_outputs):
""" Responsible for downloading results from remote server and cleaning up
LWR staging directory (if needed.)
"""
collection_failure_exceptions = []
if job_completed_normally:
output_collector = ClientOutputCollector(client)
action_mapper = FileActionMapper(client)
results_stager = ResultsCollector(output_collector, action_mapper, client_outputs, lwr_outputs)
collection_failure_exceptions = results_stager.collect()
__clean(collection_failure_exceptions, cleanup_job, client)
return collection_failure_exceptions
class ClientOutputCollector(object):
def __init__(self, client):
self.client = client
def collect_output(self, results_collector, output_type, action, name):
# This output should have been handled by the LWR.
if not action.staging_action_local:
return False
working_directory = results_collector.client_outputs.working_directory
self.client.fetch_output(
path=action.path,
name=name,
working_directory=working_directory,
output_type=output_type,
action_type=action.action_type
)
return True
class ResultsCollector(object):
def __init__(self, output_collector, action_mapper, client_outputs, lwr_outputs):
self.output_collector = output_collector
self.action_mapper = action_mapper
self.client_outputs = client_outputs
self.lwr_outputs = lwr_outputs
self.downloaded_working_directory_files = []
self.exception_tracker = DownloadExceptionTracker()
self.output_files = client_outputs.output_files
self.working_directory_contents = lwr_outputs.working_directory_contents or []
def collect(self):
self.__collect_working_directory_outputs()
self.__collect_outputs()
self.__collect_version_file()
self.__collect_other_working_directory_files()
return self.exception_tracker.collection_failure_exceptions
def __collect_working_directory_outputs(self):
working_directory = self.client_outputs.working_directory
# Fetch explicit working directory outputs.
for source_file, output_file in self.client_outputs.work_dir_outputs:
name = relpath(source_file, working_directory)
lwr_name = self.lwr_outputs.path_helper.remote_name(name)
if self._attempt_collect_output('output_workdir', path=output_file, name=lwr_name):
self.downloaded_working_directory_files.append(lwr_name)
# Remove from full output_files list so don't try to download directly.
try:
self.output_files.remove(output_file)
except ValueError:
raise Exception("Failed to remove %s from %s" % (output_file, self.output_files))
def __collect_outputs(self):
# Legacy LWR not returning list of files, iterate over the list of
# expected outputs for tool.
for output_file in self.output_files:
# Fetch output directly...
output_generated = self.lwr_outputs.has_output_file(output_file)
if output_generated is None:
self._attempt_collect_output('legacy', output_file)
elif output_generated:
self._attempt_collect_output('output', output_file)
for galaxy_path, lwr_name in self.lwr_outputs.output_extras(output_file).iteritems():
self._attempt_collect_output('output', path=galaxy_path, name=lwr_name)
# else not output generated, do not attempt download.
def __collect_version_file(self):
version_file = self.client_outputs.version_file
# output_directory_contents may be none for legacy LWR servers.
lwr_output_directory_contents = (self.lwr_outputs.output_directory_contents or [])
if version_file and COMMAND_VERSION_FILENAME in lwr_output_directory_contents:
self._attempt_collect_output('output', version_file, name=COMMAND_VERSION_FILENAME)
def __collect_other_working_directory_files(self):
working_directory = self.client_outputs.working_directory
# Fetch remaining working directory outputs of interest.
for name in self.working_directory_contents:
if name in self.downloaded_working_directory_files:
continue
if COPY_FROM_WORKING_DIRECTORY_PATTERN.match(name):
output_file = join(working_directory, self.lwr_outputs.path_helper.local_name(name))
if self._attempt_collect_output(output_type='output_workdir', path=output_file, name=name):
self.downloaded_working_directory_files.append(name)
def _attempt_collect_output(self, output_type, path, name=None):
# path is final path on galaxy server (client)
# name is the 'name' of the file on the LWR server (possible a relative)
# path.
collected = False
with self.exception_tracker():
# output_action_type cannot be 'legacy' but output_type may be
# eventually drop support for legacy mode (where type wasn't known)
# ahead of time.
output_action_type = 'output_workdir' if output_type == 'output_workdir' else 'output'
action = self.action_mapper.action(path, output_action_type)
if self._collect_output(output_type, action, name):
collected = True
return collected
def _collect_output(self, output_type, action, name):
return self.output_collector.collect_output(self, output_type, action, name)
class DownloadExceptionTracker(object):
def __init__(self):
self.collection_failure_exceptions = []
@contextmanager
def __call__(self):
try:
yield
except Exception as e:
self.collection_failure_exceptions.append(e)
def __clean(collection_failure_exceptions, cleanup_job, client):
failed = (len(collection_failure_exceptions) > 0)
if (not failed and cleanup_job != "never") or cleanup_job == "always":
try:
client.clean()
except Exception:
log.warn("Failed to cleanup remote LWR job")
__all__ = [finish_job]
|
jmchilton/lwr
|
lwr/lwr_client/staging/down.py
|
Python
|
apache-2.0
| 7,030
|
[
"Galaxy"
] |
c2d2e354a70fe9af8c7d8d91bbff8ae14dc153f040baa2bffe539ef7b98a46cc
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.