text stringlengths 38 1.54M |
|---|
class Node:
def __init__(self, value):
self.value = value
self.next = None
def add(self, value):
self.next = Node(value)
def middle_node(self):
current_node = self
if not current_node.next:
return current_node.value
else:
double_node = current_node.next
while double_node:
current_value = current_node.value
if double_node.next == None:
return current_value
if double_node.next.next == None:
current_node = current_node.next
return current_node.value
current_node = current_node.next
double_node = double_node.next.next
def reverse(self):
current_node = self
next_node = None
prev_node = None
while current_node:
# SAVE NEXT NODE BEFORE ITS GONE
next_node = current_node.next
# CURRENT NEXT TO PREV
current_node.next = prev_node
# PREV TO CURR
prev_node = current_node
# CURR TO NEXt
current_node = next_node
main = self
while main:
print(main.value)
main = self.next
node1 = Node(1)
node2 = Node(2)
node3 = Node(3)
node4 = Node(4)
node5 = Node(5)
node1.next = node2
node2.next = node3
node3.next = node4
node4.next = node5
print(node1.reverse()) |
from flask import Flask, request, session, redirect, url_for, render_template, flash
app = Flask(__name__)
app.secret_key = 'hello'
@app.route('/login', methods = ["POST", "GET"])
def login():
if request.method == 'POST':
session['user'] = request.form['nam']
return redirect(url_for('user'))
else:
return render_template('jinja 6.html')
@app.route('/user')
def user():
if 'user' in session:
user = session['user']
return f'hello {user}'
else:
return redirect(url_for('login'))
@app.route('/logout')
def logout():
if 'user' in session:
user = session['user']
flash(f'you have been logged out!{user}', 'info')
session.pop('user', None)
return redirect(url_for('login'))
|
import json
import typing
import itertools
import collections
import numpy as np
from real_human_team.util import print_board
class State:
# Note: By subclassing namedtuple, we get efficient, immutable instances
# and we automatically get sensible definitions for __eq__ and __hash__.
# This class stores the game state in the format of two lists:
# One holding the positions and symbols of all upper tokens:
upper_tokens: tuple
# And one for all the lower tokens:
lower_tokens: tuple
# There is also a set of valid hexes (all of those not blocked by
# block tokens):
all_hexes: frozenset
# Hold information about how many times each player has thrown
upper_throws: int
lower_throws: int
# When subclassing namedtuple, we should control creation of instances
# using a separate classmethod, rather than overriding __init__.
@classmethod
def new(cls, upper_tokens, lower_tokens, all_hexes, upper_throws, lower_throws):
return cls(
# TODO: Instead of sorted tuples, implement a frozen bag?
upper_tokens=tuple(sorted(upper_tokens)),
lower_tokens=tuple(sorted(lower_tokens)),
all_hexes=all_hexes,
upper_throws = upper_throws,
lower_throws = lower_throws,
)
# Following the alternative constructor idiom, we'll create a separate
# classmethod to allow creating our first state from the data dictionary.
@classmethod
def from_json(cls, file):
data = json.load(file)
upper_tokens = (Token(Hex(r, q), s) for s, r, q in data["upper"])
lower_tokens = (Token(Hex(r, q), s) for s, r, q in data["lower"])
all_hexes = ALL_HEXES
return cls.new(upper_tokens, lower_tokens, all_hexes, 0, 0)
def __init__(self,upper_tokens, lower_tokens, all_hexes, upper_throws, lower_throws):
self.upper_tokens = upper_tokens
self.lower_tokens = lower_tokens
self.all_hexes = all_hexes
self.upper_throws = upper_throws
self.lower_throws = lower_throws
# The core functionality of the state is to compute its available
# actions and their corresponding successor states.
def actions_successors(self):
for action in self.actions():
yield action, self.successor(action)
# Generate list of upper actions
def genUpActions(self):
# Get upper tokens
xs = [x for x, _s in self.upper_tokens]
xs_occupied_hexes = set(xs)
# Get adjacent hexes
def _adjacent(x):
return self.all_hexes & {x + y for y in HEX_STEPS}
# Generate throws
def _upper_throw_actions():
if self.upper_throws >= 9:
return
for row in range(self.upper_throws+1):
if 4-row >= 0:
col_range = range(-4, row+1)
else:
col_range = range(-8+row, 4+1)
for col in col_range:
for symbol in ['r','p','s']:
yield 'u', ('THROW', symbol, Hex(4-row, col))
# Generate slides and swings
def _upper_token_actions(x):
adjacent_x = _adjacent(x)
for y in adjacent_x:
yield 'u', ("SLIDE", x, y)
if y in xs_occupied_hexes:
opposite_y = _adjacent(y) - adjacent_x - {x}
for z in opposite_y:
yield 'u', ("SWING", x, z)
adjacent_y = _adjacent(x)
# Return upper actions
upper_maps = map(_upper_token_actions,xs)
upper_moves = list(_upper_throw_actions())
for gen in upper_maps:
upper_moves += [*gen]
return upper_moves
# Generate list of lower actions
def genLowerActions(self):
# Get lower tokens and occupied hexes
ys = [y for y, _s in self.lower_tokens]
ys_occupied_hexes = set(ys)
# Get adjacent hexes
def _adjacent(x):
return self.all_hexes & {x + y for y in HEX_STEPS}
# Generate THROW actions
def _lower_throw_actions():
if self.lower_throws >= 9:
return
for row in range(self.lower_throws+1):
if -4+row <= 0:
col_range = range(-row, 4+1)
else:
col_range = range(-4, 8-row+1)
for col in col_range:
for symbol in ['r','p','s']:
yield 'l', ('THROW', symbol, Hex(-4+row, col))
# Generate SLIDE, SWING actions
def _lower_token_actions(x):
adjacent_y = _adjacent(x)
for y in adjacent_y:
yield 'l', ("SLIDE", x, y)
if y in ys_occupied_hexes:
opposite_y = _adjacent(y) - adjacent_y - {x}
for z in opposite_y:
yield 'l', ("SWING", x, z)
adjacent_y = _adjacent(x)
# Return list of lower actions
lower_maps = map(_lower_token_actions, ys)
lower_moves = list(_lower_throw_actions())
for gen in lower_maps:
lower_moves += [*gen]
return lower_moves
def successor(self, action):
# move upper and lower tokens
new_upper_tokens = list(self.upper_tokens)
new_lower_tokens = list(self.lower_tokens)
new_upper_throws = self.upper_throws
new_lower_throws = self.lower_throws
for p, (_a, x, y) in action:
# lookup the symbol (any token on this hex will do, since all
# tokens here will have the same symbol since the last battle)
if p == 'u':
# Add new token if thrown
if _a == 'THROW':
new_upper_tokens.append(Token(y, x))
new_upper_throws += 1
continue
# Lookup symbol if SLIDE or SWING
for t in range(len(new_upper_tokens)):
if new_upper_tokens[t].hex == x:
s = new_upper_tokens[t].symbol
new_upper_tokens.pop(t)
break
#s = [t.symbol for t in self.upper_tokens if t.hex == x][0]
new_upper_tokens.append(Token(y, s))
if p == 'l':
# Add new token if thrown
if _a == 'THROW':
new_lower_tokens.append(Token(y, x))
new_lower_throws += 1
continue
for t in range(len(new_lower_tokens)):
if new_lower_tokens[t].hex == x:
s = new_lower_tokens[t].symbol
new_lower_tokens.pop(t)
break
new_lower_tokens.append(Token(y, s))
# where tokens clash, do battle
# TODO: only necessary to check this at destinations of actions
# (but then will have to find another way to fill the lists)
safe_upper_tokens = []
safe_lower_tokens = []
for x in self.all_hexes:
ups_at_x = [t for t in new_upper_tokens if t.hex == x]
los_at_x = [t for t in new_lower_tokens if t.hex == x]
symbols = {t.symbol for t in ups_at_x + los_at_x}
if len(symbols) > 1:
for s in symbols:
p = BEATS_WHAT[s.lower()]
ups_at_x = [t for t in ups_at_x if t.symbol != p]
los_at_x = [t for t in los_at_x if t.symbol != p]
safe_upper_tokens.extend(ups_at_x)
safe_lower_tokens.extend(los_at_x)
return self.new(safe_upper_tokens, safe_lower_tokens, self.all_hexes,
new_upper_throws, new_lower_throws)
# For easier debugging, a helper method to print the current state.
def print(self, message="", **kwargs):
board = collections.defaultdict(str)
for t in self.upper_tokens:
board[t.hex] += t.symbol.upper()
for t in self.lower_tokens:
board[t.hex] += t.symbol.lower()
for x, s in board.items():
board[x] = f"({s})"
print_board(board, message, **kwargs)
# (Some classes and constants supporting the implementation above)
class Hex(typing.NamedTuple):
"""
Hexagonal axial coordinates with basic operations and hexagonal
manhatten distance.
Thanks to https://www.redblobgames.com/grids/hexagons/ for some
of the ideas implemented here.
"""
r: int
q: int
@staticmethod
def dist(x, y):
"""
Hexagonal manhattan distance between two hex coordinates.
"""
z_r = x.r - y.r
z_q = x.q - y.q
return (abs(z_r) + abs(z_q) + abs(z_r + z_q)) // 2
def __add__(self, other):
# this special method is called when two Hex objects are added with +
return Hex(self.r + other[0], self.q + other[1])
HEX_RANGE = range(-4, +4+1)
ALL_HEXES = frozenset(
Hex(r, q) for r in HEX_RANGE for q in HEX_RANGE if -r-q in HEX_RANGE
)
HEX_STEPS = [Hex(r, q) for r, q in [(1,-1),(1,0),(0,1),(-1,1),(-1,0),(0,-1)]]
BEATS_WHAT = {'r': 's', 'p': 'r', 's': 'p'}
WHAT_BEATS = {'r': 'p', 'p': 's', 's': 'r'}
class Token(typing.NamedTuple):
hex: Hex
symbol: str |
from decimal import Decimal as D, getcontext
getcontext().prec = 100
def contfrac_to_frac(seq):
num, den = 1, 0
for u in reversed(seq):
num, den = den + num * u, num
return num, den
def CF(num1, n=10):
a = [int(num1)]
num = num1 % 1 # Mantissa
while num != 1:
num = 1 / num
whole_num = int(num)
a.append(whole_num)
num -= whole_num
if len(a) == n + 1:
break
return a
pi = D(31415926535897932384626433832795028841971693993751) / D(10 ** 49)
a = CF(pi, n=27) # n represents length of CF
print(a)
# Expected output
# [3, 7, 15, 1, 292, 1, 1, 1, 2, 1, 3, 1, 14, 2, 1, 1, 2, 2, 2, 2, 1, 84, 2, 1,
# 1, 15, 3, 13]
num, den = contfrac_to_frac(a[0] + a[1:])
print(num, den)
|
"""Definitions for an algebra on spin (angular momentum) Hilbert spaces, both
for integer and half-integer spin"""
from abc import ABCMeta
from collections.__init__ import OrderedDict
import sympy
from sympy import sqrt, sympify
from ..core.hilbert_space_algebra import LocalSpace
from ..core.operator_algebra import LocalOperator, PseudoInverse
from ..core.state_algebra import BasisKet
from ..utils.indices import SpinIndex
__all__ = [
'SpinSpace',
'SpinBasisKet',
'SpinOperator',
'Jz',
'Jplus',
'Jminus',
]
__private__ = ['Jpjmcoeff', 'Jzjmcoeff', 'Jmjmcoeff']
class SpinSpace(LocalSpace):
"""A Hilbert space for an integer or half-integer spin system.
For a given spin $N$, the resulting Hilbert space has dimension $2 N + 1$
with levels labeled from $-N$ to $+N$ (as strings)
For an integer spin::
>>> hs = SpinSpace(label=0, spin=1)
>>> hs.dimension
3
>>> hs.basis_labels
('-1', '0', '+1')
For a half-integer spin::
>>> hs = SpinSpace(label=0, spin=sympy.Rational(3, 2))
>>> hs.spin
3/2
>>> hs.dimension
4
>>> hs.basis_labels
('-3/2', '-1/2', '+1/2', '+3/2')
For convenience, you may also give `spin` as a tuple or a string::
>>> hs = SpinSpace(label=0, spin=(3, 2))
>>> assert hs == SpinSpace(label=0, spin=sympy.Rational(3, 2))
>>> hs = SpinSpace(label=0, spin='3/2')
>>> assert hs == SpinSpace(label=0, spin=(3, 2))
You may use custom labels, e.g.::
>>> hs = SpinSpace(label='s', spin='1/2', basis=('-', '+'))
>>> hs.basis_labels
('-', '+')
The labels "up" and "down" are recognized and printed as the appropritate
arrow symbols::
>>> hs = SpinSpace(label='s', spin='1/2', basis=('down', 'up'))
>>> unicode(BasisKet('up', hs=hs))
'|↑⟩⁽ˢ⁾'
>>> unicode(BasisKet('down', hs=hs))
'|↓⟩⁽ˢ⁾'
Raises:
ValueError: if `spin` is not an integer or half-integer greater than
zero
"""
_basis_label_types = (str, SpinIndex) # acceptable types for labels
def __init__(
self,
label,
*,
spin,
basis=None,
local_identifiers=None,
order_index=None
):
if isinstance(spin, tuple):
spin = sympy.sympify(spin[0]) / spin[1]
else:
spin = sympy.sympify(spin)
self._spin = spin
if not (2 * spin).is_integer:
raise ValueError(
"spin %s must be an integer or half-integer" % spin
)
try:
dimension = int(2 * spin) + 1
except TypeError:
raise ValueError(
"spin %s must be an integer or half-integer" % spin
)
if dimension <= 1:
raise ValueError("spin %s must be greater than zero" % spin)
bottom = -spin
if basis is None:
basis = tuple(
[
SpinIndex._static_render(bottom + n)
for n in range(dimension)
]
)
else:
# sometimes people don't think and use some of the "canonical" TLS
# labels in the wrong order. We can catch it, so why not?
if basis == ('up', 'down') or basis == ('+', '-'):
raise ValueError(
"Invalid basis: you've switched %s and %s" % basis
)
super().__init__(
label=label,
basis=basis,
dimension=dimension,
local_identifiers=local_identifiers,
order_index=order_index,
)
# rewrite the kwargs from super()
self._kwargs = OrderedDict(
[
('spin', self._spin),
('local_identifiers', self._sorted_local_identifiers),
('order_index', self._order_index),
]
)
self._minimal_kwargs = self._kwargs.copy()
if local_identifiers is None:
del self._minimal_kwargs['local_identifiers']
if order_index is None:
del self._minimal_kwargs['order_index']
def next_basis_label_or_index(self, label_or_index, n=1):
"""Given the label or index of a basis state, return the label
the next basis state.
More generally, if `n` is given, return the `n`'th next basis state
label/index; `n` may also be negative to obtain previous basis state
labels. Returns a :class:`str` label if `label_or_index` is a
:class:`str` or :class:`int`, or a :class:`.SpinIndex` if
`label_or_index` is a :class:`.SpinIndex`.
Args:
label_or_index (int or str or SpinIndex): If `int`, the
zero-based index of a basis state; if `str`, the label of a
basis state
n (int): The increment
Raises:
IndexError: If going beyond the last or first basis state
ValueError: If `label` is not a label for any basis state in the
Hilbert space
.BasisNotSetError: If the Hilbert space has no defined basis
TypeError: if `label_or_index` is neither a :class:`str` nor an
:class:`int`, nor a :class:`.SpinIndex`
Note:
This differs from its super-method only by never returning an
integer index (which is not accepted when instantiating a
:class:`.BasisKet` for a :class:`.SpinSpace`)
"""
if isinstance(label_or_index, int):
new_index = label_or_index + n
if new_index < 0:
raise IndexError("index %d < 0" % new_index)
if new_index >= self.dimension:
raise IndexError(
"index %d out of range for basis %s"
% (new_index, self._basis)
)
return self.basis_labels[new_index]
elif isinstance(label_or_index, str):
label_index = self.basis_labels.index(label_or_index)
new_index = label_index + n
if (new_index < 0) or (new_index >= len(self._basis)):
raise IndexError(
"index %d out of range for basis %s"
% (new_index, self._basis)
)
return self.basis_labels[new_index]
elif isinstance(label_or_index, SpinIndex):
return label_or_index.__class__(expr=label_or_index.expr + n)
@property
def spin(self) -> sympy.Rational:
"""The spin-number associated with the :class:`.SpinSpace`
This can be a SymPy integer or a half-integer.
"""
return self._spin
@property
def multiplicity(self) -> int:
"""The multiplicity of the Hilbert space, $2 S + 1$.
This is equivalent to the dimension::
>>> hs = SpinSpace('s', spin=sympy.Rational(3, 2))
>>> hs.multiplicity == 4 == hs.dimension
True
"""
return int(2 * self._spin) + 1
def SpinBasisKet(*numer_denom, hs):
"""Constructor for a :class:`.BasisKet` for a :class:`.SpinSpace`.
For a half-integer spin system::
>>> hs = SpinSpace('s', spin=(3, 2))
>>> assert SpinBasisKet(1, 2, hs=hs) == BasisKet("+1/2", hs=hs)
For an integer spin system::
>>> hs = SpinSpace('s', spin=1)
>>> assert SpinBasisKet(1, hs=hs) == BasisKet("+1", hs=hs)
Note that ``BasisKet(1, hs=hs)`` with an integer index (which would
hypothetically refer to ``BasisKet("0", hs=hs)`` is not allowed for spin
systems::
>>> BasisKet(1, hs=hs)
Traceback (most recent call last):
...
TypeError: label_or_index must be an instance of one of str, SpinIndex; not int
Raises:
TypeError: if `hs` is not a :class:`.SpinSpace` or the wrong number of
positional arguments is given
ValueError: if any of the positional arguments are out range for the
given `hs`
"""
try:
spin_numer, spin_denom = hs.spin.as_numer_denom()
except AttributeError:
raise TypeError(
"hs=%s for SpinBasisKet must be a SpinSpace instance" % hs
)
assert spin_denom in (1, 2)
if spin_denom == 1: # integer spin
if len(numer_denom) != 1:
raise TypeError(
"SpinBasisKet requires exactly one positional argument for an "
"integer-spin Hilbert space"
)
numer = numer_denom[0]
if numer < -spin_numer or numer > spin_numer:
raise ValueError(
"spin quantum number %s must be in range (%s, %s)"
% (numer, -spin_numer, spin_numer)
)
label = str(numer)
if numer > 0:
label = "+" + label
return BasisKet(label, hs=hs)
else: # half-integer spin
if len(numer_denom) != 2:
raise TypeError(
"SpinBasisKet requires exactly two positional arguments for a "
"half-integer-spin Hilbert space"
)
numer, denom = numer_denom
numer = int(numer)
denom = int(denom)
if denom != 2:
raise ValueError(
"The second positional argument (denominator of the spin "
"quantum number) must be 2, not %s" % denom
)
if numer < -spin_numer or numer > spin_numer:
raise ValueError(
"spin quantum number %s/%s must be in range (%s/2, %s/2)"
% (numer, denom, -spin_numer, spin_numer)
)
label = str(numer)
if numer > 0:
label = "+" + label
label = label + "/2"
return BasisKet(label, hs=hs)
class SpinOperator(LocalOperator, metaclass=ABCMeta):
"""Base class for operators in a spin space"""
_hs_cls = SpinSpace
def __init__(self, *args, hs):
super().__init__(*args, hs=hs)
if not isinstance(self.space, SpinSpace):
raise TypeError(
"hs %s must be an instance of SpinSpace" % self.space
)
class Jz(SpinOperator):
"""Spin (angular momentum) operator in z-direction
$\Op{J}_z$ is the $z$ component of a general spin operator acting
on a particular :class:`SpinSpace` `hs` of freedom with well defined spin
quantum number $s$. It is Hermitian::
>>> hs = SpinSpace(1, spin=(1, 2))
>>> print(ascii(Jz(hs=hs).adjoint()))
J_z^(1)
:class:`Jz`, :class:`Jplus` and :class:`Jminus` satisfy the angular
momentum commutator algebra::
>>> print(ascii((Jz(hs=hs) * Jplus(hs=hs) -
... Jplus(hs=hs)*Jz(hs=hs)).expand()))
J_+^(1)
>>> print(ascii((Jz(hs=hs) * Jminus(hs=hs) -
... Jminus(hs=hs)*Jz(hs=hs)).expand()))
-J_-^(1)
>>> print(ascii((Jplus(hs=hs) * Jminus(hs=hs)
... - Jminus(hs=hs)*Jplus(hs=hs)).expand()))
2 * J_z^(1)
>>> Jplus(hs=hs).dag() == Jminus(hs=hs)
True
>>> Jminus(hs=hs).dag() == Jplus(hs=hs)
True
Printers should represent this operator with the default identifier::
>>> Jz._identifier
'J_z'
A custom identifier may be define using `hs`'s `local_identifiers`
argument.
"""
_identifier = 'J_z'
def __init__(self, *, hs):
super().__init__(hs=hs)
def _adjoint(self):
return self
def _pseudo_inverse(self):
return PseudoInverse(self)
class Jplus(SpinOperator):
"""Raising operator of a spin space
$\Op{J}_{+} = \Op{J}_x + i \op{J}_y$ is the raising ladder operator
of a general spin operator acting on a particular :class:`SpinSpace` `hs`
with well defined spin quantum number $s$. It's adjoint is the
lowering operator::
>>> hs = SpinSpace(1, spin=(1, 2))
>>> print(ascii(Jplus(hs=hs).adjoint()))
J_-^(1)
:class:`Jz`, :class:`Jplus` and :class:`Jminus` satisfy that angular
momentum commutator algebra, see :class:`Jz`
Printers should represent this operator with the default identifier::
>>> Jplus._identifier
'J_+'
A custom identifier may be define using `hs`'s `local_identifiers`
argument.
"""
_identifier = 'J_+'
def __init__(self, *, hs):
super().__init__(hs=hs)
def _adjoint(self):
return Jminus(hs=self.space)
def _pseudo_inverse(self):
return PseudoInverse(self)
class Jminus(SpinOperator):
"""Lowering operator on a spin space
$\Op{J}_{-} = \Op{J}_x - i \op{J}_y$ is the lowering ladder operator of
a general spin operator acting on a particular :class:`SpinSpace` `hs`
with well defined spin quantum number $s$. It's adjoint is the raising
operator::
>>> hs = SpinSpace(1, spin=(1, 2))
>>> print(ascii(Jminus(hs=hs).adjoint()))
J_+^(1)
:class:`Jz`, :class:`Jplus` and :class:`Jminus` satisfy that angular
momentum commutator algebra, see :class:`Jz`.
Printers should represent this operator with the default identifier::
>>> Jminus._identifier
'J_-'
A custom identifier may be define using `hs`'s `local_identifiers`
argument.
"""
_identifier = 'J_-'
def __init__(self, *, hs):
super().__init__(hs=hs)
def _adjoint(self):
return Jplus(hs=self.space)
def _pseudo_inverse(self):
return PseudoInverse(self)
def Jpjmcoeff(ls, m, shift=False) -> sympy.Expr:
r"""Eigenvalue of the $\Op{J}_{+}$ (:class:`Jplus`) operator
.. math::
\Op{J}_{+} \ket{s, m} = \sqrt{s (s+1) - m (m+1)} \ket{s, m}
where the multiplicity $s$ is implied by the size of the Hilbert space
`ls`: there are $2s+1$ eigenstates with $m = -s, -s+1, \dots, s$.
Args:
ls (LocalSpace): The Hilbert space in which the $\Op{J}_{+}$ operator
acts.
m (str or int): If str, the label of the basis state of `hs` to which
the operator is applied. If integer together with ``shift=True``,
the zero-based index of the basis state. Otherwise, directly the
quantum number $m$.
shift (bool): If True for a integer value of `m`, treat `m` as the
zero-based index of the basis state (i.e., shift `m` down by $s$ to
obtain the quantum number $m$)
"""
assert isinstance(ls, SpinSpace)
n = ls.dimension
s = sympify(n - 1) / 2
assert n == int(2 * s + 1)
if isinstance(m, str):
m = ls.basis_labels.index(m) - s # m is now Sympy expression
elif isinstance(m, int):
if shift:
assert 0 <= m < n
m = m - s
return sqrt(s * (s + 1) - m * (m + 1))
def Jzjmcoeff(ls, m, shift) -> sympy.Expr:
r"""Eigenvalue of the $\Op{J}_z$ (:class:`Jz`) operator
.. math::
\Op{J}_{z} \ket{s, m} = m \ket{s, m}
See also :func:`Jpjmcoeff`.
"""
assert isinstance(ls, SpinSpace)
n = ls.dimension
s = sympify(n - 1) / 2
assert n == int(2 * s + 1)
if isinstance(m, str):
return ls.basis.index(m) - s
elif isinstance(m, int):
if shift:
assert 0 <= m < n
return m - s
else:
return sympify(m)
def Jmjmcoeff(ls, m, shift) -> sympy.Expr:
r"""Eigenvalue of the $\Op{J}_{-}$ (:class:`Jminus`) operator
.. math::
\Op{J}_{-} \ket{s, m} = \sqrt{s (s+1) - m (m-1)} \ket{s, m}
See also :func:`Jpjmcoeff`.
"""
assert isinstance(ls, SpinSpace)
n = ls.dimension
s = sympify(n - 1) / 2
assert n == int(2 * s + 1)
if isinstance(m, str):
m = ls.basis.index(m) - s # m is now Sympy expression
elif isinstance(m, int):
if shift:
assert 0 <= m < n
m = m - s
return sqrt(s * (s + 1) - m * (m - 1))
|
num = int(input("请输入"))
if (num %2) == 0:
print("%d偶数"%(num))
else:
print("%d奇数"%(num))
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-24 05:06
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inventory_email_support', '0003_auto_20170323_2255'),
]
operations = [
migrations.CreateModel(
name='PrependedBody',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('prepended_body', models.TextField(blank=True, null=True)),
],
),
]
|
def recProduct(a, b):
if a == 0 or b == 0:
return 0
if b > 0:
return a + recProduct(a, b - 1)
elif a > 0 and b < 0:
return b + recProduct(a - 1, b)
else:
return 0 - a + recProduct(a, b + 1)
print(recProduct(-10, -2))
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from collections import namedtuple
import random
import matplotlib
import matplotlib.pyplot as plt
import math
#data
duration=3000;
Ilength=10;
ts=torch.arange((duration+Ilength-1));
amplitude=2;
ys=(amplitude*(torch.cos(ts/4)+torch.cos(ts/2)+torch.cos(ts/10)))+100;#+torch.randn((ts.size()))*amplitude/10;
T=[];
L=[];
Rs=[];
class RNN(nn.Module):
def __init__(self,la_inputsize,la_hiddensize,la_outputsize):
super(RNN, self).__init__()
self.gru1 = nn.GRU(input_size=la_inputsize, hidden_size=la_hiddensize, num_layers=1,dropout=0)
self.fc4 = nn.Linear(la_hiddensize, la_hiddensize, True)
self.fc5 = nn.Linear(la_hiddensize, la_outputsize, True)
def forward(self, x, h):
x1, h = self.gru1(x,h)
x2 = x1+self.fc4(F.relu(x1))
x = self.fc5(F.relu(x2))
return x ,h
def reduce_weights(self):
sd = self.state_dict()
for a in sd:
sd[a]=sd[a]/10
self.load_state_dict(sd)
Transition = namedtuple('Transition',
('state', 'action', 'next_state', 'reward'))
class ReplayMemory(object):
def __init__(self, capacity):
self.capacity = capacity
self.memory = []
self.position = 0
def push(self, *args):
"""Saves a transition."""
if len(self.memory) < self.capacity:
self.memory.append(None)
self.memory[self.position] = Transition(*args)
self.position = (self.position + 1) % self.capacity
def sample(self, batch_size):
return random.sample(self.memory, batch_size)
def __len__(self):
return len(self.memory)
class TradingAgent(object):
def __init__(self):
self.inputlength=10
self.hiddenlength=20
self.na=3;
self.s=torch.zeros(1,1,self.inputlength+self.hiddenlength);
self.sp=torch.zeros(1,1,self.inputlength+self.hiddenlength);
self.NNp= RNN(self.inputlength,self.hiddenlength,self.na); #the net to be trained
self.NNp.reduce_weights();
self.NNp.reduce_weights();
self.NNp.reduce_weights();
self.NN= RNN(self.inputlength,self.hiddenlength,self.na); #is only updated every so often for stability
self.NN.load_state_dict(self.NNp.state_dict())
self.NN.eval();
self.NNbest= RNN(self.inputlength,self.hiddenlength,self.na); #is only updated every so often for stability
self.NNbest.load_state_dict(self.NNp.state_dict())
self.NNbest.eval();
self.criterion = nn.MSELoss()
self.optimizer = optim.Adam(self.NNp.parameters(), lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False)
#self.optimizer = optim.SGD(self.NNp.parameters(), lr=0.00001)
self.R = ReplayMemory(100000);
self.gamma=0.95;
self.epsilon=1;
self.meanerror=0;
self.value=100;
self.XBT=0;
self.EUR=100;
self.meanreward=0;
self.iterations=0;
self.record_on=1;
self.C=0;
self.lastoutput=[];
self.lastr=0;
self.lasth=torch.zeros(1,1,self.hiddenlength);
self.lasta=0;
self.best_sd=[];
self.savebestSD()
def play(self,I):
#compute s
#compute next state
current_price=I[self.inputlength-2];
next_price=I[self.inputlength-1];
#rI=I;
rI=I/torch.mean(I)-1;
old_value=self.EUR+self.XBT*current_price;
input=torch.zeros(1,1,self.inputlength);
input[0][0][0]=self.EUR/old_value-self.XBT*current_price/old_value;
input[0][0][1:self.inputlength]=rI[0:self.inputlength-1]
self.s=torch.cat((input,self.lasth),2) #save state
#print(s)
self.lastoutput,h=self.NN(input,self.lasth)
if random.random() > self.epsilon:
with torch.no_grad(): #makes it faster
a=self.lastoutput.max(2)[1].view(1, 1)
else:
a=torch.tensor(random.randrange(self.na)).view(1,1)
#implemente actions ici
#print(a[0][0])
self.lasta=a;
if a==0 :
self.XBT=0.9*self.XBT+0.997*self.EUR/current_price; #a transaction has a cost of 1% to have less actions
self.EUR=0;
if a==1 : #sell
self.EUR=0.9*self.EUR+0.997*self.XBT*current_price;
self.XBT=0;
#else wait
#calculate new state
#to make sure it's not clipped
current_price=next_price;
self.value=self.EUR+self.XBT*current_price
next_input=torch.zeros(1,1,self.inputlength);
next_input[0][0][0]=(self.EUR/self.value-self.XBT*current_price/self.value)
next_input[0][0][1:self.inputlength]=rI[1:self.inputlength]
self.sp=torch.cat((next_input,h),2) #save state
self.lasth=h
#calculate reward
lar=((self.value-old_value)/old_value).view(1,1)
self.lastr=lar
if self.XBT>0:
self.XBT=max(self.XBT,0.000000000001)
if self.EUR>0:
self.EUR=max(self.EUR,0.000000000001)
self.meanreward=(self.iterations*self.meanreward+lar)/(self.iterations+1) #compute mean reward
self.iterations+=1
if self.record_on==1:
self.R.push(self.s,self.lasta,self.sp,self.lastr)
def trainbatch(self,n):
transitions=self.R.sample(n);
batch = Transition(*zip(*transitions))
s_b=torch.cat(batch.state);
a_b=torch.cat(batch.action);
r_b=torch.cat(batch.reward);
sp_b=torch.cat(batch.next_state);
inputs,h0s=torch.split(s_b,[self.inputlength,self.hiddenlength],dim=2)
inputs=inputs.permute(1,0,2) #second dimension is batch
h0s=h0s.permute(1,0,2)
outputs,hns=self.NNp(inputs,h0s)
actuals=outputs.gather(2, a_b.view(1,n,1));
#print(actual)
inputsp,h0sp=torch.split(sp_b,[self.inputlength,self.hiddenlength],dim=2)
inputsp=inputsp.permute(1,0,2) #second dimension is batch
h0sp=h0sp.permute(1,0,2)
outputsp,hnsp=self.NN(inputsp,h0sp) #use Hn or H0p ???
targets=r_b.view(1,n,1)+self.gamma*outputsp.max(2)[0].clone().view(1,n,1)
#print(targets)
self.optimizer.zero_grad() # zeroes the gradient buffers of all parameters
loss=self.criterion(actuals, targets)
loss.backward(retain_graph=True)
for param in self.NNp.parameters():
param.grad.data.clamp_(-1, 1)
self.optimizer.step()
self.meanerror=math.sqrt(loss)
#loss.backward(retain_graph=False)
#print(loss)
def updateNN(self):
with torch.no_grad(): #makes it faster
sd = self.NN.state_dict()
sdp = self.NNp.state_dict()
for a in sd:
sd[a]=(self.C*sd[a]+sdp[a])/(self.C+1)
self.NN.load_state_dict(sd)
def savebestSD(self):
print('NN saved')
self.NNbest.load_state_dict(self.NN.state_dict())
torch.save(self.NNbest.state_dict(), 'NNbest.sd')
def importbestSD(self,path):
self.NNbest.load_state_dict(torch.load(path))
self.NNbest.eval()
self.loadbestSD()
def loadbestSD(self):
self.NNp.load_state_dict(self.NNbest.state_dict())
self.NN.load_state_dict(self.NNbest.state_dict())
self.NN.eval()
self.NNp.train()
self.optimizer = optim.Adam(self.NNp.parameters(), lr=0.001, betas=(0.9, 0.999), eps=1e-08, weight_decay=0, amsgrad=False)
#self.NN.load_state_dict(self.NNbest.state_dict())
def cleanR(self,maxtresh):
transitions=self.R.memory;
batch = Transition(*zip(*transitions))
s_b=torch.cat(batch.state);
a_b=torch.cat(batch.action);
r_b=torch.cat(batch.reward);
sp_b=torch.cat(batch.next_state);
actual=self.NNp(s_b).gather(1, a_b).detach();
#print(actual)
targets=r_b+self.gamma*self.NN(sp_b).max(1)[0].view(self.R.memory.__len__(),1).detach()
#print(targets)
delta=torch.sqrt((actual-targets)**2)
tresh=torch.mean(delta)/2
for k in reversed(range(self.R.memory.__len__())):
if delta[k]<min(tresh,maxtresh):
self.R.memory.pop(k)
self.R.position=self.R.__len__();
# Main script Here
ag1=TradingAgent();
time=0
score=0;
bestscore=0;
ag1.epsilon=0.1;
ag1.gamma=0.5;
exec(open('importData.py').read())
exec(open('makebatch.py').read())
Rs=[];
|
#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# from https://robertvandeneynde.be/
from OpenGL.GL import *
from OpenGL.GL import shaders
import ctypes
import pygame
from vecutils import *
pygame.init()
vertex_shader = """
#version 330
in vec4 position;
void main() {
gl_Position = position;
}
"""
fragment_shader = """
#version 330
out vec4 pixel;
void main() {
pixel = vec4(1, 0.5, 0, 1);
}
"""
vertices = farray([
0.6, 0.6, 0, 1.0,
-0.6, 0.6, 0, 1.0,
0.0, -0.6, 0, 1.0,
])
pygame.display.set_mode((512, 512), pygame.OPENGL | pygame.DOUBLEBUF)
glClearColor(0.9, 0.9, 0.5, 1.0)
shader_program = shaders.compileProgram(
shaders.compileShader(vertex_shader, GL_VERTEX_SHADER),
shaders.compileShader(fragment_shader, GL_FRAGMENT_SHADER))
# Create a new VAO (Vertex Array Object) and bind it
vertex_array_object = glGenVertexArrays(1)
glBindVertexArray(vertex_array_object)
# Get the position of the 'position' in parameter of our shader_program and bind it.
position = glGetAttribLocation(shader_program, 'position')
if position != -1: # maybe the attribute is useless and was discarded by the compiler
glEnableVertexAttribArray(position)
# Generate buffers to hold our vertices
vertex_buffer = glGenBuffers(1)
glBindBuffer(GL_ARRAY_BUFFER, vertex_buffer)
# Describe the position data layout in the buffer
glVertexAttribPointer(position, 4, GL_FLOAT, False, 0, ctypes.c_void_p(0))
# Send the data over to the buffer
glBufferData(GL_ARRAY_BUFFER, 48, vertices, GL_STATIC_DRAW) # 48 bytes = ArrayDatatype.arrayByteCount(vertices)
else:
print('Inactive attribute "{}"'.format('position'))
# Unbind the VAO
glBindVertexArray(0)
# Unbind the VBO
glBindBuffer(GL_ARRAY_BUFFER, 0)
clock = pygame.time.Clock()
done = False
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
# tick
# draw
glClear(GL_COLOR_BUFFER_BIT)
glUseProgram(shader_program)
glBindVertexArray(vertex_array_object)
glDrawArrays(GL_TRIANGLES, 0, 3) # 3 points = len(vertices) // 4
glBindVertexArray(0)
glUseProgram(0)
pygame.display.flip()
clock.tick(60)
pygame.quit()
|
import contextlib
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import scipy.stats
from sklearn.linear_model import LinearRegression
matplotlib.style.use('classic')
plt.rcParams['axes.prop_cycle'] = plt.cycler('color', 'bgrcmyk')
matplotlib.rcParams['savefig.dpi'] = 60
matplotlib.rcParams['figure.dpi'] = 60
def get_existing_twin_axis(ax):
"""Returns existing twin_axis or None"""
for other_ax in ax.figure.axes:
if other_ax is ax:
continue
if other_ax.bbox.bounds == ax.bbox.bounds:
return other_ax
return None
@contextlib.contextmanager
def default_axis(axis=None, figsize=(16, 10), axes_shape=(1, 1),
add_legend=False, add_twinx_legend=False,
legend_loc='upper right', twin_legend_loc='upper right',
add_grid=True, title=None, xlabel=None, ylabel=None,
facecolor=None, facecolor_fig=None, show_lims_without_offset=False, plot_zero_line=False,
save_fname=None):
if axis is None:
fig, axis = plt.subplots(*axes_shape, figsize=figsize)
if facecolor_fig:
fig.set_facecolor(facecolor_fig)
yield axis
if axes_shape == (1, 1):
axis = [axis]
for ax in np.array(axis).ravel():
if facecolor:
ax.set_facecolor(facecolor)
if add_legend:
legend_kwargs = dict(fancybox=True, shadow=True, framealpha=0.5)
if add_twinx_legend and legend_loc == twin_legend_loc:
legend_loc = 'upper left'
ax.legend(loc=legend_loc, **legend_kwargs)
twin_axis = get_existing_twin_axis(ax)
if twin_axis is not None:
twin_axis.legend(loc=twin_legend_loc, **legend_kwargs)
if title is not None:
ax.set_title(title)
if add_grid:
ax.grid()
if xlabel is not None:
ax.set_xlabel(xlabel)
if ylabel is not None:
ax.set_ylabel(ylabel)
if plot_zero_line:
ax.axhline(0, c='k', ls='--')
if show_lims_without_offset:
formatter = matplotlib.ticker.ScalarFormatter(useOffset=False)
ax.yaxis.set_major_formatter(formatter)
ax.xaxis.set_major_formatter(formatter)
if save_fname:
fig.savefig(save_fname)
def binned_plot(x_t, y_t, n_bins=101, color="blue", **kwargs):
with default_axis(add_twinx_legend=True, **kwargs) as axis:
x_t = x_t.copy()
y_t = y_t.copy()
at_least_one_nan_t = (np.isnan(x_t) | np.isnan(y_t))
x_t = x_t[~at_least_one_nan_t]
y_t = y_t[~at_least_one_nan_t]
y_mean_T, bin_edges_T, _ = scipy.stats.binned_statistic(x_t, y_t, statistic="mean", bins=n_bins)
bin_edges_T = 0.5 * (bin_edges_T[1:] + bin_edges_T[:-1])
y_std_T = scipy.stats.binned_statistic(x_t, y_t, statistic="std", bins=n_bins)[0]
y_std_T /= np.sqrt(scipy.stats.binned_statistic(x_t, y_t, statistic="count", bins=n_bins)[0])
clf = LinearRegression()
clf.fit(x_t[:, None], y_t)
rho = np.corrcoef(x_t, y_t)[0][1]
label = "y = %.3f x + %.3f;\n" % (clf.coef_[0],clf.intercept_)
label += f"rho = {rho:.3f}; t-stat = {rho * np.sqrt(x_t.size):.2f}"
axis.plot(bin_edges_T, clf.intercept_ + clf.coef_[0] * bin_edges_T,
color=color, label=label)
axis.errorbar(bin_edges_T, y_mean_T, y_std_T, fmt="o", color=color)
twin_axis = axis.twinx()
twin_axis.hist(x_t, bins=n_bins, histtype="step", color=color,
density=True, log=True, label="mu=%.3f, sigma=%.3f" % (x_t.mean(), x_t.std()))
twin_axis.set_ylabel("pdf")
axis.legend(loc="upper left")
twin_axis.legend(loc="upper right") |
from datetime import datetime
import os
from flask import Flask, request, render_template, jsonify
from werkzeug.exceptions import HTTPException
from app import database, routes, brand
from app.helpers import user_manager
def create_app():
app = Flask(__name__)
app.config.from_mapping(
SECRET_KEY=os.environ.get('SECRET_KEY', 'dev'),
SQLALCHEMY_DATABASE_URI=os.environ.get('SQLALCHEMY_DATABASE_URI', 'sqlite:///./database/app.db'),
SQLALCHEMY_TRACK_MODIFICATIONS=False
)
# flask_login initialization
user_manager.init_app(app)
# database initialization
database.init_app(app)
routes.register_blueprints(app)
app.add_url_rule('/', 'index')
app.jinja_env.add_extension('jinja2.ext.do')
@app.template_filter()
def currency_float(value):
return "{:,.2f}".format(value)
@app.context_processor
def inject_context():
return dict(year=datetime.utcnow().year, brand=brand)
@app.errorhandler(HTTPException)
def unauthorized_error(error: HTTPException):
if request.path.startswith('/api'):
return jsonify(
dict(message=error.name, description=error.description, status=error.code)
), error.code
return render_template('error.html', error=error), error.code
return app |
# in the previous sections, we assumed a linear relationship between
# explanatory and response variables but we can have sth like :
# y = w0 + w1x + w2x²x² + ... + wdx^d
# we will use PolynomialFeatures transformer class from scikit
# to add a quadratic term (d = 2) to a simple reg problem with
# one explanatory variable and compare the pol to the linear fit
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
# np.newaxis transform it to a col vect
X = np.array([258.0, 270.0, 294.0, 320.0, 342.0, 368.0,
396.0, 446.0, 480.0, 586.0])[:, np.newaxis]
y = np.array([236.4, 234.4, 252.8, 298.6, 314.2, 342.2,
360.8, 368.0, 391.2, 390.8])
lr = LinearRegression()
pr = LinearRegression()
# add a second degreee polynomial term
quadratic = PolynomialFeatures(degree=2)
X_quad = quadratic.fit_transform(X)
# fit a simple linear reg model for comparison:
lr.fit(X, y)
# [[250] [260] [270] ... [590]]
X_fit = np.arange(250,600,10)[:, np.newaxis]
y_lin_fit = lr.predict(X_fit)
# fit a multiple reg model on the transformed features for
# polynomial regression:
pr.fit(X_quad, y)
# need to transform to pol before predicting using pr classifier
y_quad_fit = pr.predict(quadratic.fit_transform(X_fit))
plt.scatter(X, y, label='training points') # nuage de point
plt.plot(X_fit, y_lin_fit, label='linear fit', linestyle='--')
plt.plot(X_fit, y_quad_fit, label='quadratic fit')
plt.legend(loc='upper left')
plt.show()
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
y_lin_pred = lr.predict(X)
y_quad_pred = pr.predict(X_quad)
print('Training MSE linear: %.3f, quadractic: %.3f' % (
mean_squared_error(y, y_lin_pred),
mean_squared_error(y, y_quad_pred)))
print('Training R^2 linear: %.3f, quadratic: %.3f' % (
r2_score(y, y_lin_pred),
r2_score(y, y_quad_pred))) |
import asyncio
from aiohttp import web
import sys
import json
def get_throughput(output_file):
rps_value = []
with open(output_file, 'r') as ufile:
for rpsline in ufile.readlines():
rps=rpsline.split("Throughput(ops/sec),")[-1]
try:
rps_value.append(float(rps))
except:
pass
return {'rps_value': rps_value}
if len(sys.argv) < 3:
print("Expected: python3 aiohttp-ycsb.py ${WORKLOAD} ${OUTPUT_FILE}")
exit()
workload = sys.argv[1]
output_file = sys.argv[2]
results = get_throughput(output_file)
print("workload:", workload)
async def hello(request):
return web.Response(text="%s" % json.dumps(results))
app = web.Application()
app.add_routes([web.get('/latency', hello)])
web.run_app(app, port=8080)
|
import configparser
def read_configs():
config = configparser.ConfigParser()
config.read('config.ini')
config.add_section('MODEL_WEIGHTS')
for model_section in filter(lambda x : x.endswith('MODELS'), config.sections()):
for model in config[model_section]:
items = config[model_section][model].split(',')
config['MODEL_WEIGHTS'][model] = items[1]
config[model_section][model] = items[0]
return config
|
while True:
# Choice Collection
choice = int(raw_input( "1 Thing\n2 Thing\n3 Thing\n0 Exit\n\n>>" ))
if choice not in range(4):
continue
if choice == 1:
print "Thing 1"
elif choice == 2:
print "Thing 2"
elif choice == 3:
print "Thing 3"
# Terminate Operations - Must hold value 0 at this point
else:
quit()
# Only Gets to this point of the loop if the value is in rnage(4)
break
|
# coding:utf8
from PyQt4.QtGui import *
import sys
import os
reload(sys)
sys.setdefaultencoding('utf-8')
class Remind:
def __init__(self):
self.app = QApplication(sys.argv)
def remind_success(self,file_number):
QMessageBox.information(None, 'INFORMATION', file_number + u'上传成功,请查看Renderbus网站!')
return
sys.exit(app.exec_())
def remind_start(self,file_number):
QMessageBox.information(None, 'INFORMATION', file_number + u'开始上传,请稍后查看Renderbus网站!')
return
sys.exit(app.exec_())
def remind_fail(self, fail_info, file_number):
QMessageBox.information(None, 'INFORMATION', file_number + u'上传失败,错误信息:' + fail_info)
return
sys.exit(app.exec_())
def download_success(self):
QMessageBox.information(None, 'INFORMATION', u'下载成功!')
return
sys.exit(self.app.exec_())
def download_fail(self):
QMessageBox.information(None, 'INFORMATION', u'下载失败!')
return
sys.exit(self.app.exec_())
def remind_ask(self):
re = QMessageBox.question(None, "INFORMATION", u"相同文件已上传,确定重新上传?", QMessageBox.Yes, QMessageBox.No) ## 弹出询问框
if re == QMessageBox.No:
try:
os._exit(0)
except:
print('exit')
return
app.exec_()
if __name__ == '__main__':
Remind().remind_success('')
# Remind().remind_fail('', '')
# Remind().remind_start('')
# Remind().remind_ask() |
import os, sys
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../../../'))
from constants import CRConstants
from s3 import S3Agent
class FileSystemFactory(object):
'''
Factory implementation which can be used to instantiate concrete file
storage system agents.
'''
file_systems = {
CRConstants.INFRA_AWS: S3Agent,
}
def create_agent(self, infrastructure):
"""
Instantiate a new file storage system agent.
@param infrastructure:
A string indicating the type of infrastructure where the file
storage system agent is needed.
@returns:
A file storage system agent instance that implements the
BaseFSAgent API.
@raises NameError:
If the given input string does not map to any known
agent type.
"""
if infrastructure in self.file_systems:
return self.file_systems[infrastructure]()
else:
raise NameError('Unrecognized infrastructure: ' + infrastructure)
|
# -*- coding: utf-8 -*-
import codecs
import os
from nltk import ParserI, TaggerI, word_tokenize
from inco.nlp.freeling_base import FreeLingBase
from inco.nlp.parse.tree.freeling_tree_builder import FreeLingTreeBuilder
__author__ = 'Matias Laino'
class FreeLing(FreeLingBase, ParserI):
"""
Wrapper class for the FreeLing parser.
"""
def __init__(self, path_to_executable=None, verbose=False, tagger=None):
"""
Constructor.
@param path_to_executable: path to the FreeLing executable
@type path_to_executable: str
@param tagger: POS-tagger to be used, in case tagging is required. Defaults to using FreeLing.
@type tagger: nltk.tag.TaggerI
@raise Exception: if executable is not found.
"""
self.is_full = False
self._initialize(path_to_executable, verbose)
self._tagger = tagger
def process_output(self, file_path):
"""
Processes the complete output of FreeLing when configured as a parser.
@param file_path: path to the output to process.
@return: the processed parse tree.
@rtype: nltk.tree.Tree
"""
parse_tree_str = ""
with codecs.open(file_path, encoding='utf8') as temp_output:
for line in temp_output:
parse_tree_str += line
if self.verbose:
print(line)
if self.verbose:
print("FreeLing raw output: " + parse_tree_str)
print("--- Building parse tree ---")
tree_builder = FreeLingTreeBuilder()
tree = tree_builder.build(parse_tree_str)
return tree
def raw_parse(self, sent, language='spanish'):
"""
Parse a sentence using NLTK's word-tokenizer.
Tokenization defaults to spanish.
@param sent: The sentence to be parsed
@type sent:unicode
@rtype: iter(Tree)
"""
tokens = word_tokenize(sent, language)
return self.parse(tokens)
def parse(self, sent, *args, **kwargs):
"""
@return: An iterator that generates parse trees for the sentence.
When possible this list is sorted from most likely to least likely.
@param sent: The sentence to be parsed, tokenized
@type sent: list(str)
@rtype: iter(Tree)
"""
formatted_str = "\n".join(sent)
if self._tagger is not None and issubclass(type(self._tagger), TaggerI):
tagged = self._tagger.tag(sent)
iterator = self.tagged_parse(tagged)
else:
iterator = iter([self.execute(formatted_str, self._format_type_tokenized, self._format_type_parsed)])
return iterator
def tagged_parse(self, sent, verbose=False):
"""
@return: An iterator that generates parse trees for the sentence.
When possible this list is sorted from most likely to least likely.
@param sent: The sentence to be parsed, tagged
@type sent: list(str)
@rtype: iter(Tree)
"""
# the expected FreeLing format is:
# word TAB lemma TAB pos_tag
formatted_str_list = map(lambda item: u"{} {} {}".format(item[0], item[0], item[1]), sent)
formatted_str = "\n".join(formatted_str_list)
tree = self.execute(formatted_str, self._format_type_tagged, self._format_type_parsed)
return iter([tree])
def grammar(self):
raise NotImplementedError()
def demo():
freeling = FreeLing()
tree = freeling.raw_parse(u"En el tramo de Telefónica, un toro descolgado ha creado peligro "
u"tras embestir contra un grupo de mozos.")
next(tree).draw()
if __name__ == '__main__':
demo() |
class Solution:
def longestPalindrome(self, s):
"""
:type s: str
:rtype: str
"""
flag = False
max_str = main_max = ''
text3 = s[0:]
for index, value in enumerate(s):
# print(text[index:], end='=')
# print(text[:-index])
# print()
text1 = s[:index + 1]
len_count1 = int(len(text1) / 2) if len(text1) % 2 == 0 else int(len(text1) / 2) + 1
for i in range(len_count1):
# print(text[i], text[len(text)-i-1], end='===\n')
# if text1[i] == text1[len(text1)-i-1] == ' ':
# continue
if (text1[i] != text1[len(text1) - i - 1]) and flag == True:
max_str = ''
flag = False
# break
if text1[i] == text1[len(text1) - i - 1]:
if (flag == False) and len(max_str) < len(text1[i:len(text1) - i]):
max_str = text1[i:len(text1) - i]
flag = True
if max_str != '' and len(main_max) < len(max_str):
main_max = max_str
# print(main_max)
flag = False
max_str = ''
for index, value in enumerate(s):
text2 = s[len(s) - index - 1:]
len_count2 = int(len(text2) / 2) if len(text2) % 2 == 0 else int(len(text2) / 2) + 1
for i in range(len_count2):
# print(text[i], text[len(text)-i-1], end='===\n')
# if text2[i] == text2[len(text2)-i-1] == ' ':
# continue
if (text2[i] != text2[len(text2) - i - 1]) and flag == True:
max_str = ''
flag = False
# break
if text2[i] == text2[len(text2) - i - 1]:
if (flag == False) and len(max_str) < len(text2[i:len(text2) - i]):
max_str = text2[i:len(text2) - i]
flag = True
if max_str != '' and len(main_max) < len(max_str):
main_max = max_str
# print(main_max)
flag = False
max_str = ''
return main_max |
from .models import Restaurant
from django import forms
class RestaurantForm(forms.ModelForm):
class Meta:
model=Restaurant
fields='__all__' |
'''
'''
import sys
import os
import configparser
import random
import urllib.request
import datetime
import time
import re
from locale import str
from bs4.tests.test_docs import __metaclass__
from bs4 import BeautifulSoup
import asyncio
pwd=os.getcwd() #get pwd
cnf_path=os.path.join(os.path.dirname(os.getcwd()),'Config')
log_path=os.path.join(os.path.dirname(os.getcwd()),'Logs')
class Singleton(object):
# __call__ makes the class can be use like this:
# e.g
# x=Singleton(args)
# def __call__(self, *args, **kwargs):
def __new__(cls, *args, **kw):
if not hasattr(cls, '_instance'):
orig = super(Singleton, cls)
cls._instance = orig.__new__(cls, *args, **kw)
return cls._instance
'''
URLManager
'''
class URLManager(Singleton):
'''URLManager
It is a singleton class used to manager all urls.
'''
url_reposity = set([]) # store all urls
new_url = set([]) # used to store new urls
# def __init__(self):
# self.url_reposity = set([]) # store all urls
# self.new_url = set([]) # used to store new urls
@staticmethod
def add_new_url(urls):
if len(urls) == 0:
return
i=0
for url in urls:
if url in URLManager.url_reposity:
continue
else:
URLManager.new_url.add(url)
@staticmethod
def url_is_empty():
if len(URLManager.new_url) ==0:
return True
else:
return False
@staticmethod
def url_size():
return len(URLManager.new_url)
@staticmethod
def get_url():
if len(URLManager.new_url) == 0 :
return
url = URLManager.new_url.pop()
URLManager.url_reposity.add(url)
return url
@staticmethod
def get_url_repository():
return URLManager.url_reposity
'''ok
'''
class Requester(Singleton):
@classmethod
def open_url(self, url):
head = Disguiser.get_head()
req = urllib.request.Request(url=url, headers=head)
html_str = ''
try:
# open url and get the reponse
Response = urllib.request.urlopen(req)
if Response.status == 200:
html_str = Response.read().decode('utf-8')
else:
# handle other status
print(Response.header_items())
except Exception as e:
print(e)
Disguiser.delay()
return html_str
class Disguiser(Singleton):
'''Disguiser
Used to disguised http request.
'''
user_agent = []
def __init__(self):
'''
Constructor
'''
self.head = {
'Connection': 'keep-alive',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
# 'Accept-Encoding':'gzip, deflate, sdch,utf-8', #here is a problem
'Accept-Language': 'zh-CN,zh;q=0.8,en;q=0.6',
'User-Agent': ''
}
def get_agent(self):
'''get a user-agent from file or a default one.
read user-agent from user_agent.cnf.when methond was called first time,it will read from file.
'''
if len(self.user_agent) == 0:
try:
agent = open(os.path.join(cnf_path, 'user_agent.cnf'))
for line in agent.readlines():
# when line is null,return a default one.
if line == '':
raise Exception
line = line[:-1] # delete '/n' at end of string.
self.user_agent.append(line)
except Exception:
print('read file err')
# if err, return a default agent
return 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:49.0) Gecko/20100101 Firefox/49.0'
return random.choice(self.user_agent)
@staticmethod
def get_head():
d = Disguiser()
d.head['User-Agent'] = d.get_agent()
return d.head
@staticmethod
def delay():
sec = random.randint(100, 10000)
time.sleep(sec / 1000)
|
import numpy as np
import pytest
from pysweep import sweep
from pysweep.data_storage.webui_writer import WebUIWriter
def test():
param = lambda s, n, v: {"x": {"unit": "V", "value": v, "independent_parameter": True}}
measure_values = np.linspace(-1, 1, 4)
g = (i for i in measure_values)
measure = lambda s, n: {"m": {"unit": "A", "value": next(g)}}
coordinate_values = np.array([0, 1, 2, 3])
so = sweep(param, coordinate_values)(measure)
store = WebUIWriter("")
for i in so:
store.add(i)
pages = store.get_pages()
json_dict = store.page_to_json(pages["m"])
assert json_dict["type"] == "linear"
assert np.all(json_dict["x"]["data"] == coordinate_values)
assert json_dict["x"]["unit"] == "V"
assert np.all(json_dict["y"]["data"] == measure_values)
assert json_dict["y"]["unit"] == "A"
|
# Спортсмен занимается ежедневными пробежками.
# В первый день его результат составил a километров.
# Каждый день спортсмен увеличивал результат на 10 % относительно предыдущего.
# Требуется определить номер дня, на который общий результат спортсмена составить не менее b километров.
# Программа должна принимать значения параметров a и b и выводить одно натуральное число — номер дня.
while True:
a = input('Введите, сколько километров пробежал спортсмен в первый день\n')
b = input('Введите, какого результата должен достичь спортсмен\n')
if a.isdigit() and b.isdigit():
a = int(a)
b = int(b)
break
print('Ошибка ввода, повторите')
day = 1
progress = a
while progress <= b:
progress *= 1.1
day += 1
print('Увеличивая свой результат на 10% каждый день, спортсмену потребовалось: ' + '%01d' % day + ' дней')
|
#SOURCE:https://github.com/hay/wiki-text-nlp/blob/master/wiki-text-nlp.ipynb
import urllib.request
import re
import nltk
import operator
from bs4 import BeautifulSoup
import spacy
import textacy
import requests
import json
# nltk.download('punkt')
# nltk.download('stopwords')
scraped_data = urllib.request.urlopen('https://en.wikipedia.org/api/rest_v1/page/html/Pakistan')
article = scraped_data.read()
parsed_article = BeautifulSoup(article,'lxml')
paragraphs = parsed_article.find_all('p')
article_text = ""
for p in paragraphs:
article_text += p.text
# Removing Square Brackets and Extra Spaces
article_text = re.sub(r'\[[0-9]*\]', ' ', article_text)
article_text = re.sub(r'\s+', ' ', article_text)
# Removing special characters and digits
formatted_article_text = re.sub('[^a-zA-Z]', ' ', article_text )
formatted_article_text = re.sub(r'\s+', ' ', formatted_article_text)
sentence_list = nltk.sent_tokenize(article_text)
stopwords = nltk.corpus.stopwords.words('english')
word_frequencies = {}
maxi=""
maxc=-1
for word in nltk.word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies.keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
if(word_frequencies[word]>maxc):
maxc=word_frequencies[word]
maxi=word
maximum_frequncy = max(word_frequencies.values())
cd = sorted(word_frequencies.items(),key=operator.itemgetter(1),reverse=True)
l=[]
i=1
s=cd[0][1]
l.append(cd[0][0])
while(i<len(cd) and s+cd[i][1]<(len(parsed_article))):
l.append(cd[i][0])
s+=cd[i][1]
i+=1
#l has buzzwords
nlp = spacy.load('en_core_web_sm')
# text ="""
#
# """
ARTICLE = "Pakistan" #we'll have to take input from user in this field
# ENDPOINT = "https://en.wikipedia.org/api/rest_v1/page/html/"
# ENDPOINT+=ARTICLE
# req = requests.get(ENDPOINT)
#
# soup = BeautifulSoup(req.text, "lxml")
text = parsed_article.select("body")[0].get_text().strip()
doc = nlp(text)
# most_occuring=textacy.extract.named_entities(doc,exclude_types="NUMERIC",drop_determiners=True, min_freq=5)
# for i in most_occuring:
# print(i)
# abc123=textacy.keyterms.key_terms_from_semantic_network(doc,ranking_algo='pagerank')
# for i in abc123:
# print(i)
# for i in textacy.keyterms.textrank(doc, normalize='lemma'):
# print (i)
def cleanup(s):
strip_refs = re.compile("\.?\[\d+\]?")
s = strip_refs.sub("", s).strip()
s=re.sub(r'\[[0-9]*\]',' ',s)
s=re.sub(r'\s+',' ',s)
if s[-1] == ".":
s = s[0:-1]
return s
#print (l)
m=[]
for i in l:
statements = textacy.extract.semistructured_statements(doc,i)
for statement in statements:
subject, verb, fact = statement
fact = cleanup(str(fact))
#print(fact)
m.append(fact)
#print (m)
i= (json.dumps({ARTICLE:m}))
r = ((json.loads(i)))
print(json.stringify(r))
# print("Did you know this...")
# for statement in statements:
# subject, verb, fact = statement
# fact = cleanup(str(fact))
# print(fact)
|
#
# earthquake_tsunami function
#
"""This function returns a callable object representing an initial water
displacement generated by a submarine earthqauke.
Using input parameters:
Required
length along-stike length of rupture area (km)
width down-dip width of rupture area (km)
strike azimuth (degrees, measured from north) of fault axis
dip angle of fault dip in degrees w.r.t. horizontal
depth depth to base of rupture area (km)
ns the really used number of rectangular sources
NSMAX the upper limit of ns
Optional
x0 x origin (0)
y0 y origin (0)
z0 z origin
slip metres of fault slip (1)
rake angle of slip (w.r.t. horizontal) in fault plane (90 degrees)
The returned object is a callable okada function that represents
the initial water displacement generated by a submarine earthuake.
"""
import numpy as num
from cresthh.anuga.utilities import log as log
def earthquake_tsunami(ns, NSMAX, length, width, strike, depth,
dip, xi, yi, z0, slip, rake,
domain=None, verbose=False):
from anuga.abstract_2d_finite_volumes.quantity import Quantity
from math import sin, radians
#zrec0 = Quantity(domain)
zrec0 = Quantity(domain)
zrec0.set_values(z0)
zrec=zrec0.get_vertex_values(xy=True)
x0= num.zeros(ns,num.float)
y0= num.zeros(ns,num.float)
if ns ==1:
x0[0]=xi
y0[0]=yi
else:
x0=xi
y0=yi
if domain is not None:
xllcorner = domain.geo_reference.get_xllcorner()
yllcorner = domain.geo_reference.get_yllcorner()
for i in range(0,ns):
x0[i] = x0[i] - xllcorner # fault origin (relative)
y0[i] = y0[i] - yllcorner
#a few temporary print statements
if verbose is True:
log.critical('\nThe Earthquake ...')
log.critical('\tns: %s' % str(ns))
log.critical('\tNSMAX: %s' % str(NSMAX))
log.critical('\tLength: %s' % str(length))
log.critical('\tDepth: %s' % str(depth))
log.critical('\tStrike: %s' % str(strike))
log.critical('\tWidth: %s' % str(width))
log.critical('\tDip: %s' % str(dip))
log.critical('\tSlip: %s' % str(slip))
log.critical('\tx0: %s' % str(x0))
log.critical('\ty0: %s' % str(y0))
# warning state
# test = width*1000.0*sin(radians(dip)) - depth
test = width*1000.0*sin(radians(dip)) - depth*1000
if test > 0.0:
msg = 'Earthquake source not located below seafloor - check depth'
raise Exception, msg
return Okada_func(ns=ns,NSMAX=NSMAX,length=length, width=width, dip=dip, \
x0=x0, y0=y0, strike=strike, depth=depth, \
slip=slip, rake=rake, zrec=zrec)
#
# Okada class
#
"""This is a callable class representing the initial water displacment
generated by an earthquake.
Using input parameters:
Required
length along-stike length of rupture area
width down-dip width of rupture area
strike azimuth (degrees, measured from north) of fault axis
dip angle of fault dip in degrees w.r.t. horizontal
depth depth to base of rupture area
Optional
x0 x origin (0)
y0 y origin (0)
slip metres of fault slip (1)
rake angle of slip (w.r.t. horizontal) in fault plane (90 degrees)
"""
class Okada_func:
def __init__(self, ns,NSMAX,length, width, dip, x0, y0, strike, \
depth, slip, rake,zrec):
self.dip = dip
self.length = length
self.width = width
self.x0 = x0
self.y0 = y0
self.strike = strike
self.depth = depth
self.slip = slip
self.rake = rake
self.ns=ns
self.zrec=zrec
def __call__(self, x, y):
"""Make Okada_func a callable object.
If called as a function, this object returns z values representing
the initial 3D distribution of water heights at the points (x,y,z)
produced by a submarine mass failure.
"""
from string import replace,strip
from math import sin, cos, radians, exp, cosh
#ensure vectors x and y have the same length
N = len(x)
assert N == len(y)
#nrec=N*N
depth = self.depth
dip = self.dip
length = self.length
width = self.width
y0 = self.x0
x0 = self.y0
strike = self.strike
dip = self.dip
rake = self.rake
dislocation = self.slip
ns=self.ns
zrec=self.zrec
#initialization
disp0=num.zeros(3,num.float)
strain0=num.zeros(6,num.float)
tilt0 = num.zeros(2,num.float)
dislocations=num.zeros(ns,num.float)
depths=num.zeros(ns,num.float)
strikes= num.zeros(ns,num.float)
lengths= num.zeros(ns,num.float)
slips= num.zeros(ns,num.float)
rakes= num.zeros(ns,num.float)
widths= num.zeros(ns,num.float)
dips= num.zeros(ns,num.float)
strikes= num.zeros(ns,num.float)
strikes= num.zeros(ns,num.float)
strain = num.zeros((N,6),num.float)
disp = num.zeros((N,3),num.float)
tilt = num.zeros((N,2),num.float)
xs =num.zeros(ns,num.float)
ys =num.zeros(ns,num.float)
z=[]
if ns==1:
dislocations[0]=dislocation
depths[0]=depth
strikes[0]=strike
lengths[0]=length
rakes[0]=rake
widths[0]=width
dips[0]=dip
try:
xs[0]=x0
ys[0]=y0
except:
xs[0]=x0[0]
ys[0]=y0[0]
else:
dislocations=dislocation
strikes=strike
lengths=length
rakes=rake
widths=width
dips=dip
xs=x0
ys=y0
depths=depth
#double Gaussian calculation assumes water displacement is oriented
#E-W, so, for displacement at some angle alpha clockwise from the E-W
#direction, rotate (x,y) coordinates anti-clockwise by alpha
ALPHA = 0.5
POT3 = 0.0
POT4 = 0.0
DISL3 = 0.0
AL1 = 0.0
AW2 = 0.0
#rad=1.745329252e-2
#zrec = domain.get_quantity('elevation').get_values(interpolation_points=[[x, y]]) #get Zrec... has to be constant!!!
#zrec=zreci.get_values(interpolation_points=[[x, y]])
#zrec=0
#Z=-zrec
eps = 1.0e-6
#
for irec in range(0,N):
xrec=y
yrec=x
for i in range(0,len(zrec[0])):
if (num.any(zrec[0][i]==yrec) and num.any(zrec[1][i]==xrec)):
Z=zrec[2][i]
Z=0.001*Z
break
else: continue
#zrec=zreci.get_values(interpolation_points=[[xrec[irec],yrec[irec]]],location='edges')
for ist in range(0,ns):
#st = radians(strikes[ist])
st=radians(strikes[ist])
csst = cos(st)
ssst = sin(st)
cs2st = cos(2.0*st)
ss2st = sin(2.0*st)
#
#di = radians(dips[ist])
di=radians(dips[ist])
csdi =cos(di)
ssdi=sin(di)
#
#ra= radians(rakes[ist])
ra=radians(rakes[ist])
csra=cos(ra)
ssra=sin(ra)
# transform from Aki's to Okada's system
#first attribute x to north and y to east to match OKADA axis
#X=0.001*(x[irec]-xs[ist])*ssst+0.001*(y[irec]-ys[ist])*csst
#Y=-0.001*(x[irec]-xs[ist])*csst+0.001*(y[irec]-ys[ist])*ssst
X=0.001*((xrec[irec]-xs[ist])*csst+(yrec[irec]-ys[ist])*ssst)
Y=0.001*((xrec[irec]-xs[ist])*ssst-(yrec[irec]-ys[ist])*csst)
DEPTH=depths[ist]
DIP=dips[ist]
if lengths[ist]==0 and widths[ist]== 0 :
#
# point source
#
POT1=dislocations[ist]*csra
POT2=dislocations[ist]*ssra
IRET=1
self.DC3D0(ALPHA,X,Y,Z,DEPTH,DIP,POT1,POT2,POT3,POT4)
UX=self.UX
UY=self.UY
UZ=self.UZ
UXX=self.UXX
UYX=self.UYX
UZX=self.UZX
UXY=self.UXY
UYY=self.UYY
UZY=self.UZY
UXZ=self.UXZ
UYZ=self.UYZ
UZZ=self.UZZ
IRET=self.IRET
if IRET==1:
log.critical('There is a problem in Okada subroutine!')
break
else:
# finite source
AL2=lengths[ist]
AW1=-widths[ist]
DISL1=dislocations[ist]*csra
DISL2=dislocations[ist]*ssra
if lengths[ist]==0:
AL2=widths[ist]*eps
DISL1=DISL1/AL2
DISL2=DISL2/AL2
elif widths[ist]==0.0:
AW1=-lengths[ist]*eps
DISL1=DISL1/(-AW1)
DISL2=DISL2/(-AW1)
IRET=1
self.DC3D(ALPHA,X,Y,Z,DEPTH,DIP,AL1,AL2,AW1,AW2,\
DISL1,DISL2,DISL3)
UX=self.UX
UY=self.UY
UZ=self.UZ
UXX=self.UXX
UYX=self.UYX
UZX=self.UZX
UXY=self.UXY
UYY=self.UYY
UZY=self.UZY
UXZ=self.UXZ
UYZ=self.UYZ
UZZ=self.UZZ
IRET=self.IRET
#if X==-6 and Y==-1:
#print UY
#print 'hello'
if IRET==1:
print ' There is a problem in Okada subroutine!'
break
#
# transform from Okada's to Aki's system
#
disp0[0]=UX*csst+UY*ssst
disp0[1]=UX*ssst-UY*csst
disp0[2]=-UZ
tilt0[0]=-(UXZ*csst+UYZ*ssst)
tilt0[1]=-(UXZ*ssst-UYZ*csst)
#
strain0[0]=(UXX*csst*csst+UYY*ssst*ssst
+0.5*(UXY+UYX)*ss2st)
strain0[1]=(UXX*ssst*ssst+UYY*csst*csst
-0.5*(UXY+UYX)*ss2st)
strain0[2]=(UZZ)
strain0[3]=(0.5*(UXX-UYY)*ss2st
-0.5*(UXY+UYX)*cs2st)
strain0[4]=(-0.5*(UZX+UXZ)*ssst
+0.5*(UYZ+UZY)*csst)
strain0[5]=(-0.5*(UZX+UXZ)*csst
-0.5*(UYZ+UZY)*ssst)
for j in range(0,3):
disp[irec][j]= disp[irec][j] + disp0[j]
for j in range(0,2):
tilt[irec][j]= tilt[irec][j] + tilt0[j]
for j in range(0,6):
strain[irec][j]= strain[irec][j] + strain0[j]
z.append(-disp[irec][2])
return z
# works on nautilus when have already done
# f2py -c okada.f -m okada
#z1 = okada(xr,yr,depth,length,width,dip,rake,slip)
def DC3D0(self,ALPHA,X,Y,Z,DEPTH,DIP,POT1,POT2,POT3,POT4):
"""********************************************************************
***** *****
#***** DISPLACEMENT AND STRAIN AT DEPTH *****
#***** DUE TO BURIED POINT SOURCE IN A SEMIINFINITE MEDIUM *****
#***** CODED BY Y.OKADA ... SEP.1991 *****
#***** REVISED Y.OKADA ... NOV.1991 *****
#***** *****
#********************************************************************
#***** INPUT
#***** ALPHA : MEDIUM CONSTANT (LAMBDA+MYU)/(LAMBDA+2*MYU)
#***** X,Y,Z : COORDINATE OF OBSERVING POINT
#***** DEPTH : SOURCE DEPTH
#***** DIP : DIP-ANGLE (DEGREE)
#***** POT1-POT4 : STRIKE-, DIP-, TENSILE- AND INFLATE-POTENCY
#***** POTENCY=( MOMENT OF DOUBLE-COUPLE )/MYU FOR POT1,2
#***** POTENCY=(INTENSITY OF ISOTROPIC PART)/LAMBDA FOR POT3
#***** POTENCY=(INTENSITY OF LINEAR DIPOLE )/MYU FOR POT4
#***** OUTPUT
#***** UX, UY, UZ : DISPLACEMENT ( UNIT=(UNIT OF POTENCY) /
#***** : (UNIT OF X,Y,Z,DEPTH)**2 )
#***** UXX,UYX,UZX : X-DERIVATIVE ( UNIT= UNIT OF POTENCY) /
#***** UXY,UYY,UZY : Y-DERIVATIVE (UNIT OF X,Y,Z,DEPTH)**3 )
#***** UXZ,UYZ,UZZ : Z-DERIVATIVE
#***** IRET : RETURN CODE ( =0....NORMAL, =1....SINGULAR )"""
# COMMON /C1/DUMMY(8),R,dumm(15)
# DIMENSION U(12),DUA(12),DUB(12),DUC(12)
F0=0.0
U=num.zeros((12,1),num.float)
DUA=num.zeros((12,1),num.float)
DUB=num.zeros((12,1),num.float)
DUC=num.zeros((12,1),num.float)
if Z>0: print'(''0** POSITIVE Z WAS GIVEN IN SUB-DC3D0'')'
for I in range(0,12):
U[I]=F0
DUA[I]=F0
DUB[I]=F0
DUC[I]=F0
AALPHA=ALPHA
DDIP=DIP
self.DCC0N0(AALPHA,DDIP)
#======================================
#===== REAL-SOURCE CONTRIBUTION =====
#======================================
XX=X
YY=Y
ZZ=Z
DD=DEPTH+Z
self.DCCON1(XX,YY,DD)
R=self.R
if R==F0:
UX=F0
UY=F0
UZ=F0
UXX=F0
UYX=F0
UZX=F0
UXY=F0
UYY=F0
UZY=F0
UXZ=F0
UYZ=F0
UZZ=F0
IRET=1
return
else:
PP1=POT1
PP2=POT2
PP3=POT3
PP4=POT4
self.UA0(XX,YY,DD,PP1,PP2,PP3,PP4)
DUA=self.DUA
for I in range(0,12):
if I<10: U[I]=U[I]-DUA[I]
if I>=10: U[I]=U[I]+DUA[I]
#=======================================
#===== IMAGE-SOURCE CONTRIBUTION =====
#=======================================
DD=DEPTH-Z
self.DCCON1(XX,YY,DD)
self.UA0(XX,YY,DD,PP1,PP2,PP3,PP4)
DUA=self.DUA
self.UB0(XX,YY,DD,ZZ,PP1,PP2,PP3,PP4)
DUB=self.DUB
self.UC0(XX,YY,DD,ZZ,PP1,PP2,PP3,PP4)
DUC=self.DUC
#-----
for I in range(0,12):
DU=DUA[I]+DUB[I]+ZZ*DUC[I]
if I>=9: DU=DU+DUC[I-9]
U[I]=U[I]+DU
#=====
self.UX=U[0]
self.UY=U[1]
self.UZ=U[2]
self.UXX=U[3]
self.UYX=U[4]
self.UZX=U[5]
self.UXY=U[6]
self.UYY=U[7]
self.UZY=U[8]
self.UXZ=U[9]
self.UYZ=U[10]
self.UZZ=U[11]
self.IRET=0
def UA0(self,X,Y,D,POT1,POT2,POT3,POT4):
#SUBROUTINE UA0(X,Y,D,POT1,POT2,POT3,POT4,U)
# IMPLICIT REAL*8 (A-H,O-Z)
# DIMENSION U(12),DU(12)
"""********************************************************************
C***** DISPLACEMENT AND STRAIN AT DEPTH (PART-A) *****
C***** DUE TO BURIED POINT SOURCE IN A SEMIINFINITE MEDIUM *****
C********************************************************************
C***** INPUT
C***** X,Y,D : STATION COORDINATES IN FAULT SYSTEM
C***** POT1-POT4 : STRIKE-, DIP-, TENSILE- AND INFLATE-POTENCY
C***** OUTPUT
C***** U(12) : DISPLACEMENT AND THEIR DERIVATIVES """
#
# COMMON /C0/ALP1,ALP2,ALP3,ALP4,ALP5,SD,CD,SDSD,CDCD,SDCD,S2D,C2D
# COMMON /C1/P,Q,S,T,XY,X2,Y2,D2,R,R2,R3,R5,QR,QRX,A3,A5,B3,C3,
# * UY,VY,WY,UZ,VZ,WZ
F0 = 0.0
F1 = 1.0
F3 = 3.0
PI2=6.283185307179586
ALP1=self.ALP1
ALP2=self.ALP2
ALP3=self.ALP3
ALP4=self.ALP4
ALP5=self.ALP4
SD=self.SD
CD=self.CD
SDSD=self.SDSD
CDCD=self.CDCD
SDCD=self.SDCD
S2D=self.S2D
C2D=self.C2D
P=self.P
Q=self.Q
S=self.S
T=self.T
XY=self.XY
X2=self.X2
Y2=self.Y2
D2=self.D2
R=self.R
R2=self.R2
R3=self.R3
R5=self.R5
QR=self.QR
QRX=self.QRX
A3=self.A3
A5=self.A5
B3=self.B3
C3=self.C3
UY=self.UY
VY=self.VY
WY=self.WY
UZ=self.UZ
VZ=self.VZ
WZ=self.WZ
DUA=num.zeros((12,1),num.float)
DU=num.zeros((12,1),num.float)
U=num.zeros((12,1),num.float)
#-----
for I in range(0,12):
U[I]=F0
#======================================
#===== STRIKE-SLIP CONTRIBUTION =====
#======================================
if POT1 != F0:
DU[0]= ALP1*Q/R3 +ALP2*X2*QR
DU[1]= ALP1*X/R3*SD +ALP2*XY*QR
DU[2]=-ALP1*X/R3*CD +ALP2*X*D*QR
DU[3]= X*QR*(-ALP1 +ALP2*(F1+A5) )
DU[4]= ALP1*A3/R3*SD +ALP2*Y*QR*A5
DU[5]=-ALP1*A3/R3*CD +ALP2*D*QR*A5
DU[6]= ALP1*(SD/R3-Y*QR) +ALP2*F3*X2/R5*UY
DU[7]= F3*X/R5*(-ALP1*Y*SD +ALP2*(Y*UY+Q) )
DU[8]= F3*X/R5*( ALP1*Y*CD +ALP2*D*UY )
DU[9]= ALP1*(CD/R3+D*QR) +ALP2*F3*X2/R5*UZ
DU[10]= F3*X/R5*( ALP1*D*SD +ALP2*Y*UZ )
DU[11]= F3*X/R5*(-ALP1*D*CD +ALP2*(D*UZ-Q) )
for I in range(0,12):
U[I]=U[I]+POT1/PI2*DU[I]
#===================================
#===== DIP-SLIP CONTRIBUTION =====
#===================================
if POT2 != F0:
DU[0]= ALP2*X*P*QR
DU[1]= ALP1*S/R3 +ALP2*Y*P*QR
DU[2]=-ALP1*T/R3 +ALP2*D*P*QR
DU[3]= ALP2*P*QR*A5
DU[4]=-ALP1*F3*X*S/R5 -ALP2*Y*P*QRX
DU[5]= ALP1*F3*X*T/R5 -ALP2*D*P*QRX
DU[6]= ALP2*F3*X/R5*VY
DU[7]= ALP1*(S2D/R3-F3*Y*S/R5) +ALP2*(F3*Y/R5*VY+P*QR)
DU[8]=-ALP1*(C2D/R3-F3*Y*T/R5) +ALP2*F3*D/R5*VY
DU[9]= ALP2*F3*X/R5*VZ
DU[10]= ALP1*(C2D/R3+F3*D*S/R5) +ALP2*F3*Y/R5*VZ
DU[11]= ALP1*(S2D/R3-F3*D*T/R5) +ALP2*(F3*D/R5*VZ-P*QR)
for I in range(0,12):
U[I]=U[I]+POT2/PI2*DU[I]
#========================================
#===== TENSILE-FAULT CONTRIBUTION =====
#========================================
if POT3!=F0:
DU[0]= ALP1*X/R3 -ALP2*X*Q*QR
DU[1]= ALP1*T/R3 -ALP2*Y*Q*QR
DU[2]= ALP1*S/R3 -ALP2*D*Q*QR
DU[3]= ALP1*A3/R3 -ALP2*Q*QR*A5
DU[4]=-ALP1*F3*X*T/R5 +ALP2*Y*Q*QRX
DU[5]=-ALP1*F3*X*S/R5 +ALP2*D*Q*QRX
DU[6]=-ALP1*F3*XY/R5 -ALP2*X*QR*WY
DU[7]= ALP1*(C2D/R3-F3*Y*T/R5) -ALP2*(Y*WY+Q)*QR
DU[8]= ALP1*(S2D/R3-F3*Y*S/R5) -ALP2*D*QR*WY
DU[9]= ALP1*F3*X*D/R5 -ALP2*X*QR*WZ
DU[10]=-ALP1*(S2D/R3-F3*D*T/R5) -ALP2*Y*QR*WZ
DU[11]= ALP1*(C2D/R3+F3*D*S/R5) -ALP2*(D*WZ-Q)*QR
for I in range(0,12):
U[I]=U[I]+POT3/PI2*DU[I]
#=========================================
#===== INFLATE SOURCE CONTRIBUTION =====
#=========================================
if POT4 != F0:
DU[0]=-ALP1*X/R3
DU[1]=-ALP1*Y/R3
DU[2]=-ALP1*D/R3
DU[3]=-ALP1*A3/R3
DU[4]= ALP1*F3*XY/R5
DU[5]= ALP1*F3*X*D/R5
DU[6]= DU[4]
DU[7]=-ALP1*B3/R3
DU[8]= ALP1*F3*Y*D/R5
DU[9]=-DU[5]
DU[10]=-DU[8]
DU[11]= ALP1*C3/R3
for I in range(0,12):
U[I]=U[I]+POT4/PI2*DU[I]
#for I in range(0,12):
#DUA[I]=U[I]
self.DUA=U
def UB0(self,X,Y,D,Z,POT1,POT2,POT3,POT4):
# SUBROUTINE UB0(X,Y,D,Z,POT1,POT2,POT3,POT4,U)
# IMPLICIT REAL*8 (A-H,O-Z)
# DIMENSION U(12),DU(12)
#
"""********************************************************************
C***** DISPLACEMENT AND STRAIN AT DEPTH (PART-B) *****
C***** DUE TO BURIED POINT SOURCE IN A SEMIINFINITE MEDIUM *****
C********************************************************************
C
C***** INPUT
C***** X,Y,D,Z : STATION COORDINATES IN FAULT SYSTEM
C***** POT1-POT4 : STRIKE-, DIP-, TENSILE- AND INFLATE-POTENCY
C***** OUTPUT
C***** U(12) : DISPLACEMENT AND THEIR DERIVATIVES """
#
# COMMON /C0/ALP1,ALP2,ALP3,ALP4,ALP5,SD,CD,SDSD,CDCD,SDCD,S2D,C2D
# COMMON /C1/P,Q,S,T,XY,X2,Y2,D2,R,R2,R3,R5,QR,QRX,A3,A5,B3,C3,
# * UY,VY,WY,UZ,VZ,WZ
# F0,F1,F2,F3,F4,F5,F8,F9
# * /0.D0,1.D0,2.D0,3.D0,4.D0,5.D0,8.D0,9.D0/
# DATA PI2/6.283185307179586D0/
DUB=num.zeros((12,1),num.float)
DU=num.zeros((12,1),num.float)
U=num.zeros((12,1),num.float)
F0=0.0
F1=1.0
F2=2.0
F3=3.0
F4=4.0
F5=5.0
F8=8.0
F9=9.0
PI2=6.283185307179586
ALP1=self.ALP1
ALP2=self.ALP2
ALP3=self.ALP3
ALP4=self.ALP4
ALP5=self.ALP4
SD=self.SD
CD=self.CD
SDSD=self.SDSD
CDCD=self.CDCD
SDCD=self.SDCD
S2D=self.S2D
C2D=self.C2D
P=self.P
Q=self.Q
S=self.S
T=self.T
XY=self.XY
X2=self.X2
Y2=self.Y2
D2=self.D2
R=self.R
R2=self.R2
R3=self.R3
R5=self.R5
QR=self.QR
QRX=self.QRX
A3=self.A3
A5=self.A5
B3=self.B3
C3=self.C3
UY=self.UY
VY=self.VY
WY=self.WY
UZ=self.UZ
VZ=self.VZ
WZ=self.WZ
#-----
C=D+Z
RD=R+D
D12=F1/(R*RD*RD)
D32=D12*(F2*R+D)/R2
D33=D12*(F3*R+D)/(R2*RD)
D53=D12*(F8*R2+F9*R*D+F3*D2)/(R2*R2*RD)
D54=D12*(F5*R2+F4*R*D+D2)/R3*D12
#-----
FI1= Y*(D12-X2*D33)
FI2= X*(D12-Y2*D33)
FI3= X/R3-FI2
FI4=-XY*D32
FI5= F1/(R*RD)-X2*D32
FJ1=-F3*XY*(D33-X2*D54)
FJ2= F1/R3-F3*D12+F3*X2*Y2*D54
FJ3= A3/R3-FJ2
FJ4=-F3*XY/R5-FJ1
FK1=-Y*(D32-X2*D53)
FK2=-X*(D32-Y2*D53)
FK3=-F3*X*D/R5-FK2
#-----
for I in range(0,12):
U[I]=F0
#======================================
#===== STRIKE-SLIP CONTRIBUTION =====
#======================================
if POT1!=F0:
DU[0]=-X2*QR -ALP3*FI1*SD
DU[1]=-XY*QR -ALP3*FI2*SD
DU[2]=-C*X*QR -ALP3*FI4*SD
DU[3]=-X*QR*(F1+A5) -ALP3*FJ1*SD
DU[4]=-Y*QR*A5 -ALP3*FJ2*SD
DU[5]=-C*QR*A5 -ALP3*FK1*SD
DU[6]=-F3*X2/R5*UY -ALP3*FJ2*SD
DU[7]=-F3*XY/R5*UY-X*QR -ALP3*FJ4*SD
DU[8]=-F3*C*X/R5*UY -ALP3*FK2*SD
DU[9]=-F3*X2/R5*UZ +ALP3*FK1*SD
DU[10]=-F3*XY/R5*UZ +ALP3*FK2*SD
DU[11]= F3*X/R5*(-C*UZ +ALP3*Y*SD)
for I in range(0,12):
U[I]=U[I]+POT1/PI2*DU[I]
#===================================
#===== DIP-SLIP CONTRIBUTION =====
#===================================
if POT2!=F0:
DU[0]=-X*P*QR +ALP3*FI3*SDCD
DU[1]=-Y*P*QR +ALP3*FI1*SDCD
DU[2]=-C*P*QR +ALP3*FI5*SDCD
DU[3]=-P*QR*A5 +ALP3*FJ3*SDCD
DU[4]= Y*P*QRX +ALP3*FJ1*SDCD
DU[5]= C*P*QRX +ALP3*FK3*SDCD
DU[6]=-F3*X/R5*VY +ALP3*FJ1*SDCD
DU[7]=-F3*Y/R5*VY-P*QR +ALP3*FJ2*SDCD
DU[8]=-F3*C/R5*VY +ALP3*FK1*SDCD
DU[9]=-F3*X/R5*VZ -ALP3*FK3*SDCD
DU[10]=-F3*Y/R5*VZ -ALP3*FK1*SDCD
DU[11]=-F3*C/R5*VZ +ALP3*A3/R3*SDCD
for I in range(0,12):
U[I]=U[I]+POT2/PI2*DU[I]
#========================================
#===== TENSILE-FAULT CONTRIBUTION =====
#========================================
if POT3!=F0:
DU[0]= X*Q*QR -ALP3*FI3*SDSD
DU[1]= Y*Q*QR -ALP3*FI1*SDSD
DU[2]= C*Q*QR -ALP3*FI5*SDSD
DU[3]= Q*QR*A5 -ALP3*FJ3*SDSD
DU[4]=-Y*Q*QRX -ALP3*FJ1*SDSD
DU[5]=-C*Q*QRX -ALP3*FK3*SDSD
DU[6]= X*QR*WY -ALP3*FJ1*SDSD
DU[7]= QR*(Y*WY+Q) -ALP3*FJ2*SDSD
DU[8]= C*QR*WY -ALP3*FK1*SDSD
DU[9]= X*QR*WZ +ALP3*FK3*SDSD
DU[10]= Y*QR*WZ +ALP3*FK1*SDSD
DU[11]= C*QR*WZ -ALP3*A3/R3*SDSD
for I in range(0,12):
U[I]=U[I]+POT3/PI2*DU[I]
#=========================================
#===== INFLATE SOURCE CONTRIBUTION =====
#=========================================
if POT4!=F0:
DU[0]= ALP3*X/R3
DU[1]= ALP3*Y/R3
DU[2]= ALP3*D/R3
DU[3]= ALP3*A3/R3
DU[4]=-ALP3*F3*XY/R5
DU[5]=-ALP3*F3*X*D/R5
DU[6]= DU[4]
DU[7]= ALP3*B3/R3
DU[8]=-ALP3*F3*Y*D/R5
DU[9]=-DU[5]
DU[10]=-DU[8]
DU[11]=-ALP3*C3/R3
for I in range(0,12):
U[I]=U[I]+POT4/PI2*DU[I]
#for I in range(0,12):
#DUB[I]=U[I]
self.DUB=U
def UC0(self,X,Y,D,Z,POT1,POT2,POT3,POT4):
# SUBROUTINE UC0(X,Y,D,Z,POT1,POT2,POT3,POT4,U)
# IMPLICIT REAL*8 (A-H,O-Z)
# DIMENSION U(12),DU(12)
"""********************************************************************
C***** DISPLACEMENT AND STRAIN AT DEPTH (PART-B) *****
C***** DUE TO BURIED POINT SOURCE IN A SEMIINFINITE MEDIUM *****
C********************************************************************
C***** INPUT
C***** X,Y,D,Z : STATION COORDINATES IN FAULT SYSTEM
C***** POT1-POT4 : STRIKE-, DIP-, TENSILE- AND INFLATE-POTENCY
C***** OUTPUT
C***** U(12) : DISPLACEMENT AND THEIR DERIVATIVES"""
# COMMON /C0/ALP1,ALP2,ALP3,ALP4,ALP5,SD,CD,SDSD,CDCD,SDCD,S2D,C2D
# COMMON /C1/P,Q,S,T,XY,X2,Y2,D2,R,R2,R3,R5,QR,QRX,A3,A5,B3,C3,um(6)
# DATA F0,F1,F2,F3,F5,F7,F10,F15
# * /0.D0,1.D0,2.D0,3.D0,5.D0,7.D0,10.D0,15.D0/
# DATA PI2/6.283185307179586D0/
DUC=num.zeros((12,1),num.float)
DU=num.zeros((12,1),num.float)
U=num.zeros((12,1),num.float)
F0=0.0
F1=1.0
F2=2.0
F3=3.0
F5=5.0
F7=7.0
F10=10.0
F15=15.0
PI2=6.283185307179586
ALP1=self.ALP1
ALP2=self.ALP2
ALP3=self.ALP3
ALP4=self.ALP4
ALP5=self.ALP4
SD=self.SD
CD=self.CD
SDSD=self.SDSD
CDCD=self.CDCD
SDCD=self.SDCD
S2D=self.S2D
C2D=self.C2D
P=self.P
Q=self.Q
S=self.S
T=self.T
XY=self.XY
X2=self.X2
Y2=self.Y2
D2=self.D2
R=self.R
R2=self.R2
R3=self.R3
R5=self.R5
QR=self.QR
QRX=self.QRX
A3=self.A3
A5=self.A5
B3=self.B3
C3=self.C3
UY=self.UY
VY=self.VY
WY=self.WY
UZ=self.UZ
VZ=self.VZ
WZ=self.WZ
#-----
C=D+Z
Q2=Q*Q
R7=R5*R2
A7=F1-F7*X2/R2
B5=F1-F5*Y2/R2
B7=F1-F7*Y2/R2
C5=F1-F5*D2/R2
C7=F1-F7*D2/R2
D7=F2-F7*Q2/R2
QR5=F5*Q/R2
QR7=F7*Q/R2
DR5=F5*D/R2
#-----
for I in range(0,12):
U[I]=F0
#======================================
#===== STRIKE-SLIP CONTRIBUTION =====
#======================================
if POT1!=F0:
DU[0]=-ALP4*A3/R3*CD +ALP5*C*QR*A5
DU[1]= F3*X/R5*( ALP4*Y*CD +ALP5*C*(SD-Y*QR5) )
DU[2]= F3*X/R5*(-ALP4*Y*SD +ALP5*C*(CD+D*QR5) )
DU[3]= ALP4*F3*X/R5*(F2+A5)*CD -ALP5*C*QRX*(F2+A7)
DU[4]= F3/R5*( ALP4*Y*A5*CD +ALP5*C*(A5*SD-Y*QR5*A7) )
DU[5]= F3/R5*(-ALP4*Y*A5*SD +ALP5*C*(A5*CD+D*QR5*A7) )
DU[6]= DU[4]
DU[7]= F3*X/R5*( ALP4*B5*CD -ALP5*F5*C/R2*(F2*Y*SD+Q*B7) )
DU[8]= F3*X/R5*(-ALP4*B5*SD +ALP5*F5*C/R2*(D*B7*SD-Y*C7*CD) )
DU[9]= F3/R5* (-ALP4*D*A5*CD +ALP5*C*(A5*CD+D*QR5*A7) )
DU[10]= F15*X/R7*( ALP4*Y*D*CD +ALP5*C*(D*B7*SD-Y*C7*CD) )
DU[11]= F15*X/R7*(-ALP4*Y*D*SD +ALP5*C*(F2*D*CD-Q*C7) )
for I in range(0,12):
U[I]=U[I]+POT1/PI2*DU[I]
#===================================
#===== DIP-SLIP CONTRIBUTION =====
#===================================
if POT2!=F0:
DU[0]= ALP4*F3*X*T/R5 -ALP5*C*P*QRX
DU[1]=-ALP4/R3*(C2D-F3*Y*T/R2) +ALP5*F3*C/R5*(S-Y*P*QR5)
DU[2]=-ALP4*A3/R3*SDCD +ALP5*F3*C/R5*(T+D*P*QR5)
DU[3]= ALP4*F3*T/R5*A5 -ALP5*F5*C*P*QR/R2*A7
DU[4]= F3*X/R5*(ALP4*(C2D-F5*Y*T/R2)-ALP5*F5*C/R2*(S-Y*P*QR7))
DU[5]= F3*X/R5*(ALP4*(F2+A5)*SDCD -ALP5*F5*C/R2*(T+D*P*QR7))
DU[6]= DU[4]
DU[7]= (F3/R5*(ALP4*(F2*Y*C2D+T*B5)
+ALP5*C*(S2D-F10*Y*S/R2-P*QR5*B7)))
DU[8]= F3/R5*(ALP4*Y*A5*SDCD-ALP5*C*((F3+A5)*C2D+Y*P*DR5*QR7))
DU[9]= F3*X/R5*(-ALP4*(S2D-T*DR5) -ALP5*F5*C/R2*(T+D*P*QR7))
DU[10]= (F3/R5*(-ALP4*(D*B5*C2D+Y*C5*S2D)
-ALP5*C*((F3+A5)*C2D+Y*P*DR5*QR7)))
DU[11]= F3/R5*(-ALP4*D*A5*SDCD-ALP5*C*(S2D-F10*D*T/R2+P*QR5*C7))
for I in range(0,12):
U[I]=U[I]+POT2/PI2*DU[I]
#========================================
#===== TENSILE-FAULT CONTRIBUTION =====
#========================================
if POT3!=F0:
DU[0]= F3*X/R5*(-ALP4*S +ALP5*(C*Q*QR5-Z))
DU[1]= ALP4/R3*(S2D-F3*Y*S/R2)+ALP5*F3/R5*(C*(T-Y+Y*Q*QR5)-Y*Z)
DU[2]=-ALP4/R3*(F1-A3*SDSD) -ALP5*F3/R5*(C*(S-D+D*Q*QR5)-D*Z)
DU[3]=-ALP4*F3*S/R5*A5 +ALP5*(C*QR*QR5*A7-F3*Z/R5*A5)
DU[4]= (F3*X/R5*(-ALP4*(S2D-F5*Y*S/R2)
-ALP5*F5/R2*(C*(T-Y+Y*Q*QR7)-Y*Z)))
DU[5]= (F3*X/R5*( ALP4*(F1-(F2+A5)*SDSD)
+ALP5*F5/R2*(C*(S-D+D*Q*QR7)-D*Z)))
DU[6]= DU[4]
DU[7]= (F3/R5*(-ALP4*(F2*Y*S2D+S*B5)
-ALP5*(C*(F2*SDSD+F10*Y*(T-Y)/R2-Q*QR5*B7)+Z*B5)))
DU[8]= (F3/R5*( ALP4*Y*(F1-A5*SDSD)
+ALP5*(C*(F3+A5)*S2D-Y*DR5*(C*D7+Z))))
DU[9]= (F3*X/R5*(-ALP4*(C2D+S*DR5)
+ALP5*(F5*C/R2*(S-D+D*Q*QR7)-F1-Z*DR5)))
DU[10]= (F3/R5*( ALP4*(D*B5*S2D-Y*C5*C2D)
+ALP5*(C*((F3+A5)*S2D-Y*DR5*D7)-Y*(F1+Z*DR5))))
DU[11]= (F3/R5*(-ALP4*D*(F1-A5*SDSD)
-ALP5*(C*(C2D+F10*D*(S-D)/R2-Q*QR5*C7)+Z*(F1+C5))))
for I in range(0,12):
U[I]=U[I]+POT3/PI2*DU[I]
#=========================================
#===== INFLATE SOURCE CONTRIBUTION =====
#=========================================
if POT4!=F0:
DU[0]= ALP4*F3*X*D/R5
DU[1]= ALP4*F3*Y*D/R5
DU[2]= ALP4*C3/R3
DU[3]= ALP4*F3*D/R5*A5
DU[4]=-ALP4*F15*XY*D/R7
DU[5]=-ALP4*F3*X/R5*C5
DU[6]= DU[4]
DU[7]= ALP4*F3*D/R5*B5
DU[8]=-ALP4*F3*Y/R5*C5
DU[9]= DU[5]
DU[10]= DU[8]
DU[11]= ALP4*F3*D/R5*(F2+C5)
for I in range(0,12):
U[I]=U[I]+POT4/PI2*DU[I]
#for I in range(0,12):
#DUC[I]=U[I]
self.DUC=U
def DC3D(self,ALPHA,X,Y,Z,DEPTH,DIP,AL1,AL2,AW1,AW2,DISL1,DISL2,DISL3):
# SUBROUTINE DC3D(ALPHA,X,Y,Z,DEPTH,DIP,
# * AL1,AL2,AW1,AW2,DISL1,DISL2,DISL3,
# * UX,UY,UZ,UXX,UYX,UZX,UXY,UYY,UZY,UXZ,UYZ,UZZ,IRET)
# IMPLICIT REAL*8 (A-H,O-Z)
# REAL*4 ALPHA,X,Y,Z,DEPTH,DIP,AL1,AL2,AW1,AW2,DISL1,DISL2,DISL3,
# * UX,UY,UZ,UXX,UYX,UZX,UXY,UYY,UZY,UXZ,UYZ,UZZ,EPS
"""********************************************************************
C***** *****
C***** DISPLACEMENT AND STRAIN AT DEPTH *****
C***** DUE TO BURIED FINITE FAULT IN A SEMIINFINITE MEDIUM *****
C***** CODED BY Y.OKADA ... SEP.1991 *****
C***** REVISED ... NOV.1991, APR.1992, MAY.1993, *****
C***** JUL.1993 *****
C********************************************************************
C
C***** INPUT
C***** ALPHA : MEDIUM CONSTANT (LAMBDA+MYU)/(LAMBDA+2*MYU)
C***** X,Y,Z : COORDINATE OF OBSERVING POINT
C***** DEPTH : DEPTH OF REFERENCE POINT
C***** DIP : DIP-ANGLE (DEGREE)
C***** AL1,AL2 : FAULT LENGTH RANGE
C***** AW1,AW2 : FAULT WIDTH RANGE
C***** DISL1-DISL3 : STRIKE-, DIP-, TENSILE-DISLOCATIONS
C
C***** OUTPUT
C***** UX, UY, UZ : DISPLACEMENT ( UNIT=(UNIT OF DISL)
C***** UXX,UYX,UZX : X-DERIVATIVE ( UNIT=(UNIT OF DISL) /
C***** UXY,UYY,UZY : Y-DERIVATIVE (UNIT OF X,Y,Z,DEPTH,AL,AW) )
C***** UXZ,UYZ,UZZ : Z-DERIVATIVE
C***** IRET : RETURN CODE ( =0....NORMAL, =1....SINGULAR ) """
#
# COMMON /C0/DUMMY(5),SD,CD,dumm(5)
# DIMENSION XI(2),ET(2),KXI(2),KET(2)
# DIMENSION U(12),DU(12),DUA(12),DUB(12),DUC(12)
from math import sqrt
F0 =0.0
EPS=0.000001
XI=num.zeros(2,num.float)
ET=num.zeros(2,num.float)
KXI=num.zeros(2,num.float)
KET=num.zeros(2,num.float)
U=num.zeros(12,num.float)
DU=num.zeros(12,num.float)
DUA=num.zeros(12,num.float)
DUB=num.zeros(12,num.float)
DUC=num.zeros(12,num.float)
#-----
if Z>0: log.critical('** POSITIVE Z WAS GIVEN IN SUB-DC3D')
for I in range(0,12):
U[I]=F0
DUA[I]=F0
DUB[I]=F0
DUC[I]=F0
AALPHA=ALPHA
DDIP=DIP
self.DCC0N0(AALPHA,DDIP)
CD=self.CD
SD=self.SD
#-----
ZZ=Z
DD1=DISL1
DD2=DISL2
DD3=DISL3
XI[0]=X-AL1
XI[1]=X-AL2
#if X==-6 and Y==-1:
#print AL1
#print AL2
#print 'hello'
if abs(XI[0])<EPS: XI[0]=F0
if abs(XI[1])<EPS: XI[1]=F0
#======================================
#===== REAL-SOURCE CONTRIBUTION =====
#======================================
D=DEPTH+Z
P=Y*CD+D*SD
Q=Y*SD-D*CD
ET[0]=P-AW1
ET[1]=P-AW2
if abs(Q)<EPS: Q=F0
if abs(ET[0])<EPS: ET[0]=F0
if abs(ET[1])<EPS: ET[1]=F0
#--------------------------------
#----- REJECT SINGULAR CASE -----
#--------------------------------
#----- ON FAULT EDGE
if (Q==F0 and ((XI[0]*XI[1]<F0 and ET[0]*ET[1]==F0)
or (ET[0]*ET[1]<F0 and XI[0]*XI[1]==F0) )):
self.UX=F0
self.UY=F0
self.UZ=F0
self.UXX=F0
self.UYX=F0
self.UZX=F0
self.UXY=F0
self.UYY=F0
self.UZY=F0
self.UXZ=F0
self.UYZ=F0
self.UZZ=F0
self.IRET=1
return
#----- ON NEGATIVE EXTENSION OF FAULT EDGE
KXI[0]=0
KXI[1]=0
KET[0]=0
KET[1]=0
R12= sqrt(XI[0]*XI[0]+ET[1]*ET[1]+Q*Q)
R21= sqrt(XI[1]*XI[1]+ET[0]*ET[0]+Q*Q)
R22= sqrt(XI[1]*XI[1]+ET[1]*ET[1]+Q*Q)
if XI[0]<F0 and R21+XI[1]<EPS: KXI[0]=1
if XI[0]<F0 and R22+XI[1]<EPS: KXI[1]=1
if ET[0]<F0 and R12+ET[1]<EPS: KET[0]=1
if ET[0]<F0 and R22+ET[1]<EPS: KET[1]=1
#=====
for K in range(0,2):
for J in range(0,2):
self.DCCON2(XI[J],ET[K],Q,SD,CD,KXI[K],KET[J])
self.UA(XI[J],ET[K],Q,DD1,DD2,DD3)
DUA=self.DUA
for I in range(0,10,3):
DU[I] =-DUA[I]
DU[I+1]=-DUA[I+1]*CD+DUA[I+2]*SD
DU[I+2]=-DUA[I+1]*SD-DUA[I+2]*CD
if I==9:
DU[I] =-DU[I]
DU[I+1]=-DU[I+1]
DU[I+2]=-DU[I+2]
else: continue
for I in range(0,12):
if (J+K)!=1: U[I]=U[I]+DU[I]
if (J+K)==1: U[I]=U[I]-DU[I]
#=======================================
#===== IMAGE-SOURCE CONTRIBUTION =====
#=======================================
D=DEPTH-Z
P=Y*CD+D*SD
Q=Y*SD-D*CD
ET[0]=P-AW1
ET[1]=P-AW2
if abs(Q)<EPS: Q=F0
if abs(ET[0])<EPS: ET[0]=F0
if abs(ET[1])<EPS: ET[1]=F0
#--------------------------------
#----- REJECT SINGULAR CASE -----
#--------------------------------
#----- ON FAULT EDGE
if (Q==F0 and ((XI[0]*XI[1]<F0 and ET[0]*ET[1]==F0)
or (ET[0]*ET[1]<F0 and XI[0]*XI[1]==F0) )):
self.UX=F0
self.UY=F0
self.UZ=F0
self.UXX=F0
self.UYX=F0
self.UZX=F0
self.UXY=F0
self.UYY=F0
self.UZY=F0
self.UXZ=F0
self.UYZ=F0
self.UZZ=F0
self.IRET=1
return
#----- ON NEGATIVE EXTENSION OF FAULT EDGE
KXI[0]=0
KXI[1]=0
KET[0]=0
KET[1]=0
R12=sqrt(XI[0]*XI[0]+ET[1]*ET[1]+Q*Q)
R21=sqrt(XI[1]*XI[1]+ET[0]*ET[0]+Q*Q)
R22=sqrt(XI[1]*XI[1]+ET[1]*ET[1]+Q*Q)
if XI[0]<F0 and R21+XI[1]<EPS: KXI[0]=1
if XI[0]<F0 and R22+XI[1]<EPS: KXI[1]=1
if ET[0]<F0 and R12+ET[1]<EPS: KET[0]=1
if ET[0]<F0 and R22+ET[1]<EPS: KET[1]=1
#=====
for K in range(0,2):
for J in range(0,2):
self.DCCON2(XI[J],ET[K],Q,SD,CD,KXI[K],KET[J])
self.UA(XI[J],ET[K],Q,DD1,DD2,DD3)
DUA=self.DUA
self.UB(XI[J],ET[K],Q,DD1,DD2,DD3)
DUB=self.DUB
self.UC(XI[J],ET[K],Q,ZZ,DD1,DD2,DD3)
DUC=self.DUC
#-----
for I in range(0,10,3):
DU[I]=DUA[I]+DUB[I]+Z*DUC[I]
DU[I+1]=((DUA[I+1]+DUB[I+1]+Z*DUC[I+1])*CD
-(DUA[I+2]+DUB[I+2]+Z*DUC[I+2])*SD )
DU[I+2]=((DUA[I+1]+DUB[I+1]-Z*DUC[I+1])*SD
+(DUA[I+2]+DUB[I+2]-Z*DUC[I+2])*CD)
if I==9:
DU[9]=DU[9]+DUC[0]
DU[10]=DU[10]+DUC[1]*CD-DUC[2]*SD
DU[11]=DU[11]-DUC[1]*SD-DUC[2]*CD
for I in range(0,12):
if (J+K)!=1: U[I]=U[I]+DU[I]
if (J+K)==1: U[I]=U[I]-DU[I]
#=====
self.UX=U[0]
self.UY=U[1]
self.UZ=U[2]
self.UXX=U[3]
self.UYX=U[4]
self.UZX=U[5]
self.UXY=U[6]
self.UYY=U[7]
self.UZY=U[8]
self.UXZ=U[9]
self.UYZ=U[10]
self.UZZ=U[11]
self.IRET=0
def UA(self,XI,ET,Q,DISL1,DISL2,DISL3):
# IMPLICIT REAL*8 (A-H,O-Z)
# DIMENSION U(12),DU(12)
#
#********************************************************************
#***** DISPLACEMENT AND STRAIN AT DEPTH (PART-A) *****
#***** DUE TO BURIED FINITE FAULT IN A SEMIINFINITE MEDIUM *****
#********************************************************************
#
#***** INPUT
#***** XI,ET,Q : STATION COORDINATES IN FAULT SYSTEM
#***** DISL1-DISL3 : STRIKE-, DIP-, TENSILE-DISLOCATIONS
#***** OUTPUT
#***** U(12) : DISPLACEMENT AND THEIR DERIVATIVES
#
# COMMON /C0/ALP1,ALP2,ALP3,ALP4,ALP5,SD,CD,SDSD,CDCD,SDCD,S2D,C2D
# COMMON /C2/XI2,ET2,Q2,R,R2,R3,R5,Y,D,TT,ALX,ALE,X11,Y11,X32,Y32,
# * EY,EZ,FY,FZ,GY,GZ,HY,HZ
# DATA F0,F2,PI2/0.D0,2.D0,6.283185307179586D0/
U=num.zeros(12,num.float)
DU=num.zeros(12,num.float)
DUA=num.zeros(12,num.float)
F0 =0.0
F2=2.0
PI2=6.283185307179586
ALP1=self.ALP1
ALP2=self.ALP2
ALP3=self.ALP3
ALP4=self.ALP4
ALP5=self.ALP4
SD=self.SD
CD=self.CD
SDSD=self.SDSD
CDCD=self.CDCD
SDCD=self.SDCD
S2D=self.S2D
C2D=self.C2D
XI2=self.XI2
ET2=self.ET2
Q2=self.Q2
R=self.R
R2=self.R2
R3=self.R3
R5=self.R5
Y=self.Y
D=self.D
TT=self.TT
ALX=self.ALX
ALE=self.ALE
X11=self.X11
Y11=self.Y11
X32=self.X32
Y32=self.Y32
EY=self.EY
EZ=self.EZ
FY=self.FY
FZ=self.FZ
GY=self.GY
GZ=self.GZ
HY=self.HY
HZ=self.HZ
#-----
for I in range(0,12):
U[I]=F0
XY=XI*Y11
QX=Q *X11
QY=Q *Y11
#======================================
#===== STRIKE-SLIP CONTRIBUTION =====
#======================================
if DISL1 != F0 :
DU[0]= TT/F2 +ALP2*XI*QY
DU[1]= ALP2*Q/R
DU[2]= ALP1*ALE -ALP2*Q*QY
DU[3]=-ALP1*QY -ALP2*XI2*Q*Y32
DU[4]= -ALP2*XI*Q/R3
DU[5]= ALP1*XY +ALP2*XI*Q2*Y32
DU[6]= ALP1*XY*SD +ALP2*XI*FY+D/F2*X11
DU[7]= ALP2*EY
DU[8]= ALP1*(CD/R+QY*SD) -ALP2*Q*FY
DU[9]= ALP1*XY*CD +ALP2*XI*FZ+Y/F2*X11
DU[10]= ALP2*EZ
DU[11]=-ALP1*(SD/R-QY*CD) -ALP2*Q*FZ
for I in range(0,12):
U[I]=U[I]+DISL1/PI2*DU[I]
#======================================
#===== DIP-SLIP CONTRIBUTION =====
#======================================
if DISL2!=F0:
DU[0]= ALP2*Q/R
DU[1]= TT/F2 +ALP2*ET*QX
DU[2]= ALP1*ALX -ALP2*Q*QX
DU[3]= -ALP2*XI*Q/R3
DU[4]= -QY/F2 -ALP2*ET*Q/R3
DU[5]= ALP1/R +ALP2*Q2/R3
DU[6]= ALP2*EY
DU[7]= ALP1*D*X11+XY/F2*SD +ALP2*ET*GY
DU[8]= ALP1*Y*X11 -ALP2*Q*GY
DU[9]= ALP2*EZ
DU[10]= ALP1*Y*X11+XY/F2*CD +ALP2*ET*GZ
DU[11]=-ALP1*D*X11 -ALP2*Q*GZ
for I in range(0,12):
U[I]=U[I]+DISL2/PI2*DU[I]
#========================================
#===== TENSILE-FAULT CONTRIBUTION =====
#========================================
if DISL3!=F0:
DU[0]=-ALP1*ALE -ALP2*Q*QY
DU[1]=-ALP1*ALX -ALP2*Q*QX
DU[2]= TT/F2 -ALP2*(ET*QX+XI*QY)
DU[3]=-ALP1*XY +ALP2*XI*Q2*Y32
DU[4]=-ALP1/R +ALP2*Q2/R3
DU[5]=-ALP1*QY -ALP2*Q*Q2*Y32
DU[6]=-ALP1*(CD/R+QY*SD) -ALP2*Q*FY
DU[7]=-ALP1*Y*X11 -ALP2*Q*GY
DU[8]= ALP1*(D*X11+XY*SD) +ALP2*Q*HY
DU[9]= ALP1*(SD/R-QY*CD) -ALP2*Q*FZ
DU[10]= ALP1*D*X11 -ALP2*Q*GZ
DU[11]= ALP1*(Y*X11+XY*CD) +ALP2*Q*HZ
for I in range(0,12):
U[I]=U[I]+DISL3/PI2*DU[I]
#for I in range (0,12):
#DUA[I]=U[I]
self.DUA=U
def UB(self,XI,ET,Q,DISL1,DISL2,DISL3):
from math import sqrt, atan,log
#SUBROUTINE UB(XI,ET,Q,DISL1,DISL2,DISL3,U)
# IMPLICIT REAL*8 (A-H,O-Z)
# DIMENSION U(12),DU(12)
"""********************************************************************
C***** DISPLACEMENT AND STRAIN AT DEPTH (PART-B) *****
C***** DUE TO BURIED FINITE FAULT IN A SEMIINFINITE MEDIUM *****
C********************************************************************
C
C***** INPUT
C***** XI,ET,Q : STATION COORDINATES IN FAULT SYSTEM
C***** DISL1-DISL3 : STRIKE-, DIP-, TENSILE-DISLOCATIONS
C***** OUTPUT
C***** U(12) : DISPLACEMENT AND THEIR DERIVATIVES """
# COMMON /C0/ALP1,ALP2,ALP3,ALP4,ALP5,SD,CD,SDSD,CDCD,SDCD,S2D,C2D
# COMMON /C2/XI2,ET2,Q2,R,R2,R3,R5,Y,D,TT,ALX,ALE,X11,Y11,X32,Y32,
# * EY,EZ,FY,FZ,GY,GZ,HY,HZ
# DATA F0,F1,F2,PI2/0.D0,1.D0,2.D0,6.283185307179586D0/
DUB=num.zeros(12,num.float)
DU=num.zeros(12,num.float)
U=num.zeros(12,num.float)
F0=0.0
F1=1.0
F2=2.0
PI2=6.283185307179586
ALP1=self.ALP1
ALP2=self.ALP2
ALP3=self.ALP3
ALP4=self.ALP4
ALP5=self.ALP4
SD=self.SD
CD=self.CD
SDSD=self.SDSD
CDCD=self.CDCD
SDCD=self.SDCD
S2D=self.S2D
C2D=self.C2D
XI2=self.XI2
ET2=self.ET2
Q2=self.Q2
R=self.R
R2=self.R2
R3=self.R3
R5=self.R5
Y=self.Y
D=self.D
TT=self.TT
ALX=self.ALX
ALE=self.ALE
X11=self.X11
Y11=self.Y11
X32=self.X32
Y32=self.Y32
EY=self.EY
EZ=self.EZ
FY=self.FY
FZ=self.FZ
GY=self.GY
GZ=self.GZ
HY=self.HY
HZ=self.HZ
RD=R+D
D11=F1/(R*RD)
AJ2=XI*Y/RD*D11
AJ5=-(D+Y*Y/RD)*D11
if CD!=F0:
if XI==F0:
AI4=F0
else:
X=sqrt(XI2+Q2)
AI4=(F1/CDCD*( XI/RD*SDCD
+F2*atan((ET*(X+Q*CD)+X*(R+X)*SD)/(XI*(R+X)*CD)) ))
AI3=(Y*CD/RD-ALE+SD*log(RD))/CDCD
AK1=XI*(D11-Y11*SD)/CD
AK3=(Q*Y11-Y*D11)/CD
AJ3=(AK1-AJ2*SD)/CD
AJ6=(AK3-AJ5*SD)/CD
else:
RD2=RD*RD
AI3=(ET/RD+Y*Q/RD2-ALE)/F2
AI4=XI*Y/RD2/F2
AK1=XI*Q/RD*D11
AK3=SD/RD*(XI2*D11-F1)
AJ3=-XI/RD2*(Q2*D11-F1/F2)
AJ6=-Y/RD2*(XI2*D11-F1/F2)
#-----
XY=XI*Y11
AI1=-XI/RD*CD-AI4*SD
AI2= log(RD)+AI3*SD
AK2= F1/R+AK3*SD
AK4= XY*CD-AK1*SD
AJ1= AJ5*CD-AJ6*SD
AJ4=-XY-AJ2*CD+AJ3*SD
#=====
for I in range(0,12):
U[I]=F0
QX=Q*X11
QY=Q*Y11
#======================================
#===== STRIKE-SLIP CONTRIBUTION =====
#======================================
if DISL1!=F0:
DU[0]=-XI*QY-TT -ALP3*AI1*SD
DU[1]=-Q/R +ALP3*Y/RD*SD
DU[2]= Q*QY -ALP3*AI2*SD
DU[3]= XI2*Q*Y32 -ALP3*AJ1*SD
DU[4]= XI*Q/R3 -ALP3*AJ2*SD
DU[5]=-XI*Q2*Y32 -ALP3*AJ3*SD
DU[6]=-XI*FY-D*X11 +ALP3*(XY+AJ4)*SD
DU[7]=-EY +ALP3*(F1/R+AJ5)*SD
DU[8]= Q*FY -ALP3*(QY-AJ6)*SD
DU[9]=-XI*FZ-Y*X11 +ALP3*AK1*SD
DU[10]=-EZ +ALP3*Y*D11*SD
DU[11]= Q*FZ +ALP3*AK2*SD
for I in range(0,12):
U[I]=U[I]+DISL1/PI2*DU[I]
#======================================
#===== DIP-SLIP CONTRIBUTION =====
#======================================
if DISL2!=F0:
DU[0]=-Q/R +ALP3*AI3*SDCD
DU[1]=-ET*QX-TT -ALP3*XI/RD*SDCD
DU[2]= Q*QX +ALP3*AI4*SDCD
DU[3]= XI*Q/R3 +ALP3*AJ4*SDCD
DU[4]= ET*Q/R3+QY +ALP3*AJ5*SDCD
DU[5]=-Q2/R3 +ALP3*AJ6*SDCD
DU[6]=-EY +ALP3*AJ1*SDCD
DU[7]=-ET*GY-XY*SD +ALP3*AJ2*SDCD
DU[8]= Q*GY +ALP3*AJ3*SDCD
DU[9]=-EZ -ALP3*AK3*SDCD
DU[10]=-ET*GZ-XY*CD -ALP3*XI*D11*SDCD
DU[11]= Q*GZ -ALP3*AK4*SDCD
for I in range(0,12):
U[I]=U[I]+DISL2/PI2*DU[I]
#========================================
#===== TENSILE-FAULT CONTRIBUTION =====
#========================================
if DISL3!=F0:
DU[0]= Q*QY -ALP3*AI3*SDSD
DU[1]= Q*QX +ALP3*XI/RD*SDSD
DU[2]= ET*QX+XI*QY-TT -ALP3*AI4*SDSD
DU[3]=-XI*Q2*Y32 -ALP3*AJ4*SDSD
DU[4]=-Q2/R3 -ALP3*AJ5*SDSD
DU[5]= Q*Q2*Y32 -ALP3*AJ6*SDSD
DU[6]= Q*FY -ALP3*AJ1*SDSD
DU[7]= Q*GY -ALP3*AJ2*SDSD
DU[8]=-Q*HY -ALP3*AJ3*SDSD
DU[9]= Q*FZ +ALP3*AK3*SDSD
DU[10]= Q*GZ +ALP3*XI*D11*SDSD
DU[11]=-Q*HZ +ALP3*AK4*SDSD
for I in range(0,12):
U[I]=U[I]+DISL3/PI2*DU[I]
#for I in range(0,12):
#DUB[I]=U[I]
self.DUB=U
def UC(self,XI,ET,Q,Z,DISL1,DISL2,DISL3):
# SUBROUTINE UC(XI,ET,Q,Z,DISL1,DISL2,DISL3,U)
# IMPLICIT REAL*8 (A-H,O-Z)
# DIMENSION U(12),DU(12)
"""********************************************************************
C***** DISPLACEMENT AND STRAIN AT DEPTH (PART-C) *****
C***** DUE TO BURIED FINITE FAULT IN A SEMIINFINITE MEDIUM *****
C********************************************************************
C
C***** INPUT
C***** XI,ET,Q,Z : STATION COORDINATES IN FAULT SYSTEM
C***** DISL1-DISL3 : STRIKE-, DIP-, TENSILE-DISLOCATIONS
C***** OUTPUT
C***** U(12) : DISPLACEMENT AND THEIR DERIVATIVES """
# COMMON /C0/ALP1,ALP2,ALP3,ALP4,ALP5,SD,CD,SDSD,CDCD,SDCD,S2D,C2D
# COMMON /C2/XI2,ET2,Q2,R,R2,R3,R5,Y,D,TT,ALX,ALE,X11,Y11,X32,Y32,
# * EY,EZ,FY,FZ,GY,GZ,HY,HZ
# DATA F0,F1,F2,F3,PI2/0.D0,1.D0,2.D0,3.D0,6.283185307179586D0/
DUC=num.zeros(12,num.float)
DU=num.zeros(12,num.float)
U=num.zeros(12,num.float)
F0=0.0
F1=1.0
F2=2.0
F3=3.0
PI2=6.283185307179586
ALP1=self.ALP1
ALP2=self.ALP2
ALP3=self.ALP3
ALP4=self.ALP4
ALP5=self.ALP4
SD=self.SD
CD=self.CD
SDSD=self.SDSD
CDCD=self.CDCD
SDCD=self.SDCD
S2D=self.S2D
C2D=self.C2D
XI2=self.XI2
ET2=self.ET2
Q2=self.Q2
R=self.R
R2=self.R2
R3=self.R3
R5=self.R5
Y=self.Y
D=self.D
TT=self.TT
ALX=self.ALX
ALE=self.ALE
X11=self.X11
Y11=self.Y11
X32=self.X32
Y32=self.Y32
EY=self.EY
EZ=self.EZ
FY=self.FY
FZ=self.FZ
GY=self.GY
GZ=self.GZ
HY=self.HY
HZ=self.HZ
#-----
C=D+Z
X53=(8.0*R2+9.0*R*XI+F3*XI2)*X11*X11*X11/R2
Y53=(8.0*R2+9.0*R*ET+F3*ET2)*Y11*Y11*Y11/R2
H=Q*CD-Z
Z32=SD/R3-H*Y32
Z53=F3*SD/R5-H*Y53
Y0=Y11-XI2*Y32
Z0=Z32-XI2*Z53
PPY=CD/R3+Q*Y32*SD
PPZ=SD/R3-Q*Y32*CD
QQ=Z*Y32+Z32+Z0
QQY=F3*C*D/R5-QQ*SD
QQZ=F3*C*Y/R5-QQ*CD+Q*Y32
XY=XI*Y11
QX=Q*X11
QY=Q*Y11
QR=F3*Q/R5
CQX=C*Q*X53
CDR=(C+D)/R3
YY0=Y/R3-Y0*CD
#=====
for I in range(1,12):
U[I]=F0
#======================================
#===== STRIKE-SLIP CONTRIBUTION =====
#======================================
if DISL1!=F0:
DU[0]= ALP4*XY*CD -ALP5*XI*Q*Z32
DU[1]= ALP4*(CD/R+F2*QY*SD) -ALP5*C*Q/R3
DU[2]= ALP4*QY*CD -ALP5*(C*ET/R3-Z*Y11+XI2*Z32)
DU[3]= ALP4*Y0*CD -ALP5*Q*Z0
DU[4]=-ALP4*XI*(CD/R3+F2*Q*Y32*SD) +ALP5*C*XI*QR
DU[5]=-ALP4*XI*Q*Y32*CD +ALP5*XI*(F3*C*ET/R5-QQ)
DU[6]=-ALP4*XI*PPY*CD -ALP5*XI*QQY
DU[7]= (ALP4*F2*(D/R3-Y0*SD)*SD-Y/R3*CD
-ALP5*(CDR*SD-ET/R3-C*Y*QR))
DU[8]=-ALP4*Q/R3+YY0*SD +ALP5*(CDR*CD+C*D*QR-(Y0*CD+Q*Z0)*SD)
DU[9]= ALP4*XI*PPZ*CD -ALP5*XI*QQZ
DU[10]= ALP4*F2*(Y/R3-Y0*CD)*SD+D/R3*CD -ALP5*(CDR*CD+C*D*QR)
DU[11]= YY0*CD -ALP5*(CDR*SD-C*Y*QR-Y0*SDSD+Q*Z0*CD)
for I in range(0,12):
U[I]=U[I]+DISL1/PI2*DU[I]
#======================================
#===== DIP-SLIP CONTRIBUTION =====
#======================================
if DISL2!=F0 :
DU[0]= ALP4*CD/R -QY*SD -ALP5*C*Q/R3
DU[1]= ALP4*Y*X11 -ALP5*C*ET*Q*X32
DU[2]= -D*X11-XY*SD -ALP5*C*(X11-Q2*X32)
DU[3]=-ALP4*XI/R3*CD +ALP5*C*XI*QR +XI*Q*Y32*SD
DU[4]=-ALP4*Y/R3 +ALP5*C*ET*QR
DU[5]= D/R3-Y0*SD +ALP5*C/R3*(F1-F3*Q2/R2)
DU[6]=-ALP4*ET/R3+Y0*SDSD -ALP5*(CDR*SD-C*Y*QR)
DU[7]= ALP4*(X11-Y*Y*X32) -ALP5*C*((D+F2*Q*CD)*X32-Y*ET*Q*X53)
DU[8]= XI*PPY*SD+Y*D*X32 +ALP5*C*((Y+F2*Q*SD)*X32-Y*Q2*X53)
DU[9]= -Q/R3+Y0*SDCD -ALP5*(CDR*CD+C*D*QR)
DU[10]= ALP4*Y*D*X32 -ALP5*C*((Y-F2*Q*SD)*X32+D*ET*Q*X53)
DU[11]=-XI*PPZ*SD+X11-D*D*X32-ALP5*C*((D-F2*Q*CD)*X32-D*Q2*X53)
for I in range(0,12):
U[I]=U[I]+DISL2/PI2*DU[I]
#========================================
#===== TENSILE-FAULT CONTRIBUTION =====
#========================================
if DISL3!=F0:
DU[0]=-ALP4*(SD/R+QY*CD) -ALP5*(Z*Y11-Q2*Z32)
DU[1]= ALP4*F2*XY*SD+D*X11 -ALP5*C*(X11-Q2*X32)
DU[2]= ALP4*(Y*X11+XY*CD) +ALP5*Q*(C*ET*X32+XI*Z32)
DU[3]= ALP4*XI/R3*SD+XI*Q*Y32*CD+ALP5*XI*(F3*C*ET/R5-F2*Z32-Z0)
DU[4]= ALP4*F2*Y0*SD-D/R3 +ALP5*C/R3*(F1-F3*Q2/R2)
DU[5]=-ALP4*YY0 -ALP5*(C*ET*QR-Q*Z0)
DU[6]= ALP4*(Q/R3+Y0*SDCD) +ALP5*(Z/R3*CD+C*D*QR-Q*Z0*SD)
DU[7]=(-ALP4*F2*XI*PPY*SD-Y*D*X32
+ALP5*C*((Y+F2*Q*SD)*X32-Y*Q2*X53))
DU[8]=(-ALP4*(XI*PPY*CD-X11+Y*Y*X32)
+ALP5*(C*((D+F2*Q*CD)*X32-Y*ET*Q*X53)+XI*QQY))
DU[9]= -ET/R3+Y0*CDCD -ALP5*(Z/R3*SD-C*Y*QR-Y0*SDSD+Q*Z0*CD)
DU[10]= (ALP4*F2*XI*PPZ*SD-X11+D*D*X32
-ALP5*C*((D-F2*Q*CD)*X32-D*Q2*X53))
DU[11]= (ALP4*(XI*PPZ*CD+Y*D*X32)
+ALP5*(C*((Y-F2*Q*SD)*X32+D*ET*Q*X53)+XI*QQZ))
for I in range(0,12):
U[I]=U[I]+DISL3/PI2*DU[I]
#for I in range(0,12):
#DUC[I]=U[I]
self.DUC=U
def DCC0N0(self,ALPHA,DIP):
from math import sin, cos
# SUBROUTINE DCCON0(ALPHA,DIP)
# IMPLICIT REAL*8 (A-H,O-Z)
#
"""*******************************************************************
C***** CALCULATE MEDIUM CONSTANTS AND FAULT-DIP CONSTANTS *****
C*******************************************************************
C
C***** INPUT
C***** ALPHA : MEDIUM CONSTANT (LAMBDA+MYU)/(LAMBDA+2*MYU)
C***** DIP : DIP-ANGLE (DEGREE)
C### CAUTION ### IF COS(DIP) IS SUFFICIENTLY SMALL, IT IS SET TO ZERO """
# COMMON /C0/ALP1,ALP2,ALP3,ALP4,ALP5,SD,CD,SDSD,CDCD,SDCD,S2D,C2D
# DATA F0,F1,F2,PI2/0.D0,1.D0,2.D0,6.283185307179586D0/
# DATA EPS/1.D-6/
F0=0.0
F1=1.0
F2=2.0
PI2=6.283185307179586
EPS=1.0e-6
#-----
ALP1=(F1-ALPHA)/F2
ALP2= ALPHA/F2
ALP3=(F1-ALPHA)/ALPHA
ALP4= F1-ALPHA
ALP5= ALPHA
#print ALP1
#-----
P18=PI2/360.0
SD=sin(DIP*P18)
CD=cos(DIP*P18)
if abs(CD)<EPS:
CD=F0
if SD>F0: SD= F1
if SD<F0: SD=-F1
SDSD=SD*SD
CDCD=CD*CD
SDCD=SD*CD
S2D=F2*SDCD
C2D=CDCD-SDSD
self.ALP1=ALP1
self.ALP2=ALP2
self.ALP3=ALP3
self.ALP4=ALP4
self.ALP5=ALP5
self.SD=SD
self.CD=CD
self.SDSD=SDSD
self.CDCD=CDCD
self.SDCD=SDCD
self.S2D=S2D
self.C2D=C2D
def DCCON1(self,X,Y,D):
from math import sqrt
# SUBROUTINE DCCON1(X,Y,D)
# IMPLICIT REAL*8 (A-H,O-Z)
"""**********************************************************************
C***** CALCULATE STATION GEOMETRY CONSTANTS FOR POINT SOURCE *****
C**********************************************************************
C
C***** INPUT
C***** X,Y,D : STATION COORDINATES IN FAULT SYSTEM
C### CAUTION ### IF X,Y,D ARE SUFFICIENTLY SMALL, THEY ARE SET TO ZERO """
# COMMON /C0/DUMMY(5),SD,CD,dumm(5)
# COMMON /C1/P,Q,S,T,XY,X2,Y2,D2,R,R2,R3,R5,QR,QRX,A3,A5,B3,C3,
# * UY,VY,WY,UZ,VZ,WZ
F0=0.0
F1=1.0
F3=3.0
F5=5.0
EPS=1.0e-6
SD=self.SD
CD=self.CD
#-----
if abs(X)<EPS: X=F0
if abs(Y)<EPS: Y=F0
if abs(D)<EPS: D=F0
P=Y*CD+D*SD
Q=Y*SD-D*CD
S=P*SD+Q*CD
T=P*CD-Q*SD
XY=X*Y
X2=X*X
Y2=Y*Y
D2=D*D
R2=X2+Y2+D2
R =sqrt(R2)
if R==F0: return
R3=R *R2
R5=R3*R2
R7=R5*R2
#-----
A3=F1-F3*X2/R2
A5=F1-F5*X2/R2
B3=F1-F3*Y2/R2
C3=F1-F3*D2/R2
#-----
QR=F3*Q/R5
QRX=F5*QR*X/R2
#-----
UY=SD-F5*Y*Q/R2
UZ=CD+F5*D*Q/R2
VY=S -F5*Y*P*Q/R2
VZ=T +F5*D*P*Q/R2
WY=UY+SD
WZ=UZ+CD
self.P=P
self.Q=Q
self.S=S
self.T=T
self.XY=XY
self.X2=X2
self.Y2=Y2
self.D2=D2
self.R2=R2
self.R=R
self.R3=R3
self.R5=R5
self.R7=R7
self.A3=A3
self.A5=A5
self.B3=B3
self.C3=C3
self.QR=QR
self.QRX=QRX
self.UY=UY
self.UZ=UZ
self.VZ=VZ
self.VY=VY
self.WY=WY
self.WZ=WZ
def DCCON2(self,XI,ET,Q,SD,CD,KXI,KET):
from math import sqrt,atan,log
# SUBROUTINE DCCON2(XI,ET,Q,SD,CD,KXI,KET)
# IMPLICIT REAL*8 (A-H,O-Z)
"""**********************************************************************
C***** CALCULATE STATION GEOMETRY CONSTANTS FOR FINITE SOURCE *****
C**********************************************************************
C
C***** INPUT
C***** XI,ET,Q : STATION COORDINATES IN FAULT SYSTEM
C***** SD,CD : SIN, COS OF DIP-ANGLE
C***** KXI,KET : KXI=1, KET=1 MEANS R+XI<EPS, R+ET<EPS, RESPECTIVELY
C
C### CAUTION ### IF XI,ET,Q ARE SUFFICIENTLY SMALL, THEY ARE SET TO ZER0"""
F0=0.0
F1=1.0
F2=2.0
EPS=1.0e-6
#-----
if abs(XI)<EPS: XI=F0
if abs(ET)<EPS: ET=F0
if abs(Q)<EPS: Q=F0
XI2=XI*XI
ET2=ET*ET
Q2=Q*Q
R2=XI2+ET2+Q2
R =sqrt(R2)
if R==F0: return
R3=R *R2
R5=R3*R2
Y =ET*CD+Q*SD
D =ET*SD-Q*CD
#-----
if Q==F0:
TT=F0
else:
TT=atan(XI*ET/(Q*R))
#-----
if KXI==1:
ALX=-log(R-XI)
X11=F0
X32=F0
else:
RXI=R+XI
ALX=log(RXI)
X11=F1/(R*RXI)
X32=(R+RXI)*X11*X11/R
#-----
if KET==1:
ALE=-log(R-ET)
Y11=F0
Y32=F0
else:
RET=R+ET
ALE=log(RET)
Y11=F1/(R*RET)
Y32=(R+RET)*Y11*Y11/R
#-----
EY=SD/R-Y*Q/R3
EZ=CD/R+D*Q/R3
FY=D/R3+XI2*Y32*SD
FZ=Y/R3+XI2*Y32*CD
GY=F2*X11*SD-Y*Q*X32
GZ=F2*X11*CD+D*Q*X32
HY=D*Q*X32+XI*Q*Y32*SD
HZ=Y*Q*X32+XI*Q*Y32*CD
self.XI2=XI2
self.Q2=Q2
self.R=R
self.R2=R2
self.R3=R3
self.R5=R5
self.Y=Y
self.D=D
self.TT=TT
self.ALX=ALX
self.ALE=ALE
self.X11=X11
self.Y11=Y11
self.X32=X32
self.Y32=Y32
self.EY=EY
self.EZ=EZ
self.FY=FY
self.FZ=FZ
self.GY=GY
self.GZ=GZ
self.HY=HY
self.HZ=HZ
self.ET2=ET2
|
import os
import tempfile
from detect_secrets import SecretsCollection
from detect_secrets.settings import default_settings
from prowler.lib.check.models import Check, Check_Report_AWS
from prowler.providers.aws.services.awslambda.awslambda_client import awslambda_client
class awslambda_function_no_secrets_in_code(Check):
def execute(self):
findings = []
for function in awslambda_client.functions.values():
if function.code:
report = Check_Report_AWS(self.metadata())
report.region = function.region
report.resource_id = function.name
report.resource_arn = function.arn
report.status = "PASS"
report.status_extended = (
f"No secrets found in Lambda function {function.name} code"
)
with tempfile.TemporaryDirectory() as tmp_dir_name:
function.code.code_zip.extractall(tmp_dir_name)
# List all files
files_in_zip = next(os.walk(tmp_dir_name))[2]
for file in files_in_zip:
secrets = SecretsCollection()
with default_settings():
secrets.scan_file(f"{tmp_dir_name}/{file}")
if secrets.json():
report.status = "FAIL"
report.status_extended = f"Potential secret found in Lambda function {function.name} code"
break
findings.append(report)
return findings
|
# This code snippet is adapated from AWS ECS Documentation
# The plan is 'create' environments by uploading Secrets to AWS Secrets Manager
# and retrieve the secrets upon deployment.
# The idea is the container runner will have an assumed IAM role granting access
# specifically to the secret
import os
import logging
from dcicutils.misc_utils import override_environ
from dcicutils.deployment_utils import BasicOrchestratedFourfrontIniFileManager
from dcicutils.secrets_utils import assume_identity
from dcicutils.env_utils import EnvUtils
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__file__)
_MY_DIR = os.path.dirname(__file__)
class FourfrontDockerIniFileManager(BasicOrchestratedFourfrontIniFileManager):
""" This runs at top level, so path is slightly different. """
# should work but doesn't (missing fourfront): os.path.join(os.path.dirname(_MY_DIR), "pyproject.toml")
# expected = <hardwired>
# actual = <computed>
# assert actual == expected, "The actual value %s was not what we expected, %s." % (actual, expected)
TEMPLATE_DIR = '/home/nginx/fourfront/deploy/ini_files'
PYPROJECT_FILE_NAME = '/home/nginx/fourfront/pyproject.toml'
def build_production_ini_from_global_application_configuration():
""" This function makes a request to secrets manager for the identity passed to the container.
See documentation on API in dcicutils.
"""
identity = assume_identity()
# build production.ini
with override_environ(**identity):
# load env_utils
EnvUtils.init()
# TODO: this probably needs configuring but minimal
FourfrontDockerIniFileManager.build_ini_file_from_template(
'/home/nginx/fourfront/deploy/ini_files/fourfront_any_alpha.ini',
'/home/nginx/fourfront/production.ini'
)
if __name__ == '__main__':
build_production_ini_from_global_application_configuration()
|
from django.contrib import admin
from import_export.admin import ImportExportModelAdmin
from lifequotesbot.models import Questions, Answers, ChatRecord, NoRecordFoundResponse
# Register your models here.
class AdminQuestions(ImportExportModelAdmin):
list_display = ('question_keyword',)
search_fields = ('question_keyword',)
class AdminAnswers(ImportExportModelAdmin):
list_display = ('question_keyword', 'answer')
search_fields = ('question_keyword__question_keyword', 'answer')
class AdminNoRecordFoundResponse(ImportExportModelAdmin):
list_display = ('responsetext', )
search_fields = ('responsetext', )
class AdminChatRecord(ImportExportModelAdmin):
list_display = ('fb_user', 'message', 'response', 'timestamp')
search_fields = ('fb_user', 'message', 'response', 'timestamp')
admin.site.register(Questions, AdminQuestions)
admin.site.register(Answers, AdminAnswers)
admin.site.register(NoRecordFoundResponse, AdminNoRecordFoundResponse)
admin.site.register(ChatRecord, AdminChatRecord) |
def evenOdd(number):
if number%2 == 0:
print('Even Number')
else:
print('Odd Number')
def factorial(number):
f = 1
for i in range(1,number+1):
f = f * i
print(f) |
import tensorflow as tf
#from tensorflow.python.ops import ctc_ops as ctc
import numpy as np
import random
import time
import datetime
from PIL import Image
from tools import DataLoader
from tools import CharactorSource
input_image_path = '/home/melt61/PictureGenerator/GenImage10/423.jpg'
input_image = Image.open(input_image_path).convert('L')
input_image.show()
input_image = np.asarray(input_image)
input_image = input_image.transpose(1, 0)
input_length = input_image.shape[0]
input_image = input_image.reshape(input_length, 50, 1)
network_input = []
network_input.append(input_image)
input_seq_len = []
input_seq_len.append(input_image.shape[0])
#learning parameters
learning_rate = 0.001
beta1 = 0.9
beta2 = 0.99
epochs = 100
batch_size = 1
validation_steps = 500
#network parameters
input_size = 50
hidden_neuron = 512
layer_num = 2
num_classes = 96
#num_classes = 5689
fc_hidden_neuron = 256
#define graph
graph_1 = tf.Graph()
with graph_1.as_default():
#input
inputs = tf.placeholder(tf.float32,[batch_size, None, input_size]+[1])
labels = tf.sparse_placeholder(tf.int32)
seq_len = tf.placeholder(tf.int32,[None])
in_shape = tf.shape(inputs)
batch_s, max_timesteps = in_shape[0], in_shape[1]
#build LSTM
rnn_cell_layer_1 = tf.contrib.rnn.ConvLSTMCell(conv_ndims=1, input_shape=[50, 1], output_channels=32, kernel_shape=[3], name='convlstm_layer_01')
#initial_state = rnn_cells.zero_state(batch_size, dtype=tf.float32)
#stacked_rnn_cell = tf.contrib.rnn.MultiRNNCell(rnn_cells)
output_1, _= tf.nn.dynamic_rnn(rnn_cell_layer_1, inputs, dtype=tf.float32, scope='convlstm_layer_01')
rnn_cell_layer_2 = tf.contrib.rnn.ConvLSTMCell(conv_ndims=1, input_shape=[50, 32], output_channels=64, kernel_shape=[3], name='convlstm_layer_02')
output_2, _= tf.nn.dynamic_rnn(rnn_cell_layer_2, output_1, dtype=tf.float32, scope='convlstm_layer_02')
rnn_cell_layer_3 = tf.contrib.rnn.ConvLSTMCell(conv_ndims=1, input_shape=[50, 64], output_channels=128, kernel_shape=[3], name='convlstm_layer_03')
#stacked_rnn_cell = tf.contrib.rnn.MultiRNNCell([rnn_cell_layer_1, rnn_cell_layer_2, rnn_cell_layer_3])
outputs, _= tf.nn.dynamic_rnn(rnn_cell_layer_3, output_2, dtype=tf.float32, scope='convlstm_layer_03')
#outputs, _ , _= tf.contrib.rnn.stack_bidirectional_dynamic_rnn(cells_fw = rnn_cells_fw,
# cells_bw = rnn_cells_bw,
# inputs = inputs, sequence_length = seq_len, dtype = tf.float32)
#classcification process
outputs = tf.reshape(outputs, [-1, 50*128])
#wh = tf.get_variable(name = 'wh',
# shape = [hidden_neuron, fc_hidden_neuron],
# dtype = tf.float32,
# initializer = tf.contrib.layers.xavier_initializer())
#bh = tf.get_variable(name = 'bh',
# shape = [fc_hidden_neuron],
# dtype = tf.float32,
# initializer = tf.constant_initializer())
w = tf.get_variable(name = 'w',
shape = [50*128, num_classes],
dtype = tf.float32,
initializer = tf.contrib.layers.xavier_initializer())
b = tf.get_variable(name = 'b',
shape = [num_classes],
dtype = tf.float32,
initializer = tf.constant_initializer())
#logits = tf.matmul(outputs, wh) + bh
#logits = tf.nn.tanh(logits)
logits = tf.matmul(outputs, w) + b
logits = tf.reshape(logits, [batch_s, -1, num_classes])
logits = tf.transpose(logits, [1, 0, 2])
#ctc loss
global_step = tf.Variable(0, trainable = False)
loss = tf.nn.ctc_loss(labels = labels, inputs = logits, sequence_length = seq_len)
cost = tf.reduce_mean(loss)
tf.summary.scalar('cost', cost)
#optimizer
optimizer = tf.train.AdamOptimizer(learning_rate = learning_rate,
beta1 = beta1,
beta2 = beta2).minimize(loss,
global_step = global_step)
#ctc decoder
decoded, log_prob = tf.nn.ctc_beam_search_decoder(logits, seq_len, merge_repeated = False)
dense_decoded = tf.sparse_tensor_to_dense(decoded[0], default_value = -1)
#error rate
acc_rate = tf.reduce_mean(tf.edit_distance(tf.cast(decoded[0], tf.int32), labels))
merged_summay = tf.summary.merge_all()
with tf.device('/cpu:0'):
config = tf.ConfigProto(allow_soft_placement = True)
with tf.Session(config = config,graph=graph_1) as sess:
saver = tf.train.Saver()
saver.restore(sess, './model/model_05(convlstm_eng)/model_05.ckpt')
feeds = {
inputs : network_input,
seq_len : input_seq_len
}
predict_result = sess.run([dense_decoded], feeds)
print("outputs: ", predict_result)
ens_ins =CharactorSource.charactorsource()
predict_characters = []
for i in predict_result[0]:
eng_char = ens_ins.eng_int2char(i)
predict_characters.append(eng_char)
print("predict: ",predict_characters)
|
import logging
import numpy
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics.pairwise import cosine_similarity
class CosineSimilarity():
def cosinesimilarity(self,doc1,doc2):
logging.basicConfig(level=logging.DEBUG,filename='logs/cosine.logs', filemode='w')
doc1 = open(doc1, 'r', encoding='utf8')
doc2 = open(doc2, 'r', encoding='utf8')
#doc1 = open('logs/lemm_doc.txt', 'r', encoding='utf8')
#doc2 = open('logs/lemm_query.txt', 'r', encoding='utf8')
dataset1=""
dataset2=""
for line in doc1:
dataset1=dataset1+line
#dataset1 = 'خدا'
for line in doc2:
dataset2=dataset2+line
#dataset2 = 'خدا حافظ'
dataset = [dataset1,dataset2]
vectorizer = TfidfVectorizer()
X_tfidf = vectorizer.fit_transform(dataset)
# ...you say you are already at this point here...
sims = cosine_similarity(X_tfidf, X_tfidf)
rank = list(reversed(numpy.argsort(sims[0])))
#logging.debug("\nTdidf: \n%s" % X_tfidf.toarray())
logging.debug("\nSims: \n%s", sims)
logging.debug("\nRank: \n%s", rank) |
#继承
class Animal():
def eat(self):
print("好吃")
def drink(self):
print("好喝")
def __haode(self):
print("私有")
class Dog(Animal):
def tiao(self):
print("tiao")
dog=Dog();
dog.eat();
dog.tiao();
class Cat(object):
def __init__(self, name, color="白色"):
self.name = name
self.color = color
def run(self):
print("%s--在跑"%self.name)
class Bosi(Cat):
def set_name(self, newName):
self.name = newName
def eat(self):
print("%s--在吃"%self.name)
bs=Bosi("aa");
# bs.set_name(22)
bs.eat();
class base(object):
def test(self):
print('----base test----')
class A(base):
def test(self):
print('----A test----')
# 定义一个父类
class B(base):
def test(self):
print('----B test----')
# 定义一个子类,继承自A、B
class C(A,B):
pass
obj_C = C()
obj_C.test()
print(C.__mro__) |
import tkinter
import segyio
from threading import Thread
from time import sleep
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg
import matplotlib.pyplot as plt
import numpy as np
'''Создаем граффический интерфейс, и вставляем в него 5 окон из матплотлиба'''
root = tkinter.Tk()
root.wm_title("Track plotter")
fig_A1, ax_A1 = plt.subplots(figsize=(3, 4))
'Перестовляем оси коардинат на нужные места'
ax_A1.spines['left'].set_position('center')
ax_A1.spines['bottom'].set_position(('axes', 1))
ax_A1.spines['top'].set_visible(False)
ax_A1.spines['right'].set_visible(False)
canvas = FigureCanvasTkAgg(fig_A1, master=root) # A tk.DrawingArea.
canvas.draw()
canvas.get_tk_widget().grid(row=1, column=0)
fig_A2, ax_A2 = plt.subplots(figsize=(3, 4))
ax_A2.spines['left'].set_position('center')
ax_A2.spines['bottom'].set_position(('axes', 1))
ax_A2.spines['top'].set_visible(False)
ax_A2.spines['right'].set_visible(False)
canvas1 = FigureCanvasTkAgg(fig_A2, master=root) # A tk.DrawingArea.
canvas1.draw()
canvas1.get_tk_widget().grid(row=1, column=1)
fig_X, ax_X = plt.subplots(figsize=(3, 4))
ax_X.spines['left'].set_position('center')
ax_X.spines['bottom'].set_position(('axes', 1))
ax_X.spines['top'].set_visible(False)
ax_X.spines['right'].set_visible(False)
canvas2 = FigureCanvasTkAgg(fig_X, master=root) # A tk.DrawingArea.
canvas2.draw()
canvas2.get_tk_widget().grid(row=1, column=2)
fig_Y, ax_Y = plt.subplots(figsize=(3, 4))
ax_Y.spines['left'].set_position('center')
ax_Y.spines['bottom'].set_position(('axes', 1))
ax_Y.spines['top'].set_visible(False)
ax_Y.spines['right'].set_visible(False)
canvas3 = FigureCanvasTkAgg(fig_Y, master=root) # A tk.DrawingArea.
canvas3.draw()
canvas3.get_tk_widget().grid(row=1, column=3)
fig_par, ax_par = plt.subplots(figsize=(3, 4))
ax_par.spines['left'].set_position('center')
ax_par.spines['bottom'].set_position('center')
ax_par.spines['top'].set_visible(False)
ax_par.spines['right'].set_visible(False)
ax_par.set_xlabel('X')
ax_par.set_xlabel('Y')
ax_par.set_xlabel('X', loc='right')
ax_par.set_ylabel('Y', loc='top')
canvas4 = FigureCanvasTkAgg(fig_par, master=root) # A tk.DrawingArea.
canvas4.draw()
canvas4.get_tk_widget().grid(row=1, column=4)
'Создаем функцию для загрузки данных из файла'
def load_data():
global a_1, a_2, time, err, mem_ang
'Берем из поля для ввода текста имя файла'
pach = Ef.get()
'Берем из поля для ввода текста номера треков и обрабатываем ошибку при неправильном вводе номеров'
try:
num = E.get()
i = int(num.split(',')[0])
j = int(num.split(',')[1])
err['text'] = ''
except ValueError:
err['text'] = 'не верные номера трасс'
'открываем файл обрабатывая ошибку'
try:
with segyio.open(pach, ignore_geometry=True) as f:
a_1 = f.trace[i]
a_2 = f.trace[j]
time = f.samples
err['text'] = ''
except:
err['text'] = 'Неудалось открыть файл'
'Строим граффики A1 и A2'
ax_A1.clear()
ax_A1.spines['left'].set_position('center')
ax_A1.spines['bottom'].set_position(('axes', 1))
ax_A1.spines['top'].set_visible(False)
ax_A1.spines['right'].set_visible(False)
ax_A1.invert_yaxis()
ax_A1.plot(a_1, time)
canvas.draw() # draw используется для обновления gui
ax_A2.clear()
ax_A2.spines['left'].set_position('center')
ax_A2.spines['bottom'].set_position(('axes', 1))
ax_A2.spines['top'].set_visible(False)
ax_A2.spines['right'].set_visible(False)
ax_A2.invert_yaxis()
ax_A2.plot(a_2, time)
canvas1.draw()
mem_ang = None
'Создаем функцию для сохранения результатов в файл'
def save():
global a_1, a_2, X, Y
ang = int(slide.get())
'Генерируем имя файла из названия входных данных и угла'
path = f'{Ef.get()}_angle_{ang}.sgy'
spec = segyio.spec()
spec.samples = list(range(len(a_1)))
spec.format = 1
spec.tracecount = 5
with segyio.create(path, spec) as f:
f.trace[0] = a_1
f.trace[1] = a_2
f.trace[2] = X
f.trace[3] = Y
f.trace[4] = np.array([ang for _ in range(len(a_1))])
'Выводим сообщение об успешном сохранении данных'
err['text'] = f'Треки сохранены в {path}'
'Создаем функцию для рассчетов при передвижении ползунка'
def caunt_XY():
global X, Y, mem_ang
'Создаем бесконечный цыкл'
while True:
sleep(0.1)
'Читаем угл слайдера'
ang = np.radians(int(slide.get()))
'С помощю переменной mem проверяем передвинулся ли слайдер'
if ang != mem_ang:
'Проверяем загруженны ли данные с помощю оброботки ошибок'
try:
'Проводим рассяеты'
X = a_1 * np.cos(ang) + a_2 * np.sin(ang)
Y = a_1 * -np.sin(ang) + a_2 * np.cos(ang)
'Строим граффики предварительно удаляя старые данные'
ax_X.clear()
ax_X.spines['left'].set_position('center')
ax_X.spines['bottom'].set_position(('axes', 1))
ax_X.spines['top'].set_visible(False)
ax_X.spines['right'].set_visible(False)
ax_X.invert_yaxis()
ax_X.plot(X, time)
canvas2.draw_idle()
ax_Y.clear()
ax_Y.spines['left'].set_position('center')
ax_Y.spines['bottom'].set_position(('axes', 1))
ax_Y.spines['top'].set_visible(False)
ax_Y.spines['right'].set_visible(False)
ax_Y.invert_yaxis()
ax_Y.plot(Y, time)
canvas3.draw_idle()
ax_par.clear()
ax_par.spines['left'].set_position('center')
ax_par.spines['bottom'].set_position('center')
ax_par.spines['top'].set_visible(False)
ax_par.spines['right'].set_visible(False)
ax_par.set_xlabel('X', loc='left')
ax_par.set_xlabel('Y', loc='left')
ax_par.plot(X, Y)
canvas4.draw_idle()
mem_ang = ang
except NameError:
sleep(1)
'Создаем злементы оформления'
tkinter.Label(master=root, text="A1").grid(row=0, column=0)
tkinter.Label(master=root, text="A2").grid(row=0, column=1)
tkinter.Label(master=root, text="X").grid(row=0, column=2)
tkinter.Label(master=root, text="Y").grid(row=0, column=3)
tkinter.Label(master=root, text="Граффик параметрической кривой").grid(row=0, column=4)
err = tkinter.Label(master=root, text="")
err.grid(row=2, column=0)
tkinter.Label(master=root, text="номера трасс через запятую").grid(row=2, column=1)
tkinter.Label(master=root, text="Угол (градусы)").grid(row=2, column=2)
tkinter.Label(master=root, text="Имя фала").grid(row=2, column=3)
'Создаем поля для ввода'
s1 = tkinter.StringVar()
s1.set('0,1')
E = tkinter.Entry(master=root, textvariable=s1)
E.grid(row=3, column=1)
s2 = tkinter.StringVar()
s2.set('1580_rot.sgy')
Ef = tkinter.Entry(master=root, textvariable=s2)
Ef.grid(row=3, column=3)
'создаем кнопки'
button_delit = tkinter.Button(master=root, text="Удалить данные, и построить граффики заново", command=load_data)
button_delit.grid(row=3, column=0)
tkinter.Button(master=root, text="Сохранить", command=save).grid(row=3, column=4)
'Создаем слайдер для выбора угла'
slide = tkinter.Scale(root, orient=tkinter.HORIZONTAL, length=300, from_=0, to=90, tickinterval=10, resolution=1)
slide.grid(row=3, column=2)
'Создаем переменную для "Запоминания угла(используется в фенкции caunt_XY)"'
mem_ang = None
'Запускаем поток для рассчетов и перестроении граффиков при смене угла на слайдере'
ang_update = Thread(target=caunt_XY)
ang_update.start()
'Запускаем программу'
tkinter.mainloop()
|
from pathlib import Path
import pytest
import fpdf
from fpdf.errors import FPDFException
from fpdf.html import px2mm
from test.conftest import assert_pdf_equal
HERE = Path(__file__).resolve().parent
class MyFPDF(fpdf.FPDF, fpdf.HTMLMixin):
pass
def test_html_images(tmp_path):
pdf = MyFPDF()
pdf.add_page()
initial = 10
mm_after_image = initial + px2mm(300)
assert round(pdf.get_x()) == 10
assert round(pdf.get_y()) == 10
assert round(pdf.w) == 210
img_path = HERE.parent / "image/png_images/c636287a4d7cb1a36362f7f236564cef.png"
pdf.write_html(
f"<center><img src=\"{img_path}\" height='300' width='300'></center>"
)
# Unable to text position of the image as write html moves to a new line after
# adding the image but it can be seen in the produce test.pdf file.
assert round(pdf.get_x()) == 10
assert pdf.get_y() == pytest.approx(mm_after_image, abs=0.01)
assert_pdf_equal(pdf, HERE / "html_images.pdf", tmp_path)
def test_html_features(tmp_path):
pdf = MyFPDF()
pdf.add_page()
pdf.write_html("<p><b>hello</b> world. i am <i>tired</i>.</p>")
pdf.write_html("<p><u><b>hello</b> world. i am <i>tired</i>.</u></p>")
pdf.write_html("<p><u><strong>hello</strong> world. i am <em>tired</em>.</u></p>")
pdf.write_html('<p><a href="https://github.com">github</a></p>')
pdf.write_html('<p align="right">right aligned text</p>')
pdf.write_html("<p>i am a paragraph <br />in two parts.</p>")
pdf.write_html('<font color="#00ff00"><p>hello in green</p></font>')
pdf.write_html('<font size="7"><p>hello small</p></font>')
pdf.write_html('<font face="helvetica"><p>hello helvetica</p></font>')
pdf.write_html('<font face="times"><p>hello times</p></font>')
pdf.write_html("<h1>h1</h1>")
pdf.write_html("<h2>h2</h2>")
pdf.write_html("<h3>h3</h3>")
pdf.write_html("<h4>h4</h4>")
pdf.write_html("<h5>h5</h5>")
pdf.write_html("<h6>h6</h6>")
pdf.write_html("<br />")
pdf.write_html("<hr />")
pdf.write_html("<br />")
pdf.write_html("<br />")
pdf.write_html("<pre>i am preformatted text.</pre>")
pdf.write_html("<blockquote>hello blockquote</blockquote>")
pdf.write_html("<ul><li>li1</li><li>another</li><li>l item</li></ul>")
pdf.write_html("<ol><li>li1</li><li>another</li><li>l item</li></ol>")
pdf.write_html('<table width="50"></table>')
pdf.write_html("<img></img>")
pdf.write_html(
"<table>"
" <thead>"
" <tr>"
' <th width="30%">ID</th>'
' <th width="70%">Name</th>'
" </tr>"
" </thead>"
" <tbody>"
" <tr>"
" <td>1</td>"
" <td>Alice</td>"
" </tr>"
" <tr>"
" <td>2</td>"
" <td>Bob</td>"
" </tr>"
" </tbody>"
" <tfoot>"
" <tr>"
' <td width="50%">id</td>'
' <td width="50%">name</td>'
" </tr>"
" </tfoot>"
"</table>"
)
pdf.write_html('<table width="50"></table>')
pdf.write_html(
'<table width="50%">'
" <thead>"
" <tr>"
' <th width="30%">ID</th>'
' <th width="70%">Name</th>'
" </tr>"
" </thead>"
" <tbody>"
" <tr>"
" <td>1</td>"
" <td>Alice</td>"
" </tr>"
" <tr>"
" <td>2</td>"
" <td>Bob</td>"
" </tr>"
" </tbody>"
" <tfoot>"
" <tr>"
' <td width="50%">id</td>'
' <td width="50%">name</td>'
" </tr>"
" </tfoot>"
"</table>"
)
name = [
"Alice",
"Carol",
"Chuck",
"Craig",
"Dan",
"Erin",
"Eve",
"Faythe",
"Frank",
"Grace",
"Heidi",
"Ivan",
"Judy",
"Mallory",
"Michael",
"Niaj",
"Olivia",
"Oscar",
"Peggy",
"Rupert",
"Sybil",
"Trent",
"Trudy",
"Victor",
"Walter",
"Wendy",
]
def getrow(i):
return f"<tr><td>{i}</td><td>{name[i]}</td></tr>"
pdf.write_html(
(
'<table width="50%">'
" <thead>"
" <tr>"
' <th width="30%">ID</th>'
' <th width="70%">Name</th>'
" </tr>"
" </thead>"
" <tbody>"
" <tr>"
' <td colspan="2">Alice</td>'
" </tr>"
)
+ "".join(getrow(i) for i in range(26))
+ " </tbody>"
+ "</table>"
)
pdf.add_page()
img_path = HERE.parent / "image/png_images/c636287a4d7cb1a36362f7f236564cef.png"
pdf.write_html(f"<img src=\"{img_path}\" height='300' width='300'>")
assert_pdf_equal(pdf, HERE / "html_features.pdf", tmp_path)
def test_html_simple_table(tmp_path):
pdf = MyFPDF()
pdf.set_font_size(30)
pdf.add_page()
pdf.write_html(
"""<table><thead><tr>
<th width="25%">left</th><th width="50%">center</th><th width="25%">right</th>
</tr></thead><tbody><tr>
<td>1</td><td>2</td><td>3</td>
</tr><tr>
<td>4</td><td>5</td><td>6</td>
</tr></tbody></table>"""
)
assert_pdf_equal(pdf, HERE / "html_simple_table.pdf", tmp_path)
def test_html_table_line_separators(tmp_path):
pdf = MyFPDF()
pdf.set_font_size(30)
pdf.add_page()
pdf.write_html(
"""<table><thead><tr>
<th width="25%">left</th><th width="50%">center</th><th width="25%">right</th>
</tr></thead><tbody><tr>
<td>1</td><td>2</td><td>3</td>
</tr><tr>
<td>4</td><td>5</td><td>6</td>
</tr></tbody></table>""",
table_line_separators=True,
)
assert_pdf_equal(pdf, HERE / "html_table_line_separators.pdf", tmp_path)
def test_html_table_th_inside_tr_issue_137(tmp_path):
pdf = MyFPDF()
pdf.add_page()
pdf.write_html(
"""<table border="1">
<tr>
<th width="40%">header1</th>
<th width="60%">header2</th>
</tr>
<tr>
<th width="40%">value1</th>
<td width="60%">value2</td>
</tr>
</table>"""
)
assert_pdf_equal(pdf, HERE / "html_table_line_separators_issue_137.pdf", tmp_path)
def test_html_table_with_border(tmp_path):
pdf = MyFPDF()
pdf.set_font_size(30)
pdf.add_page()
pdf.write_html(
"""<table border="1"><thead><tr>
<th width="25%">left</th><th width="50%">center</th><th width="25%">right</th>
</tr></thead><tbody><tr>
<td>1</td><td>2</td><td>3</td>
</tr><tr>
<td>4</td><td>5</td><td>6</td>
</tr></tbody></table>"""
)
assert_pdf_equal(pdf, HERE / "html_table_with_border.pdf", tmp_path)
def test_html_bold_italic_underline(tmp_path):
pdf = MyFPDF()
pdf.set_font_size(30)
pdf.add_page()
pdf.write_html(
"""<B>bold</B>
<I>italic</I>
<U>underlined</U>
<B><I><U>all at once!</U></I></B>"""
)
assert_pdf_equal(pdf, HERE / "html_bold_italic_underline.pdf", tmp_path)
def test_customize_ul(tmp_path):
html = """<ul>
<li><b>term1</b>: definition1</li>
<li><b>term2</b>: definition2</li>
</ul>"""
# 1. Customizing through class attributes:
class CustomPDF(fpdf.FPDF, fpdf.HTMLMixin):
li_tag_indent = 5
ul_bullet_char = "\x86"
pdf = CustomPDF()
pdf.set_font_size(30)
pdf.add_page()
pdf.write_html(html)
pdf.ln()
# 2. Customizing through instance attributes:
pdf.li_tag_indent = 10
pdf.ul_bullet_char = "\x9b"
pdf.write_html(html)
pdf.ln()
# 3. Customizing through optional method arguments:
for indent, bullet in ((15, "\xac"), (20, "\xb7")):
pdf.write_html(html, li_tag_indent=indent, ul_bullet_char=bullet)
pdf.ln()
assert_pdf_equal(pdf, HERE / "test_customize_ul.pdf", tmp_path)
def test_img_inside_html_table(tmp_path):
pdf = MyFPDF()
pdf.add_page()
pdf.write_html(
"""<table>
<tr>
<td width="50%">
<img src="test/image/png_images/affc57dfffa5ec448a0795738d456018.png" height="235" width="435"/>
</td>
<td width="50%">
<img src="test/image/image_types/insert_images_insert_png.png" height="162" width="154"/>
</td>
</tr>
</table>"""
)
assert_pdf_equal(pdf, HERE / "test_img_inside_html_table.pdf", tmp_path)
def test_img_inside_html_table_without_explicit_dimensions(tmp_path):
pdf = MyFPDF()
pdf.add_page()
pdf.write_html(
"""<table>
<tr>
<td width="50%">
<img src="test/image/png_images/affc57dfffa5ec448a0795738d456018.png"/>
</td>
<td width="50%">
<img src="test/image/image_types/insert_images_insert_png.png"/>
</td>
</tr>
</table>"""
)
assert_pdf_equal(
pdf,
HERE / "test_img_inside_html_table_without_explicit_dimensions.pdf",
tmp_path,
)
def test_img_inside_html_table_centered(tmp_path):
pdf = MyFPDF()
pdf.add_page()
pdf.write_html(
"""<table>
<tr>
<td width="50%"><center>
<img src="test/image/png_images/affc57dfffa5ec448a0795738d456018.png" height="235" width="435"/>
</center></td>
<td width="50%"><center>
<img src="test/image/image_types/insert_images_insert_png.png" height="162" width="154"/>
</center></td>
</tr>
</table>"""
)
assert_pdf_equal(pdf, HERE / "test_img_inside_html_table_centered.pdf", tmp_path)
def test_img_inside_html_table_centered_with_align(tmp_path):
pdf = MyFPDF()
pdf.add_page()
pdf.write_html(
"""<table>
<tr>
<td width="50%" align="center">
<img src="test/image/png_images/affc57dfffa5ec448a0795738d456018.png" height="235" width="435"/>
</td>
<td width="50%" align="center">
<img src="test/image/image_types/insert_images_insert_png.png" height="162" width="154"/>
</td>
</tr>
</table>"""
)
assert_pdf_equal(
pdf, HERE / "test_img_inside_html_table_centered_with_align.pdf", tmp_path
)
def test_img_inside_html_table_centered_with_caption(tmp_path):
pdf = MyFPDF()
pdf.add_page()
pdf.write_html(
"""<table border="1">
<tr>
<td colspan="2" align="center"><b>Side by side centered pictures and captions</b></td>
</tr>
<tr>
<td width="50%" align="center"><img src="docs/fpdf2-logo.png" height="200" width="200"/></td>
<td width="50%" align="center"><img src="docs/fpdf2-logo.png" height="200" width="200"/></td>
</tr>
<tr>
<td width="50%" align="center">left caption</td>
<td width="50%" align="center">right caption</td>
</tr>
</table>"""
)
assert_pdf_equal(
pdf, HERE / "test_img_inside_html_table_centered_with_caption.pdf", tmp_path
)
def test_html_justify_paragraph(tmp_path):
pdf = MyFPDF()
pdf.add_page()
pdf.write_html(
'<p align="justify">'
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua."
" Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat."
" Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur."
" Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum."
"</p>"
)
assert_pdf_equal(pdf, HERE / "html_justify_paragraph.pdf", tmp_path)
def test_issue_156(tmp_path):
pdf = MyFPDF()
pdf.add_font("Roboto", style="B", fname="test/fonts/Roboto-Bold.ttf", uni=True)
pdf.set_font("Roboto", style="B")
pdf.add_page()
with pytest.raises(FPDFException) as error:
pdf.write_html("Regular text<br><b>Bold text</b>")
assert (
str(error.value)
== "Undefined font: roboto - Use built-in fonts or FPDF.add_font() beforehand"
)
pdf.add_font("Roboto", fname="test/fonts/Roboto-Regular.ttf", uni=True)
pdf.write_html("Regular text<br><b>Bold text</b>")
assert_pdf_equal(pdf, HERE / "issue_156.pdf", tmp_path)
def test_html_font_color_name(tmp_path):
pdf = MyFPDF()
pdf.add_page()
pdf.write_html(
'<font color="crimson"><p>hello in crimson</p></font>'
'<font color="#f60"><p>hello in orange</p></font>'
'<font color="LIGHTBLUE"><p><b>bold hello in light blue</b></p></font>'
'<font color="royalBlue"><p>hello in royal blue</p></font>'
'<font color="#000"><p>hello in black</p></font>'
'<font color="beige"><p><i>italic hello in beige</i></p></font>'
)
assert_pdf_equal(pdf, HERE / "html_font_color_name.pdf", tmp_path)
|
import FWCore.ParameterSet.Config as cms
subjet_variables = ["px", "py", "pz", "e", "pt", "m", "eta", "phi"]
Subjetter = cms.EDProducer("SubjetProducer",
src=cms.InputTag("ca12PFJetsCHS"),
nSubjets=cms.uint32(4),
)
|
import os, cv2
def rename(file_path, new_file_path):
os.rename(file_path, new_file_path)
def delete(file_path):
os.remove(file_path)
def getFiles(folder_path, full_path=True):
files = []
for f in os.listdir(folder_path):
path = os.path.join(folder_path, f)
if os.path.isfile(path):
if isImage(path):
if full_path is False:
path = f
files.append(path)
return files
def getFolders(folder_path):
folders = []
for f in os.listdir(folder_path):
path = os.path.join(folder_path, f)
if os.path.isdir(path):
folders.append(path)
return folders
def getAllFiles(folder_path):
files = []
for f in os.listdir(folder_path):
path = os.path.join(folder_path, f)
if os.path.isfile(path):
if isImage(path):
files.append(path)
else:
temp = getAllFiles(path)
for t in temp:
files.append(t)
return files
def isImage(file_path):
if '.png' in file_path:
return True
elif '.jpg' in file_path:
return True
elif '.jpeg' in file_path:
return True
elif '.bmp' in file_path:
return True
else:
return False
def readCv2Image(image_path, flags=None):
if flags is None:
return cv2.imread(image_path)
else:
return cv2.imread(image_path, flags)
def saveImage(image_path, cv2_img, params=None):
cv2.imwrite(image_path, cv2_img, params)
def isExists(file_path):
return os.path.exists(file_path)
def isFolder(folder_path):
return os.path.isdir(folder_path)
def isFile(file_path):
return not isFolder(file_path)
def correctFolderPath(folder_path):
if isFolder(folder_path):
if folder_path[-1] != '/':
return folder_path+'/'
else:
return folder_path
else:
correctPath = dirName(folder_path)
if correctPath[-1] != '/':
correctPath += '/'
return correctPath
def createDir(*folder_path):
for path in folder_path:
if not isExists(path):
os.makedirs(path)
def getFullPath(file_path):
return os.path.abspath(file_path)
def dirName(file_path):
return os.path.dirname(file_path)
def fileName(file_path, withExtension=True):
if withExtension:
return os.path.basename(file_path)
else:
return os.path.splitext(os.path.basename(file_path))[0]
def join(folder_path,file_path):
if folder_path[-1] == '/':
return folder_path + file_path
else:
return folder_path + '/' + file_path
def folderName(folder_path):
return os.path.basename(folder_path)
def extensionName(file_path):
basename = fileName(file_path)
split = os.path.splitext(basename)
if len(split) == 1:
return ''
else:
return split[-1]
def generateEmptyFolder(generate_path, total_number):
createDir(generate_path)
generate_path = correctFolderPath(generate_path)
for i in xrange(total_number):
createDir(generate_path+str(i))
print 'generated folder:',generate_path+str(i) |
class Node: # membuat class node terlebih dahulu
def __init__(self, data=None): # membuat konstruktor dengan atribut data, left dan right
self.data = data
self.left =None
self.right = None
class Tree: # membuat class tree
def __init__(self): # konstruktor dengan atribut root
self.root = None
def insert(self,angka): # membuat method insert dengan parameter angka
if self.root is None: # jika self.root dalam keadaan kosong
nodeBaru = Node(angka) # maka buat node baru
self.root = nodeBaru # node baru akan menjadi root
self.root.left = Tree() # lalu left dari node baru diubah menjadi sebuah tree dan yang right juga
self.root.right = Tree()
else: # jika root dalam keadaan isi
if self.root.data > angka: # maka jika data dalam root > dari angka
self.root.left.insert(angka) # maka tree left akan insert angka sehingga akan mengulang kembali ke syarat pertama method insert
else: # jika data dalam root lebih kecil atau sama dengan dari angka
self.root.right.insert(angka) # maka akan melakukan rekursi insert kepada tree right
def preOrder(self): # preorder urutannya adalah jika root tidak kosong maka akan print data lalu print left dengan rekursi, dan print right dengan rekursi
if self.root is not None:
print(self.root.data)
self.root.left.preOrder()
self.root.right.preOrder()
def inOrder(self): # inorder hanya berbeda urutan dengan preorder yaitu print left dulu, lalu print data, lalu right
if self.root is not None:
self.root.left.inOrder()
print(self.root.data)
self.root.right.inOrder()
def postOrder(self):
if self.root is not None: # print left dulu lalu right, kemudian data
self.root.left.postOrder()
self.root.right.postOrder()
print(self.root.data)
pohon = Tree()
pohon.insert(5)
pohon.insert(6)
pohon.insert(4)
pohon.preOrder()
print("=====")
pohon.inOrder()
print("====")
pohon.postOrder()
|
import pickle
import sqlite3
class DefaultTablesTable(object):
def __init__(self):
self.table_name = self.__class__.__name__
def get_create_query(self, **kwargs):
return f"CREATE TABLE {self.table_name} (id INTEGER PRIMARY KEY," \
f"table_name VARCHAR(255),kwargs BLOB);"
def get_insert_query(self, table_name, **kwargs):
bdata = pickle.dumps(kwargs, pickle.HIGHEST_PROTOCOL)
return f"INSERT INTO {self.table_name} (table_name,kwargs) VALUES " \
f"({table_name}, {sqlite3.Binary(bdata)});"
class ResultsByView(object):
def __init__(self, n_classes):
self.n_classes = n_classes
self.table_name = self.__class__.__name__
def get_create_query(self, **kwargs):
query = f"CREATE TABLE {self.table_name} (id INTEGER PRIMARY KEY," \
f"image_id VARCHAR(255) NOT NULL,mean_dice FLOAT(5),"
for i in range(self.n_classes):
query += f"class_{i}_dice FLOAT(5),"
return query[:-1] + ");"
def get_insert_query(self, image_id, mean_dice, per_class_dices):
query = f"INSERT INTO {self.table_name} (image_id,mean_dice,"
for i in range(self.n_classes):
query += f"class_{i}_dice,"
query = query[:-1]
query += f") VALUES ('{image_id}',{mean_dice},"
for i in range(self.n_classes):
query += f"{per_class_dices[i]},"
return query[:-1] + ");"
|
from django.urls import re_path
from channels.routing import URLRouter
from django.core.asgi import get_asgi_application
from . import consumers
urlpatterns = URLRouter([
re_path(r'events/', consumers.ServerSentEventConsumer.as_asgi()),
re_path(r'^.*$', get_asgi_application())
]) |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Usage example:
$ python gen.py ru.txt result.txt 10
"""
import argparse
import random
parser = argparse.ArgumentParser(description='String generator.')
parser.add_argument('alphabet', type=str,
help='path to the alphabet file')
parser.add_argument('output', type=str,
help='output path')
parser.add_argument('size', type=int,
help='output size in GB')
parser.add_argument('--output-buffer', type=int, default=10 << 10,
help='output buffer size in KB [DEFAULT: 10MB]')
parser.add_argument('--min-spaces', type=int, default=0,
help='minimum number of spaces per string [DEFAULT: 0]')
parser.add_argument('--max-spaces', type=int, default=3,
help='maximum number of spaces per string [DEFAULT: 3]')
parser.add_argument('--min-string-length',
type=int, default=20,
help='minimum word length [DEFAULT: 20]')
parser.add_argument('--max-string-length',
type=int, default=30,
help='maximum word length [DEFAULT: 30]')
def load_alphabet(path):
with open(path, 'r') as fd:
alphabet = fd.read().decode('utf-8')
return alphabet
def main():
args = parser.parse_args()
alphabet = load_alphabet(args.alphabet)
max_size = args.size << 30
min_spaces = args.min_spaces
max_spaces = args.max_spaces
min_string_length = args.min_string_length
max_string_length = args.max_string_length
with open(args.output, 'wb', args.output_buffer << 10) as fd:
size = 0
run = True
while run:
string = u''
letters_count = random.randint(min_string_length,
max_string_length)
spaces_count = random.randint(min_spaces,
max_spaces)
spaces = [random.randint(1, letters_count-1)
for _ in range(spaces_count)]
for i in range(letters_count):
string += random.choice(alphabet) if i not in spaces else ' '
string += '\n'
encoded_string = string.encode('utf-8')
size += len(encoded_string)
if size > (max_size):
run = False
encoded_string = string[:(max_size - size)].encode('utf-8')
if size == (max_size):
run = False
fd.write(encoded_string)
if __name__ == "__main__":
main()
|
import numpy as np
import matplotlib.pyplot as plt
N = 55
for i in range(1,N+1):
Rres = np.loadtxt('./Rres/Rres'+str(i)+'.txt')
Vres = np.loadtxt('./Vres/Vres'+str(i)+'.txt')
sigDfft = np.loadtxt('./sigDfft/sigDfft'+str(i)+'.txt')
plt.contourf(Rres,Vres,sigDfft)
plt.title('Range-Doppler Map')
plt.xlabel('Range/m')
plt.ylabel('Velocity/mps')
plt.savefig('./pic/jpeg/RDM'+str(i)+'.jpeg')
plt.savefig('./pic/svg/RDM'+str(i)+'.svg')
plt.close() |
# -*- coding=utf-8 -*-
from app import db
from werkzeug.security import generate_password_hash, check_password_hash
from flask_login import UserMixin, AnonymousUserMixin
from app import login_manager
import datetime
from flask import current_app
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
import hashlib
import random
import string
class Permission:
FOLLOW = 0x01
COMMENT = 0X02
WRITE_ARTICLES = 0x04
MODERATE_COMMENTS = 0x08
ADMINISTER = 0x80
class Role(db.Model):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(64), unique=True)
default = db.Column(db.Boolean, default=False, index=True)
permissions = db.Column(db.Integer)
users = db.relationship('User', backref='role', lazy='dynamic')
def __repr__(self):
return self.name
@staticmethod
def insert_roles():
roles = {
'User': (Permission.FOLLOW |
Permission.COMMENT |
Permission.WRITE_ARTICLES, True),
'Moderator': (Permission.FOLLOW |
Permission.COMMENT |
Permission.WRITE_ARTICLES |
Permission.MODERATE_COMMENTS, False),
'Administrator': (0xff, False)
}
for r in roles:
role = Role.query.filter_by(name=r).first()
if role is None:
role = Role(name=r)
role.permissions = roles[r][0]
role.default = roles[r][1]
db.session.add(role)
db.session.commit()
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(64), index=True)
username = db.Column(db.String(64), unique=True, index=True)
password = db.Column(db.String(64))
password_hash = db.Column(db.String(128))
last_seen = db.Column(db.DateTime(), default=datetime.datetime.utcnow)
confirmed = db.Column(db.Boolean, default=False)
role_id = db.Column(db.Integer, db.ForeignKey('roles.id'))
isvip = db.Column(db.Boolean, default=False, index=True)
vip_expired = db.Column(db.DateTime())
avatar_hash = db.Column(db.String(32))
jifen = db.Column(db.Integer, default=0)
regip = db.Column(db.String(32))
lastip = db.Column(db.String(32))
coin = db.Column(db.Integer, default=500)
invitecode = db.Column(db.String(64))
status = db.Column(db.Boolean, default=1)
def __repr__(self):
return self.username
def __init__(self, **kwargs):
super(User, self).__init__(**kwargs)
if self.role is None:
if self.username == 'admin':
self.role = Role.query.filter_by(permissions=0xff).first()
else:
self.role = Role.query.filter_by(default=True).first()
if self.email is not None and self.avatar_hash is None:
self.avatar_hash = hashlib.md5(
self.email.encode('utf-8')).hexdigest()
@staticmethod
def insert_admin(email, password, username='admin'):
admin = User(email=email, password=password,
username=username, isvip=True, vip_expired=datetime.datetime.now() + datetime.timedelta(days=+99999))
admin.role = Role.query.filter_by(permissions=0xff).first()
db.session.add(admin)
db.session.commit()
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def can(self, permissions):
return self.role is not None and \
(self.role.permissions & permissions) == permissions
def is_administrator(self):
return self.can(Permission.ADMINISTER)
def generate_reset_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'reset': self.id})
def reset_password(self, token, new_password):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('reset') != self.id:
return False
self.password = new_password
db.session.add(self)
db.session.commit()
return True
def generate_confirmation_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'confirm': self.id})
def confirm(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
db.session.add(self)
db.session.commit()
return True
def gravatar(self, size=40, default='identicon', rating='g'):
# if request.is_secure:
# url = 'https://secure.gravatar.com/avatar'
# else:
# url = 'http://www.gravatar.com/avatar'
url = 'http://www.gravatar.com/avatar'
hash = self.avatar_hash or hashlib.md5(
self.email.encode('utf-8')).hexdigest()
return '{url}/{hash}?s={size}&d={default}&r={rating}'.format(
url=url, hash=hash, size=size, default=default, rating=rating)
@staticmethod
def insert_avatar():
for user in User.query.all():
if user.email is not None and user.avatar_hash is None:
user.avatar_hash = hashlib.md5(
user.email.encode('utf-8')).hexdigest()
db.session.add(user)
db.session.commit()
class AnonymousUser(AnonymousUserMixin):
def can(self, permissions):
return False
def is_administrator(self):
return False
def vip_(self):
return False
login_manager.anonymous_user = AnonymousUser
@login_manager.user_loader
def load_user(user_id):
return User.query.get(int(user_id))
# category
class Category(db.Model):
__tablename__ = 'categories'
cateid = db.Column(db.Integer, primary_key=True)
cate_name = db.Column(db.String(64), index=True)
cate_info = db.Column(db.String(64))
goods = db.relationship('Good', backref='category', lazy='dynamic')
def __repr__(self):
return self.cate_name
# goods
class Good(db.Model):
__tablename__ = 'goods'
good_id = db.Column(db.Integer, primary_key=True)
good_name = db.Column(db.String(64), index=True)
good_info = db.Column(db.Text(1000))
good_price = db.Column(db.Float, default=0)
good_sales = db.Column(db.Integer, default=0)
good_status = db.Column(db.Boolean, default=True)
cate_id = db.Column(db.Integer, db.ForeignKey('categories.cateid'))
#kms = db.relationship('KM', backref='good', lazy='dynamic')
#orders = db.relationship('Order', backref='good', lazy='dynamic')
def __repr__(self):
return self.good_name
# km
class KM(db.Model):
__tablename__ = 'km'
km_id = db.Column(db.Integer, primary_key=True)
km_value = db.Column(db.String(64), index=True)
km_status = db.Column(db.Boolean, default=True)
good_id = db.Column(db.Integer, db.ForeignKey('goods.good_id'))
orders = db.relationship('Order', backref='km', lazy='dynamic')
def __repr__(self):
return self.km_value
# order
class Order(db.Model):
__tablename__ = 'orders'
order_id = db.Column(db.Integer, primary_key=True)
trade_id = db.Column(db.String(64), index=True)
trade_status = db.Column(db.Boolean, default=False)
starttime = db.Column(db.DateTime, default=datetime.datetime.now)
endtime = db.Column(db.DateTime)
lx = db.Column(db.Integer)
good_id = db.Column(db.Integer, db.ForeignKey('goods.good_id'))
km_id = db.Column(db.Integer, db.ForeignKey('km.km_id'))
def __repr__(self):
return self.trade_id
|
import json
import time
import unittest
import datetime
from dataserv_client import cli
from dataserv_client import api
from dataserv_client import exceptions
fixtures = json.load(open("tests/fixtures.json"))
addresses = fixtures["addresses"]
url = "http://127.0.0.1:5000"
class AbstractTestSetup(object):
def setUp(self):
time.sleep(2) # avoid collision
class TestClientRegister(AbstractTestSetup, unittest.TestCase):
def test_register(self):
client = api.Client(addresses["alpha"], url=url)
self.assertTrue(client.register())
def test_already_registered(self):
def callback():
client = api.Client(addresses["beta"], url=url)
client.register()
client.register()
self.assertRaises(exceptions.AddressAlreadyRegistered, callback)
def test_invalid_address(self):
def callback():
client = api.Client("xyz", url=url)
client.register()
self.assertRaises(exceptions.InvalidAddress, callback)
def test_invalid_farmer(self):
def callback():
client = api.Client(addresses["nu"], url=url + "/xyz")
client.register()
self.assertRaises(exceptions.FarmerNotFound, callback)
def test_address_required(self):
def callback():
api.Client().register()
self.assertRaises(exceptions.AddressRequired, callback)
class TestClientPing(AbstractTestSetup, unittest.TestCase):
def test_ping(self):
client = api.Client(addresses["gamma"], url=url)
self.assertTrue(client.register())
self.assertTrue(client.ping())
def test_invalid_address(self):
def callback():
client = api.Client("xyz", url=url)
client.ping()
self.assertRaises(exceptions.InvalidAddress, callback)
def test_invalid_farmer(self):
def callback():
client = api.Client(addresses["delta"], url=url + "/xyz")
client.ping()
self.assertRaises(exceptions.FarmerNotFound, callback)
def test_address_required(self):
def callback():
api.Client().ping()
self.assertRaises(exceptions.AddressRequired, callback)
class TestClientPoll(AbstractTestSetup, unittest.TestCase):
def test_poll(self):
client = api.Client(addresses["zeta"], url=url)
self.assertTrue(client.poll(register_address=True, limit=60))
def test_address_required(self):
def callback():
api.Client().poll()
self.assertRaises(exceptions.AddressRequired, callback)
class TestClientVersion(AbstractTestSetup, unittest.TestCase):
def test_version(self):
client = api.Client(url=url)
self.assertEqual(client.version(), api.__version__)
class TestInvalidArgument(AbstractTestSetup, unittest.TestCase):
def test_invalid_retry_limit(self):
def callback():
api.Client(connection_retry_limit=-1)
self.assertRaises(exceptions.InvalidArgument, callback)
def test_invalid_retry_delay(self):
def callback():
api.Client(connection_retry_delay=-1)
self.assertRaises(exceptions.InvalidArgument, callback)
class TestConnectionRetry(AbstractTestSetup, unittest.TestCase):
def test_no_retry(self):
def callback():
client = api.Client(address=addresses["kappa"],
url="http://invalid.url",
connection_retry_limit=0,
connection_retry_delay=0)
client.register()
before = datetime.datetime.now()
self.assertRaises(exceptions.ConnectionError, callback)
after = datetime.datetime.now()
self.assertTrue(datetime.timedelta(seconds=15) > (after - before))
def test_default_retry(self):
def callback():
client = api.Client(address=addresses["kappa"],
url="http://invalid.url",
connection_retry_limit=5,
connection_retry_delay=5)
client.register()
before = datetime.datetime.now()
self.assertRaises(exceptions.ConnectionError, callback)
after = datetime.datetime.now()
self.assertTrue(datetime.timedelta(seconds=25) < (after - before))
class TestClientBuild(AbstractTestSetup, unittest.TestCase):
def test_build(self):
client = api.Client(addresses["pi"], url=url, debug=True,
max_size=1024*1024*256) # 256MB
client.register()
generated = client.build(cleanup=True)
self.assertTrue(len(generated))
client = api.Client(addresses["omicron"], url=url, debug=True,
max_size=1024*1024*512) # 512MB
client.register()
generated = client.build(cleanup=True)
self.assertTrue(len(generated) == 4)
def test_address_required(self):
def callback():
api.Client().build()
self.assertRaises(exceptions.AddressRequired, callback)
class TestClientCliArgs(AbstractTestSetup, unittest.TestCase):
def test_poll(self):
args = [
"--address=" + addresses["eta"],
"--url=" + url,
"poll",
"--register_address",
"--delay=5",
"--limit=60"
]
self.assertTrue(cli.main(args))
def test_register(self):
args = ["--address=" + addresses["theta"], "--url=" + url, "register"]
self.assertTrue(cli.main(args))
def test_ping(self):
args = ["--address=" + addresses["iota"], "--url=" + url, "register"]
self.assertTrue(cli.main(args))
args = ["--address=" + addresses["iota"], "--url=" + url, "ping"]
self.assertTrue(cli.main(args))
def test_no_command_error(self):
def callback():
cli.main(["--address=" + addresses["lambda"]])
self.assertRaises(SystemExit, callback)
def test_input_error(self):
def callback():
cli.main([
"--address=" + addresses["mu"],
"--url=" + url,
"poll",
"--register_address",
"--delay=5",
"--limit=xyz"
])
self.assertRaises(ValueError, callback)
def test_api_error(self):
def callback():
cli.main(["--address=xyz", "--url=" + url, "register"])
self.assertRaises(exceptions.InvalidAddress, callback)
if __name__ == '__main__':
unittest.main()
|
import inspect
import logging
from functools import wraps
logger = logging.getLogger("app.client")
logger.setLevel(logging.INFO)
fh = logging.FileHandler("logs/app.client.log", encoding="utf-8")
formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
fh.setFormatter(formatter)
logger.addHandler(fh)
def log(level="info"):
"""декоратор принимает параметры - уровень, по умолчанию = info"""
def decorator(func):
@wraps(func)
def wrapped(*args, **kwargs):
name = func.__name__
# если ф-цию вызвала другая ф-ция - передаем ее имя в переменную main_function
main_function = inspect.stack()[1][3]
result = func(*args, **kwargs)
if main_function != "<module>":
if level == "info":
logger.info(f"Функция {name} вызвана из функции {main_function} с аргументами {args},{kwargs}, результат {result}")
if level == "error":
logger.error(
f"Функция {name} вызвана из функции {main_function} с аргументами {args},{kwargs}, получена ошибка {result}"
)
elif level == "info":
logger.info(f"Функция {name} вызвана с аргументами {args},{kwargs}, результат {result}")
elif level == "error" and result is None:
logger.info(f"Функция {name} вызвана с аргументами {args},{kwargs}")
else:
level == "error"
logger.error(f"Функция {name} вызвана с аргументами {args},{kwargs}, получена ошибка {result}")
return result
return wrapped
return decorator
|
from __future__ import absolute_import
import collections, json, re
from django.contrib.contenttypes.models import ContentType
from django.db import transaction
class JSONLD(object):
subject_re = re.compile('^</(?P<content_type>\d+)/(?P<object_id>.+)/>$')
rdfa_re = re.compile('^<http://viejs.org/ns/(?P<property>.*)>$')
content_re = re.compile('^content-(?P<section>[A-Za-z]\w*)-(?P<oid>\d+)')
registered_content_types = set()
def __init__(self, d):
self.rdfa = d.copy()
if not self.rdfa.get('@type') == "<http://www.w3.org/2002/07/owl#Thing>":
raise ValueError("Not valid JSON-LD - no @type found")
del self.rdfa['@type']
subject = self.rdfa.get("@subject")
if not subject:
raise ValueError("Not valid JSON-LD - no @subject found")
del self.rdfa['@subject']
m = self.subject_re.match(subject)
if not m:
raise ValueError("Couldn't understand @subject")
md = m.groupdict()
content_type = ContentType.objects.get_for_id(md['content_type'])
if content_type not in self.registered_content_types:
raise ValueError("This contenttype not available through JSONLD")
self.object = content_type.get_object_for_this_type(pk=md['object_id'])
@staticmethod
def rdfa_property(property):
return "<http://viejs.org/ns/{0}".format(property)
@classmethod
def from_json(cls, body):
try:
d = json.loads(body)
except ValueError:
raise ValueError("Syntactically invalid JSON")
if not isinstance(d, dict):
raise ValueError("Not valid JSON-LD - not a dict")
return cls(d)
@classmethod
def register_model(cls, model):
cls.registered_content_types.add(ContentType.objects.get_for_model(model))
@transaction.commit_on_success()
def update_model(self, save=False):
attributes = RDFAttributes.from_rdfa(self.rdfa)
content = attributes.pop('content')
for section, l in content.items():
s = getattr(self.object.content, section)
assert len(l) == len(s)
# should have done earlier checks to prevent us getting this far
for (i, c) in enumerate(l):
cm = s[i] # one contentmodel instance
cm.text = c
if save:
cm.save()
for k, v in attributes.items():
setattr(self.object, k, v)
if save:
self.object.save()
def save(self):
self.update_model(save=True)
class RDFAttributes(dict):
@classmethod
def from_rdfa(cls, rdfa):
attributes = { "content": collections.defaultdict(dict) }
for (k, v) in rdfa.items():
m = JSONLD.rdfa_re.match(k)
if not m:
raise ValueError("Could not understand rdfa: {0}".format(k))
property = m.groupdict()["property"]
if property.startswith('content'):
m = JSONLD.content_re.match(property)
if not m:
raise ValueError("Could not understand content id: {0}".format(m))
md = m.groupdict()
attributes['content'][md['section']][int(md['oid'])] = v
else:
attributes[property] = v
# sanity check content section, plus turn dicts into lists
for (section, d) in attributes['content'].items():
if sorted(d.keys()) != list(range(len(d))):
print(sorted(d.keys()))
print(list(range(len(d))))
raise ValueError("Some content missing from rdfa content: {0}".format(section))
attributes['content'][section] = [v for (k, v) in sorted(d.items())]
return cls(**attributes)
|
# --------------------------------------------------------
# PYTHON PROGRAM
# Here is where we are going to define our set of...
# - Imports
# - Global Variables
# - Functions
# ...to achieve the functionality required.
# When executing > python 'this_file'.py in a terminal,
# the Python interpreter will load our program,
# but it will execute nothing yet.
# --------------------------------------------------------
import sys
import codecs
def process(line, l_p):
word = line.split()
key = ()
temp = word[0];
if '.' in temp:
temp = word[0].split('.')[1]
if per_language_or_project:
temp = word[0].split('.')[0]
elif not per_language_or_project:
temp = "wikipedia"
try:
key = (temp, int(word[-2]))
except:
key = (temp, int(word[1]))
return key
# ------------------------------------------
# FUNCTION my_main
# ------------------------------------------
def my_main(dataset_dir, o_file_dir, per_language_or_project):
# 1. We remove the solution directory, to rewrite into it
dbutils.fs.rm(o_file_dir, True)
inputRDD = sc.textFile(dataset_dir)
inputRDD.persist()
total = inputRDD.map(lambda x: int(x.split()[-2])).sum()
mapRDD = inputRDD.map(lambda x: process(x, per_language_or_project))
eachRDD = mapRDD.combineByKey(lambda value: (value, 1),
lambda x, value: (x[0] + value, x[1] + 1),
lambda x, y: (x[0] + y[0], x[1] + y[1]))
solutionRDD = eachRDD.map(lambda x: (x[0], (x[1][0], x[1][0] / total * 100)))
solutionRDD.saveAsTextFile(o_file_dir)
# ---------------------------------------------------------------
# PYTHON EXECUTION
# This is the main entry point to the execution of our program.
# It provides a call to the 'main function' defined in our
# Python program, making the Python interpreter to trigger
# its execution.
# ---------------------------------------------------------------
if __name__ == '__main__':
dataset_dir = "/FileStore/tables/my_dataset/"
o_file_dir = "/FileStore/tables/my_result/"
per_language_or_project = True # True for language and False for project
my_main(dataset_dir, o_file_dir, per_language_or_project)
|
# Utils
from Utils.file import write_json
from Utils.preprocessing import remove_seen
def inference(question_data, model, result_path) :
question_data = question_data[0]
like_data = [question_data['like']]
dislike_data = [question_data['dislike']]
like = model.inference(like_data, save=False)[0]
dislike = model.inference(dislike_data, save=False)[0]
true_like_songs = remove_seen(dislike['songs'], like['songs'])
maybe_like_songs = list(set(like['songs']) & set(dislike['songs']))
true_like_tags = remove_seen(dislike['tags'], like['tags'])
maybe_like_tags = list(set(like['tags']) & set(dislike['tags']))
true_like = {'id':like['id'], 'songs':true_like_songs, 'tags':true_like_tags}
maybe_like = {'id':like['id'], 'songs':maybe_like_songs, 'tags':maybe_like_tags}
write_json(true_like, result_path)
return true_like, maybe_like
def multi_lists(length, q_dataloader, model, result_path, id2song_dict, id2tag_dict, num_songs=192019):
elements = []
for i in range(length):
elements.append(inference(q_dataloader, model, result_path, id2song_dict, id2tag_dict, num_songs=192019))
return elements |
import requests
from datetime import datetime
api_key = '758093beb3bd2776951491e29bcab162'
city = input("Enter the city name: ")
complete_api_link = "https://api.openweathermap.org/data/2.5/weather?q="+city+"&appid="+api_key
api_link = requests.get(complete_api_link)
api_data = api_link.json()
#variables to store and display data
temperature=((api_data['main']['temp'])-273.15)
weather_desc=api_data['weather'][0]['description']
humidity=api_data['main']['humidity']
wind_speed=api_data['wind']['speed']
date_time = datetime.now().strftime("%d %b %Y | %I:%M:%S %p")
#print the data
print ("-------------------------------------------------------------")
print ("Weather Stats for - {} || {}".format(city.upper(), date_time))
print ("-------------------------------------------------------------")
print("Current weather description : ",weather_desc)
print("Currrent temp : {:.2f} deg C".format(temperature))
print("Current humidity : ",humidity,'%')
print("Current wind speed : ",wind_speed,'kmph')
#saving in txt file
with open('weather_data.txt', 'a') as f:
f.write("-------------------------------------------------------------"+"\n")
f.write("Weather Stats for - {} || {}".format(city.upper(), date_time+"\n"))
f.write("-------------------------------------------------------------"+"\n")
f.write("Current weather description : "+str(weather_desc)+"\n")
f.write("Currrent temp : {:.2f} deg C".format(temperature)+"\n")
f.write("Current humidity : "+str(humidity)+'%'+"\n")
f.write("Current wind speed : "+str(wind_speed)+'kmph'+"\n")
f.close()
|
from hiveminder.flower import Flower
from hiveminder.utils import (distance_between_hex_cells, nearest_hex_cell,
furthest_hex_cell, apply_command_and_advance,
is_on_course_with)
import pytest
from hiveminder.game_params import DEFAULT_GAME_PARAMETERS
@pytest.mark.parametrize("col1, row1, col2, row2, distance", [(5, 5, 5, 5, 0),
(5, 5, 6, 4, 1),
(5, 5, 6, 5, 1),
(5, 5, 5, 6, 1),
(5, 5, 4, 5, 1),
(5, 5, 5, 4, 1),
(5, 5, 4, 4, 1),
(4, 4, 5, 4, 1),
(4, 4, 4, 5, 1),
(4, 4, 3, 4, 1),
(4, 4, 4, 3, 1),
(4, 4, 5, 5, 1),
(4, 4, 3, 5, 1),
(0, 0, 0, 1, 1),
(0, 0, 0, 2, 2),
(0, 0, 0, 3, 3),
(0, 0, 0, 4, 4),
(0, 0, 0, 5, 5),
(0, 0, 0, 6, 6),
(0, 0, 0, 7, 7),
(0, 0, 1, 0, 1),
(0, 0, 2, 0, 2),
(0, 0, 3, 0, 3),
(0, 0, 4, 0, 4),
(0, 0, 5, 0, 5),
(0, 0, 6, 0, 6),
(0, 0, 7, 0, 7),
(0, 0, 1, 1, 1),
(0, 0, 2, 2, 3),
(0, 0, 3, 3, 4),
(0, 0, 4, 4, 6),
(0, 0, 5, 5, 7),
(0, 0, 6, 6, 9),
(0, 0, 7, 7, 10),
])
def test_can_find_distance_between_two_tiles(col1, row1, col2, row2, distance):
assert distance == distance_between_hex_cells((col1, row1), (col2, row2))
assert distance == distance_between_hex_cells((col2, row2), (col1, row1))
def test_find_nearest_returns_none_if_no_cells():
assert nearest_hex_cell((1, 1), []) is None
def test_find_nearest_returns_nearest_of_one():
assert nearest_hex_cell((1, 1), [(1, 2)]) == (1, 2)
def test_find_nearest_returns_nearest_of_two():
assert nearest_hex_cell((1, 1), [(1, 2), (5, 5)]) == (1, 2)
def test_find_nearest_returns_nearest_of_three():
assert nearest_hex_cell((1, 1), [(1, 2), (5, 5), (100, 100)]) == (1, 2)
def test_find_furthest_returns_none_if_no_cells():
assert furthest_hex_cell((1, 1), []) is None
def test_find_furthest_returns_furthest_of_one():
assert furthest_hex_cell((1, 1), [(1, 2)]) == (1, 2)
def test_find_furthest_returns_furthest_of_two():
assert furthest_hex_cell((1, 1), [(1, 2), (5, 5)]) == (5, 5)
def test_find_furthest_returns_furthest_of_three():
assert furthest_hex_cell((1, 1), [(1, 2), (5, 5), (100, 100)]) == (100, 100)
def test_apply_command_and_advance_no_command_nothing_happens():
crashed, landed, lost = apply_command_and_advance(board_width=10,
board_height=10,
hives=[],
flowers=[],
inflight={"abee": ("Bee", 1, 1, 0, 100, DEFAULT_GAME_PARAMETERS._asdict(), 0)},
turn_num=0,
cmd=None)
assert not crashed['collided']
assert not crashed['exhausted']
assert not crashed['headon']
assert not crashed['seeds']
assert not landed
assert not lost
def test_apply_command_and_advance_no_command_bee_leaves_board():
crashed, landed, lost = apply_command_and_advance(board_width=10,
board_height=10,
hives=[],
flowers=[],
inflight={"abee": ("Bee", 0, 0, 180, 100, DEFAULT_GAME_PARAMETERS._asdict(), 0)},
turn_num=0,
cmd=None)
assert not crashed['collided']
assert not crashed['exhausted']
assert not crashed['headon']
assert not crashed['seeds']
assert not landed
assert lost == {"abee"}
def test_apply_command_and_advance_no_command_bee_dies_of_exhaustion():
crashed, landed, lost = apply_command_and_advance(board_width=10,
board_height=10,
hives=[],
flowers=[],
inflight={"abee": ("Bee", 0, 0, 0, 0, DEFAULT_GAME_PARAMETERS._asdict(), 0)},
turn_num=0,
cmd=None)
assert not crashed['collided']
assert crashed['exhausted'] == {"abee"}
assert not crashed['headon']
assert not crashed['seeds']
assert not landed
assert not lost
def test_apply_command_and_advance_no_command_bee_lands():
crashed, landed, lost = apply_command_and_advance(board_width=10,
board_height=10,
hives=[(0, 1)],
flowers=[],
inflight={"abee": ("Bee", 0, 0, 0, 100, DEFAULT_GAME_PARAMETERS._asdict(), 0)},
turn_num=0,
cmd=None)
assert not crashed['collided']
assert not crashed['exhausted']
assert not crashed['headon']
assert not crashed['seeds']
assert landed == {"abee"}
assert not lost
def test_apply_command_and_advance_no_command_bee_saved_from_exhaustion_by_flower():
crashed, landed, lost = apply_command_and_advance(board_width=10,
board_height=10,
hives=[],
flowers=[Flower(0, 1, DEFAULT_GAME_PARAMETERS, 1, 0, 1000).to_json()],
inflight={"abee": ("Bee", 0, 0, 0, 0, DEFAULT_GAME_PARAMETERS._asdict(), 0)},
turn_num=0,
cmd=None)
assert not crashed['collided']
assert not crashed['exhausted']
assert not crashed['headon']
assert not crashed['seeds']
assert not landed
assert not lost
def test_apply_command_and_advance_no_command_two_bees_crash_headon():
crashed, landed, lost = apply_command_and_advance(board_width=10,
board_height=10,
hives=[],
flowers=[],
inflight={"abee": ("Bee", 0, 0, 0, 100, DEFAULT_GAME_PARAMETERS._asdict(), 0),
"anotherbee": ("Bee", 0, 1, 180, 100, DEFAULT_GAME_PARAMETERS._asdict(), 0)},
turn_num=0,
cmd=None)
assert not crashed['collided']
assert not crashed['exhausted']
assert crashed['headon'] == {"abee", "anotherbee"}
assert not crashed['seeds']
assert not landed
assert not lost
def test_apply_command_and_advance_no_command_two_bees_crash():
crashed, landed, lost = apply_command_and_advance(board_width=10,
board_height=10,
hives=[],
flowers=[],
inflight={"abee": ("Bee", 0, 0, 0, 100, DEFAULT_GAME_PARAMETERS._asdict(), 0),
"anotherbee": ("Bee", 0, 2, 180, 100, DEFAULT_GAME_PARAMETERS._asdict(), 0)},
turn_num=0,
cmd=None)
assert crashed['collided'] == {"abee", "anotherbee"}
assert not crashed['exhausted']
assert not crashed['headon']
assert not crashed['seeds']
assert not landed
assert not lost
def test_apply_command_and_advance_command_stops_two_bees_crashing():
crashed, landed, lost = apply_command_and_advance(board_width=10,
board_height=10,
hives=[],
flowers=[],
inflight={"abee": ("Bee", 0, 0, 0, 100, DEFAULT_GAME_PARAMETERS._asdict(), 0),
"anotherbee": ("Bee", 0, 2, 180, 100, DEFAULT_GAME_PARAMETERS._asdict(), 0)},
turn_num=0,
cmd=dict(entity="anotherbee", command=120))
assert not crashed['collided']
assert not crashed['exhausted']
assert not crashed['headon']
assert not crashed['seeds']
assert not landed
assert not lost
@pytest.mark.parametrize("heading", [60, 120, 180, -120, -60])
def test_is_on_course_with_itself(heading):
assert is_on_course_with((0, 0), heading, (0, 0))
@pytest.mark.parametrize("y", range(1, 10))
def test_is_on_course_with(y):
assert is_on_course_with((0, 0), 0, (0, y))
@pytest.mark.parametrize("heading", [60, 120, 180, -120, -60])
@pytest.mark.parametrize("y", range(1, 10))
def test_is_not_on_course_with(heading, y):
assert not is_on_course_with((0, 0), heading, (0, y))
|
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from delete_dialog import Ui_delete_dialog as Ui_Delete_Dialog
from add_book import Ui_Dialog as Ui_Add_Dialog
from Edit_dialog import Ui_Dialog as Ui_Edit_Dialog
from library import Ui_MainWindow
import my_functions as lib
class Add_Dialog(QDialog):
def __init__(self,parent=None):
super(Add_Dialog,self).__init__(parent)
self.ui = Ui_Add_Dialog()
self.ui.setupUi(self)
self.ui.buttonBox.accepted.connect(self.accept)
self.ui.buttonBox.rejected.connect(self.reject)
class Edit_Dialog(QDialog):
def __init__(self,parent=None):
super(Edit_Dialog,self).__init__(parent)
self.ui = Ui_Edit_Dialog()
self.ui.setupUi(self)
self.ui.buttonBox.accepted.connect(self.accept)
self.ui.buttonBox.rejected.connect(self.reject)
class Delete_Dialog(QDialog):
def __init__(self,parent=None):
super(Delete_Dialog,self).__init__(parent)
self.ui = Ui_Delete_Dialog()
self.ui.setupUi(self)
self.ui.buttonBox.accepted.connect(self.accept)
self.ui.buttonBox.rejected.connect(self.reject)
class MainWindow(QMainWindow,Ui_MainWindow):
def __init__(self,parent=None):
super(MainWindow,self).__init__(parent)
self.setupUi(self)
self.new_book_btn.pressed.connect(self.show_add_dialog)
self.load_issued_table()
self.load_unissued_table()
self.load_all_books_table()
#edit
self.edit_issued.clicked.connect(
lambda:self.edit_book(self.issued_books_table))
self.edit_unissued.clicked.connect(
lambda:self.edit_book(self.unissued_books_table))
#delete
self.delete_issued.pressed.connect(
lambda:self.delete_book(self.issued_books_table))
self.delete_unissued.pressed.connect(
lambda:self.delete_book(self.unissued_books_table))
#refresh
self.refresh_issued.clicked.connect(self.load_issued_table)
self.refresh_unissued.clicked.connect(self.load_unissued_table)
self.refresh_btn.clicked.connect(self.load_all_books_table)
#search
self.search_btn.clicked.connect(self.search_book)
def save_existing_book(self,ui):
book = {
'id':int(ui.id_spinbox.text()),
'name':ui.name_input.text(),
'description':ui.description_input.text(),
'isbn':ui.isbn_input.text(),
'page_count':int(ui.page_count_spinbox.text()),
'issued':ui.yes.isChecked(),
'author':ui.author_input.text(),
'year': int(ui.year_spinbox.text())
}
lib.update_book(book)
def edit_book(self,table):
selected_row = table.currentRow()
if selected_row != -1:
book_id = int(table.item(selected_row,0).text())
book = lib.find_book(book_id)
dialog = Edit_Dialog()
dialog.ui.id_spinbox.setValue(int(book.id))
dialog.ui.name_input.setText(book.name)
dialog.ui.description_input.setText(book.description)
dialog.ui.isbn_input.setText(book.isbn)
dialog.ui.page_count_spinbox.setValue(int(book.page_count))
dialog.ui.yes.setChecked(book.issued)
if book.issued == False:
dialog.ui.no.setChecked(True)
dialog.ui.author_input.setText(book.author)
dialog.ui.year_spinbox.setValue(int(book.year))
dialog.ui.buttonBox.accepted.connect(lambda: self.save_existing_book(dialog.ui))
dialog.exec()
self.load_issued_table()
self.load_unissued_table()
def save_new_book(self,ui):
new_book ={
"id": int(ui.id_spinbox.text()),
"name": ui.name_input.text(),
"description":ui.description_input.text(),
"isbn":ui.isbn_input.text(),
"page_count":int(ui.page_count_spinbox.text()),
"issued":ui.yes.isChecked(),
"author":ui.author_input.text(),
"year":int(ui.year_spinbox.text())
}
for attr in new_book:
if new_book[attr]==None or str(new_book[attr])=="":
return False
lib.add_book(new_book)
self.load_issued_table()
self.load_unissued_table()
def delete_book(self,table):
selected_row = table.currentRow()
if selected_row != -1:
book_id = int(table.item(selected_row,0).text())
dialog = Delete_Dialog()
dialog.ui.buttonBox.accepted.connect(lambda:lib.delete_book(book_id))
dialog.exec()
self.load_issued_table()
self.load_unissued_table()
def search_book(self):
if self.search_input.text() != "":
book = lib.find_book(int(self.search_input.text()))
if book != None:
self.search_table.setRowCount(1)
book_dict = book.to_dict()
for book_index,attr in enumerate(book_dict):
self.search_table.setItem(
0,book_index,QTableWidgetItem(str(book_dict[str(attr)]))
)
self.search_table.item(0,book_index).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled
)
def load_issued_table(self):
books = lib.get_issued_books()
self.issued_books_table.setRowCount(len(books))
for index,book in enumerate(books):
book = book.to_dict()
for book_index,attr in enumerate(book):
self.issued_books_table.setItem(
index,book_index,QTableWidgetItem(str(book[str(attr)]))
)
self.issued_books_table.item(index,book_index).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled
)
def load_unissued_table(self):
books = lib.get_unissued_books()
self.unissued_books_table.setRowCount(len(books))
for index,book in enumerate(books):
book = book.to_dict()
for book_index,attr in enumerate(book):
self.unissued_books_table.setItem(
index,book_index,QTableWidgetItem(str(book[str(attr)]))
)
self.unissued_books_table.item(index,book_index).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled
)
def load_all_books_table(self):
books = lib.load_books()
self.all_books_table.setRowCount(len(books))
for index,book in enumerate(books):
book = book.to_dict()
for book_index,attr in enumerate(book):
self.all_books_table.setItem(
index,book_index,QTableWidgetItem(str(book[str(attr)]))
)
self.all_books_table.item(index,book_index).setFlags(
Qt.ItemIsSelectable | Qt.ItemIsEnabled
)
def show_add_dialog(self):
input_dlg = Add_Dialog()
input_dlg.ui.buttonBox.accepted.connect(
lambda: self.save_new_book(input_dlg.ui)
)
input_dlg.exec()
app = QApplication([])
window = MainWindow()
window.show()
app.exec()
|
from datetime import datetime, timedelta
from threading import Timer
import requests
import json
import webbrowser
from pprint import pprint
import arrow
#testing boy
TELEGRAM_URL = 'https://api.telegram.org/bot{}'.format('large string') # the large string is the bmsqbot token
TELEGRAM_SEND_MESSAGE_URL = TELEGRAM_URL + '/sendMessage?chat_id={}&text={}'
import time
start = arrow.utcnow()
end = arrow.utcnow().shift(hours= +0)
#'''
response = requests.get(
"https://api.stormglass.io/v2/weather/point",
params = {
'lat' : 39,
'lng' : -74,
'params' : ','.join(['waveHeight', 'windDirection', 'windSpeed', 'wavePeriod']),
'start' : start,
'end' : end
},
headers = {
"Authorization" : ""
}
)
oceanInfo = response.json()
message = 'Bomb Squad Alarm: '
message = message + arrow.now().format('MM/DD/YYYY HH:mm:ss') + '\n'
aveDict = dict()
for ele in oceanInfo.get('hours')[0]:
if ele != 'time':
lis = oceanInfo.get('hours')[0].get(ele).values()
total = sum(lis)
times = len(lis)
average = total / times
aveDict[ele] = average
#oceanInfo.get('hours')[0].get(ele).values()
alarm = True
for ele in aveDict:
if ele == 'waveHeight':
message = message + "Wave Height : " + str(aveDict.get(ele)*3.28084) + "ft\n"
if ele == 'windDirection':
message = message + "Wind Direction : " + str(aveDict.get(ele)) + "degrees\n"
if ele == 'windSpeed':
message = message + "Wind Speed : " + str(aveDict.get(ele)*2.23694) + "mph\n"
if ele == 'wavePeriod':
message = message + "Wave Period : " + str(aveDict.get(ele)) + "seconds\n"
if alarm:
requests.get(TELEGRAM_SEND_MESSAGE_URL.format( 'int chat ID' , message))
|
from fastapi import Form
import json
import requests_async as requests
from config import configs
from serving import Controller, mapping
from serving.controller import json_wrapper
class ReviewPredictionController(Controller):
def __init__(self, fast_api):
super().__init__(fast_api, base_url='/review')
mac_configs = configs['review_prediction']
self._host_url = 'http://{}:{}'.format(mac_configs['host'], mac_configs['port'])
@mapping('predict', methods=['POST'])
async def classify(self, *, sentence: str = Form(...)):
params = {'sentence': sentence}
response = await requests.post(self._host_url + '/predict', params=params)
# parse response
response_dict = json.loads(response.text)
data = None
message = ''
if response.ok:
data = {
'stars': response_dict['stars'],
}
else:
message = response_dict['message']
return json_wrapper(data, status_code=response.status_code, message=message)
|
from django.shortcuts import render, redirect
from django.contrib.auth.models import User
from django.contrib.auth.forms import AuthenticationForm
from .forms import RegisterForm
from django.contrib.auth import authenticate, login as auth_login, logout as auth_logout
# Create your views here.
def login(request):
error = ''
if request.user.is_authenticated:
return redirect('core:calculation')
if request.method == 'POST':
username = request.POST['username']
password = request.POST['password']
user = authenticate(request, username=username, password=password) # authenticate user's, email and password
print(user, "----->")
if user is not None:
auth_login(request, user)
return redirect('core:calculation')
else:
error = 'Invalid Username or Password'
form = AuthenticationForm(request.POST)
return render(request, 'accounts/login.html', {'form': form, 'error': error})
else:
form = AuthenticationForm()
return render(request, 'accounts/login.html', {'form': form})
def signup(request):
if request.user.is_authenticated:
return redirect('core:calculation')
if request.method == 'POST':
form = RegisterForm(request.POST)
if form.is_valid():
form.save()
return redirect('accounts:login')
else:
return render(request, 'accounts/signup.html', {'form': form})
else:
form = RegisterForm()
return render(request, 'accounts/signup.html', {'form': form})
def logout(request):
auth_logout(request)
return redirect('accounts:login')
|
# -*- coding: utf-8 -*-
# @Time : 2020/12/12 18:47
# @Author : fcj11
# @Email : yangfit@126.com
# @File : browser.py
# @Project : crm自动化测试
from selenium import webdriver
def chrome(): #谷歌
driver = webdriver.Chrome()
driver.maximize_window()
driver.implicitly_wait(30)
return driver |
def orderinglogicForThreadFromEXPID(i):
# id1=int(s1.split("/")[-1].split("-")[6])
# ratelist1=s1.split("/")[-1].split("-")[7].split(".")
# ratelist1.pop()
# rate1=float(".".join(ratelist1))
#
# return id1-rate1
expID = i.split("/")[-1].split("-")[-2]
print expID
return int(expID)
def orderinglogic(i):
rate = i.split("/")[-1].split("-")[-1].split(".")[0] + "." + i.split("/")[-1].split("-")[-1].split(".")[1]
return (10 / float(rate))
import pandas as pd
import numpy as np
import matplotlib
import os
matplotlib.use('Agg') # Must be before importing matplotlib.pyplot or pylab!
import matplotlib.pyplot as plt
import sys
import os.path
# outDirForBoxplot="/Users/anshushukla/PycharmProjects/DataAnlytics1/scheduler/float/log1/plot/"
# expID="*-0.29*"
# print expID
# cmd="ls -1 "+"/Users/anshushukla/PycharmProjects/DataAnlytics1/scheduler/float/log1/dat/*-"+expID+"-*.dat"
# outDirForBoxplot="/Users/anshushukla/PycharmProjects/DataAnlytics1/scheduler/parsing/logwithstringxml/plot/"
# expID="*-0.27.*"
# print expID
# cmd="ls -1 "+"/Users/anshushukla/PycharmProjects/DataAnlytics1/scheduler/parsing/logwithstringxml/dat/*"+expID+"*.dat"
outDirForBoxplot="/Users/anshushukla/PycharmProjects/tpctc/FullpaperPlots/log1/plot/"
expID="-13*"
# expID="*-0.11.*"
print expID
cmd="ls -1 "+"/Users/anshushukla/PycharmProjects/tpctc/FullpaperPlots/log1/dat/L-*"+expID+"*.dat"
# for out_dir in os.popen(cmd).read().split("\n"):
# if(len(out_dir)!=0):
# # print out_dir.split("/")[-1]
# temp=out_dir.split("/")
# # print temp
# temp[-1]="clear-"+temp[-1]
# newout_dir="/".join(temp)
#
# cmdforGrep="grep Cpu "+out_dir +"> " +newout_dir
# print cmdforGrep
# os.popen(cmdforGrep)
out_dir_list=[]
for out_dir in os.popen(cmd).read().split("\n"):
if out_dir:
out_dir_list.append(out_dir)
print out_dir_list
## using x as method declared above
# out_dir_list=sorted(out_dir_list,key=orderinglogic)
# print out_dir_list
out_dir_list=sorted(out_dir_list,key=orderinglogicForThreadFromEXPID)
print out_dir_list
# exit()
## Full box plot
filename1=outDirForBoxplot + "/Latency-BoxplotDetails-"+expID+".txt"
if os.path.exists(filename1 ):
print "yes present"
os.remove(filename1)
p=[]
for i in out_dir_list:
dSpout = pd.read_csv(i, engine='python', header=None, names=['msgid', 'ts1', 'ts2', 'latency'])
# print dSpout['ts1']-dSpout['ts1'][0]
rate = i.split("/")[-1].split("-")[-1].split(".")[0] + "." + i.split("/")[-1].split("-")[-1].split(".")[1]
print rate, 10 / float(rate)
print dSpout.head(10)
p.append(dSpout['latency'])
print dSpout.describe()
with open(filename1, "a") as myfile:
myfile.write(i+"\n\n")
myfile.write("\t\t" + str(dSpout['latency'].describe()) + "\n")
# exit()
#
fig = plt.figure(1, figsize=(9, 6))
# Create an axes instance
ax = fig.add_subplot(111)
# Create the boxplot
bp = ax.boxplot(p)
ax.set_ylabel('Latency (in millisec.)', color='b')
ax.set_ylim([0,15000])
def slice(i):
rate = i.split("/")[-1].split("-")[-1].split(".")[0] + "." + i.split("/")[-1].split("-")[-1].split(".")[1]
print rate, 100 / float(rate)
return i.split("/")[-1].split("-")[-2] + "-" + i.split("/")[-1].split("-")[-1] + "(" + str(int(100 / float(rate))) + ")"
# [-3]+ "-" + x.split("/")[-1].split("-")[-2] +"-"+x.split("/")[-1].split("-")[-1]
print map(slice,out_dir_list)
print len(out_dir_list)
# ax.set_yticklabels(1000,2000,3000,4000,5000,7000,1000,15000,20000)
# ax.set_xticklabels(map(slice,out_dir_list), rotation='90')
plt.yticks(np.arange(0, 15000, 1000))
ax.yaxis.grid(which='minor', alpha=0.5)
ax.yaxis.grid(which='major', alpha=0.5)
ax.set_xticklabels(map(slice,out_dir_list), rotation='45',ha='right')
# plt.yticks(np.arange(0, 15000, 1000))
#
# ax.yaxis.grid(which='minor', alpha=0.5)
# ax.yaxis.grid(which='major', alpha=0.5)
fig = matplotlib.pyplot.gcf()
fig.set_size_inches(15.5, 6.5)
#
# Save the figure
fig.savefig(outDirForBoxplot+"FullBoxPlot-Latency-"+ expID +".png", bbox_inches='tight') |
import numpy
ARCHIVO_BANCOS = 'bancos.txt'
ARCHIVO_CUENTAS = 'cuentas.txt'
ARCHIVO_NOVEDAD = 'novedad.txt'
def subir_bancos():
bancos = {}
with open(ARCHIVO_BANCOS, 'r') as f:
for line in f:
reg = line.split(',')
bancos[reg[0]] = reg[1]
return bancos
def subir_cuentas():
bancos = subir_bancos()
cuentas = {}
with open(ARCHIVO_CUENTAS, 'r') as f:
for line in f:
reg = line.split(',')
cuentas[reg[0]] = reg[1]
return bancos
mes = int(raw_input("Mes: "))
anio = int(raw_input("Anio: "))
print len(subir_bancos())
|
"""
@file
@author John C. Linford (jlinford@paratools.com)
@version 1.0
@brief
This file is part of the TAU Performance System
@section COPYRIGHT
Copyright (c) 2013, ParaTools, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
(1) Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
(2) Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
(3) Neither the name of ParaTools, Inc. nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
# System modules
import sys
from pkgutil import walk_packages
# TAU modules
import logger
import error
LOGGER = logger.getLogger(__name__)
class UnknownCommandError(error.ConfigurationError):
"""
Indicates that a specified command is unknown
"""
message_fmt = """
%(value)r is not a valid TAU command.
%(hint)s"""
def __init__(self, value, hint="Try 'tau --help'."):
super(UnknownCommandError, self).__init__(value, hint)
class AmbiguousCommandError(error.ConfigurationError):
"""
Indicates that a specified partial command is ambiguous
"""
message_fmt = """
Command %(value)r is ambiguous: %(matches)r
%(hint)s"""
def __init__(self, value, matches, hint="Try 'tau --help'."):
super(AmbiguousCommandError, self).__init__('Command %s is ambiguous: %s' % (value, matches), hint)
_commands = {__name__: {}}
def getCommands(root=__name__):
"""
Returns commands at the specified level
"""
def _lookup(c, d):
if len(c) == 1: return d[c[0]]
else: return _lookup(c[1:], d[c[0]])
def _walking_import(module, c, d):
car, cdr = c[0], c[1:]
if cdr:
_walking_import(module, cdr, d[car])
elif not car in d:
d[car] = {}
__import__(module)
d[car]['__module__'] = sys.modules[module]
command_module = sys.modules[__name__]
for _, module, _ in walk_packages(command_module.__path__, command_module.__name__+'.'):
try:
_lookup(module.split('.'), _commands)
except KeyError:
_walking_import(module, module.split('.'), _commands)
return _lookup(root.split('.'), _commands)
def getCommandsHelp(root=__name__):
"""
Builds listing of command names with short description
"""
groups = {}
commands = sorted([i for i in getCommands(root).iteritems() if i[0] != '__module__'])
for cmd, topcmd in commands:
module = topcmd['__module__']
descr = getattr(module, 'SHORT_DESCRIPTION', "FIXME: No description")
group = getattr(module, 'GROUP', None)
name = '{:<12}'.format(cmd)
groups.setdefault(group, []).append(' %s %s' % (name, descr))
parts = []
for group, members in groups.iteritems():
if group:
parts.append(group+' subcommands:')
else:
parts.append('subcommands:')
parts.extend(members)
parts.append('')
return '\n'.join(parts)
def executeCommand(cmd, cmd_args=[]):
"""
Import the command module and run its main routine
"""
def _resolve(c, d):
if not c:
return []
car, cdr = c[0], c[1:]
try:
matches = [(car, d[car])]
except KeyError:
matches = [i for i in d.iteritems() if i[0].startswith(car)]
if len(matches) == 1:
return [matches[0][0]] + _resolve(cdr, matches[0][1])
elif len(matches) == 0:
raise UnknownCommandError(' '.join(cmd))
elif len(matches) > 1:
raise AmbiguousCommandError(' '.join(cmd), [m[0] for m in matches])
while len(cmd):
root = '.'.join([__name__] + cmd)
try:
main = getCommands(root)['__module__'].main
except KeyError:
LOGGER.debug('%r not recognized as a TAU command' % cmd)
try:
resolved = _resolve(cmd, _commands[__name__])
except UnknownCommandError:
if len(cmd) <= 1:
raise # We finally give up
parent = cmd[:-1]
LOGGER.debug('Getting help from parent command %r' % parent)
return executeCommand(parent, ['--help'])
else:
LOGGER.debug('Resolved ambiguous command %r to %r' % (cmd, resolved))
return executeCommand(resolved, cmd_args)
except AttributeError:
raise InternalError("'main(argv)' undefined in command %r" % cmd)
else:
return main(cmd_args)
|
#! /usr/bin/env python
# coding=utf-8
# ================================================================
#
# Author : miemie2013
# Created date: 2020-06-10 10:20:27
# Description : 配置文件。
#
# ================================================================
class YOLOv4_Config_1(object):
"""
YOLOv4默认配置
"""
def __init__(self):
self.algorithm = 'YOLOv4'
# 自定义数据集
# self.train_path = 'annotation_json/voc2012_train.json'
# self.val_path = 'annotation_json/voc2012_val.json'
# self.classes_path = 'data/voc_classes.txt'
# self.train_pre_path = '../VOCdevkit/VOC2012/JPEGImages/' # 训练集图片相对路径
# self.val_pre_path = '../VOCdevkit/VOC2012/JPEGImages/' # 验证集图片相对路径
# COCO数据集
self.train_path = '../data/annotations/instances_train2017.json'
# self.train_path = '../data/data7122/annotations/instances_val2017.json'
self.val_path = '../data/annotations/instances_val2017.json'
self.classes_path = 'data/coco_classes.txt'
self.train_pre_path = '../data/train2017/' # 训练集图片相对路径
# self.train_pre_path = '../data/data7122/val2017/' # 验证集图片相对路径
self.val_pre_path = '../data/val2017/' # 验证集图片相对路径
# 训练时若预测框与所有的gt小于阈值self.iou_loss_thresh时视为反例
self.iou_loss_thresh = 0.7
# 模式。 0-从头训练,1-读取之前的模型继续训练(model_path可以是'yolov4.h5'、'./weights/step00001000.h5'这些。)
self.pattern = 0
self.lr = 0.0001
self.batch_size = 32
# 如果self.pattern = 1,需要指定self.model_path表示从哪个模型读取权重继续训练。
self.model_path = 'weights1/best_model'
# self.model_path = './weights/1000'
# ========= 一些设置 =========
# 每隔几步保存一次模型
self.save_iter = 1000
# 每隔几步计算一次eval集的mAP
self.eval_iter = 5000
# 训练多少步
self.max_iters = 70000
# 验证
# self.input_shape越大,精度会上升,但速度会下降。
# self.input_shape = (320, 320)
self.input_shape = (416, 416)
#self.input_shape = (608, 608)
# 验证时的分数阈值和nms_iou阈值
self.conf_thresh = 0.001
self.nms_thresh = 0.45
# 是否画出验证集图片
self.draw_image = False
# 验证时的批大小
self.eval_batch_size = 16
# ============= 训练时预处理相关 =============
self.with_mixup = False
self.context = {'fields': ['image', 'gt_bbox', 'gt_class', 'gt_score']}
# PadBox
self.num_max_boxes = 70
# Gt2YoloTarget
self.anchor_masks = [[6, 7, 8], [3, 4, 5], [0, 1, 2]]
self.anchors = [[12, 16], [19, 36], [40, 28],
[36, 75], [76, 55], [72, 146],
[142, 110], [192, 243], [459, 401]]
self.downsample_ratios = [32, 16, 8]
# ============= 推理、导出时相关 =============
# 读取的模型
self.infer_model_path = 'yolov4'
# self.infer_model_path = './weights/66000'
# infer_input_shape越大,精度会上升,但速度会下降。
# self.infer_input_shape = (320, 320)
self.infer_input_shape = (416, 416)
# self.infer_input_shape = (608, 608)
# 推理时的分数阈值和nms_iou阈值
self.infer_conf_thresh = 0.05
self.infer_nms_thresh = 0.45
self.infer_keep_top_k = 100
self.infer_nms_top_k = 100
# 是否给图片画框。
self.infer_draw_image = True
# self.infer_draw_image = False
class YOLOv3_Config_1(object):
"""
YOLOv3默认配置
"""
def __init__(self):
self.algorithm = 'YOLOv3'
# 自定义数据集
# self.train_path = 'annotation_json/voc2012_train.json'
# self.val_path = 'annotation_json/voc2012_val.json'
# self.classes_path = 'data/voc_classes.txt'
# self.train_pre_path = '../VOCdevkit/VOC2012/JPEGImages/' # 训练集图片相对路径
# self.val_pre_path = '../VOCdevkit/VOC2012/JPEGImages/' # 验证集图片相对路径
# COCO数据集
self.train_path = '../data/annotations/instances_train2017.json'
# self.train_path = '../data/data7122/annotations/instances_val2017.json'
self.val_path = '../data/annotations/instances_val2017.json'
self.classes_path = 'data/coco_classes.txt'
self.train_pre_path = '../data/train2017/' # 训练集图片相对路径
# self.train_pre_path = '../data/data7122/val2017/' # 验证集图片相对路径
self.val_pre_path = '../data/val2017/' # 验证集图片相对路径
# 训练时若预测框与所有的gt小于阈值self.iou_loss_thresh时视为反例
self.iou_loss_thresh = 0.7
# 模式。 0-从头训练,1-读取之前的模型继续训练(model_path可以是'yolov4.h5'、'./weights/step00001000.h5'这些。)
self.pattern = 0
self.lr = 0.0001
self.batch_size = 64
# 如果self.pattern = 1,需要指定self.model_path表示从哪个模型读取权重继续训练。
self.model_path = 'yolov3_r50vd_dcn_obj365_dropblock_iouloss'
# self.model_path = './weights/1000'
# ========= 一些设置 =========
# 每隔几步保存一次模型
self.save_iter = 1000
# 每隔几步计算一次eval集的mAP
self.eval_iter = 1000
# 训练多少步
self.max_iters = 60000
# 验证
# self.input_shape越大,精度会上升,但速度会下降。
# self.input_shape = (320, 320)
self.input_shape = (416, 416)
# self.input_shape = (608, 608)
# 验证时的分数阈值和nms_iou阈值
self.conf_thresh = 0.001
self.nms_thresh = 0.45
# 是否画出验证集图片
self.draw_image = False
# 验证时的批大小
self.eval_batch_size = 4
# ============= 训练时预处理相关 =============
self.with_mixup = False
self.context = {'fields': ['image', 'gt_bbox', 'gt_class', 'gt_score']}
# PadBox
self.num_max_boxes = 70
# Gt2YoloTarget
self.anchor_masks = [[6, 7, 8], [3, 4, 5], [0, 1, 2]]
self.anchors = [[10, 13], [16, 30], [33, 23],
[30, 61], [62, 45], [59, 119],
[116, 90], [156, 198], [373, 326]]
self.downsample_ratios = [32, 16, 8]
# ============= 推理、导出时相关 =============
# 读取的模型
self.infer_model_path = 'yolov3_r50vd_dcn_obj365_dropblock_iouloss'
# self.infer_model_path = './weights/1000'
# infer_input_shape越大,精度会上升,但速度会下降。
# self.infer_input_shape = (320, 320)
self.infer_input_shape = (416, 416)
# self.infer_input_shape = (608, 608)
# 推理时的分数阈值和nms_iou阈值
self.infer_conf_thresh = 0.05
self.infer_nms_thresh = 0.45
self.infer_keep_top_k = 100
self.infer_nms_top_k = 100
# 是否给图片画框。
self.infer_draw_image = True
# self.infer_draw_image = False
class PostprocessNumpyNMSConfig(object):
"""
deploy_infer.py后处理配置
"""
def __init__(self):
self.anchors = [[12, 16], [19, 36], [40, 28],
[36, 75], [76, 55], [72, 146],
[142, 110], [192, 243], [459, 401]]
self.conf_thresh = 0.05
self.nms_thresh = 0.45
self.keep_top_k = 100
self.nms_top_k = 100
self.nms_top_k = 100
class TrainConfig_2(object):
"""
其它配置
"""
def __init__(self):
pass
|
from __future__ import annotations
from typing import Any
import sys
# 保存するキャッシュの数XはX=3としました
class Node(object):
def __init__(self, data: Any, next_node: Node = None, prev_node: Node = None) -> None:
self.data = data
self.next = next_node
self.prev = prev_node
class LinkedList(object):
def __init__(self, head: Node = None, tail: Node = None) -> None:
self.head = head
self.tail = tail
def append(self, data: Any) -> None:
new_node = Node(data)
if self.head is None:
self.head = new_node
return
current_node = self.head
while current_node.next:
current_node = current_node.next
current_node.next = new_node
new_node.prev = current_node
class HashTable(object):
def __init__(self) -> None:
self.table = [None] * 1000
self.linkedList = LinkedList()
def hash(self, data: tuple) -> int:
cnt = 0
for c in data[0].strip():
cnt += ord(c)
return cnt
def add(self, data: tuple) -> None:
hashed_key = self.hash(data)
head_node = self.linkedList.head
tail_node = self.linkedList.tail
tmp_node = head_node
len_list = 0
# ALEX_COMMENT: the loop below is the main cause of ORDER N.
# you don't need to search for the last entry. You
# can just use tail_node - it should always have the last entry.
while tmp_node:
len_list += 1
tmp_node = tmp_node.next
if len_list >= 3:
# ハッシュテーブルにデータがあり,ノードの前後にノードがある場合:既に連結リストにデータがあるということなので,付け替えて先頭に持ってくる
# ALEX_COMMENT: the search algorithm is excluding both head and tail (is not None)...
# that is probably why you are getting duplicates.
if self.table[hashed_key] and (self.table[hashed_key].prev is not None) and (self.table[hashed_key].next is not None):
node = self.table[hashed_key]
prev_node = node.prev
next_node = node.next
prev_node.next = next_node
next_node.prev = prev_node
node.prev = None
head_node.prev = node
node.next = head_node
self.linkedList.head = node
# それ以外の場合:連結リストにデータが無いので,先頭にデータを追加・末尾を削除
else:
new_node = Node(data, head_node)
self.table[hashed_key] = new_node
self.linkedList.head = new_node
head_node.prev = new_node
tail_node.prev.next = None
self.linkedList.tail = tail_node.prev
tail_node.prev = None
# 連結リストの長さが3未満なら普通に追加
else:
new_node = Node(data, head_node)
self.table[hashed_key] = new_node
self.linkedList.head = new_node
if head_node is not None:
head_node.prev = new_node
if tail_node is None:
self.linkedList.tail = new_node
def print(self) -> None:
current_node = self.linkedList.head
while current_node:
print(current_node.data)
current_node = current_node.next
hash_table = HashTable()
while 1:
page_name, url = input("<pageName, url>: ").split()
data = (page_name, url)
hash_table.add(data)
print("linked list: ")
hash_table.print()
|
import sys
def solution(n, lines):
answer = -1
l, r = 1, max(lines)
while l < r:
total = 0
mid = (l + r) // 2
for line in lines:
total += (line // mid)
if total >= n:
answer = mid
l = mid + 1
elif total < n:
r = mid - 1
return answer
if __name__ == "__main__":
sys.stdin = open("data/data.txt", "rt")
k, n = map(int, input().split())
lines = [int(input()) for _ in range(k)]
print(solution(n, lines))
|
"""
Program name: animated_color_wheel_1.py
Objective: Draw a progressive arc of a circle while mixing controlled
amounts of red, green and blue. Animate the display of corresponding
hex color value.
Keywords: circle, arc, progressive color wheel, hex color, animation
============================================================================79
Explanation:
The prtiphery of the disk is divided into 60 degree sectors with
ramp-and-fall functions that govern the amount of pimary color that should
be present.
Author: Mike Ohlson de Fine
Note: If you try to complete an entire disk you end up with the only excluded
portion - merely a line
"""
# animated_color_wheel_1.py
# >>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>
from Tkinter import *
root = Tk()
root.title("Animated Color Wheel")
cw = 300 # canvas width
ch = 300 # canvas height
canvas_1 = Canvas(root, width=cw, height=ch, background="black")
canvas_1.grid(row=0, column=1)
cycle_period = 200
redFl = 255.0
greenFl = 0
blueFl = 0
kula = "#000000"
arcStart = 89
arcEnd = 90
xCentr = 150
yCentr = 160
radius = 130
circ = xCentr - radius, yCentr + radius, xCentr + radius, yCentr - radius
# angular position markers, degrees
A_ANG = 0
B_ANG = 60
C_ANG = 120
D_ANG = 180
E_ANG = 240
F_ANG = 300
#G_ANG = 1
G_ANG = 359
intervals = 60 # degrees
# Percent color at each position marker
# index 0 1 2 3 4 5 6 7
redShift = 100, 100, 0, 0, 0, 100, 100 # percent of red
greenShift = 0, 100, 100, 100, 0, 0, 0 # percent of green
blueShift = 0, 0, 0, 100, 100, 100, 0 # percent of blue
# Rate of change of color per degree, rgb integer counts per degree.
red_rate = [0,1,2,3,4,5,6,7]
green_rate = [0,1,2,3,4,5,6,7]
blue_rate = [0,1,2,3,4,5,6,7]
# Calibrate counts-per-degree in each interval, place in xrate list
for i in range(0,6):
red_rate[i] = 256.0 * (redShift[i+1] - redShift[i])/(100 * intervals)
green_rate[i] = 256.0 * (greenShift[i+1] - greenShift[i])/(100 * intervals)
blue_rate[i] = 256.0 * (blueShift[i+1] - blueShift[i])/(100 * intervals)
def rgb2hex(redFl, greenFl, blueFl):
# Convert integer to hex color.
red = int(redFl)
green = int(greenFl)
blue = int(blueFl)
rgb = red, green, blue
return '#%02x%02x%02x' % rgb
for i in range (0, 359):
canvas_1.create_arc(circ, start=arcStart, extent=arcStart - arcEnd,\
fill= kula, outline= kula)
arcStart = arcEnd
arcEnd -=1
# Color component transitions in 60 degree sectors
if i>A_ANG and i<B_ANG:
redFl += red_rate[0]
greenFl += green_rate[0]
blueFl += blue_rate[0]
kula = rgb2hex(redFl, greenFl, blueFl)
if i>B_ANG and i<C_ANG:
redFl += red_rate[1]
greenFl += green_rate[1]
blueFl += blue_rate[1]
kula = rgb2hex(redFl, greenFl, blueFl)
if i>C_ANG and i<D_ANG:
redFl += red_rate[2]
greenFl += green_rate[2]
blueFl += blue_rate[2]
kula = rgb2hex(redFl, greenFl, blueFl)
if i>D_ANG and i<E_ANG:
redFl += red_rate[3]
greenFl += green_rate[3]
blueFl += blue_rate[3]
kula = rgb2hex(redFl, greenFl, blueFl)
if i>E_ANG and i<F_ANG:
redFl += red_rate[4]
greenFl += green_rate[4]
blueFl += blue_rate[4]
kula = rgb2hex(redFl, greenFl, blueFl)
if i>F_ANG and i<G_ANG:
redFl += red_rate[5]
greenFl += green_rate[5]
blueFl += blue_rate[5]
kula = rgb2hex(redFl, greenFl, blueFl)
#kula = rgb2hex(redFl, greenFl, blueFl)
canvas_1.create_text(100, 20, text=kula, fill='white', width=200,\
font='SansSerif 12 ', tag= 'degreesAround', anchor= SW)
canvas_1.update() # This refreshes the drawing on the canvas.
canvas_1.after(cycle_period) # This makes execution pause for 200 milliseconds.
canvas_1.delete('degreesAround') # This erases the changing text
root.mainloop()
|
# -*- coding: utf-8 -*-
import time
import threading
import Queue
class TasksRunner(object):
tq = Queue.Queue(maxsize = -1)
def __init__(self):
t = threading.Thread(target=self.run, args=())
t.start()
def put(self, task):
self.__class__.tq.put(task)
def run(self):
while True:
print u"等待任务数量: ", self.__class__.tq.qsize()
try:
_task = self.__class__.tq.get(block=True, timeout=10)
_task.start()
_task.join()
except Queue.Empty:
time.sleep(1)
except Exception:
import traceback
print traceback.print_exc()
time.sleep(3)
if __name__ == "__main__":
tester = TasksRunner()
time.sleep(3)
tester.put("666")
time.sleep(30) |
from tkinter import *
from quiz_brain import QuizBrain
THEME_COLOR = "#375362"
class QuizInterface:
def __init__(self, quiz: QuizBrain):
self.quiz = quiz
self.window = Tk()
self.window.title("Quiz GUI")
self.window.config(padx=20, pady=20, bg=THEME_COLOR)
self.score_label = Label(text="Score: 0", fg="white", highlightthickness=0, bg=THEME_COLOR)
self.score_label.grid(column=1, row=0)
self.Canvas = Canvas(height=250, width=300)
self.question_text = self.Canvas.create_text(
150,
125,
width=280,
text="Chai Pi lo",
font=["Arial", 20, "italic"]
)
self.Canvas.grid(column=0, row=1, columnspan=2, pady=50)
right_button = PhotoImage(file="images/true.png")
self.right_button = Button(image=right_button, highlightthickness=0, command=self.right_answer)
self.right_button.grid(row=2, column=1)
wrong_button = PhotoImage(file="images/false.png")
self.wrong_button = Button(image=wrong_button, highlightthickness=0, command=self.wrong_answer)
self.wrong_button.grid(row=2, column=0)
self.get_next_question()
self.window.mainloop()
def get_next_question(self):
self.Canvas.config(bg="White")
if self.quiz.still_has_questions():
self.score_label.config(text=f"Score: {self.quiz.score}")
q_text = self.quiz.next_question()
self.Canvas.itemconfig(self.question_text, text=q_text)
else:
self.Canvas.itemconfig(self.question_text, text="You've completed the quiz.")
self.right_button.config(state="disabled")
self.wrong_button.config(state="disabled")
def right_answer(self):
self.feedback(self.quiz.check_answer("True"))
def wrong_answer(self):
self.feedback(self.quiz.check_answer("False"))
def feedback(self, answer):
if answer:
self.Canvas.config(bg="Green")
else:
self.Canvas.config(bg="Red")
self.window.after(1000, self.get_next_question)
|
# Generated by Django 3.1.5 on 2021-01-29 08:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('libraryapp', '0003_book_description'),
]
operations = [
migrations.AddField(
model_name='book',
name='count',
field=models.IntegerField(blank=True, null=True),
),
]
|
import cv2
import face_recognition
import pyttsx3
import os
import numpy as np
from keras.preprocessing.image import img_to_array
from keras.models import load_model
engine = pyttsx3.init()
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[1].id)
path='trainingData'
detection_model_path = 'haarcascade_files/haarcascade_frontalface_default.xml'
emotion_model_path = 'models/_mini_XCEPTION.24-0.77.hdf5'
face_detection = cv2.CascadeClassifier(detection_model_path)
emotion_classifier = load_model(emotion_model_path, compile=False)
EMOTIONS = ["Sad","Happy","Neutral"]
imagePaths = [os.path.join(path,f) for f in os.listdir(path)]
names=[]
known_faces=list()
for imagePath in imagePaths:
name_temp=str(os.path.split(imagePath)[-1].split(".")[0])
names.append(name_temp)
image = cv2.imread(imagePath)
known_faces.append(face_recognition.face_encodings(image)[0])
face_locations = []
face_encodings = []
face_names = []
cam = cv2.VideoCapture(0)
cam.set(3, 640)
cam.set(4, 480)
while True:
ret, frame =cam.read()
if not ret:
break
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_detection.detectMultiScale(gray,scaleFactor=1.1,minNeighbors=5,minSize=(30,30),flags=cv2.CASCADE_SCALE_IMAGE)
frameClone = frame.copy()
if len(faces) > 0:
faces = sorted(faces, reverse=True,
key=lambda x: (x[2] - x[0]) * (x[3] - x[1]))[0]
(fX, fY, fW, fH) = faces
roi = gray[fY:fY + fH, fX:fX + fW]
roi = cv2.resize(roi, (48, 48))
roi = roi.astype("float") / 255.0
roi = img_to_array(roi)
roi = np.expand_dims(roi, axis=0)
preds = emotion_classifier.predict(roi)[0]
emotion_probability = np.max(preds)
label = EMOTIONS[preds.argmax()]
# Convert the image from BGR color (which OpenCV uses) to RGB color (which face_recognition uses)
rgb_frame = frame[:, :, ::-1]
face_locations = face_recognition.face_locations(rgb_frame)
face_encodings = face_recognition.face_encodings(rgb_frame, face_locations)
face_names = []
for (top, right, bottom, left),face_encoding in zip(face_locations, face_encodings):
match = face_recognition.compare_faces(known_faces, face_encoding, tolerance=0.50)
name = "Unknown"
if(True in match):
name=names[match.index(True)]
cv2.rectangle(frame, (left, top), (right, bottom), (0, 0, 255), 2)
font = cv2.FONT_HERSHEY_DUPLEX
cv2.putText(frame, label, (left + 6, bottom - 6), font, 0.5, (255, 255, 255), 1)
cv2.putText(frame, name, (left + 6, bottom - 30), font, 0.5, (255, 255, 255), 1)
if (name!='Unknown'):
engine.say('Hello '+name+'... Welcome!!!')
engine.runAndWait()
cv2.imshow('camera',frame)
k = cv2.waitKey(10) & 0xff
if (k == 27 or k==ord('q')):
break
print("\n [INFO] Exiting Program and cleanup stuff")
cam.release()
cv2.destroyAllWindows() |
'''
Escreva um programa para aprovar o empréstimo
bancário para a compra de uma casa. O programa
vai perguntar o valor da casa, o salário do
comprador e em quantos anos ele vai pagar.
Calcule o valor da prestação mensal, sabendo
que ela não pode exceder 30% do salário ou
então o empréstimo será negado.
'''
casa = float(input('Qual o valor da casa? '))
salario = float(input('Qual o salário do comprador? '))
anos = int(input('Em quantos anos vai financiar? '))
prestação = casa / (anos * 12)
if prestação >= (salario * 0.3):
print('O empréstimo foi \033[1;31mNEGADO\033[m, pois a prestação ocuparia {:.2f}% do seu salário. Ficando com parcelas de R${:.2f}'.format((prestação / salario) * 100, prestação))
else:
print('O empréstimo foi \033[1;32mAPROVADO\033[m, pois ele só ocupará {:.2f}% do seu salário. Ficando com parcelas de R${:.2f}'.format((prestação / salario) * 100, prestação)) |
# Simple program for correcting mistake values in a list
odd = [2, 4, 6, 8]
# change the 1st item
odd[0] = 1
print(odd)
# change 2nd to 4th items
odd[1:4] = [3, 5, 7]
print(odd) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#a test for traverse directory
__author__ = 'AlbertS'
import os
import os.path
f = open('outline.txt','w')
def dfs_showdir(path, depth):
if depth == 0:
print("root:[" + path + "]")
temp="root:[" + path + "]"+'\n'
f.write(temp)
for item in os.listdir(path):
if '.git' not in item:
print("| " * depth + "|--" + item)
temp="| " * depth + "|--" + item+'\n'
f.write(temp)
newitem = path +'/'+ item
if os.path.isdir(newitem):
dfs_showdir(newitem, depth +1)
if __name__ == '__main__':
dfs_showdir('.', 0)
f.close() |
# RBFNN
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import xlrd
from pandas import DataFrame
from numpy.linalg import pinv
import math
from sklearn.cross_validation import train_test_split
from sklearn.cluster import KMeans
def find_max(x):
index = 0
maxi = x[index]
for i in range(len(x)):
if x[i]>maxi:
maxi = x[i]
index = i
return index
def sigmoid(x):
temp = 1.0 + np.exp(-x)
return (1.0/temp)
def basis_func(x, mu):
# taking kernel function to be cubic function
val = (np.linalg.norm(x-mu))**3
return val
def main():
data = pd.read_excel('dataset.xlsx')
var1 = data['row1']
var2 = data['row2']
var3 = data['row3']
var4 = data['row4']
var5 = data['row5']
var6 = data['row6']
var7 = data['row7']
y = data['row8']
x = []
for i in data.index:
temp = []
# temp.append(1)
temp.append(var1[i])
temp.append(var2[i])
temp.append(var3[i])
temp.append(var4[i])
temp.append(var5[i])
temp.append(var6[i])
temp.append(var7[i])
x.append(temp)
x = np.array(x)
y = np.array(y)
# x = sigmoid(x)
# normalizing the data
mean = np.sum(x,axis = 0)/len(x)
variance = (np.sum((x - mean)**2,axis = 0))/len(x)
x = (x - mean)/variance
xtra, xte, ytr, yte = train_test_split(x, y, train_size=0.7)
yt = []
for i in range(len(ytr)):
if ytr[i] == 1: yt.append([1, 0, 0])
elif ytr[i] == 2: yt.append([0, 1, 0])
else: yt.append([0, 0, 1])
nhid = 8
kmeans = KMeans(n_clusters=nhid, random_state=0).fit(xtra)
mu = kmeans.cluster_centers_
# training the data
h = np.zeros((len(xtra), nhid))
for i in range(len(xtra)):
for j in range(nhid):
h[i][j] = basis_func(xtra[i], mu[j])
w = np.matmul(pinv(h), yt)
# testing the data
# accuracy is the objective function
cnt = 0
tot = 0
ht = np.zeros((len(xte), nhid))
for i in range(len(xte)):
for j in range(nhid):
ht[i][j] = basis_func(xte[i], mu[j])
yp = np.matmul(ht, w)
for i in range(len(yp)):
ind = find_max(yp[i])+1
if ind==yte[i] : cnt+=1
tot+=1
print('Accuracy : ')
print(cnt/tot)
if __name__ == "__main__":
main()
|
#이 파일은 nester.py모듈이며 print_lol()함수 하나를 제공합니다. 이 함수는 포함된 리스트가 있을 경우 그것을 포함해서 리스트의 모든 힝목을 화면애 츌력합니다
def print_lol(the_list, ident=False , level=0):
"""이 함수는 the_list한 이름의 인자를 갖고 있으며, 파이썬 리스트를 받습니다.
이 리스트는 항목으로 포함할 수 있습니다. 매 라인마다 리스트에 있는 항목이 하나씩 재귀적으로 화면에 출력됩니다.
"""
for each_item in the_list:
if isinstance(each_item,list):
print_lol(each_item, ident, level+1)
else:
if ident:
for tab_stop in range(level):
print("\t", end='')
print(each_item)
|
import pexpect, sys, time, re
def setcommand( s ):
print "setting the debug command"
s.sendline('shelltimeout -1')
s.expect('DEBUG>', timeout = 90)
#s.sendline('debug sip-reg-state')
#s.expect('DEBUG>', timeout = 90)
#s.sendline('sdump')
#s.expect( pexpect.EOF, timeout = 60)
#print s.before
# s.sendline('debug sip-message')
# s.expect('DEBUG>', timeout = 90)
# s.sendline('debug fsm gsm lsm')
# s.expect('DEBUG>', timeout = 90)
# s.sendline('debug jvm tftp')
# s.expect('DEBUG>', timeout = 90)
# s.sendline('debug jvm http')
# s.expect('DEBUG>', timeout = 90)
def _startlogin( phone_addr ):
s = pexpect.spawn('ssh -o MACs=hmac-md5 cisco %s' % phone_addr)
sshKeyStr = re.escape("(yes/no)?")
expVal = s.expect(["password:", sshKeyStr], timeout=120)
if expVal == 1:
s.sendline('yes')
expVal = s.expect("password:", timeout=120)
s.sendline('cisco')
s.expect('\\(none\\) login:')
s.sendline('debug')
try:
s.expect("Password:")
s.sendline('debug')
s.expect('DEBUG\\>', timeout = 60)
print "Login Successfull"
fout = file( phone_addr, 'w')
s.logfile = fout
setcommand( s )
except pexpect.EOF:
print "EOF...... %s" % phone_addr
return fail( s , fout, phone_addr )
except pexpect.TIMEOUT:
print "timeout...... %s" % phone_addr
return fail( s , fout, phone_addr )
return success( s , fout )
def startlogin( phone_addr ):
print "attempting to connect to phone %s" % phone_addr
_startlogin( phone_addr )
def fail( s, fout ):
s.close()
fout.close()
print "Login fail %s" % phone_addr
def success( s, fout ):
s.close()
fout.close()
print "set command successful, and exit."
if __name__ == '__main__':
fips = open( 'ips', 'r')
row = 0
for phone_addr in fips:
row = row + 1
print '--------- %d -----------' % row
startlogin( phone_addr.rstrip('\n') )
|
from googleapiclient.discovery import build
from google.auth.transport.requests import Request
from google_auth_oauthlib.flow import Flow, InstalledAppFlow
from googleapiclient.http import MediaFileUpload
from googleapiclient.errors import HttpError
from time import sleep
from threading import Thread
from tzlocal import get_localzone
import pytz
import os
import pickle
import json
import datetime
import logging
class youtube_uploader():
def __init__(self, parent, jsonfile, youtube_args, sort=True):
self.parent = parent
self.logger = logging.getLogger(f'vodloader.{self.parent.channel}.uploader')
self.end = False
self.pause = False
self.sort = sort
self.jsonfile = jsonfile
self.youtube_args = youtube_args
self.youtube = self.setup_youtube(jsonfile)
self.queue = []
self.upload_process = Thread(target=self.upload_loop, args=(), daemon=True)
self.upload_process.start()
def stop(self):
self.end = True
def setup_youtube(self, jsonfile, scopes=['https://www.googleapis.com/auth/youtube.upload', 'https://www.googleapis.com/auth/youtube']):
self.logger.info(f'Building YouTube flow for {self.parent.channel}')
api_name='youtube'
api_version = 'v3'
pickle_dir = os.path.join(os.path.dirname(__file__), 'pickles')
if not os.path.exists(pickle_dir):
self.logger.info(f'Creating pickle directory')
os.mkdir(pickle_dir)
pickle_file = os.path.join(pickle_dir, f'token_{self.parent.channel}.pickle')
creds = None
if os.path.exists(pickle_file):
with open(pickle_file, 'rb') as token:
creds = pickle.load(token)
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
self.logger.info(f'YouTube credential pickle file for {self.parent.channel} is expired. Attempting to refresh now')
creds.refresh(Request())
else:
print(f'Please log into the YouTube account that will host the vods of {self.parent.channel} below')
flow = InstalledAppFlow.from_client_secrets_file(jsonfile, scopes)
creds = flow.run_console()
with open(pickle_file, 'wb') as token:
pickle.dump(creds, token)
self.logger.info(f'YouTube credential pickle file for {self.parent.channel} has been written to {pickle_file}')
else:
self.logger.info(f'YouTube credential pickle file for {self.parent.channel} found!')
return build(api_name, api_version, credentials=creds)
def upload_loop(self):
while True:
if len(self.queue) > 0:
try:
self.upload_video(*self.queue[0])
del self.queue[0]
except YouTubeOverQuota as e:
self.wait_for_quota()
else: sleep(1)
if self.end: break
def upload_video(self, path, body, id, keep=False, chunk_size=4194304, retry=3):
self.logger.info(f'Uploading file {path} to YouTube account for {self.parent.channel}')
uploaded = False
attempts = 0
response = None
while uploaded == False:
media = MediaFileUpload(path, mimetype='video/mpegts', chunksize=chunk_size, resumable=True)
upload = self.youtube.videos().insert(part=",".join(body.keys()), body=body, media_body=media)
try:
response = upload.execute()
self.logger.debug(response)
uploaded = response['status']['uploadStatus'] == 'uploaded'
except HttpError as e:
self.check_over_quota(e)
except (BrokenPipeError, ConnectionResetError) as e:
self.logger.error(e)
if not uploaded:
attempts += 1
if attempts >= retry:
self.logger.error(f'Number of retry attempts exceeded for {path}')
break
if response and 'id' in response:
self.logger.info(f'Finished uploading {path} to https://youtube.com/watch?v={response["id"]}')
if self.youtube_args['playlistId']:
self.add_video_to_playlist(response["id"], self.youtube_args['playlistId'])
if self.sort:
self.sort_playlist(self.youtube_args['playlistId'])
self.parent.status[id] = True
self.parent.status.save()
if not keep: os.remove(path)
else:
self.logger.info(f'Could not parse a video ID from uploading {path}')
def wait_for_quota(self):
self.pause = True
now = datetime.datetime.now()
until = now + datetime.timedelta(days=1)
until = until - datetime.timedelta(microseconds=until.microsecond, seconds=until.second, minutes=until.minute, hours=until.hour)
until = pytz.timezone('US/Pacific').localize(until)
now = get_localzone().localize(now)
wait = until - now
if wait.days > 0:
wait = wait - datetime.timedelta(days=wait.days)
self.logger.error(f'YouTube upload quota has been exceeded, waiting for reset at Midnight Pacific Time in {wait.seconds} seconds')
sleep(wait.seconds + 15)
self.pause = False
def get_playlist_items(self, playlist_id):
items = []
npt = ""
i = 1
while True:
request = self.youtube.playlistItems().list(
part="snippet",
maxResults=50,
pageToken=npt,
playlistId=playlist_id
)
try:
response = request.execute()
except HttpError as e:
self.check_over_quota(e)
self.logger.debug(f'Retrieved page {i} from playlist {playlist_id}')
items.extend(response['items'])
if 'nextPageToken' in response:
npt = response['nextPageToken']
i += 1
else:
break
return items
def get_videos_from_playlist_items(self, playlist_items):
videos = []
max_results = 50
length = len(playlist_items)
i = 0
while i * max_results < length:
top = max_results * (i + 1)
if top > length: top = length
ids = ",".join([x['snippet']['resourceId']['videoId'] for x in playlist_items[max_results*i:top]])
request = self.youtube.videos().list(
part="snippet",
id=ids
)
try:
response = request.execute()
except HttpError as e:
self.check_over_quota(e)
self.logger.debug(f'Retrieved video info for videos: {ids}')
videos.extend(response['items'])
i += 1
for video in videos:
video['tvid'], video['part'] = self.get_tvid_from_yt_video(video)
return videos
def get_playlist_videos(self, playlist_id):
return self.get_videos_from_playlist_items(self.get_playlist_items(playlist_id))
def get_channel_videos(self):
request = self.youtube.channels().list(part="contentDetails", mine=True)
try:
r = request.execute()
self.logger.debug('Retrieved channel upload playlist')
uploads = r['items'][0]['contentDetails']['relatedPlaylists']['uploads']
except HttpError as e:
self.check_over_quota(e)
return self.get_playlist_videos(uploads)
@staticmethod
def get_tvid_from_yt_video(item):
if 'tags' in item['snippet']:
tvid = None
for tag in item['snippet']['tags']:
if tag[:5] == 'tvid:':
tvid = tag[5:]
if tvid:
tvid = tvid.split('p', 1)
id = int(tvid[0])
if len(tvid) > 1: part = int(tvid[1])
else: part = None
return id, part
else: return None, None
else: return None, None
def add_video_to_playlist(self, video_id, playlist_id, pos=-1):
if pos == -1:
pos = len(self.get_playlist_items(playlist_id))
request = self.youtube.playlistItems().insert(
part="snippet",
body={
"snippet": {
"playlistId": playlist_id,
"position": pos,
"resourceId": {
"kind": "youtube#video",
"videoId": video_id
}
}
}
)
try:
r = request.execute()
self.logger.debug(f'Added video {video_id} to playlist {playlist_id}')
return r
except HttpError as e:
self.check_over_quota(e)
def set_video_playlist_pos(self, video_id, playlist_item_id, playlist_id, pos):
request = self.youtube.playlistItems().update(
part="snippet",
body={
"id": playlist_item_id,
"snippet": {
"playlistId": playlist_id,
"position": pos,
"resourceId": {
"kind": "youtube#video",
"videoId": video_id
}
}
}
)
try:
r = request.execute()
self.logger.debug(f'Moved item {video_id} to position {pos} in playlist {playlist_id}')
return r
except HttpError as e:
self.check_over_quota(e)
def sort_playlist(self, playlist_id, reverse=False):
self.logger.debug(f'Sorting playlist {playlist_id} according to tvid and part')
playlist_items = self.get_playlist_items(playlist_id)
videos = self.get_videos_from_playlist_items(playlist_items)
unsortable = []
for video in videos:
if video['tvid'] == None:
unsortable.append(video['id'])
if unsortable != []:
self.logger.error(f"There were videos found in the specified playlist to be sorted without a valid tvid tag. As such this playlist cannot be reliably sorted. The videos specified are: {','.join(unsortable)}")
return
try:
videos.sort(reverse=reverse, key=lambda x: (x['tvid'], x['part']))
except TypeError as e:
dupes = {}
invalid = []
for video in videos:
if video['tvid'] in dupes:
dupes['tvid'].append(video)
else:
dupes['tvid'] = [video]
for tvid in dupes:
if len(dupes[tvid]) > 1:
for video in dupes[tvid]:
if video['part'] == None:
invalid.append(video['id'])
self.logger.error(f"There were videos found in the specified playlist to be sorted that has duplicate tvid tags, but no part specified. As such this playlist cannot be reliably sorted. The videos specified are: {','.join(invalid)}")
return
i = 0
while i < len(videos):
if videos[i]['id'] != playlist_items[i]['snippet']['resourceId']['videoId']:
j = i + 1
while videos[i]['id'] != playlist_items[j]['snippet']['resourceId']['videoId'] and j <= len(videos): j+=1
if j < len(videos):
self.set_video_playlist_pos(playlist_items[j]['snippet']['resourceId']['videoId'], playlist_items[j]['id'], playlist_id, i)
playlist_items.insert(i, playlist_items.pop(j))
else:
self.logger.error('An error has occured while sorting the playlist')
return
i+=1
def check_over_quota(self, e: HttpError):
c = json.loads(e.content)
if c['error']['errors'][0]['domain'] == 'youtube.quota' and c['error']['errors'][0]['reason'] == 'quotaExceeded':
self.logger.error(f'YouTube client quota has been exceeded!')
raise YouTubeOverQuota
else:
self.logger.error(e.resp)
self.logger.error(e.content)
class YouTubeOverQuota(Exception):
""" called when youtube upload quota is exceeded """
pass |
import frappe
from frappe.desk.moduleview import get_desktop_settings, get
@frappe.whitelist()
def get_menu(parent=None, is_root=False):
if is_root:
return get_desktop_settings()
elif parent:
return get(parent) |
#!/usr/bin/env python
import sys
import re
def is_number(n):
"""This is an implementation of `is_number` and is not needed in this exercise ;).
>>> is_number('44')
True
>>> is_number('44.0') # tricky :D
False
>>> is_number('44.5')
False
>>> is_number('not a number')
False
"""
try:
int(n)
except ValueError:
return False
else:
return True
def _test():
"""Run doctests."""
import doctest
doctest.testmod()
if __name__ == '__main__':
_test()
if len(sys.argv) != 2:
print 'Usage: python %s "a phone number xx xx xx xx xx"' % sys.argv[0]
exit(1)
match_obj = re.search(r'(\d\d\s){4}(\d\d)', sys.argv[1])
if not match_obj:
print "No phone number found"
else:
phone = match_obj.group().strip()
print tuple(p for p in phone.split())
|
import json
from css_html_js_minify import html_minify
import re
from os import path
from PIL import Image
import base64
from subprocess import Popen, PIPE
pattern = re.compile('{image:[a-zA-Z0-9-_]*}')
def beautify_css(style):
sass = Popen(
['sass', '--stdin', '--no-source-map'],
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
universal_newlines=True,
bufsize=0
)
sass.stdin.write(style)
sass.stdin.close()
return sass.stdout.read()
def encode_image(match, images_path):
image_path = path.join(images_path, match.group().split(':')[1][:-1] + '.png')
image = Image.open(image_path)
width, height = image.size
with open(image_path, 'rb') as image_r:
return (
'width: ' + str(width) + 'px; height: ' + str(height) + 'px; ' +
'background: url(data:image/png;base64,' + str(base64.b64encode(image_r.read()), 'utf-8') + ');'
)
def build_scss(scss_template_path, scss_result_path, images_path):
with open(scss_template_path, 'r') as template_file:
style = template_file.read()
style = pattern.sub(lambda match: encode_image(match, images_path), style)
style = beautify_css(style)
with open(scss_result_path, 'w') as style_css_w:
style_css_w.write(style)
def build_html(html_description_path, i18n_json_path):
with open(i18n_json_path, 'r') as i18n_json_r:
i18n = json.load(i18n_json_r)
with open(html_description_path, 'r') as html_description:
description = html_description.read()
i18n['widget']['description'] = html_minify(description)
with open(i18n_json_path, 'w') as i18n_json_w:
i18n_json_w.write(json.dumps(i18n, ensure_ascii=False, indent=2))
|
import codecs
from json import JSONDecoder, JSONDecodeError
import requests
# import ujson as json
import re
import time
import datetime
import json
import threading
import os
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%d-%m %H:%M:%S')
# small byte to mb gb etc convertor
suffixes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB']
def humansize(nbytes):
i = 0
while nbytes >= 1024 and i < len(suffixes)-1:
nbytes /= 1024.
i += 1
f = ('%.2f' % nbytes).rstrip('0').rstrip('.')
return '%s %s' % (f, suffixes[i])
PUSHSHIFT_REDDIT_URL = "http://api.pushshift.io/reddit"
# gets a filename as a string, the file should contain ALL the names and ages (sorted by age)
def handleAuthorInput(filename):
file = open(filename, "r")
thirteen = []
fourteen = []
fifteen = []
sixteen = []
seventeen = []
eighteen = []
nineteen = []
old = []
for index, line in enumerate(file):
if line[0:2] == "13":
thirteen.append(line[4:-1])
if line[0:2] == "14":
fourteen.append(line[4:-1])
if line[0:2] == "15":
fifteen.append(line[4:-1])
if line[0:2] == "16":
sixteen.append(line[4:-1])
if line[0:2] == "17":
seventeen.append(line[4:-1])
if line[0:2] == "18":
eighteen.append(line[4:-1])
if line[0:2] == "19":
nineteen.append(line[4:-1])
if index == 2185:
print()
if line[0:3] == "OLD":
old.append(line[5:-1])
all_ages = []
all_ages.append(thirteen)
all_ages.append(fourteen)
all_ages.append(fifteen)
all_ages.append(sixteen)
all_ages.append(seventeen)
all_ages.append(eighteen)
all_ages.append(nineteen)
all_ages.append(old)
for index, age in enumerate(all_ages):
cur_file = open(str(13 + index) + ".txt", "w")
print("There are ",len(all_ages[index]), " users in age ", (13+index))
for author in age:
print(author, file=cur_file)
cur_file.close()
file.close()
return all_ages
# all_ages = handleAuthorInput("2017authorsCommentsSubmissions.txt")
# using PushShift, there's only 2 endpoints - submissions and comments - so we'll do it separately
# example of a search : all submissions in 2017 for certain author
# https://api.pushshift.io/reddit/search/submission/?author=Itwentrightin&sort=asc&size=1000&after=1483221600&before=1514757600
def fetchObjects(**kwargs):
# Default params values
params = {"sort_type":"created_utc","sort":"asc","size":1000}
for key,value in kwargs.items():
params[key] = value
print(params)
type = "comment"
if 'type' in kwargs and kwargs['type'].lower() == "submission":
type = "submission"
r = requests.get(PUSHSHIFT_REDDIT_URL + "/" + type + "/search/",params=params)
if r.status_code == 200:
response = json.loads(r.text)
data = response['data']
sorted_data_by__id = sorted(data, key=lambda x: int(x['id'],36))
return sorted_data_by__id
debug = open("redditdebug.txt","w")
def process(age,**kwargs):
print("$$$$$$$$$$$$$$$$$$$$ ",age," $$$$$$$$$$")
if not os.path.exists("\\jsons\\"):
os.makedirs("\\jsons\\")
last_year_utc = 1483221600
max_created_utc = 1483221600 # 1483221600 is 1.1.2017 - since we want to be as sure as we can with flairs
max_id = 0
# Json file will eventually contain all comments or all submissions of a certain age # NOTICE : doing append, so if wants to start over, should clean files before
json_file = open("\\jsons\\"+ str(age)+str(kwargs['type'])+".json","a")
corpus_file = codecs.open("\\jsons\\" + (str(age)+str(kwargs['type']))+".txt","a")
backup = open("backupReddit.txt","a")
counter = 1
print("started collecting posts by author at epoch time:",ts,"which is:",st,file=debug)
while 1:
nothing_processed = True
objects = fetchObjects(**kwargs,after=max_created_utc,before=1514757600) # can also use "before=" . 1514764861 is 1.1.2018
if objects is None:
print("weired none object 1",file=debug)
continue
for obj_index,object in enumerate(objects): # runs 1000 times (size times)
if object is None:
print("weired none object 2", file=debug)
break
id = int(object['id'],36)
if id > max_id:
nothing_processed = False
created_utc = object['created_utc']
max_id = id
if created_utc > max_created_utc:
max_created_utc = created_utc
print("object number:", obj_index)
# if object['author'] == "ABlankNam3dKid":
# print()
# if obj_index == 161:
# print()
if object['author'] == '[deleted]':
continue
print(json.dumps(object,sort_keys=True,ensure_ascii=True),file=json_file) # print to json # consider returning the ,indent=4 to arguments
# if kwargs['type'] == "comment":
# length = len(object['body'])
# if length > 255:
# try:
# print(object['body'].encode("utf-8"),file=corpus_file)
# except Exception as e:
# continue
# else:
# print(object['body'], file=corpus_file)
# if kwargs['type'] == "submission":
# if 'selftext' in object:
# length = len(object['selftext'])
# if length > 0:
# if length > 255:
# try:
# print(object['selftext'].encode("utf-8"), file=corpus_file)
# except Exception as e:
# continue
# else:
# print(object['selftext'],file=corpus_file)
#
# # print(object['selftext'].encode("utf-8"),file=corpus_file)
# #TODO: optional
# # print(object['title'],file=corpus_file)
if nothing_processed:
break
max_created_utc -= 1
# time.sleep(.5)
counter -= 1
backup.close()
json_file.close()
corpus_file.close()
return all_ages
# retrieving all posts (comments and submissions) of all users in different ages
def handleAges(all_ages):
for index, age in enumerate(all_ages):
for author in age:
all_ages_submissions = process((13 + index), type="submission", author=author, size=1000)
all_ages_comments = process((13 + index), type="comment", author=author, size=1000)
# handleAges(all_ages)
#TODO:
# text = text.replace(u'\ufeff', '') # Char fix for some of the files
# gets a directory , runs through all it's json files and making new text file with just the body
def handleJson(input_path):
dir_files = os.listdir(input_path)
for file in dir_files:
sents = codecs.open(input_path + "\\" + file[:-4] + "txt", 'w', "utf-8")
# sents = codecs.open("19sentSubmission.txt", "w", "utf-8")
js = open(input_path + "\\" + file, 'r')
for line in js:
item = json.loads(line)
# Submissions
if 'selftext' in item:
if item['selftext'] != '[removed]' and len(item['selftext'])>0:
print(item['selftext'].replace("\n",""), file=sents)
# sents.write(item['selftext'])
# Comments
elif 'body' in item:
if item['body'] != '[removed]' and len(item['body'])>0:
print(item['body'].replace("\n",""), file=sents)
sents.close()
# gets the json directory name
# handleJson("jsons")
WHITE_SPACE = [' ', '\n', '\r', '\t']
def splitSent(input_path):
dir_files = os.listdir(input_path)
#TODO: also , take care of skipping the folder - it is considered as a file now
if not os.path.exists(input_path + "\\splitsents\\"):
os.makedirs(input_path + "\\splitsents\\")
for file in dir_files:
if file.endswith(".txt"):
# new file
sents = codecs.open(input_path + "\\splitsents\\" + file + ".splitsentences", 'w', "utf-8")
# source file
src = codecs.open(input_path + "\\" + file, 'r',"utf-8")
# for line in src:
# print()
#
# sents.close()
sentences = []
final_text = ""
for line in src:
final_text += line
i = 0
for j in range(len(final_text)):
if i < j:
# any line break (we entered) ends a sentence
if final_text[j] in ['\r', '\n']:
sentences.append(final_text[i:j])
i = j + 1
# look for characters which signify end of sentence
elif final_text[j] in ['.', ';', '?', '!']:
# a '.'
if final_text[j] == '.':
k = j + 1
# a '.' with whitespace afterwards
if (k < len(final_text)) and (final_text[k] in WHITE_SPACE):
sentences.append(final_text[i:k])
i = k + 1
# a '.' with quotes afterwards
elif (k < len(final_text)) and (final_text[k] in ['"', '״', '\'']):
if (k + 1 < len(final_text)) and (final_text[k + 1] in WHITE_SPACE):
sentences.append(final_text[i:k + 1])
i = k + 2
else:
# look for more '.'s
while (k < len(final_text)) and (final_text[k] == '.'):
k += 1
# many '.'s. otherwise - a letter after a '.' - not the end of the sentence
if k>= len(final_text):
print("debug")
if final_text[k] in WHITE_SPACE:
sentences.append(final_text[i:k])
i = k + 1
# a '?' or a '!'
elif final_text[j] in ['?', '!']:
k = j + 1
while (k < len(final_text)) and (final_text[k] in ['?', '!']):
k += 1
# a whitespace should appear after them
if final_text[k] in WHITE_SPACE:
sentences.append(final_text[i:k])
i = k + 1
# a ';'
else:
k = j + 1
if (k < len(final_text)) and (final_text[k] in WHITE_SPACE):
sentences.append(final_text[i:k])
i = k + 1
first_sentence = True
for sent in sentences:
if first_sentence:
first_sentence = False
else:
sents.write('\r\n')
sents.write(sent)
sents.close()
splitSent("jsons\\texts")
print()
debug.close() |
#!/usr/bin/env python3
import sys
def parse_in(std_in):
return [s.strip() for s in std_in]
def plus(a, b):
return a + b
def times(a, b):
return a * b
OPER_DICT = {'+': plus, '*': times}
def add_parens(text):
_open = 1
for idx, l in enumerate(text):
if l == '(':
_open += 1
elif l == ')':
_open -= 1
if _open == 0:
return '(' + text[:idx] + ')' + text[idx:]
return '(' + text + ')'
def calculate(text):
while True:
if text[0] == '(':
text = calculate(text[1:])
space = text.find(' ')
for idx, l in enumerate(text):
if l == ' ':
space = idx
break
elif l == ')':
return text[:idx] + text[idx+1:]
elif idx == len(text) - 1:
return text
first_num = int(text[:space])
text = text[space+1:]
operation = OPER_DICT[text[0]]
text = text[2:]
#Comment next 2 lines to get star 1
if operation == times:
text = add_parens(text)
if text[0] == '(':
text = calculate(text[1:])
for idx, l in enumerate(text):
if l == ' ':
fin = idx
break
elif l == ')':
fin = idx
next_num = int(text[:fin])
res = operation(first_num, next_num)
return str(res) + text[fin+1:]
elif idx == len(text) - 1:
next_num = int(text)
res = operation(first_num, next_num)
return str(res)
next_num = int(text[:fin])
text = text[fin:]
res = operation(first_num, next_num)
text = str(res) + text
def second_task(parsed_lines):
_sum = 0
for line in parsed_lines:
_sum += int(calculate(line))
return _sum
def main():
std_in = sys.stdin.readlines()
parsed = parse_in(std_in)
#res = first_task(parsed)
#print(f"task 1: {res}")
res = second_task(parsed)
print(f"task 2: {res}")
if __name__ == "__main__":
main()
|
import cv2
from pykinect2 import PyKinectV2
from pykinect2.PyKinectV2 import *
from pykinect2 import PyKinectRuntime
import numpy as np
import Kinect.const as const
import time
from kivy.graphics.texture import Texture
import imutils
class SimpleCalibrator(object):
def __init__(self):
self.xyratio=1.0
return super(SimpleCalibrator, self).__init__()
def takePicture(self,chessboard):
kinect = PyKinectRuntime.PyKinectRuntime(PyKinectV2.FrameSourceTypes_Color |
PyKinectV2.FrameSourceTypes_Infrared |
PyKinectV2.FrameSourceTypes_Depth)
pictureok = False
while not pictureok:
cv2.namedWindow('RGB',cv2.WINDOW_NORMAL)
cv2.resizeWindow('RGB', const.screenresolution)
while(cv2.waitKey(1) != 27):#wait ESC press
colorFrame = kinect.get_last_color_frame()
colorFrame = colorFrame.reshape(const.rgb_image_size[1],const.rgb_image_size[0],4)
colorFrame = cv2.flip(colorFrame,+1);
cv2.imshow('RGB',colorFrame)
if not chessboard:
pictureok = True
found, corners = cv2.findChessboardCorners(colorFrame, const.pattern_size, flags=cv2.CALIB_CB_ADAPTIVE_THRESH)
if found:
rgbFilePath = const.rootfolder + "SimpleColorCalibration.jpg"
cv2.imwrite(rgbFilePath, colorFrame)
print("ok")
pictureok = True
else:
print("Dont found chessboard in rgb frame. Repeat recording Picture")
cv2.destroyAllWindows()
self.colorframe = colorFrame
return
def PictureWithCross(self,rot,x,y):
localcolorframe = self.colorframe.copy()
localcolorframe = imutils.rotate(localcolorframe,rot)
for k in range(-30,30):
localcolorframe[y+k,x-1,:]=(0,0,255,255)
localcolorframe[y+k,x,:]=(0,0,255,255)
localcolorframe[y+k,x+1,:]=(0,0,255,255)
for k in range(-30,30):
localcolorframe[y-1,x+k,:]=(0,0,255,255)
localcolorframe[y,x+k,:]=(0,0,255,255)
localcolorframe[y+1,x+k,:]=(0,0,255,255)
return _ColorFrameToKivyPicture_(localcolorframe)
def find_points(self,rot):
image = self.colorframe.copy()
image = imutils.rotate(image,rot)
if self.xyratio > 1:
image = cv2.resize(image, (0,0), fx=(1/self.xyratio), fy=1.0)
elif self.xyratio < 1:
image = cv2.resize(image, (0,0), fx=1.0, fy=self.xyratio)
color_image = image.copy()
image = cv2.cvtColor(image,cv2.COLOR_BGRA2GRAY)
cv2.namedWindow('Color',cv2.WINDOW_NORMAL)
cv2.resizeWindow('Color', const.screenresolution)
cv2.imshow('Color',image)
cv2.waitKey(0)
debug_images = []
found, corners = cv2.findChessboardCorners(image, const.pattern_size, flags=cv2.CALIB_CB_ADAPTIVE_THRESH)
if found:
cv2.cornerSubPix(image, corners, (5, 5), (-1, -1), (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001))
if (corners[3][0][1] - 30) >corners[0][0][1]:
const.pattern_size = const.pattern_size[::-1]
found, corners = cv2.findChessboardCorners(image, const.pattern_size, flags=cv2.CALIB_CB_ADAPTIVE_THRESH)
if found:
cv2.cornerSubPix(image, corners, (5, 5), (-1, -1), (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001))
cv2.drawChessboardCorners(color_image, const.pattern_size, corners, found)
localareaofinterest = np.array([corners[0][0],corners[const.pattern_size[0]-1][0],corners[const.pattern_size[0]*(const.pattern_size[1]-1)][0],corners[(const.pattern_size[0]*const.pattern_size[1])-1][0]])
print(corners)
print("Interesse")
print(localareaofinterest)
areaofinterest = _SortAreaOfInterest_(localareaofinterest)
print(areaofinterest)
self.areaofinterest = areaofinterest
self.sortedcorners = _SortCorners_(corners)
cv2.namedWindow('Corner',cv2.WINDOW_NORMAL)
cv2.resizeWindow('Corner', const.screenresolution)
cv2.imshow('Corner',color_image)
cv2.waitKey(0)
cv2.imwrite(const.rootfolder+"\Chessbaord.jpg",color_image)
else:
print("Chessboard not found. No Area of Interst defined")
areaofinterest = 0
return areaofinterest
def Distortion(self,x,y):
bigxyratio = ((((self.areaofinterest[1][0]-self.areaofinterest[0][0])+(self.areaofinterest[3][0]-self.areaofinterest[2][0]))/2)/(const.pattern_size[0]-1))/((((self.areaofinterest[2][1]-self.areaofinterest[0][1])+(self.areaofinterest[3][1]-self.areaofinterest[1][1]))/2)/(const.pattern_size[1]-1))
print(bigxyratio)
self.xyratio = bigxyratio
if bigxyratio > 1:
x=int(x/bigxyratio)
elif bigxyratio < 1:
y=int(y*bigxyratio)
return bigxyratio,x,y
def Exit(self):
bigxyratio = ((((self.areaofinterest[1][0]-self.areaofinterest[0][0])+(self.areaofinterest[3][0]-self.areaofinterest[2][0]))/2)/(const.pattern_size[0]-1))/((((self.areaofinterest[2][1]-self.areaofinterest[0][1])+(self.areaofinterest[3][1]-self.areaofinterest[1][1]))/2)/(const.pattern_size[1]-1))
print(bigxyratio)
xlenperpix = const.square_size/((((self.areaofinterest[1][0]-self.areaofinterest[0][0])+(self.areaofinterest[3][0]-self.areaofinterest[2][0]))/2)/(const.pattern_size[0]-1))
ylenperpix = const.square_size/((((self.areaofinterest[2][1]-self.areaofinterest[0][1])+(self.areaofinterest[3][1]-self.areaofinterest[1][1]))/2)/(const.pattern_size[1]-1))
return xlenperpix, ylenperpix
def _SortAreaOfInterest_(localareaofinterest):
avgx=0
avgy=0
for point in localareaofinterest:
avgx=avgx+point[0]
avgy=avgy+point[1]
avgx=avgx/4
avgy=avgy/4
areaofinterest = np.zeros((4,2),dtype=float)
find=False
i=0
while not find:
if localareaofinterest[i][0] < avgx and localareaofinterest[i][1] < avgy:
find=True
else:
i=i+1
areaofinterest[0]=localareaofinterest[i]
find=False
i=0
while not find:
if localareaofinterest[i][0] > avgx and localareaofinterest[i][1] < avgy:
find=True
else:
i=i+1
areaofinterest[1]=localareaofinterest[i]
find=False
i=0
while not find:
if localareaofinterest[i][0] < avgx and localareaofinterest[i][1] > avgy:
find=True
else:
i=i+1
areaofinterest[2]=localareaofinterest[i]
find=False
i=0
while not find:
if localareaofinterest[i][0] > avgx and localareaofinterest[i][1] > avgy:
find=True
else:
i=i+1
areaofinterest[3]=localareaofinterest[i]
return areaofinterest
def _ColorFrameToKivyPicture_(colorframe):
texturecolor = Texture.create(size=(1920,1080),colorfmt='bgr')
colorframe= cv2.cvtColor(colorframe,cv2.COLOR_BGRA2BGR)
colorframe = cv2.flip(colorframe,0);
colorframe = colorframe.reshape(1080*1920*3)
texturecolor.blit_buffer(colorframe,bufferfmt='ubyte',colorfmt='bgr')
return texturecolor
def _SortCorners_(corners):
if corners[0][0][1] > corners[(const.pattern_size[0]*const.pattern_size[1])-1][0][1]:
horswift=True
else:
horswift=False
if corners[0][0][0] > corners[(const.pattern_size[0]*const.pattern_size[1])-1][0][0]:
verswift=True
else:
verswift=False
corners = corners.reshape(const.pattern_size[0]*const.pattern_size[1]*2)
corners = corners.reshape(const.pattern_size[1],const.pattern_size[0],2)
if horswift:
corners=np.flipud(corners)
if verswift:
corners=np.fliplr(corners)
return corners |
# Generated by Django 3.0 on 2020-01-03 14:57
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0033_auto_20200103_2022'),
]
operations = [
migrations.AlterField(
model_name='work_exp',
name='end_date',
field=models.DateTimeField(default=datetime.datetime(2020, 1, 3, 20, 27, 23, 977879), null=True),
),
]
|
import streamlit as st
import pandas as pd
# naziv temeljen na nazivu dataseta
st.title("Cardiovascular disease prediction")
st.write('')
st.write('')
st.markdown("**To choose attributes, click on the arrow in the upper left corner of the screen**")
st.subheader("Data information: ")
st.write("Systolic blood pressure is higher number.")
st.write("Diastolic blood pressure is lower number.")
st.write("Cholesterol levels are: 1 - normal, 2 - above normal, 3 - well above normal")
st.write("Glucose levels are: 1 - normal, 2 - above normal, 3 - well above normal")
st.sidebar.markdown("## Choose attributes")
def get_user_input():
numbers = [False]
features = 0 # nužno postaviti da bi radilo dok nije odabran ni jedan atribut
data = False
col_1 = st.sidebar.checkbox('Select age')
if col_1:
age = st.sidebar.slider('20 - 90', 20, 90, 55, key = 1)
age = age * 365
data = {'age' : age}
features = pd.DataFrame(data, index = [0])
numbers = [True]
col_2 = st.sidebar.checkbox('Select sex')
if col_2:
sex = st.sidebar.slider('1 - female, 2 - male', 1, 2, 1, key = 2)
if data:
features['sex'] = sex
numbers.append(True)
else:
data = {'sex' : sex}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
col_3 = st.sidebar.checkbox('Select height')
if col_3:
height = st.sidebar.slider('in cm', 100, 210, 155, key = 3)
if data:
features['height'] = height
numbers.append(True)
else:
data = {'height' : height}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
col_4 = st.sidebar.checkbox('Select weight')
if col_4:
weight = st.sidebar.slider('in kg', 30, 200, 115, key = 4)
if data:
features['weight'] = weight
numbers.append(True)
else:
data = {'weight' : weight}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
col_5 = st.sidebar.checkbox('Select systolic blood pressure')
if col_5:
ap_hi = st.sidebar.slider(' ', 80, 180, 130, key = 5)
if data:
features['ap_hi'] = ap_hi
numbers.append(True)
else:
data = {'ap_hi' : ap_hi}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
col_6 = st.sidebar.checkbox('Select diastolic blood pressure')
if col_6:
ap_lo = st.sidebar.slider(' ', 40, 140, 90, key = 6)
if data:
features['ap_lo'] = ap_lo
numbers.append(True)
else:
data = {'ap_lo' : ap_lo}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
col_7 = st.sidebar.checkbox('Select cholesterol')
if col_7:
cholesterol = st.sidebar.slider('levels(3): 1 to 3', 1, 3, 2, key = 7)
if data:
features['cholesterol'] = cholesterol
numbers.append(True)
else:
data = {'cholesterol' : cholesterol}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
col_8 = st.sidebar.checkbox('Select glucose levels')
if col_8:
gluc = st.sidebar.slider('levels(3): 1 to 3', 1, 3, 2, key = 8)
if data:
features['gluc'] = gluc
numbers.append(True)
else:
data = {'gluc' : gluc}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
col_9 = st.sidebar.checkbox('Select smoking')
if col_9:
smoke = st.sidebar.slider('0 - not smoking, 1 - smoking', 0, 1, 0, key = 9)
if data:
features['smoke'] = smoke
numbers.append(True)
else:
data = {'smoke' : smoke}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
col_10 = st.sidebar.checkbox('Select alcohol drinking')
if col_10:
alco = st.sidebar.slider('0 - not drinking, 1 - drinking', 0, 1, 0, key = 10)
if data:
features['alco'] = alco
numbers.append(True)
else:
data = {'alco' : alco}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
col_11 = st.sidebar.checkbox('Select physical activity')
if col_11:
active = st.sidebar.slider('0 - no, 1 - yes', 0, 1, 0, key = 11)
if data:
features['active'] = active
numbers.append(True)
else:
data = {'active' : active}
features = pd.DataFrame(data, index = [0])
numbers.append(True)
else:
numbers.append(False)
numbers.append(False)
return features, numbers, data
user_input, numbers, data = get_user_input()
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier
from sklearn.neighbors import KNeighborsClassifier
st.write('')
option = st.selectbox("Choose clasification algorithm", ("Logistic Regression", "Random Forest", "k-Nearest Neighbors", "Gradient Boosting"))
button = st.button("Get prediction")
def RFC_fun(X_train, X_test, Y_train, Y_test, user_input):
RFC = RandomForestClassifier(n_estimators = 120, max_depth = 10, min_samples_split = 10)
RFC.fit(X_train, Y_train)
st.write(str(accuracy_score(Y_test, RFC.predict(X_test)) * 100) + '%')
prediction = RFC.predict(user_input)
return prediction
def LR_fun(X_train, X_test, Y_train, Y_test, user_input):
LR = LogisticRegression()
LR.fit(X_train, Y_train)
st.write(str(accuracy_score(Y_test, LR.predict(X_test)) * 100) + '%')
prediction = LR.predict(user_input)
return prediction
def kN_fun(X_train, X_test, Y_train, Y_test, user_input):
kN = KNeighborsClassifier(n_neighbors = 40)
kN.fit(X_train, Y_train)
st.write(str(accuracy_score(Y_test, kN.predict(X_test)) * 100) + '%')
prediction = kN.predict(user_input)
return prediction
def GBC_fun(X_train, X_test, Y_train, Y_test, user_input):
Grad_Boosting = GradientBoostingClassifier( n_estimators = 100, max_depth = 1, learning_rate = 1,
random_state = 0)
Grad_Boosting.fit(X_train, Y_train)
st.write(str(accuracy_score(Y_test, Grad_Boosting.predict(X_test)) * 100) + '%')
prediction = Grad_Boosting.predict(user_input)
return prediction
if button:
# pošto je velik dataset treba vremena da učita i stoga je bolje da se nalazi tu
# da se ne bi učitavalo svaki put kada bi se promijenila lista atributa i time znatno
# usporio program
if data == False:
st.markdown("**Please choose at least 1 attribute.**")
else:
heart_df = pd.read_csv ("cardio_train.csv", index_col = 0 )
X = heart_df.iloc[:, numbers]
Y = heart_df.iloc[:, -1]
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size = 0.25, random_state = 0)
st.subheader('User Input:')
st.write(user_input)
st.subheader('Model Test Accuracy Score:')
if (option == "Random Forest"):
prediction = RFC_fun(X_train, X_test, Y_train, Y_test, user_input)
elif (option == "Logistic Regression"):
prediction = LR_fun(X_train, X_test, Y_train, Y_test, user_input)
elif (option == "k-Nearest Neighbors"):
prediction = kN_fun(X_train, X_test, Y_train, Y_test, user_input)
elif (option == "Gradient Boosting"):
prediction = GBC_fun(X_train, X_test, Y_train, Y_test, user_input)
st.subheader('Classification result: ')
st.write('')
if (prediction == 0):
st.markdown("**Predicted that there is no cardiovascular disease.**")
else:
st.markdown("**Predicted that cardiovascular disease is present.**")
st.write(prediction)
|
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import inspect
import json
import re
from typing import Dict
from opentelemetry.instrumentation.botocore.extensions.types import (
_AttributeMapT,
_AwsSdkCallContext,
_AwsSdkExtension,
)
from opentelemetry.propagate import inject
from opentelemetry.semconv.trace import SpanAttributes
from opentelemetry.trace.span import Span
class _LambdaOperation(abc.ABC):
@classmethod
@abc.abstractmethod
def operation_name(cls):
pass
@classmethod
def prepare_attributes(
cls, call_context: _AwsSdkCallContext, attributes: _AttributeMapT
):
pass
@classmethod
def before_service_call(cls, call_context: _AwsSdkCallContext, span: Span):
pass
class _OpInvoke(_LambdaOperation):
# https://docs.aws.amazon.com/lambda/latest/dg/API_Invoke.html#API_Invoke_RequestParameters
ARN_LAMBDA_PATTERN = re.compile(
"(?:arn:(?:aws[a-zA-Z-]*)?:lambda:)?"
"(?:[a-z]{2}(?:-gov)?-[a-z]+-\\d{1}:)?(?:\\d{12}:)?"
"(?:function:)?([a-zA-Z0-9-_\\.]+)(?::(?:\\$LATEST|[a-zA-Z0-9-_]+))?"
)
@classmethod
def operation_name(cls):
return "Invoke"
@classmethod
def extract_attributes(
cls, call_context: _AwsSdkCallContext, attributes: _AttributeMapT
):
attributes[SpanAttributes.FAAS_INVOKED_PROVIDER] = "aws"
attributes[
SpanAttributes.FAAS_INVOKED_NAME
] = cls._parse_function_name(call_context)
attributes[SpanAttributes.FAAS_INVOKED_REGION] = call_context.region
@classmethod
def _parse_function_name(cls, call_context: _AwsSdkCallContext):
function_name_or_arn = call_context.params.get("FunctionName")
matches = cls.ARN_LAMBDA_PATTERN.match(function_name_or_arn)
function_name = matches.group(1)
return function_name_or_arn if function_name is None else function_name
@classmethod
def before_service_call(cls, call_context: _AwsSdkCallContext, span: Span):
cls._inject_current_span(call_context)
@classmethod
def _inject_current_span(cls, call_context: _AwsSdkCallContext):
payload_str = call_context.params.get("Payload")
if payload_str is None:
return
# TODO: reconsider propagation via payload as it manipulates input of the called lambda function
try:
payload = json.loads(payload_str)
headers = payload.get("headers", {})
inject(headers)
payload["headers"] = headers
call_context.params["Payload"] = json.dumps(payload)
except ValueError:
pass
################################################################################
# Lambda extension
################################################################################
_OPERATION_MAPPING = {
op.operation_name(): op
for op in globals().values()
if inspect.isclass(op)
and issubclass(op, _LambdaOperation)
and not inspect.isabstract(op)
} # type: Dict[str, _LambdaOperation]
class _LambdaExtension(_AwsSdkExtension):
def __init__(self, call_context: _AwsSdkCallContext):
super().__init__(call_context)
self._op = _OPERATION_MAPPING.get(call_context.operation)
def extract_attributes(self, attributes: _AttributeMapT):
if self._op is None:
return
self._op.extract_attributes(self._call_context, attributes)
def before_service_call(self, span: Span):
if self._op is None:
return
self._op.before_service_call(self._call_context, span)
|
#proyecto final - BDD en Python - Fritz, Mariano
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, Integer, String, Sequence, ForeignKey, Table, Text
from sqlalchemy.orm import sessionmaker, relationship
#para exportar los datos
import csv
engine = create_engine("sqlite:///:memory:")
Base = declarative_base()
class CursoNoExisteException(Exception):
pass
class CursoRepetidoException(Exception):
pass
class OperacionCanceladaException(Exception):
pass
def cargar_un_curso(s, a, d, cantidad_alumnos=5, cantidad_profes=4):
un_curso = Curso(anyo=a, division=d)
#cargo los alumnos
for i in range(cantidad_alumnos):
un_alumno = Alumno(nombre=f"nombre{i}", apellido=f"apellido{i}")
un_curso.alumnos.append(un_alumno)
s.add(un_curso)
#cargo los profes
for i in range(cantidad_profes):
un_profe = Profesor(nombre=f"{a}{d}_nombre{i}", apellido=f"apellido{i}")
un_horario = Horario(curso_id=un_curso.id, profesor_id=un_profe.id, dia="Lunes", desde=f"hora{i}", hasta=f"hora{i+1}")
un_profe.cursos.append(un_horario)
un_curso.profesores.append(un_horario)
s.add(un_profe)
def cargar_datos_ejemplo(s):
for i in range(1,3):
cargar_un_curso(s, str(i), "a")
cargar_un_curso(s, str(i), "b")
class Alumno(Base):
__tablename__ = 'alumno'
id = Column(Integer, Sequence('alumno_id_seq'), primary_key=True)
nombre = Column(String)
apellido = Column(String)
curso_id = Column(Integer, ForeignKey('curso.id'))
#la relación entre el alumno y el curso
curso = relationship('Curso', back_populates='alumnos')
def __repr__(self):
return f"Estudiante {self.nombre} {self.apellido}"
def campos_valores(self):
return ["id","apellido","nombre"],[self.id, self.apellido, self.nombre]
class Profesor(Base):
__tablename__ = 'profesor'
id = Column(Integer, Sequence('profesor_id_seq'), primary_key=True)
nombre = Column(String)
apellido = Column(String)
#la relación muchos a muchos entre profesor - horario - curso
cursos = relationship('Horario', backref='profesores')
def __repr__(self):
return f"Prof. {self.nombre} {self.apellido}"
def campos_valores(self):
return ["id","apellido","nombre"],[self.id, self.apellido, self.nombre]
class Curso(Base):
__tablename__ = 'curso'
id = Column(Integer, Sequence('curso_id_seq'), primary_key=True)
anyo = Column(String)
division = Column(String)
#la relación muchos a muchos entre curso - horario - profesor
profesores = relationship('Horario', backref='cursos')
#la relación entre el curso y el alumno
alumnos = relationship('Alumno', back_populates='curso')
def __repr__(self):
return f"Curso: {self.anyo} {self.division}"
def campos_valores(self):
return ["id","anyo","division"],[self.id, self.anyo, self.division]
#No se usa una tabla secundaria (secondary) sino que se la reemplaza con nueva clase Horario
class Horario(Base):
__tablename__ = 'horario'
id = Column(Integer, Sequence('horario_id_seq'), primary_key=True)
profesor_id = Column(Integer, ForeignKey('profesor.id'))
curso_id = Column(Integer, ForeignKey('curso.id'))
dia = Column(String)
desde = Column(String)
hasta = Column(String)
def __repr__(self):
return f"Horario: {self.dia} de {self.desde} a {self.hasta}"
def preguntar_sino(m):
opt = input(m).lower()
while opt not in ["s","n","si", "no"]:
opt = input(m).lower()
return opt in ["s", "si"]
def pedir_datos():
print("Ingrese los datos: ")
nombre = input("Nombre: ")
apellido = input("Apellido: ")
return nombre,apellido
def pedir_profesor():
return pedir_datos()
def pedir_horario():
print("Ingrese los datos del horario: ")
dia = input("Dia de la semana: ")
desde = input("Hora de inicio: ")
hasta = input("hora de fin: ")
return Horario(dia=dia, desde=desde, hasta=hasta)
def seleccionar_horario():
print("Generar un horario para el profesor")
horario = pedir_horario()
return horario
def nuevo_profesor(s):
print("Nuevo profesor---")
nombre, apellido = pedir_datos()
nuevo = Profesor(nombre=nombre, apellido=apellido)
#elijo un curso y un horario
curso = seleccionar_curso(s, True)
horario = seleccionar_horario()
#añado las relaciones
nuevo.cursos.append(horario)
curso.profesores.append(horario)
return nuevo
def pedir_alumno():
return pedir_datos()
def nuevo_alumno(s):
print("Nuevo alumno---")
nombre, apellido = pedir_alumno()
nuevo = Alumno(nombre=nombre, apellido=apellido)
nuevo.curso = seleccionar_curso(s, True)
return nuevo
def pedir_curso():
print("Ingrese los datos del curso: ")
anyo = input("Año: ")
divis = input("Division:")
return (anyo, divis)
def seleccionar_curso(s, puede_crear=False):
print("Seleccionar curso")
anyo, divis = pedir_curso()
curso = None
try:
curso = s.query(Curso).filter(Curso.anyo==anyo).filter(Curso.division==divis).one()
except Exception:
if puede_crear:
rta = preguntar_sino("El curso no existe. ¿Desea crearlo? (s/n)")
if rta:
curso = Curso(anyo=anyo, division=divis)
else:
raise OperacionCanceladaException("Operación cancelada por el usuario")
else:
raise CursoNoExisteException(f"No existe el curso {anyo} {divis}")
return curso
def nuevo_curso(s):
print("Nuevo curso---")
anyo, divis = pedir_curso()
if s.query(Curso).filter(Curso.anyo==anyo).filter(Curso.division==divis).count()==0:
nuevo = Curso(anyo=anyo, division=divis)
else:
raise CursoRepetidoException(f"El curso {anyo} {divis} ya existe")
return nuevo
def menu():
opt = "x"
while opt not in ["1","2","3","4","5","6","7","8","9","0"]:
print("---- Menú de opciones ----")
print("1. Nuevo alumno")
print("2. Nuevo profesor")
print("3. Nuevo curso")
print("-"*30)
print("4. Listar alumnos por curso")
print("5. Listar horarios por profesor")
print("6. Listar horarios por curso")
print("--------------------------")
print("7. Listar todos los alumnos")
print("8. Listar todos los profesores")
print("9. Listar todos los cursos")
print("--------------------------")
print("0. Salir")
opt=input(">")
return opt
def volcar_csv(ruta, campos, filas):
with open(ruta, "w", newline="") as csvfile:
csvw = csv.writer(csvfile)
csvw.writerow(campos)
csvw.writerows(filas)
print(f"Se ha generado el archivo {ruta}")
def mostrar_fila(fila, ancho=12):
linea = map(lambda x:str(x).center(ancho), fila)
print("|".join(linea))
def mostrar_filas(filas, ancho=12):
for f in filas:
mostrar_fila(f, ancho)
def listar_alumnos_por_curso(s):
#busca los alumnos de un curso
#ruta por defecto
ruta = "informe.csv"
anyo, divis = pedir_curso()
print(f"Alumnos de {anyo} {divis}")
campos = ["Apellido","Nombre","Anyo","Curso"]
registros = s.query(Alumno.apellido, Alumno.nombre, Curso.anyo, Curso.division).join(Curso).filter(Curso.anyo==anyo).filter(Curso.division==divis).all()
mostrar_fila(campos)
mostrar_filas(registros)
print(f"{len(registros)} registros")
if preguntar_sino(f"¿Generar {ruta}? s/n"):
volcar_csv(ruta, campos, registros)
def listar_horarios_por_curso(s):
#busca los horarios de un curso
#ruta por defecto
ruta = "informe.csv"
curso = seleccionar_curso(s)
print(f"Horarios de {curso.anyo} {curso.division}")
campos = ["Anyo", "Division", "Dia", "Desde", "Hasta", "Apellido", "Nombre"]
registros = s.query(Curso.anyo, Curso.division, Horario.dia, Horario.desde, Horario.hasta, Profesor.apellido, Profesor.nombre).join(Horario, Curso.id==Horario.curso_id).join(Profesor, Horario.profesor_id==Profesor.id).filter(Curso.anyo==curso.anyo).filter(Curso.division==curso.division).all()
mostrar_fila(campos)
mostrar_filas(registros)
print(f"{len(registros)} registros")
if preguntar_sino(f"¿Generar {ruta}? s/n"):
volcar_csv(ruta, campos, registros)
def listar_horarios_por_profesor(s):
#busca los horarios de un profesor
ruta = "informe.csv"
nombre, apellido = pedir_profesor()
campos = ["Dia","Desde","Hasta","Anyo", "Division"]
registros = s.query(Horario.dia, Horario.desde, Horario.hasta, Curso.anyo, Curso.division).join(Curso).join(Profesor).filter(Profesor.nombre==nombre).filter(Profesor.apellido==apellido).all()
mostrar_fila(campos)
mostrar_filas(registros)
print(f"{len(registros)} registros")
if preguntar_sino(f"¿Generar {ruta}? s/n"):
volcar_csv(ruta, campos, registros)
def listado_completo(s, obj):
ruta = "informe.csv"
print(f"Listado completo {obj.__tablename__}")
registros_crudos = s.query(obj).all()
registros = []
for r in registros_crudos:
campos, valores = r.campos_valores()
registros.append(valores)
mostrar_fila(campos)
mostrar_filas(registros)
print(f"{len(registros)} registros")
if preguntar_sino(f"¿Generar {ruta}? s/n"):
volcar_csv(ruta, campos, registros)
def listar_todos_los_alumnos(s):
listado_completo(s, Alumno)
def listar_todos_los_profesores(s):
listado_completo(s, Profesor)
def listar_todos_los_cursos(s):
listado_completo(s, Curso)
def principal(s):
opt = menu()
while opt != "0":
if opt == "1":
try:
a = nuevo_alumno(s)
s.add(a)
except OperacionCanceladaException as ex:
print(ex)
elif opt == "2":
try:
p = nuevo_profesor(s)
s.add(p)
except OperacionCanceladaException as ex:
print(ex)
elif opt == "3":
try:
c = nuevo_curso(s)
s.add(c)
except CursoRepetidoException as ex:
print(ex)
elif opt == "4":
try:
listar_alumnos_por_curso(s)
except CursoNoExisteException as ex:
print(ex)
elif opt == "5":
listar_horarios_por_profesor(s)
elif opt == "6":
try:
listar_horarios_por_curso(s)
except CursoNoExisteException as ex:
print(ex)
elif opt == "7":
listar_todos_los_alumnos(s)
elif opt == "8":
listar_todos_los_profesores(s)
elif opt == "9":
listar_todos_los_cursos(s)
opt = menu()
s.commit()
print("Programa terminado")
Base.metadata.create_all(engine)
Session = sessionmaker(bind=engine)
sesion = Session()
cargar_datos_ejemplo(sesion)
principal(sesion) |
#!/usr/bin/env python
import socket
from time import sleep
import MySQLdb as mysql
import time
host = '192.168.1.116'
port = 6789
def setupSocket():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, port))
return s
def sendReceive(s, message):
s.send(str.encode(message))
reply = s.recv(1024)
print("We have received a reply")
print("Send closing message.")
s.send(str.encode("EXIT"))
s.close()
reply = reply.decode('utf-8')
return reply
def transmit(message):
s = setupSocket()
response = sendReceive(s, message)
return response
def mySQLConnection():
connection=mysql.connect(host = "localhost", user = "root", passwd = "student", db = "users")
return connection
while True:
recvData=transmit('GET')
print('GET:'+str(len(recvData)))
if len(recvData.strip())!=0:
con=mySQLConnection()
if con is None:
print('Error DB Connection')
else:
cur=con.cursor()
cur.execute(recvData)
con.commit()
con.close()
|
from app.base.func import CSVToList
from sqlalchemy.sql import func
from sqlalchemy.orm import relationship
from sqlalchemy import or_
from flask import current_app
from werkzeug.security import generate_password_hash, check_password_hash
from itsdangerous import TimedJSONWebSignatureSerializer as Serializer
from flask_sqlalchemy import SQLAlchemy
from config import config
import json
db = SQLAlchemy()
Base = db.Model
Column = db.Column
Integer = db.Integer
String = db.String
Date = db.Date
DateTime = db.DateTime
Boolean = db.Boolean
ForeignKey = db.ForeignKey
Float = db.Float
type_dict = ['不限', '景点', '酒店', '活动', '餐馆', '购物']
why_dict = ['圆满', '历险', '思考人生', '天伦之乐', '商务考察', '游学']
stay_dict = ['经济型', '普通型', '奢华型']
wk_status_dict = ['待补充', '匹配中', '待选择', '待发布', '下线中', '出行中',
'已完成']
sup_status_dict = ['待处理', '制作中', '待选择', '募集中', '出行中',
'已拒绝', '未中标', '已完成']
topic_dict = ['亲子旅行', '游学旅行', '企业考察', '度假旅行',
'团队建设或奖励', '时尚之旅', '运动旅行', '美食之旅',
'邮轮旅行', '火车之旅', '蜜月旅行', '文化之旅', '全球节日之旅',
'粉丝之旅', '彩虹之旅', '医疗保健旅行', '财富投资之旅',
'自然风光之旅', '著名酒店体验之旅', '自驾之旅', '海岛旅行',
'公务机旅行', '同学聚会', '闺蜜之旅']
tran_dict = ['飞机', '火车', '大巴', '自驾']
airplane_dict = ['经济舱', '公务舱', '头等舱']
hotel_dict = ['国际连锁酒店', 'Airbnb(公寓/别墅', '精品酒店(当地特色', '度假村']
hotel_brand_dict = ['STARWOOD', 'MARRIOTT', 'HILTON', 'HYATT', 'IHG', 'ACCOR',
'FOUR SEASON']
food_dict = ['当地特色美食', '经济快餐', '米其林餐厅', '餐饮自理', '中餐', '其他']
visa_dict = ['已有签证', '自己办理', '协助办理']
budget_dict = ['5000-15000(经济型)', '15000-25000(品质型)',
'25000-35000(高价型)', '35000以上(奢华型)']
class User(Base):
__tablename__ = 'userinfo'
id = Column(Integer, primary_key=True)
email = Column(String(128), nullable=False)
pwd = Column(String(64), nullable=False)
phone = Column(String(32), nullable=False)
licence = Column(String(128), nullable=False)
company = Column(String(128), nullable=False)
location = Column(String(128), default='')
introduce = Column(String(1024), nullable=False)
service = Column(String(512))
active = Column(Integer, default=0)
createtime = Column(DateTime, default=func.now())
def __init__(self, id, email):
self.id = id
self.email = email
@property
def password(self):
raise AttributeError('password is not a readable attribute')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password)
def generate_confirmation_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'chgpwd': self.id})
def confirm(self, token):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('confirm') != self.id:
return False
self.confirmed = True
# db.session.add(self)
return True
def generate_reset_token(self, expiration=3600):
s = Serializer(current_app.config['SECRET_KEY'], expiration)
return s.dumps({'reset': self.id})
def reset_password(self, token, new_password):
s = Serializer(current_app.config['SECRET_KEY'])
try:
data = s.loads(token)
except:
return False
if data.get('reset') != self.id:
return False
self.password = new_password
# db.session.add(self)
return True
def __repr__(self):
return '<User %r>' % self.name
def serialize(self):
r = {
'id': self.id,
'company': self.company,
'introduce': self.introduce,
}
return r
class SupplierOrder(Base):
__tablename__ = 'sup_and_order'
id = Column(Integer, primary_key=True)
supplier_id = Column(Integer, ForeignKey('userinfo.id'))
status = Column(Integer)
order_id = Column(Integer, ForeignKey('wk_order.id'))
it_id = Column(Integer, ForeignKey('itinerary.id'))
order = relationship('Order', back_populates='supplier')
it = relationship('Itinerary')
def __init__(self, supplier_id, order_id):
self.supplier_id = supplier_id
self.order_id = order_id
self.status = 0
@property
def status_cn(self):
try:
return sup_status_dict[self.status]
except:
return 'None'
class Order(Base):
__tablename__ = 'wk_order'
id = Column(Integer, primary_key=True)
wk_id = Column(Integer, nullable=False)
wk_name = Column(String(32), nullable=False)
wk_port = Column(String(128), nullable=False)
begin_date = Column(Date, nullable=False)
end_date = Column(Date, nullable=False)
people_num = Column(Integer, nullable=False)
is_determined = Column(Boolean, nullable=False)
city_id = Column(String(255))
departure = Column(String(32))
recity = Column(String(32))
topic = Column(String(32))
status = Column(Integer, default=0)
create_time = Column(DateTime, default=func.now())
update_time = Column(DateTime, default=func.now())
info = relationship('OrderInfo', uselist=False, back_populates='order')
supplier = relationship('SupplierOrder', back_populates='order')
@property
def city_id_cn(self):
citys = []
cityname = ''
countrys = json.loads(self.city_id)
for country in countrys:
for city in country['city']:
citys.append(city)
cityname = db.session.query(City.cityname_cn).filter(
or_(City.cityid == i for i in citys))
return cityname
@property
def status_cn(self):
try:
return wk_status_dict[self.status]
except:
return 'None'
@property
def topic_cn(self):
try:
li = []
for x in CSVToList(self.topic):
li.append(topic_dict[x])
return li
except:
return []
@property
def days_num(self):
return (self.end_date - self.begin_date).days
def serialize(self, get_all=False):
r = {
'id': self.id,
'wk_id': self.wk_id,
'wk_name': self.wk_name,
'wk_port': self.wk_port,
'begin_date': str(self.begin_date),
'end_date': str(self.end_date),
'people_num': self.people_num,
'is_determined': self.is_determined,
'city_id': json.loads(self.city_id),
'departure': self.departure,
'recity': self.recity,
'topic': CSVToList(self.topic),
'status': self.status,
'create_time': str(self.create_time),
'update_time': str(self.update_time)
}
if get_all:
s = self.info
rr = {
'favor_tran': s.favor_tran,
'favor_airplane': s.favor_airplane,
'favor_hotel': s.favor_hotel,
'favor_hotel_brand': s.favor_hotel_brand,
'favor_food': s.favor_food,
'favor_visa': s.favor_visa,
'favor_budget': s.favor_budget,
'remark': s.remark
}
r.update(rr)
return r
def php_serialize(self):
return {
'origin': self.departure,
'destinations': self.city_id_cn,
'pdtnums': self.people_num
}
class OrderInfo(Base):
__tablename__ = 'order_info'
id = Column(Integer, primary_key=True)
favor_tran = Column(Integer)
favor_airplane = Column(Integer)
favor_hotel = Column(Integer)
favor_hotel_brand = Column(Integer)
favor_food = Column(Integer)
favor_visa = Column(Integer)
favor_budget = Column(Integer)
favor_why = Column(Integer)
favor_stay = Column(Integer)
favor_budget = Column(Integer)
is_shopping = Column(Integer)
remark = Column(String(255))
order_id = Column(Integer, ForeignKey('wk_order.id'), unique=True)
order = relationship('Order', back_populates='info')
@property
def tran_cn(self):
try:
return tran_dict[self.favor_tran]
except:
return 'None'
@property
def airplane_cn(self):
try:
return airplane_dict[self.favor_airplane]
except:
return 'None'
@property
def hotel_cn(self):
try:
return hotel_dict[self.favor_hotel]
except:
return 'None'
@property
def hotel_brand_cn(self):
try:
return hotel_brand_dict[self.favor_hotel_brand]
except:
return 'None'
@property
def food_cn(self):
try:
return food_dict[self.favor_food]
except:
return 'None'
@property
def visa_cn(self):
try:
return visa_dict[self.favor_visa]
except:
return 'None'
@property
def budget_cn(self):
try:
return budget_dict[self.favor_budget]
except:
return 'None'
@property
def why_cn(self):
try:
return why_dict[self.favor_why]
except:
return 'None'
@property
def stay_cn(self):
try:
return stay_dict[self.favor_stay]
except:
return 'None'
class City(Base):
__tablename__ = 'city_config'
cityid = Column(Integer, primary_key=True)
countryid = Column(Integer, nullable=False)
cityname_cn = Column(String(255), nullable=False)
cityname_en = Column(String(255), nullable=False)
city_cover = Column(String(128), nullable=False)
class Country(Base):
__tablename__ = 'country_config'
countryid = Column(Integer, primary_key=True)
continentid = Column(Integer, nullable=False)
countryname_cn = Column(String(255), nullable=False)
countryname_en = Column(String(255), nullable=False)
country_cover = Column(String(128), nullable=False)
class Itinerary(Base):
__tablename__ = 'itinerary'
id = Column(Integer, primary_key=True)
supplier_id = Column(Integer, ForeignKey('userinfo.id'))
title = Column(String(512), nullable=False)
coverimg = Column(String(256), nullable=False)
pieces = Column(String(1024))
price = Column(String(1024))
notice = Column(String(1024))
light = Column(String(1024), nullable=False)
start_date = Column(String(10), nullable=False)
end_date = Column(String(10), nullable=False)
status = Column(Integer, nullable=False, default=0)
@property
def status_cn(self):
return ['编辑中', '已完成', '废弃'][self.status]
def serialize(self):
r = {
'id': self.id,
'sup_id': self.supplier_id,
'title': self.title,
'coverimg': config.host_url + self.coverimg,
'start_date': str(self.start_date),
'end_date': str(self.end_date),
'light': json.loads(self.light),
'pieces': self.pieces,
'notice': self.notice,
'prices': self.price
}
for l in r['light']:
l['light_img'] = config.host_url + l['light_img']
return r
def php_serialize(self):
price = json.loads(self.price)
return {
'pdtprice': ['count'],
'pdtcprice': price['chlid'],
'pdtdprice': price['soloroom'],
'itineraryid': self.id,
'pdttitle': self.title,
'img': config.host_url + self.coverimg,
'goofftime': str(self.begin_date),
'pdtday': (self.start_date - self.end_date).days,
'token': config.php_token
}
class Frag(Base):
__tablename__ = 'frag'
id = Column(Integer, primary_key=True)
cityid = Column(ForeignKey('city_config.cityid'))
sup_id = Column(Integer)
name_cn = Column(String(255))
name_en = Column(String(255))
price = Column(Float)
description = Column(String(255))
period = Column(String(255))
cover = Column(String(255))
ticket = Column(String(255))
tel = Column(String(255))
url = Column(String(255))
address = Column(String(255))
lat = Column(Float, default=0)
lng = Column(Float, default=0)
type = Column(Integer)
city_name = relationship('City', foreign_keys=[cityid])
def serialize(self):
r = {
'id': self.id,
'sup_id': self.sup_id,
'name_cn': self.name_cn,
'name_en': self.name_en,
'price': str(self.price),
'description': self.description,
'type': self.type
}
if self.sup_id is None:
r['cover'] = 'http://p.igenwo.com/static/' + self.cover
else:
r['cover'] = config.host_url + '/static/' + self.cover
return r
class Match(Base):
__tablename__ = 'match_table'
id = Column(Integer, primary_key=True)
sup_id = Column(ForeignKey('userinfo.id'))
countryid = Column(ForeignKey('country_config.countryid'))
topic = Column(Integer)
def __init__(self, sup_id, countryid, topic):
self.sup_id = sup_id
self.countryid = countryid
self.topic = topic
|
"""
1.While Loop Basics
a.create a while loop that prints out a string 5 times (should not use a break statement)
b.create a while loop that appends 1, 2, and 3 to an empty string and prints that string
c.print the list you created in step 1.b.
2.while/else and break statements
a.create a while loop that does the same thing as the the while loop you created in step 1.a. but uses a
break statement to end the loop instead of what was used in step 1.a.
b.use the input function to make the user of the program guess your favorite fruit
c.create a while/else loop that continues to prompt the user to guess what your favorite fruit is
until they guess correctly (use the input function for this.)
The else should be triggered when the user correctly guesses your favorite fruit.
When the else is triggered, it should output a message saying that the user has correctly guessed your favorite fruit.
"""
counter =0
while counter < 5:
print("Nishant")
counter +=1
a_counter = 1
emp_li = []
while a_counter <4:
emp_li.append(a_counter)
a_counter +=1
print(emp_li)
str = input("Please Enter you Favourite Fruit")
|
from Flask_project.sweater import app
if __name__ == '__main__': # если программа запускается через этот файл, т-е. app = Flask(этот файл)
app.run(debug=True) # запуск Flask(вывод ошибок на сайт)
|
import time
print("Welcome to the Prime Number App")
flag = True
while flag:
print("Enter 1 to determine if a specific number is prime.")
print("Enter 2 to determine all prime numbers within a set range.")
choice = int(input("Enter your choice 1 or 2: "))
if choice == 1:
num = int(input("Enter a number to determine if it is prime or not: "))
prime_status = True
for i in range(2,num):
if num%i==0:
prime_status = False
break
if prime_status == True:
print("{} is prime".format(num))
else:
print("{} is not prime".format(num))
elif choice == 2:
lower = int(input("Enter the lower bound of your range: "))
upper = int(input("Enter the upper bound of your range: "))
prime = []
start_time = time.time()
for j in range(lower,upper+1):
if j > 1:
prime_status = True
for i in range(2, j):
if j % i == 0:
prime_status = False
break
else:
prime_status = False
if prime_status:
prime.append(j)
end_time = time.time()
total_time = round(start_time-end_time,4)
print("Calculations took a total of {} seconds".format(total_time))
print("The following numbers between {} and {} are prime:".format(lower,upper))
for k in prime:
print(k)
else:
print("That is not a valid option.")
res = input("Would you like to run the program again (y/n): ")
if res.startswith('n'):
flag = False
print("Thank you for using the program. Goodbye.") |
class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
left = 0
right = 0
ans = 0
dict = {}
while right<len(s):
pos = dict.get(s[right], -1)
if pos<left: # 未重复-1;或者重复值在滑动窗口左端边界外
dict[s[right]] = right
ans = max(ans, right-left+1)
right+=1
else:
left = pos+1 # 更新滑动窗口左边界
return ans
if __name__ == '__main__':
import time
start = time.process_time()
source = "abcabcbb"
ans = Solution().lengthOfLongestSubstring(source)
print(ans)
end = time.process_time()
print(str(end-start))
|
import numpy as np
def g_str(x):
str = hex(x).partition('x')[2].partition('L')[0]
while len(str) < 2:
str = '0' + str
return(str)
def main(start, end, steps):
r_min = int(start[:2], 16)
r_max = int(end[:2], 16)
r_grad = [int(a) for a in np.linspace(r_min, r_max, steps)]
g_min = int(start[2:4], 16)
g_max = int(end[2:4], 16)
g_grad = [int(a) for a in np.linspace(g_min, g_max, steps)]
b_min = int(start[4:], 16)
b_max = int(end[4:], 16)
b_grad = [int(a) for a in np.linspace(b_min, b_max, steps)]
gradient = []
for count in range(len(r_grad)):
gradient.append(
'#' +
g_str(r_grad[count]) + g_str(g_grad[count]) + g_str(b_grad[count])
)
return(gradient)
if __name__ == '__main__':
import pylab as pl
import random as rnd
g_len = 10
c = main('00FFFF', 'FF00FF', g_len)
print(c)
for i, sd in enumerate(np.linspace(0, 5, g_len)):
x = [rnd.gauss(0,sd) for a in np.arange(1000)]
pl.hist(x, histtype='step', lw=5, color=c[i])
pl.show()
# x = np.arange(0, g_len, 1)
# y = [0]*g_len
# pl.scatter(x, y, c=c, s=1000, linewidths = 0)
# pl.show()
|
#!/usr/bin/env python
################################################################################
# COPYRIGHT(c) 2018 STMicroelectronics #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions are met: #
# 1. Redistributions of source code must retain the above copyright notice, #
# this list of conditions and the following disclaimer. #
# 2. Redistributions in binary form must reproduce the above copyright #
# notice, this list of conditions and the following disclaimer in the #
# documentation and/or other materials provided with the distribution. #
# 3. Neither the name of STMicroelectronics nor the names of its #
# contributors may be used to endorse or promote products derived from #
# this software without specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" #
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE #
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE #
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE #
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR #
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF #
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS #
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN #
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) #
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
################################################################################
################################################################################
# Author: Davide Aliprandi, STMicroelectronics #
################################################################################
# DESCRIPTION
#
# This application example shows how to connect IO-Link devices to a Linux
# gateway and to get data from them.
# IMPORT
from __future__ import print_function
import sys
import os
import time
import getopt
import json
import logging
from enum import Enum
import threading
import serial
from serial import SerialException
from serial import SerialTimeoutException
import wire_st_sdk.iolink.iolink_protocol as iolink_protocol
from wire_st_sdk.iolink.iolink_master import IOLinkMaster
from wire_st_sdk.iolink.iolink_master import IOLinkMasterListener
from wire_st_sdk.iolink.iolink_device import IOLinkDevice
from wire_st_sdk.utils.wire_st_exceptions import WireSTInvalidOperationException
# PRECONDITIONS
#
# In case you want to modify the SDK, clone the repository and add the location
# of the "WireSTSDK_Python" folder to the "PYTHONPATH" environment variable.
#
# On Linux:
# export PYTHONPATH=/home/<user>/WireSTSDK_Python
# CONSTANTS
# Presentation message.
INTRO = """###################
# IO-Link Example #
###################"""
# IO-Link settings.
SERIAL_PORT_NAME = '/dev/ttyUSB0'
SERIAL_PORT_BAUDRATE_bs = 230400
SERIAL_PORT_TIMEOUT_s = 5
# Devices' identifier.
IOT_DEVICE_1_ID = '393832383035511900430037'
IOT_DEVICE_2_ID = '3938323830355119003B0038'
# FUNCTIONS
#
# Printing intro.
#
def print_intro():
print('\n' + INTRO + '\n')
# CLASSES
#
# Implementation of the interface used by the IOLinkMaster class to notify the
# status of the connection.
#
class MyIOLinkMasterListener(IOLinkMasterListener):
#
# To be called whenever a masterboard changes its status.
#
# @param masterboard IOLinkMaster instance that has changed its status.
# @param new_status New status.
# @param old_status Old status.
#
def on_status_change(self, masterboard, new_status, old_status):
print('Masterboard on port \"%s\" from \"%s\" to \"%s\".' %
(masterboard.get_port().port, str(old_status), str(new_status)))
#
# To be called whenever a masterboard finds a new device connected.
#
# @param masterboard (IOLinkMaster): Masterboard that has found a new device.
# @param device_id (str): New device found.
# @param device_position (int): Position of the new device found.
#
def on_device_found(self, masterboard, device_id, device_position):
print('Masterboard on port \"%s\" found device \"%s\" on position \"%d\".' %
(masterboard.get_port().port, device_id, device_position))
# MAIN APPLICATION
#
# Main application.
#
def main(argv):
# Printing intro.
print_intro()
try:
# Initializing Serial Port.
serial_port = serial.Serial()
serial_port.port = SERIAL_PORT_NAME
serial_port.baudrate = SERIAL_PORT_BAUDRATE_bs
serial_port.parity = serial.PARITY_NONE
serial_port.stopbits = serial.STOPBITS_ONE
serial_port.bytesize = serial.EIGHTBITS
serial_port.timeout = SERIAL_PORT_TIMEOUT_s
serial_port.write_timeout = None
# Initializing an IO-Link Masterboard and connecting it to the host.
print('\nInitializing Masterboard on port \"%s\" with a baud rate of ' \
'\"%d\" [b/s]...' % (serial_port.port, serial_port.baudrate))
master = IOLinkMaster(serial_port)
master_listener = MyIOLinkMasterListener()
master.add_listener(master_listener)
status = master.connect()
# Initializing IO-Link Devices.
print('\nInitializing IO-Link Devices...')
devices = []
devices.append(master.get_device(IOT_DEVICE_1_ID))
devices.append(master.get_device(IOT_DEVICE_2_ID))
# Checking setup.
for device in devices:
if not device:
print('IO-Link setup incomplete. Exiting...\n')
sys.exit(0)
# IO-Link setup complete.
print('\nIO-Link setup complete.\n')
# Getting information about devices.
for device in devices:
print('Device \"%d\":' % (device.get_position()))
print('\tDevice Id:\n\t\t\"%s\"' % (device.get_id()))
print('\tFirmware:\n\t\t\"%s\"' % (device.get_firmware()))
print('\tFeatures:\n\t\t%s' % (device.get_features()))
print()
# Setting devices' parameters.
# odr = iolink_protocol.ODR.ODR_6660
# fls = iolink_protocol.FLS.FLS_16
# sze = iolink_protocol.SZE.SZE_1024
# sub = iolink_protocol.SUB.SUB_64
# acq = iolink_protocol.ACQ_MIN
# ovl = iolink_protocol.OVL_MIN
# for device in devices:
# print('Device %d:' % (device.get_position()))
# print('\tSetting ODR to \"%s\"...' % (odr.value), end='')
# sys.stdout.flush()
# print('Done' if device.set_odr(odr) else 'Error')
# print('\tSetting FLS to \"%s\"...' % (fls.value), end='')
# sys.stdout.flush()
# print('Done' if device.set_fls(fls) else 'Error')
# print('\tSetting SZE to \"%s\"...' % (sze.value), end='')
# sys.stdout.flush()
# print('Done' if device.set_sze(sze) else 'Error')
# print('\tSetting SUB to \"%s\"...' % (sub.value), end='')
# sys.stdout.flush()
# print('Done' if device.set_sub(sub) else 'Error')
# print('\tSetting ACQ to \"%s\"...' % (acq), end='')
# sys.stdout.flush()
# print('Done' if device.set_acq(acq) else 'Error')
# print('\tSetting OVL to \"%s\"...' % (ovl), end='')
# sys.stdout.flush()
# print('Done' if device.set_ovl(ovl) else 'Error')
# print()
# Getting measures from devices.
for device in devices:
print('Device \"%d\":' % (device.get_position()))
print('\tEnvironmental data (P[mbar], H[%%], T[C]):\n\t\t%s' \
% (device.get_env()))
print('\tTime domain data, RMS Speed [mm/s] and Peak Acceleration' \
' [m/s2]:\n\t\t%s' % (device.get_tdm()))
print('\tFast Fourier Transform of vibration data [m/s2]:')
i = 0
for l in device.get_fft():
print('\t\t%d) %s' % (i, l))
i += 1
print()
master.disconnect()
sys.exit(0)
except (WireSTInvalidOperationException, \
SerialException, SerialTimeoutException, \
ValueError) as e:
print(e)
master.disconnect()
print('Exiting...\n')
sys.exit(0)
except KeyboardInterrupt:
try:
master.disconnect()
print('\nExiting...\n')
sys.exit(0)
except SystemExit:
os._exit(0)
if __name__ == "__main__":
try:
main(sys.argv[1:])
except KeyboardInterrupt:
try:
sys.exit(0)
except SystemExit:
os._exit(0)
|
"""
samplefunction8 class3
return if a number is even or odd
"""
def returnEO(number):
if number % 2 == 0:
strout = "Even"
else:
strout = "Odd"
return strout
print (returnEO(5))
print (returnEO(6))
print (returnEO(-4))
print (returnEO(-1.2))
|
from django.shortcuts import render
from django.http import HttpResponse
from docker import Client
import os
import datetime
import json
# Create your views here.
def home(request):
date = datetime.datetime.now()
messages = 'Hi '
hours = int(date.strftime('%H'))
if hours<12:
messages+= 'Good Morning, Welcome to the Docker Dashboard'
else:
messages+= 'Good Evening, How was your day?, Hope your Day was Good'
name = 'Dhanesh'
current_date = {
'date_now' :date,
'username' :name,
'greetings' :messages
}
return render(request, 'docker_stats/index.html', context=current_date)
def docker_containers(request):
cli = Client(base_url='unix://var/run/docker.sock')
containers = cli.containers()
container_info = []
for container in containers:
info = {
'container_name': container['Names'],
'container_id': container['Id'],
'container_state': container['State'],
'container_status': container['Status'],
'container_image': container['Image']
}
container_info.append(info)
return render(request, 'docker_stats/containers.html', context={'data': container_info}) |
import heapq
n, m = map(int, input().split())
day = [[] for _ in range(10**5+1)]
for _ in range(n):
a, b = map(int, input().split())
day[a].append(b)
benefit = []
heapq.heapify(benefit)
ans = 0
for i in range(1, m+1):
for j in day[i]:
heapq.heappush(benefit, -j)
print(benefit)
if benefit:
ans -= heapq.heappop(benefit) # 最大値を加算
print(ans)
|
#coding: UTF-8
archivo = open("CPdescarga.txt", "r")
archivo.read(512)
print archivo
while True:
linea = archivo.readline()
if not linea: break
print linea |
# Dev's Journey
# Terrance Corley
import cmd
import textwrap
import sys
import os
import time
import random
screen_width = 100
#### Player Setup ####
class player:
def __init__(self):
self.name = ''
self.job = ''
self.location = 'b2'
self.game_over = False
myPlayer = player()
#### Title Screen ####
def title_screen_selections():
option = input('> ')
if option.lower() == ('play'):
setup_game()
elif option.lower() == ('help'):
help_menu()
elif option.lower() == ('quit'):
sys.exit()
while option.lower() not in ['play', 'help', 'quit']:
print('Please enter a valid command.')
option = input('> ')
if option.lower() == ('play'):
setup_game()
elif option.lower() == ('help'):
help_menu()
elif option.lower() == ('quit'):
sys.exit()
def title_screen():
os.system('clear')
print('###############################')
print(' # Welcome to Dev\'s Journey ')
print('###############################')
print(' - Play - ')
print(' - Help - ')
print(' - Quit - ')
print('- Copyright 2017 Terrance Corley -')
title_screen_selections()
def help_menu():
print('###############################')
print(' # Welcome to Dev\'s Journey ')
print('###############################')
print('- Use commands up, down, left, right to move.')
print('- Type your commands to execute them.')
print('- Use command "knock" to inspect the doors.')
print('- Good luck and have fun!')
title_screen_selections()
#### Map ####
"""
a1 a2 a3 a4
-----------------
| | | | | a1
-----------------
| | | | | b2
-----------------
| | | | | c3
-----------------
| | | | | d4
-----------------
"""
ZONENAME = ''
DESCRIPTION = 'description'
EXAMINATION = 'examine'
SOLVED = False
UP = 'up', 'north'
DOWN = 'down', 'south'
LEFT = 'left', 'west'
RIGHT = 'right', 'east'
solved_places = {
'a1': False,
'a2': False,
'a3': False,
'a4': False,
'b1': False,
'b2': False,
'b3': False,
'b4': False,
}
zonemap = {
'a1': {
ZONENAME: 'Town Market',
DESCRIPTION: 'description',
EXAMINATION: 'examine',
SOLVED: False,
UP: '',
DOWN: 'b1',
LEFT: '',
RIGHT: 'a2',
},
'a2': {
ZONENAME: 'Mr. Petrov - Lead Web Designer',
DESCRIPTION: 'You stand in front of a bright red door, you can hear the buzzing sound of what appears to be bad 90\'s techno music playing on the other side.',
EXAMINATION: 'examine',
SOLVED: False,
UP: '',
DOWN: 'b2',
LEFT: 'a1',
RIGHT: 'a3',
},
'a3': {
ZONENAME: 'Town Square',
DESCRIPTION: 'description',
EXAMINATION: 'examine',
SOLVED: False,
UP: '',
DOWN: 'b3',
LEFT: 'a2',
RIGHT: 'a4',
},
'a4': {
ZONENAME: 'Town Hall',
DESCRIPTION: 'description',
EXAMINATION: 'examine',
SOLVED: False,
UP: '',
DOWN: 'b4',
LEFT: 'a3',
RIGHT: '',
},
'b1': {
ZONENAME: '',
DESCRIPTION: 'description',
EXAMINATION: 'examine',
SOLVED: False,
UP: 'a1',
DOWN: '',
LEFT: '',
RIGHT: 'b2',
},
'b2': {
ZONENAME: 'Home',
DESCRIPTION: 'This is your home.',
EXAMINATION: 'Your home looks the same - nothing has changed.',
SOLVED: False,
UP: 'a2',
DOWN: '',
LEFT: 'b1',
RIGHT: 'b3',
},
'b3': {
ZONENAME: '',
DESCRIPTION: 'description',
EXAMINATION: 'examine',
SOLVED: False,
UP: 'a3',
DOWN: '',
LEFT: 'b2',
RIGHT: 'b4',
},
'b4': {
ZONENAME: '',
DESCRIPTION: 'description',
EXAMINATION: 'examine',
SOLVED: False,
UP: 'a4',
DOWN: '',
LEFT: 'b3',
RIGHT: '',
},
}
#### GAME INTERACTIVITY ####
def print_location():
print('\n' + ('#' * (4 + len(myPlayer.location))))
print('# ' + zonemap[myPlayer.location][ZONENAME].upper() + ' #')
print('# ' + zonemap[myPlayer.location][DESCRIPTION] + ' #\n')
print('\n' + ('#' * (4 + len(myPlayer.location))))
def new_line():
print('\n')
def prompt():
print('\n' + '============================')
print('What would you like to do?')
action = input('> ')
acceptable_actions = [ 'move', 'go', 'travel', 'walk', 'examine', 'inspect', 'interact', 'knock', 'quit', 'exit', 'list', 'options'
]
while action.lower() not in acceptable_actions:
print('Unknown action, type "list" to see available actions.\n')
action = input('> ')
if action.lower() in ['quit', 'exit']:
sys.exit()
elif action.lower() in ['move', 'go', 'travel', 'walk']:
player_move(action.lower())
elif action.lower() in ['examine', 'inspect', 'interact', 'knock']:
player_examine(action.lower())
elif action.lower() in ['list', 'options']:
player_options(action.lower())
def player_move_invalid(destination):
if destination == '':
input('That door does not exist, try again.')
player_move('move')
def player_move(my_action):
ask = 'Where would you like to move to?\n'
dest = input(ask)
if dest in ['up', 'north']:
destination = zonemap[myPlayer.location][UP]
player_move_invalid(destination)
movement_handler(destination)
elif dest in ['down', 'south']:
destination = zonemap[myPlayer.location][DOWN]
player_move_invalid(destination)
movement_handler(destination)
elif dest in ['left', 'west']:
destination = zonemap[myPlayer.location][LEFT]
player_move_invalid(destination)
movement_handler(destination)
elif dest in ['right', 'east']:
destination = zonemap[myPlayer.location][RIGHT]
player_move_invalid(destination)
movement_handler(destination)
def movement_handler(destination):
if destination != '':
print('\n' + 'You have moved to the ' + destination + '.')
myPlayer.location = destination
print_location()
def text_speech2(text):
for character in text:
sys.stdout.write(character)
sys.stdout.flush()
time.sleep(0.03)
def player_examine(action):
if zonemap[myPlayer.location][SOLVED]:
print('* A mysterious voice on the company loudspeaker yells *\nYou have already been through this door!! Why on earth would you ever want to go back?! Fool!!')
elif zonemap[myPlayer.location][ZONENAME] == 'Mr. Petrov - Lead Web Designer':
os.system('clear')
petrov1 = '(AMBIANCE): uhn tis uhn tis uhn tis uhn tis\n'
text_speech2(petrov1)
petrov2 = 'Mr. Petrov: Yas! Yas! Come on innnnnn, recruit!\n'
text_speech2(petrov2)
petrov3 = 'Terrance: ' + 'Umm, hi there it\'s nice to mee-\n'
text_speech2(petrov3)
petrov4 = 'Mr. Petrov: Hold on comrade! Hold on...\n'
text_speech2(petrov4)
petrov5 = 'Mr. Petrov: Before I even shake your hand, answer me this...\n'
text_speech2(petrov5)
petrov6 = 'Mr. Petrov: ...do you...like-a de technaw?\n'
text_speech2(petrov6)
petrov7 = 'Terrance: ' + '...the technaw? Do you mean techn-\n'
text_speech2(petrov7)
petrov8 = 'Mr. Pretov: Yas comrade! Thee techNAWWWWWW!\n'
text_speech2(petrov8)
petrov9 = 'Terrance: ' + 'I mean sure, techno is...alright.\n'
text_speech2(petrov9)
petrov10 = 'Mr. Pretov: Alright? ALRIGHT?! No, no, no. This will not work. Recruit, how can you nawt love et huh? What else are you going to say next? That you are still using PNG in place of SVG for your site logos?!\n'
text_speech2(petrov10)
petrov11 = 'Mr. Pretov: Wait...I know almost nothing about your design skills, comrade. How about you riddle me this...if I have a wonderful high res photo of me and all my friends outside the best technaw club in San Francisco...which image format should this photo be saved as?\n'
text_speech2(petrov11)
answer = input('> JPG, PNG, SVG, GIF?...')
if answer == 'JPG':
zonemap[myPlayer.location][SOLVED] = True
petrov12 = 'Mr. Pretov: HAAAAAYYYYY! Shots! Shots! Shots! Take a shot comrade, I knew you were a smart one!\n'
text_speech2(petrov12)
else: 'Mr. Pretov: That is wrong comrade...'
else:
print('You can trigger a puzzle here.')
def player_options(action):
os.system('clear')
print('These commands are available: "move", "knock", "quit"')
#### Game Functionality ####
def text_speech(text):
for character in text:
sys.stdout.write(character)
sys.stdout.flush()
time.sleep(0.03)
def enter_to_continue():
input('> Press "Enter" to continue')
os.system('clear')
def main_game_loop():
while myPlayer.game_over is False:
prompt()
# here handle if puzzles solved, boss defeated, explored everything
def setup_game():
os.system('clear')
#### NAME COLLECTING ####
question1 = 'Hello, what\'s your name?\n'
text_speech(question1)
player_name = input('> ')
myPlayer.name = player_name
#### JOB HANDLING ####
question2 = 'What role would you like to play?\n'
question2_added = '(You can play as a front-end developer, back-end developer, or web designer.)\n'
text_speech(question2)
text_speech(question2_added)
player_job = input('> ')
valid_jobs = ['front-end developer', 'back-end developer', 'web designer']
if player_job.lower() in valid_jobs:
myPlayer.job = player_job
else:
while player_job.lower not in valid_jobs:
player_job = input('> ')
if player_job.lower() in valid_jobs:
myPlayer.job = player_job
#### PLAYER STATS ####
# Build stats here
#### INTRODUCTION ####
myPlayer.name = player_name
title1 = '#### Noogle HQ Lobby ####\n'
speech1 = 'HR Lady: Welcome ' + player_name + '!\n'
speech2 = 'We are really excited to have you interview with us today at Noogle!\n'
speech3 = 'So just to recap, you will be having 8 interviews today and they will all be taking place on the 11th floor.\n'
speech4 = 'If you will just follow me to the elevator we can get you started!\n'
speech5 = '#### Noogle 11th Floor ####\n'
speech6 = 'Here we are!\n'
speech7 = 'Okay, step on out now and get to it! Try not to get too lost now...'
speech8 = 'Hehehehe...\n'
os.system('clear')
text_speech(title1)
new_line()
text_speech(speech1)
text_speech(speech2)
text_speech(speech3)
text_speech(speech4)
enter_to_continue()
text_speech(speech5)
new_line()
text_speech(speech6)
text_speech(speech7)
text_speech(speech8)
enter_to_continue()
print('########################')
print('# Let\'s start now! #')
print('########################')
main_game_loop()
title_screen() |
__all__ = ()
from datetime import datetime as DateTime
from hata import Embed
from hata.ext.slash import Button, InteractionResponse, Row
from .calendar_events import CALENDAR_EVENTS
from .constants import (
COLOR_CODE_RESET, BUTTON_BACK_DISABLED, BUTTON_CLOSE, BUTTON_NEXT_DISABLED, DAY_NAMES_SHORT, EMOJI_BACK, EMOJI_NEXT,
MONTH_MAX, MONTH_MIN, MONTH_NAMES, YEAR_MAX, YEAR_MIN
)
from .filtering import get_events_for_month
def add_month_field(embed, year, month_number):
"""
Adds a new field to the embed for the given year-month combination.
Parameters
----------
embed : ``Embed``
The embed to extend.
year : `int`
The year's number to use.
month_number : `int`
The month's number.
"""
by_day = get_events_for_month(month_number, CALENDAR_EVENTS)
month_name = MONTH_NAMES[month_number]
description_parts = []
description_parts.append('```')
by_day_length = len(by_day)
if by_day_length:
description_parts.append('ansi\n')
by_day_index = 0
while True:
day_number, in_day = by_day[by_day_index]
description_parts.append(DAY_NAMES_SHORT[DateTime(year, month_number, day_number).weekday()])
description_parts.append(' ')
description_parts.append(format(day_number, '>2'))
description_parts.append(' ')
in_day_length = len(in_day)
in_day_index = 0
while True:
event = in_day[in_day_index]
description_parts.append(event.color_code)
description_parts.append(event.name)
description_parts.append(COLOR_CODE_RESET)
in_day_index += 1
if in_day_index == in_day_length:
break
description_parts.append(' & ')
continue
by_day_index += 1
if by_day_index == by_day_length:
break
description_parts.append('\n')
continue
else:
description_parts.append('\n\u200b')
description_parts.append('\n```')
embed.add_field(month_name, ''.join(description_parts), inline = True)
def build_month_embed(year):
"""
Builds a month embed.
Parameters
----------
year : `int`
The month's number.
Returns
-------
embed : ``Embed``
"""
embed = Embed(f'{year} Touhou calendar')
for month_number in range(MONTH_MIN, MONTH_MAX + 1):
add_month_field(embed, year, month_number)
return embed
def build_year_component(year):
"""
Builds a year components for the given year.
Parameters
----------
year : `int`
The year to get the components for.
Returns
-------
component : ``Row``
"""
if year <= YEAR_MIN:
button_back = BUTTON_BACK_DISABLED
else:
year_previous = year - 1
button_back = Button(
str(year_previous),
EMOJI_BACK,
custom_id = f'touhou_calendar.year.{year_previous}',
)
if year >= YEAR_MAX:
button_next = BUTTON_NEXT_DISABLED
else:
year_next = year + 1
button_next = Button(
str(year_next),
EMOJI_NEXT,
custom_id = f'touhou_calendar.year.{year_next}',
)
return Row(
button_back,
button_next,
BUTTON_CLOSE,
)
RESPONSE_CACHE = {}
def get_response_for_year(year):
"""
Gets the response the given year.
Parameters
----------
year : `int`
The year to get the response for.
Returns
-------
response : ``InteractionResponse``
"""
try:
response = RESPONSE_CACHE[year]
except KeyError:
response = InteractionResponse(
embed = build_month_embed(year),
components = build_year_component(year),
)
RESPONSE_CACHE[year] = response
return response
|
import pathlib
import re
import requests
import pandas
from pkg_resources import parse_version
# Minimum number of available spaces
spaces = 2
# Comment out trailheads you'd like to start from
exclude = [
# 'HI → LYV',
# 'HI → Sunrise/Merced Lakes (Pass through)',
# "Glacier Point → LYV",
# "Sunrise Lakes",
# "Lyell Canyon",
]
# Dates you'd like to start on (inclusive of end date)
dates = pandas.date_range(start="2020-06-01", end="2020-10-05", freq="D")
# Write output to this file. If the generated output is identical to
# the existing output at this path, suppress notification. To disable
# writing any files, set output_path=None as shown below.
output_path = pathlib.Path("__file__").parent.joinpath("hackjohn-output.txt")
# output_path = None # None disables writing to a file
# If the Report Date is before this day, suppress Telegram notification.
# You probably do not need to change this setting unless you have disabled
# output_path
min_report_date = "2020-01-01"
def get_trailhead_df():
"""
Convert the current "Donohue Exit Quota and Trailhead Space Available" HTML table
to a pandas.DataFrame.
"""
pandas_version = parse_version(pandas.__version__)._version.release
if pandas_version[:2] == (0, 23):
# read_html malfunctions in pandas v0.23
raise ImportError("pandas v0.23 is not supported due to https://git.io/fp9Zn")
url = "https://www.nps.gov/yose/planyourvisit/fulltrailheads.htm"
response = requests.get(url)
response.raise_for_status()
(wide_df,) = pandas.read_html(
response.text,
header=2,
attrs={"id": "cs_idLayout2"},
flavor="html5lib",
parse_dates=["Date"],
)
wide_df = wide_df.iloc[:, :6]
trailhead_df = (
wide_df.melt(id_vars="Date", var_name="Trailhead", value_name="Spaces")
.dropna()
.sort_values(by=["Date"], kind="mergesort")
)
trailhead_df.Spaces = trailhead_df.Spaces.astype(int)
assert len(trailhead_df) > 0
return response, trailhead_df
def get_last_updated():
pandas_version = parse_version(pandas.__version__)._version.release
if pandas_version[:2] == (0, 23):
# read_html malfunctions in pandas v0.23
raise ImportError("pandas v0.23 is not supported due to https://git.io/fp9Zn")
url = "https://www.nps.gov/yose/planyourvisit/fulltrailheads.htm"
response = requests.get(url)
response.raise_for_status()
# Get last time updated
last_updated = pandas.read_html(
response.text,
flavor="html5lib"
)
last_updated = last_updated[0].iloc[1,1]
return last_updated
yose_response, trailhead_df = get_trailhead_df()
# Extract report date
try:
match = re.search(r"Report Date: ([0-9/]+)", yose_response.text)
report_date = match.group(1)
report_date = pandas.to_datetime(report_date, dayfirst=False)
print("Report date is, ", report_date)
except Exception:
report_date = yose_response.headers["Date"]
report_date = pandas.to_datetime(report_date, utc=True)
report_date = report_date.date().isoformat()
space_df = trailhead_df.query(
"Date in @dates and Spaces >= @spaces and Trailhead not in @exclude"
)
# space_df
space_str = "NO VACANCY" if space_df.empty else space_df.to_string(index=False)
text = f"{space_str}"
print(text)
#create csv so we can format in html
csv_space_df = space_df
csv_space_df.columns = ['Date', 'Trailhead', 'Spaces']
csv_data = "NO VACANCY" if space_df.empty else space_df.to_csv(r'hackcsv.csv', index=False)
html_file = pandas.read_csv('hackcsv.csv')
html_file = html_file.to_html()
last_updated_time = get_last_updated()
phone_hours = f"""
LAST UPDATED AT {last_updated_time}
According to https://www.nps.gov/yose/planyourvisit/fulltrailheads.htm
Yosemite Reservations: 209-372-0740 (Monday–Friday 9:00am–4:30pm)
Apply at https://yosemite.org/yosemite-wilderness-permit-request-form/
"""
# Detect if output_path has changed. If so, rewrite output. Also write previous text to previous_output_path so we can find the diff
output_has_changed = True
if output_path:
output_path = pathlib.Path(output_path)
if output_path.is_file():
previous_text = output_path.read_text()
output_has_changed = text != previous_text
if output_has_changed:
output_path.write_text(text)
print(f"output has changed: {output_has_changed}")
# determine whether to notify
notify = not space_df.empty and output_has_changed and min_report_date <= report_date
#Zapier
enable_zapier = True
zapier_url = "https://hooks.zapier.com/hooks/catch/7560244/oi4zve2"
if notify and enable_zapier:
report = {
"value1": html_file,
"value_2": phone_hours
}
response = requests.post(zapier_url, data=report)
print("SENDING EMAIL notify is ", notify)
print("zapier status code", response.status_code)
print(response.text)
|
"""URL definition for the beer site"""
from django.urls import path, include
from beers.views import user, validation, contest
import beers.api.views as api_views
api_patterns = [
path('players/', api_views.PlayerList.as_view(), name='player-list',),
path('players/<slug:user__username>', api_views.PlayerDetail.as_view(), name='player-detail',),
path('contests/', api_views.ContestList.as_view(), name='contest-list',),
path('contests/<int:id>', api_views.ContestDetail.as_view(), name='contest-detail',),
path('contests/<int:contest_id>/players/',
api_views.ContestPlayerList.as_view(),
name='contest-player-list',),
path('contests/<int:contest_id>/players/<slug:username>',
api_views.ContestPlayerDetail.as_view(),
name='contest-player-detail',),
path('contests/<int:contest_id>/beers/',
api_views.ContestBeerList.as_view(),
name='contest-beer-list',),
path('contests/<int:contest_id>/beers/<int:contest_beer_id>',
api_views.ContestBeerDetail.as_view(),
name='contest-beer-detail',),
path('contests/<int:contest_id>/breweries/',
api_views.ContestBreweryList.as_view(),
name='contest-brewery-list',),
path('contests/<int:contest_id>/breweries/<int:contest_brewery_id>',
api_views.ContestBreweryDetail.as_view(),
name='contest-brewery-detail',),
path('contests/<int:contest_id>/bonuses/',
api_views.ContestBonusList.as_view(),
name='contest-bonus-list',),
path('contests/<int:contest_id>/bonuses/<int:contest_bonus_id>',
api_views.ContestBonusDetail.as_view(),
name='contest-bonus-detail',),
path('contests/<int:contest_id>/unvalidated_checkins',
api_views.UnvalidatedCheckinList.as_view(),
name='unvalidated-checkin-list',),
path('unvalidated_checkins/<int:id>',
api_views.UnvalidatedCheckinDetail.as_view(),
name='unvalidated-checkin-detail',),
path('lookup/beer', api_views.BeerLookup.as_view(), name='beer-lookup'),
path('lookup/brewery', api_views.BreweryLookup.as_view(), name='brewery-lookup'),
]
contest_patterns = [
path('', contest.contests, name='contests'),
path('add', contest.contest_add, name='contest-add'),
path('<int:contest_id>/', include([
path('', contest.contest, name='contest'),
path('join', contest.contest_join, name='contest-join'),
path('validate', validation.unvalidated_checkins, name='unvalidated-checkins'),
path('unvalidated_checkins',
validation.unvalidated_checkins_json,
name='unvalidated-checkins-json'),
path('unvalidated_checkins/<int:uv_checkin>',
validation.delete_checkin,
name='delete-checkin'),
path('checkins', validation.validate_checkin, name='validate-checkin'),
path('players/<slug:username>', contest.contest_player, name='contest-player'),
path('players', contest.contest_players, name='contest-players'),
path('beers/', contest.contest_beers, name='contest-beers'),
path('beers/<int:beer_id>', contest.contest_beer, name='contest-beer'),
path('breweries/', contest.contest_breweries, name='contest-breweries'),
path('breweries/<int:brewery_id>', contest.contest_brewery, name='contest-brewery'),
path('challenges/', contest.contest_challenges, name='contest-challenges'),
path('challenges/<int:beer_id>', contest.contest_challenge, name='contest-challenge'),
path('bonuses/', contest.contest_bonuses, name='contest-bonuses'),
path('bonuses/<slug:bonus_tag>', contest.contest_bonus, name='contest-bonus'),
])),
]
urlpatterns = [
path('', contest.index, name='index'),
path('', include('django.contrib.auth.urls')),
path('signup', user.signup, name='signup'),
path('profile', user.update_profile, name='profile'),
path('instructions', contest.instructions, name='instructions'),
path('contests/', include(contest_patterns)),
path('api/', include(api_patterns)),
]
|
#!/usr/bin/python3
# Copyright (c) 2018, 2019 Peter Palfrader
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Loader for .site formatted graphs."""
if __name__ == '__main__' and __package__ is None:
import os
__LEVEL = 2
os.sys.path.append(os.path.abspath(os.path.join(*([os.path.dirname(__file__)] + ['..']*__LEVEL))))
from ORD53.graph.Graph import GeometricGraph
from ORD53.common.geometry import Vertex2
from ORD53.common.iter import pair_iterator, PeekIterator
import os
class SiteLoader:
extension = '.site'
@classmethod
def load(cls, content, name="unknown", args=None):
"""Load graph from a valid .site file"""
g = GeometricGraph(source=name, fmt=os.path.basename(__file__))
f = iter(content.splitlines())
while True:
try:
element_type = next(f).rstrip()
element_data = next(f).rstrip()
if element_type == "0": # segment
c = [float(e) for e in element_data.split()]
g.add_edge_by_vertex(Vertex2(c[0], c[1]), Vertex2(c[2], c[3]))
except StopIteration:
break
return g
def main():
"""Load a graph from stdin or a file."""
import argparse
import sys
parser = argparse.ArgumentParser(description='Load a graph from a .site file')
parser.add_argument('inputfile', help='Inputfile (.site)', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('outputfile', help='Outputfile (.graphml)', nargs='?', type=argparse.FileType('wb'), default=sys.stdout.buffer)
parser.add_argument('-r', '--randomize-weights', action='store_true', default=False, help='randomize edge weights')
args = parser.parse_args()
g = SiteLoader.load(args.inputfile.read())
if args.randomize_weights:
g.randomize_weights()
g.write_graphml(args.outputfile)
args.outputfile.close()
#print(g)
if __name__ == '__main__' and __package__ is None:
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.