hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
99d90f0037bec0f478fe5d4636df8a2ae1954387 | 4,079 | py | Python | research/delf/delf/python/pooling_layers/pooling.py | w07wong/models | ca011eaa5a35adc46b92fe957f3e57bea53c9f69 | [
"Apache-2.0"
] | 2 | 2021-03-03T07:54:33.000Z | 2021-03-09T16:13:01.000Z | research/delf/delf/python/pooling_layers/pooling.py | w07wong/models | ca011eaa5a35adc46b92fe957f3e57bea53c9f69 | [
"Apache-2.0"
] | null | null | null | research/delf/delf/python/pooling_layers/pooling.py | w07wong/models | ca011eaa5a35adc46b92fe957f3e57bea53c9f69 | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Pooling layers definitions."""
import tensorflow as tf
class MAC(tf.keras.layers.Layer):
"""Global max pooling (MAC) layer.
Maximum Activations of Convolutions (MAC) is simply constructed by
max-pooling over all dimensions per feature map. See
https://arxiv.org/abs/1511.05879 for a reference.
"""
def __init__(self):
"""Initialization of the global max pooling (MAC) layer."""
super(MAC, self).__init__()
def call(self, x, axis=[1, 2]):
"""Invokes the MAC pooling instance.
Args:
x: [B, H, W, D] A float32 Tensor.
axis: Dimensions to reduce.
Returns:
output: [B, D] A float32 Tensor.
"""
return mac(x, axis=axis)
class SPoC(tf.keras.layers.Layer):
"""Average pooling (SPoC) layer.
Sum-pooled convolutional features (SPoC) is based on the sum pooling of the
deep features. See https://arxiv.org/pdf/1510.07493.pdf for a reference."""
def __init__(self):
"""Initialization of the SPoC layer."""
super(SPoC, self).__init__()
def call(self, x, axis=[1, 2]):
"""Invokes the SPoC instance.
Args:
x: [B, H, W, D] A float32 Tensor.
axis: Dimensions to reduce.
Returns:
output: [B, D] A float32 Tensor.
"""
return spoc(x, axis)
class GeM(tf.keras.layers.Layer):
"""Generalized mean pooling (GeM) layer.
Generalized Mean Pooling (GeM) computes the generalized mean of each
channel in a tensor. See https://arxiv.org/abs/1711.02512 for a reference.
"""
def __init__(self, power=3.):
"""Initialization of the generalized mean pooling (GeM) layer.
Args:
power: Float power > 0 is an inverse exponent parameter, used during
the generalized mean pooling computation. Setting this exponent as power
> 1 increases the contrast of the pooled feature map and focuses on
the salient features of the image. GeM is a generalization of the
average pooling commonly used in classification networks (power = 1) and
of spatial max-pooling layer (power = inf).
"""
super(GeM, self).__init__()
self.power = power
self.eps = 1e-6
def call(self, x, axis=[1, 2]):
"""Invokes the GeM instance.
Args:
x: [B, H, W, D] A float32 Tensor.
axis: Dimensions to reduce.
Returns:
output: [B, D] A float32 Tensor.
"""
return gem(x, power=self.power, eps=self.eps, axis=axis)
def mac(x, axis=[1, 2]):
"""Performs global max pooling (MAC).
Args:
x: [B, H, W, D] A float32 Tensor.
axis: Dimensions to reduce.
Returns:
output: [B, D] A float32 Tensor.
"""
return tf.reduce_max(x, axis=axis, keepdims=False)
def spoc(x, axis=[1, 2]):
"""Performs average pooling (SPoC).
Args:
x: [B, H, W, D] A float32 Tensor.
axis: Dimensions to reduce.
Returns:
output: [B, D] A float32 Tensor.
"""
return tf.reduce_mean(x, axis=axis, keepdims=False)
def gem(x, axis=[1, 2], power=3., eps=1e-6):
"""Performs generalized mean pooling (GeM).
Args:
x: [B, H, W, D] A float32 Tensor.
axis: Dimensions to reduce.
power: Float, power > 0 is an inverse exponent parameter (GeM power).
eps: Float, parameter for numerical stability.
Returns:
output: [B, D] A float32 Tensor.
"""
tmp = tf.pow(tf.maximum(x, eps), power)
out = tf.pow(tf.reduce_mean(tmp, axis=axis, keepdims=False), 1. / power)
return out
| 28.326389 | 80 | 0.648934 |
09539e0ca368b78938dad4516841b5e05cefb990 | 299 | py | Python | problems/3.py | christofferaakre/project-euler | 4b42802233be10e4a592798205171fb5156dae6b | [
"MIT"
] | null | null | null | problems/3.py | christofferaakre/project-euler | 4b42802233be10e4a592798205171fb5156dae6b | [
"MIT"
] | null | null | null | problems/3.py | christofferaakre/project-euler | 4b42802233be10e4a592798205171fb5156dae6b | [
"MIT"
] | null | null | null | from main import Solver
solver = Solver()
big = 600851475143
def largest_prime_factor(number):
b = 2
a = number
while a > b:
if int(a % b) == 0:
a /= b
b = 2
else:
b += 1
return int(a)
solver.solve(3, largest_prime_factor(big))
| 16.611111 | 42 | 0.518395 |
33bbe447bb44f229d8ceb14e9cd167a21fc7c1ad | 446 | py | Python | quotrapp/main/routes.py | joshbduncan/quotr | dc35cdffe3133f07e7d2471fa40d5291f16ceee2 | [
"MIT"
] | 1 | 2020-09-12T04:56:20.000Z | 2020-09-12T04:56:20.000Z | quotrapp/main/routes.py | joshbduncan/quotr | dc35cdffe3133f07e7d2471fa40d5291f16ceee2 | [
"MIT"
] | null | null | null | quotrapp/main/routes.py | joshbduncan/quotr | dc35cdffe3133f07e7d2471fa40d5291f16ceee2 | [
"MIT"
] | null | null | null | from flask import render_template, request, Blueprint, current_app
from sqlalchemy import func
from quotrapp.models import Quote, Author
main_bp = Blueprint('main_bp', __name__)
@main_bp.route('/')
@main_bp.route('/index')
def index():
quote = Quote.query.order_by(func.random()).first()
return render_template('index.html', quote=quote)
@main_bp.route('/about')
def about():
return render_template('about.html', title='About')
| 23.473684 | 66 | 0.733184 |
05a2e1662ddf1ad651bdca507b842eab3d478998 | 12,091 | py | Python | stack/stack/scp.py | leipan/ariamh | aa307ddad7d0f3f75303e9a5bb53d25ba171605e | [
"Apache-2.0"
] | null | null | null | stack/stack/scp.py | leipan/ariamh | aa307ddad7d0f3f75303e9a5bb53d25ba171605e | [
"Apache-2.0"
] | null | null | null | stack/stack/scp.py | leipan/ariamh | aa307ddad7d0f3f75303e9a5bb53d25ba171605e | [
"Apache-2.0"
] | null | null | null | # scp.py
# Copyright (C) 2008 James Bardin <jbardin@bu.edu>
"""
Utilities for sending files over ssh using the scp1 protocol.
"""
import os
import re
from socket import timeout as SocketTimeout
DEBUG = False
class SCPClient(object):
"""
An scp1 implementation, compatible with openssh scp.
Raises SCPException for all transport related errors. Local filesystem
and OS errors pass through.
Main public methods are .put and .get
The get method is controlled by the remote scp instance, and behaves
accordingly. This means that symlinks are resolved, and the transfer is
halted after too many levels of symlinks are detected.
The put method uses os.walk for recursion, and sends files accordingly.
Since scp doesn't support symlinks, we send file symlinks as the file
(matching scp behaviour), but we make no attempt at symlinked directories.
"""
def __init__(self, transport, buff_size=16384, socket_timeout=5.0,
progress=None):
"""
Create an scp1 client.
@param transport: an existing paramiko L{Transport}
@type transport: L{Transport}
@param buff_size: size of the scp send buffer.
@type buff_size: int
@param socket_timeout: channel socket timeout in seconds
@type socket_timeout: float
@param progress: callback - called with (filename, size, sent) during
transfers
@type progress: function(string, int, int)
"""
self.transport = transport
self.buff_size = buff_size
self.socket_timeout = socket_timeout
self.channel = None
self.preserve_times = False
self._progress = progress
self._recv_dir = ''
self._utime = None
self._dirtimes = {}
def put(self, files, remote_path='.',
recursive=False, preserve_times=False):
"""
Transfer files to remote host.
@param files: A single path, or a list of paths to be transfered.
recursive must be True to transfer directories.
@type files: string OR list of strings
@param remote_path: path in which to receive the files on the remote
host. defaults to '.'
@type remote_path: str
@param recursive: transfer files and directories recursively
@type recursive: bool
@param preserve_times: preserve mtime and atime of transfered files
and directories.
@type preserve_times: bool
"""
self.preserve_times = preserve_times
self.channel = self.transport.open_session()
self.channel.settimeout(self.socket_timeout)
scp_command = ('scp -t %s', 'scp -r -t %s')[recursive]
self.channel.exec_command(scp_command % remote_path)
self._recv_confirm()
if not isinstance(files, (list, tuple)):
files = [files]
if recursive:
self._send_recursive(files)
else:
self._send_files(files)
if self.channel:
self.channel.close()
def get(self, remote_path, local_path='',
recursive=False, preserve_times=False):
"""
Transfer files from remote host to localhost
@param remote_path: path to retreive from remote host. since this is
evaluated by scp on the remote host, shell wildcards and
environment variables may be used.
@type remote_path: str
@param local_path: path in which to receive files locally
@type local_path: str
@param recursive: transfer files and directories recursively
@type recursive: bool
@param preserve_times: preserve mtime and atime of transfered files
and directories.
@type preserve_times: bool
"""
self._recv_dir = local_path or os.getcwd()
rcsv = ('', ' -r')[recursive]
prsv = ('', ' -p')[preserve_times]
self.channel = self.transport.open_session()
self.channel.settimeout(self.socket_timeout)
self.channel.exec_command("scp%s%s -f %s" %
(rcsv, prsv, _sh_quote(remote_path)))
self._recv_all()
if self.channel:
self.channel.close()
def _read_stats(self, name):
"""return just the file stats needed for scp"""
stats = os.stat(name)
mode = oct(stats.st_mode)[-4:]
size = stats.st_size
atime = int(stats.st_atime)
mtime = int(stats.st_mtime)
return (mode, size, mtime, atime)
def _send_files(self, files):
for name in files:
basename = os.path.basename(name)
(mode, size, mtime, atime) = self._read_stats(name)
if self.preserve_times:
self._send_time(mtime, atime)
file_hdl = file(name, 'rb')
self.channel.sendall("C%s %d %s\n" %
(mode, size, _sh_quote(basename)))
self._recv_confirm()
file_pos = 0
if self._progress:
self._progress(basename, size, 0)
buff_size = self.buff_size
chan = self.channel
while file_pos < size:
chan.sendall(file_hdl.read(buff_size))
file_pos = file_hdl.tell()
if self._progress:
self._progress(basename, size, file_pos)
chan.sendall('\x00')
file_hdl.close()
self._recv_confirm()
def _chdir(self, from_dir, to_dir):
# Pop until we're one level up from our next push.
# Push *once* into to_dir.
# This is dependent on the depth-first traversal from os.walk
# add path.sep to each when checking the prefix, so we can use
# path.dirname after
common = os.path.commonprefix([from_dir + os.path.sep,
to_dir + os.path.sep])
# now take the dirname, since commonprefix is character based,
# and we either have a seperator, or a partial name
common = os.path.dirname(common)
cur_dir = from_dir.rstrip(os.path.sep)
while cur_dir != common:
cur_dir = os.path.split(cur_dir)[0]
self._send_popd()
# now we're in our common base directory, so on
self._send_pushd(to_dir)
def _send_recursive(self, files):
for base in files:
if not os.path.isdir(base):
# filename mixed into the bunch
self._send_files([base])
continue
last_dir = base
for root, dirs, fls in os.walk(base):
self._chdir(last_dir, root)
self._send_files([os.path.join(root, f) for f in fls])
last_dir = root
def _send_pushd(self, directory):
(mode, size, mtime, atime) = self._read_stats(directory)
basename = os.path.basename(directory)
if self.preserve_times:
self._send_time(mtime, atime)
self.channel.sendall('D%s 0 %s\n' % (mode, basename))
self._recv_confirm()
def _send_popd(self):
self.channel.sendall('E\n')
self._recv_confirm()
def _send_time(self, mtime, atime):
self.channel.sendall('T%d 0 %d 0\n' % (mtime, atime))
self._recv_confirm()
def _recv_confirm(self):
# read scp response
msg = ''
try:
msg = self.channel.recv(512)
except SocketTimeout:
raise SCPException('Timout waiting for scp response')
if msg and msg[0] == '\x00':
return
elif msg and msg[0] == '\x01':
raise SCPException(msg[1:])
elif self.channel.recv_stderr_ready():
msg = self.channel.recv_stderr(512)
raise SCPException(msg)
elif not msg:
raise SCPException('No response from server')
else:
raise SCPException('Invalid response from server: ' + msg)
def _recv_all(self):
# loop over scp commands, and recive as necessary
command = {'C': self._recv_file,
'T': self._set_time,
'D': self._recv_pushd,
'E': self._recv_popd}
while not self.channel.closed:
# wait for command as long as we're open
self.channel.sendall('\x00')
msg = self.channel.recv(1024)
if not msg: # chan closed while recving
break
code = msg[0]
try:
command[code](msg[1:])
except KeyError:
raise SCPException(repr(msg))
# directory times can't be set until we're done writing files
self._set_dirtimes()
def _set_time(self, cmd):
try:
times = cmd.split()
mtime = int(times[0])
atime = int(times[2]) or mtime
except:
self.channel.send('\x01')
raise SCPException('Bad time format')
# save for later
self._utime = (atime, mtime)
def _recv_file(self, cmd):
chan = self.channel
parts = cmd.strip().split(' ', 2)
try:
mode = int(parts[0], 8)
size = int(parts[1])
path = os.path.join(self._recv_dir, parts[2])
except:
chan.send('\x01')
chan.close()
raise SCPException('Bad file format')
try:
file_hdl = file(path, 'wb')
except IOError, e:
chan.send('\x01' + e.message)
chan.close()
raise
buff_size = self.buff_size
pos = 0
chan.send('\x00')
try:
while pos < size:
# we have to make sure we don't read the final byte
if size - pos <= buff_size:
buff_size = size - pos
file_hdl.write(chan.recv(buff_size))
pos = file_hdl.tell()
if self._progress:
self._progress(path, size, pos)
msg = chan.recv(512)
if msg and msg[0] != '\x00':
raise SCPException(msg[1:])
except SocketTimeout:
chan.close()
raise SCPException('Error receiving, socket.timeout')
file_hdl.truncate()
try:
os.utime(path, self._utime)
self._utime = None
os.chmod(path, mode)
# should we notify the other end?
finally:
file_hdl.close()
# '\x00' confirmation sent in _recv_all
def _recv_pushd(self, cmd):
parts = cmd.split()
try:
mode = int(parts[0], 8)
path = os.path.join(self._recv_dir, parts[2])
except:
self.channel.send('\x01')
raise SCPException('Bad directory format')
try:
if not os.path.exists(path):
os.mkdir(path, mode)
elif os.path.isdir(path):
os.chmod(path, mode)
else:
raise SCPException('%s: Not a directory' % path)
self._dirtimes[path] = (self._utime)
self._utime = None
self._recv_dir = path
except (OSError, SCPException), e:
self.channel.send('\x01' + e.message)
raise
def _recv_popd(self, *cmd):
self._recv_dir = os.path.split(self._recv_dir)[0]
def _set_dirtimes(self):
try:
for d in self._dirtimes:
os.utime(d, self._dirtimes[d])
finally:
self._dirtimes = {}
class SCPException(Exception):
"""SCP exception class"""
pass
# this is quote from the shlex module, added in py3.3
_find_unsafe = re.compile(r'[^\w@%+=:,./-]').search
def _sh_quote(s):
"""Return a shell-escaped version of the string *s*."""
if not s:
return "''"
if _find_unsafe(s) is None:
return s
# use single quotes, and put single quotes into double quotes
# the string $'b is then quoted as '$'"'"'b'
return "'" + s.replace("'", "'\"'\"'") + "'"
| 34.644699 | 78 | 0.565214 |
5c88d6baefe4affb74b244f1bcd279aff06f427e | 1,377 | py | Python | RPI/Wifi_old/WiFi_Socket_old.py | sguertl/Flying_Raspberry | d4f4ec82c4ceab7560d00265d7214aff84cbffd7 | [
"Apache-2.0"
] | 1 | 2018-03-10T10:50:29.000Z | 2018-03-10T10:50:29.000Z | RPI/Wifi_old/WiFi_Socket_old.py | sguertl/Flying_Pi | d4f4ec82c4ceab7560d00265d7214aff84cbffd7 | [
"Apache-2.0"
] | null | null | null | RPI/Wifi_old/WiFi_Socket_old.py | sguertl/Flying_Pi | d4f4ec82c4ceab7560d00265d7214aff84cbffd7 | [
"Apache-2.0"
] | null | null | null | ### This version is used for multiclients and python 3.4.x ###
import socket
import sys
from _thread import *
HOST = '' # Symbolic name meaning all available interfaces
PORT = 5050 # Abritray non-privileged port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
print('Socket created')
# Bind socket to local host and port
try:
s.bind((HOST, PORT))
except socket.error as msg:
print('Bind failed. Error code: ' + str(msg[0]) + ' Message: ' (msg[1]))
sys.exit()
print('Socket bind complete')
# Start listening on socket
s.listen(10)
print('Socket now listening')
# Function for handling connections, this will be used to create threads
def clientthread(conn):
# Sending message to connected client
conn.send('Welcome!\n') # .send only takes strings
# Infinite loop so that function does not terminate and thread does not end
while True:
# Receiving data from client
data = conn.recv(1024)
reply = 'OK ...' + data
conn.sendall(reply)
# Now keep talking with the client
while 1:
# Wait to accept a connection - blocking call
conn, addr = s.accept()
print('Connected with ' + addr[0] + ':' + str(addr[1]))
# Start a new thread that takes 1st argument as function name to be run, 2nd argument is the tuple of args given to the function
start_new_thread(clientthread, (conn,))
s.close() | 29.297872 | 132 | 0.687727 |
b69c70f1b9bcd40e17ea582a03bf21b21d06eaf9 | 476 | py | Python | src/grepros/__init__.py | suurjaak/grepros | 4e719252858b6895d2ee071fcf0c332a3a5dafaa | [
"BSD-3-Clause"
] | 10 | 2021-11-05T12:43:21.000Z | 2022-03-17T06:08:30.000Z | src/grepros/__init__.py | suurjaak/grepros | 4e719252858b6895d2ee071fcf0c332a3a5dafaa | [
"BSD-3-Clause"
] | 1 | 2022-03-01T09:19:53.000Z | 2022-03-01T21:38:52.000Z | src/grepros/__init__.py | suurjaak/grepros | 4e719252858b6895d2ee071fcf0c332a3a5dafaa | [
"BSD-3-Clause"
] | 1 | 2022-01-24T23:46:00.000Z | 2022-01-24T23:46:00.000Z | # -*- coding: utf-8 -*-
"""
------------------------------------------------------------------------------
This file is part of grepros - grep for ROS bag files and live topics.
Released under the BSD License.
@author Erki Suurjaak
@created 31.10.2021
@modified 26.05.2022
------------------------------------------------------------------------------
"""
## @namespace grepros
__title__ = "grepros"
__version__ = "0.4.6"
__version_info__ = (0, 4, 6)
| 29.75 | 78 | 0.428571 |
bd3b8f3b6614c1af17096515e3d815d719a413b6 | 1,863 | py | Python | bandwitch/ClonesObservations/band_patterns_discrepancy.py | Edinburgh-Genome-Foundry/BandWitch | 23f7faee9a955313ade66c77ab474a12712e14fc | [
"MIT"
] | 12 | 2018-02-12T13:12:00.000Z | 2021-08-15T11:36:28.000Z | bandwitch/ClonesObservations/band_patterns_discrepancy.py | Edinburgh-Genome-Foundry/BandWitch | 23f7faee9a955313ade66c77ab474a12712e14fc | [
"MIT"
] | 2 | 2020-09-07T21:53:27.000Z | 2020-09-20T18:49:17.000Z | bandwitch/ClonesObservations/band_patterns_discrepancy.py | Edinburgh-Genome-Foundry/BandWitch | 23f7faee9a955313ade66c77ab474a12712e14fc | [
"MIT"
] | null | null | null | import numpy as np
from ..tools import max_min_distance
def band_patterns_discrepancy(
bands1,
bands2,
ladder,
relative_tolerance=0.1,
reference_and_gel=False,
zone=None,
):
"""Return a discrepancy indicating whether the band patterns are
perfectly matching (0) or dissimilar (>=1) or in-between (0-1).
Similarity is determined by the fact that the min-max distance between
the two migration patterns is below some threshold. The threshold is some
proportion of the ladder's migration span.
If mode ``reference_and_gel`` is set to True, then ``bands1`` is considered
a "truth" (= expected bands pattern) and the function will automatically
return False if the observed pattern on gel (bands2) has more bands than
bands1.
Parameters
----------
bands1
A list of bands sizes forming the first pattern
bands2
A list of bands sizes forming the second pattern
ladder
A Ladder object used to compute migration distances.
relative tolerance
Proportion of the ladder's migration span (between lowest and highest
migration distances) that is the threshold for the min-max distance.
reference_and_gel
Set to True if ``bands1`` is a reference (= expected bands) and
``bands2`` is the observed bands.
zone
"""
if bands1 == bands2 == []:
return 0
elif min(len(bands1), len(bands2)) == 0:
return 2.0
if reference_and_gel and (len(bands2) > len(bands1)):
return 2.0
m1, m2 = (
ladder.dna_size_to_migration(np.array(b)) for b in (bands1, bands2)
)
mini, maxi = ladder.migration_distances_span
tolerance = relative_tolerance * (maxi - mini)
zone = [ladder.dna_size_to_migration(b) for b in zone][::-1]
return 1.0 * max_min_distance(m1, m2, zone=zone) / tolerance
| 30.540984 | 79 | 0.678476 |
48cb94a0e298a0bddcd134b5a3174c3904d978b0 | 2,230 | py | Python | modules/video/classification/videotag_tsn_lstm/resource/utils/config_utils.py | chunzhang-hub/PaddleHub | c5cfd021f77fd59340fb26e223e09a592e6a345f | [
"Apache-2.0"
] | 8,360 | 2019-01-18T10:46:45.000Z | 2022-03-31T14:50:02.000Z | modules/video/classification/videotag_tsn_lstm/resource/utils/config_utils.py | dwuping/PaddleHub | 9a3b23295947e22149cc85c17cb4cf23c03f9e06 | [
"Apache-2.0"
] | 1,158 | 2019-04-11T09:22:43.000Z | 2022-03-31T12:12:09.000Z | modules/video/classification/videotag_tsn_lstm/resource/utils/config_utils.py | dwuping/PaddleHub | 9a3b23295947e22149cc85c17cb4cf23c03f9e06 | [
"Apache-2.0"
] | 1,677 | 2019-04-09T15:07:40.000Z | 2022-03-31T06:41:10.000Z | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserve.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import logging
from .utility import AttrDict
logger = logging.getLogger(__name__)
CONFIG_SECS = [
'train',
'valid',
'test',
'infer',
]
def parse_config(cfg_file):
"""Load a config file into AttrDict"""
import yaml
with open(cfg_file, 'r') as fopen:
yaml_config = AttrDict(yaml.load(fopen, Loader=yaml.Loader))
create_attr_dict(yaml_config)
return yaml_config
def create_attr_dict(yaml_config):
from ast import literal_eval
for key, value in yaml_config.items():
if type(value) is dict:
yaml_config[key] = value = AttrDict(value)
if isinstance(value, str):
try:
value = literal_eval(value)
except BaseException:
pass
if isinstance(value, AttrDict):
create_attr_dict(yaml_config[key])
else:
yaml_config[key] = value
return
def merge_configs(cfg, sec, args_dict):
assert sec in CONFIG_SECS, "invalid config section {}".format(sec)
sec_dict = getattr(cfg, sec.upper())
for k, v in args_dict.items():
if v is None:
continue
try:
if hasattr(sec_dict, k):
setattr(sec_dict, k, v)
except:
pass
return cfg
def print_configs(cfg, mode):
logger.info("---------------- {:>5} Arguments ----------------".format(mode))
for sec, sec_items in cfg.items():
logger.info("{}:".format(sec))
for k, v in sec_items.items():
logger.info(" {}:{}".format(k, v))
logger.info("-------------------------------------------------")
| 29.342105 | 81 | 0.615695 |
f1126fefbaeaa1ea0dabd1a4a9a79fda14c3c53a | 2,260 | py | Python | experiment_tests/basic_experiments.py | probablytom/bpi_13_python | ef042674d3d40857237511af1fbca59ede97e75e | [
"MIT"
] | null | null | null | experiment_tests/basic_experiments.py | probablytom/bpi_13_python | ef042674d3d40857237511af1fbca59ede97e75e | [
"MIT"
] | null | null | null | experiment_tests/basic_experiments.py | probablytom/bpi_13_python | ef042674d3d40857237511af1fbca59ede97e75e | [
"MIT"
] | null | null | null | import unittest
from theatre_au import Clock
from actor_au import Troupe
from domain_model import construct_universe, action_log, generate_XES, new_trace
class ExperimentalScratchpad(unittest.TestCase):
def setUp(self):
self.clock = Clock()
self.reps = Troupe()
self.specialists = Troupe()
self.company = Troupe()
self.num_reps = 5
self.num_specialists = 2
construct_universe(self.clock,
self.specialists,
self.reps,
self.company,
self.num_reps,
self.num_specialists)
def test_actors_actually_act(self):
# Submit some work for the company to do.
self.company.recieve_message('a_submitted')
self.clock.tick(2)
# Check work has moved on
self.assertTrue(len(action_log) is not 0)
def test_handoff_to_specialists(self):
self.company.recieve_message('w_nabellen_incomplete_dossiers_scheduled')
self.clock.tick(2)
self.assertTrue('w_valideren_aanvraag_complete' in [event[1].lower() for event_sequence in action_log
for event in event_sequence])
class TestExperimentsMakeXES(unittest.TestCase):
def setUp(self):
self.clock = Clock()
self.reps = Troupe()
self.specialists = Troupe()
self.company = Troupe()
self.num_reps = 5
self.num_specialists = 2
construct_universe(self.clock,
self.specialists,
self.reps,
self.company,
self.num_reps,
self.num_specialists)
def test_simple_XES_trace(self):
self.company.recieve_message('start')
self.clock.tick(100)
generate_XES()
def test_generate_50_traces(self):
def run_sim():
self.company.recieve_message('start')
self.clock.tick(100)
for i in range(49):
run_sim()
new_trace()
run_sim()
generate_XES(log_path="50_traces.xes")
if __name__ == '__main__':
unittest.main()
| 29.350649 | 109 | 0.569027 |
1cf8262decc4c80d6d76a90447aac2554e27d5b7 | 15,868 | py | Python | mvqag/experiments/abnormality/2020.py | VirkSaab/Medical-VQA | 6d77963cc81940fc680a18d931e0d88a3264f5fa | [
"MIT"
] | null | null | null | mvqag/experiments/abnormality/2020.py | VirkSaab/Medical-VQA | 6d77963cc81940fc680a18d931e0d88a3264f5fa | [
"MIT"
] | null | null | null | mvqag/experiments/abnormality/2020.py | VirkSaab/Medical-VQA | 6d77963cc81940fc680a18d931e0d88a3264f5fa | [
"MIT"
] | null | null | null | import torch
import torch.nn as nn
import torchvision.transforms as T
import pandas as pd
from typing import Dict, List
from torchcontrib.optim import SWA
from pathlib import Path
from mvqag import CNF_PATH
from mvqag.utils import load_yaml, load_json, get_recent_githash, load_qa_file
from mvqag.data import (
VQADataset,
DataLoaders,
Tokenizer,
vqa_collate_fn,
generate_new_questions_dataframe
)
from mvqag.model import VQANet, SANNet
from mvqag.train import (
get_device,
get_metrics,
LabelSmoothingCrossEntropyWithSuperLoss,
LabelSmoothingCrossEntropy,
SuperLoss,
VQATrainer,
TrainingLogger,
Checkpointer,
mixup_data_vqa,
mixup_criterion_vqa,
)
class PrepareCLEF2020DataWithQ:
def __init__(self, CNF) -> None:
self.n_classes = CNF.data.n_classes
self.QG = CNF.data.QG
# TRAINING DATA
# This is the main dataset for training
train20_df = self._make_dataframe(
columns=CNF.clef_cols.default,
qa_path=CNF.paths.clef_20_train_qa,
imgs_path=CNF.paths.clef_20_train_imgs,
is_main_df=True
)
# Get categorical abnormality classes
self.classes = train20_df.A.unique().tolist()
if self.n_classes == 330:
if 'no' in self.classes:
self.classes.remove('no')
if 'yes' in self.classes:
self.classes.remove('yes')
self.classes = sorted(self.classes)
assert len(self.classes) == self.n_classes
# Remove yes/no classes from train20_df
train20_df = pd.DataFrame([
row for row in train20_df.itertuples() if row.A in self.classes
]).drop("Index", axis=1)
# Filter abnormality data from other ImageCLEF datasets
train19_df = self._make_dataframe(
columns=CNF.clef_cols.default,
qa_path=CNF.paths.clef_19_train_qa,
imgs_path=CNF.paths.clef_19_train_imgs
)
val19_df = self._make_dataframe(
columns=CNF.clef_cols.default,
qa_path=CNF.paths.clef_19_val_qa,
imgs_path=CNF.paths.clef_19_val_imgs
)
test19_df = self._make_dataframe(
columns=CNF.clef_cols.test19,
qa_path=CNF.paths.clef_19_test_qa,
imgs_path=CNF.paths.clef_19_test_imgs
)
test19_df = test19_df.drop('Task', axis=1)
training_dfs = [train19_df, val19_df, test19_df, train20_df]
self.train_df = pd.concat(
training_dfs, ignore_index=True
).reset_index(drop=True)
self.val_df = self._make_dataframe(
columns=CNF.clef_cols.default,
qa_path=CNF.paths.clef_20_val_qa,
imgs_path=CNF.paths.clef_20_val_imgs
)
# Augmentation
self.train_tfms = T.Compose([
T.Resize(size=(CNF.model.inp_size + 8, CNF.model.inp_size + 8)),
# T.AutoAugment(),
T.RandomCrop(size=(CNF.model.inp_size, CNF.model.inp_size)),
T.RandomHorizontalFlip(),
T.ToTensor(),
T.Normalize(
mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)
)
])
self.val_tfms = T.Compose([
T.Resize(size=(CNF.model.inp_size, CNF.model.inp_size)),
T.ToTensor(),
T.Normalize(
mean=(0.485, 0.456, 0.406), std=(0.229, 0.224, 0.225)
)
])
# Generate new questions for training dataset
if CNF.data.QG:
print(f"Before QG: # training samples = {self.train_df.shape[0]}")
self.train_df = self._generate_questions(self.train_df,
CNF.task_keywords)
print(f"After QG: # training samples = {self.train_df.shape[0]}")
self.tokenizer = Tokenizer.from_list(
self.train_df.Q.unique().tolist(),
max_len=CNF.model.max_len
)
# Make dataset classes
self.trainset = VQADataset(
self.train_df, self.train_tfms, self.classes, self.tokenizer
)
self.valset = VQADataset(
self.val_df, self.val_tfms, self.classes, self.tokenizer
)
# Make dataloaders
self.dls = DataLoaders.from_dataset(
trainset=self.trainset,
train_bs=CNF.train.bs,
valset=self.valset,
val_bs=1,
collate_fn=vqa_collate_fn
)
def _make_dataframe(self,
columns,
qa_path,
imgs_path,
is_main_df=False):
df = load_qa_file(qa_filepath=qa_path, columns=columns)
df['PATH'] = df.ID.apply(lambda x: f"{imgs_path}/{x}.jpg")
if not is_main_df:
df = pd.DataFrame([
row for row in df.itertuples() if row.A in self.classes
]).drop("Index", axis=1)
return df
def _generate_questions(
self,
train_df: pd.DataFrame,
task_keywords: Dict[str, List[str]]
) -> pd.DataFrame:
new_train_df = generate_new_questions_dataframe(
train_df, task_keywords
)
new_train_df = new_train_df[new_train_df.Task == 'abnormality']
new_train_df = new_train_df[new_train_df.SubTask == 'categorical']
return new_train_df
def check(self):
print(f"Data augmentation:\n\t{self.train_tfms}", end='')
print(f"\n\t{self.val_tfms}")
print(f"# training samples = {self.train_df.shape}")
print(f"# validation samples = {self.val_df.shape}")
if self.n_classes == 330:
assert self.train_df.shape == (4963, 4)
assert self.val_df.shape == (472, 4)
elif self.n_classes == 332:
if self.QG:
assert self.train_df.shape == (39704, 6)
else:
assert self.train_df.shape == (6583, 4)
assert self.val_df.shape == (500, 4)
else:
_errmsg = f"check not added for classes = {self.n_classes}"
raise NotImplementedError(_errmsg)
assert self.train_df.A.nunique() <= self.n_classes
assert self.val_df.A.nunique() <= self.n_classes
assert len(self.trainset) == self.train_df.shape[0]
assert len(self.valset) == self.val_df.shape[0]
batch = next(iter(self.dls.trainloader))
print(f"data batch:")
print(f"\tV = {batch['inputs']['V'].shape}")
print(f"\tQ = {batch['inputs']['Q']['input_ids'].shape}")
print(f"\tA = {batch['target'].shape}")
print("data check: [green]PASSED[/green]")
class CLEFTrainer(VQATrainer):
def __init__(self, dls, net, loss_fn, optm_fn, device,
metrics=None,
checkpointer=None,
logger=None,
step_lrs=None,
epoch_lrs=None,
mixup: float = 0.):
super().__init__(dls, net, loss_fn, optm_fn, device, metrics,
checkpointer, logger, step_lrs, epoch_lrs)
self.mixup = mixup
def train_one_batch(self, batch):
target = batch['target'].to(self.device)
V = batch['inputs']['V'].to(self.device)
Q = {
k: v.to(self.device) for k, v in batch['inputs']['Q'].items()
}
if self.mixup > 0.0:
Q = Q['input_ids']
mixed_v, a_a, a_b, q_a, q_b, lam = mixup_data_vqa(
V, Q, target, alpha=self.mixup, use_cuda=True
)
output_a = self.net(mixed_v, {'input_ids': q_a})
output_b = self.net(mixed_v, {'input_ids': q_b})
loss = mixup_criterion_vqa(
self.loss_fn, output_a, output_b, a_a, a_b, lam)
output = (lam * output_a) + ((1 - lam) * output_b)
else:
output = self.net(V, Q)
loss = self.loss_fn(output, target)
# Backpropagation
self.optm_fn.zero_grad()
loss.backward()
self.optm_fn.step()
return loss, output, target
def val_one_batch(self, batch):
target = batch['target'].to(self.device)
V = batch['inputs']['V'].to(self.device)
Q = {
k: v.to(self.device) for k, v in batch['inputs']['Q'].items()
}
output = self.net(V, Q)
# test time augmentation
assert V.dim() == 4, 'You need to provide a [B,C,H,W] image to flip'
Vs_flip = torch.flip(V, [3])
output_flip = self.net(Vs_flip, Q)
output = (output + output_flip) / 2.0
loss = self.loss_fn(output, target)
return loss, output, target
def run(CNF: dict, dm=None, ret_model_and_dm: bool = False):
# ---------------------------------------- SETTINGS:
# Set seed
torch.manual_seed(CNF.seed)
torch.cuda.manual_seed(CNF.seed)
torch.cuda.manual_seed_all(CNF.seed)
# Set device
CNF.device, CNF.cuda_ids = get_device()
# Get the githash of last commit
CNF.recent_githash = get_recent_githash()
# ---------------------------------------- DATA:
if dm is None:
dm = PrepareCLEF2020DataWithQ(CNF)
dm.check()
# ---------------------------------------- MODEL:
model_name = f"{CNF.model.vnet_name}_{CNF.model.qnet_name}"
print(f"Loading {model_name} model...", end=' ')
if CNF.model.use_SAN:
CNF.wandb_run_name += f"+{model_name}+SAN"
model = SANNet(
n_classes=CNF.data.n_classes,
vnet_name=CNF.model.vnet_name,
qnet_name=CNF.model.qnet_name,
vocab_dim=dm.tokenizer.vocab_dim,
emb_dim=CNF.model.emb_dim,
vdp=CNF.model.vdp,
qdp=CNF.model.qdp
)
else:
CNF.wandb_run_name += f"+{model_name}"
model = VQANet(
n_classes=CNF.data.n_classes,
vnet_name=CNF.model.vnet_name,
qnet_name=CNF.model.qnet_name,
vocab_dim=dm.tokenizer.vocab_dim,
emb_dim=CNF.model.emb_dim,
hid_dim=1024,
bidirect=True,
vdp=CNF.model.vdp,
qdp=CNF.model.qdp
)
model.to(CNF.device)
model = torch.nn.DataParallel(model, device_ids=CNF.cuda_ids)
print('done.')
# ---------------------------------------- TRAIN:
optm_fn = torch.optim.SGD(model.parameters(),
lr=CNF.optm.lr,
momentum=CNF.optm.mom,
weight_decay=CNF.optm.wd,
)
if CNF.loss.fn.lower() == 'lscesl':
loss_fn = LabelSmoothingCrossEntropyWithSuperLoss(rank=CNF.device)
elif CNF.loss.fn.lower() == 'superloss':
loss_fn = SuperLoss(rank=CNF.device)
elif CNF.loss.fn.lower() == 'crossentropy':
loss_fn = nn.CrossEntropyLoss(
reduction='mean',
label_smoothing=CNF.loss.smoothing
)
elif CNF.loss.fn.lower() == 'celsv2':
loss_fn = LabelSmoothingCrossEntropy(classes=CNF.data.n_classes)
else:
raise NotImplementedError(f'Loss_fn {CNF.loss.fn} not supported.')
print(f"Criterion = {CNF.loss.fn}")
epoch_lrs = torch.optim.lr_scheduler.StepLR(optm_fn,
step_size=20,
gamma=0.60)
logger = TrainingLogger(
logs_dir=CNF.paths.logs_dir, config=CNF, run_name=CNF.wandb_run_name
)
checkpointer = Checkpointer(chkpts_dir=CNF.paths.chkpts_dir,
chkpt_of=[{'ValAccuracyMicro': 'max'}])
if CNF.train.use_swa: # Stochastic Weight Averaging
optm_fn = SWA(optm_fn, swa_start=20, swa_freq=5, swa_lr=0.005)
trainer = CLEFTrainer(dls=dm.dls,
net=model,
loss_fn=loss_fn,
optm_fn=optm_fn,
device=CNF.device,
metrics=get_metrics(CNF.data.n_classes),
logger=logger,
checkpointer=checkpointer,
epoch_lrs=epoch_lrs,
mixup=CNF.train.vqa_mixup
)
if CNF.is_test_run:
trainer.train(2, max_train_iters=5, max_val_iters=5)
else:
trainer.train(CNF.train.n_epochs)
if CNF.train.use_swa:
optm_fn.swap_swa_sgd()
if ret_model_and_dm:
return trainer.net, dm
if __name__ == '__main__':
# * EXPERIMENT NUMBER
EXP_NO = 2
# * Baseline settings
CNF = load_yaml(CNF_PATH)
CNF.data.n_classes = 332
CNF.data.QG = False # Questions generation
CNF.model.use_SAN = False # If False, use multiplication fusion
CNF.train.use_swa = False # Stochastic Weight Averaging
CNF.wandb_run_name = '/'.join(__file__.split('.')[0].split('/')[-2:])
CNF.wandb_run_name = f"{EXP_NO}-{CNF.wandb_run_name}+332"
CNF.is_test_run = False
if EXP_NO == 1:
# * EXP 1 - our baseline with SYSU curated dataset
# Baseline experiment
pass
elif EXP_NO == 2:
CNF.loss.fn = 'celsv2'
CNF.train.vqa_mixup = 0.1
CNF.wandb_run_name += '+VQAMixUp'
elif EXP_NO == 3:
CNF.loss.smoothing = 0.1
CNF.train.vqa_mixup = 0.1
CNF.wandb_run_name += '+VQAMixUp+LabelSmoothing'
elif EXP_NO == 4:
CNF.model.vnet_name = 'vgg16mixpool'
CNF.loss.smoothing = 0.1
CNF.train.vqa_mixup = 0.1
CNF.wandb_run_name += '+VQAMixUp+LabelSmoothing'
elif EXP_NO == 5:
CNF.model.vnet_name = 'vgg16mixpool'
CNF.loss.fn = 'lscesl'
CNF.train.vqa_mixup = 0.1
CNF.wandb_run_name += '+VQAMixup+LSCESL'
elif EXP_NO == 6:
CNF.train.use_swa = True
CNF.wandb_run_name += '+SWA'
elif EXP_NO == 7:
CNF.train.use_swa = True
CNF.train.vqa_mixup = 0.1
CNF.wandb_run_name += '+SWA+VQAMixUp'
elif EXP_NO == 8:
CNF.data.QG = True
CNF.train.vqa_mixup = 0.1
CNF.loss.smoothing = 0.1
CNF.wandb_run_name += '+QG+VQAMixUp+LabelSmoothing'
elif EXP_NO == 9:
CNF.data.QG = True
CNF.model.use_SAN = True
CNF.train.vqa_mixup = 0.1
CNF.loss.smoothing = 0.1
CNF.wandb_run_name += '+QG+VQAMixUp+LabelSmoothing'
elif EXP_NO == 10:
CNF.data.QG = True
CNF.model.use_SAN = True
CNF.train.vqa_mixup = 0.1
CNF.loss.smoothing = 0.1
CNF.loss.fn = 'lscesl'
CNF.wandb_run_name += '+QG+VQAMixUp+LSCESL'
elif EXP_NO == 11:
CNF.data.QG = True
CNF.model.vnet_name = 'vgg16mixpool'
CNF.model.use_SAN = True
CNF.train.vqa_mixup = 0.1
CNF.loss.smoothing = 0.1
CNF.loss.fn = 'lscesl'
CNF.wandb_run_name += '+QG+VQAMixUp+LSCESL'
elif EXP_NO == 12:
CNF.data.QG = True
CNF.model.vnet_name = 'vgg16mixpool'
CNF.loss.fn = 'lscesl'
CNF.train.vqa_mixup = 0.1
CNF.wandb_run_name += '+QG+VQAMixup+LSCESL'
elif EXP_NO == 13:
CNF.model.vnet_name = 'vgg16mixpool'
CNF.model.use_SAN = True
CNF.train.vqa_mixup = 0.1
CNF.loss.smoothing = 0.1
CNF.wandb_run_name += '+VQAMixUp+LabelSmoothing'
elif EXP_NO == 14:
CNF.data.QG = True
CNF.wandb_run_name += '+QG'
elif EXP_NO == 15:
CNF.model.use_SAN = True
elif EXP_NO == 16:
CNF.data.QG = True
CNF.model.use_SAN = True
CNF.loss.fn = 'celsv2' # to run on older pytorch versions
CNF.loss.smoothing = 0.1
CNF.train.vqa_mixup = 0.1
CNF.wandb_run_name += '+VQAMixUp+QG+LabelSmoothing'
# Sanity check
assert CNF.train.bs == 32
run(CNF)
| 34.874725 | 78 | 0.564784 |
05406136bfe9824ae4cc599f52a81b536db42243 | 3,725 | py | Python | dechorate/utils/mds_utils.py | Chutlhu/DechorateDB | 378eda37ed296f2823e3306238101343c5f4084a | [
"MIT"
] | 7 | 2021-06-01T10:57:58.000Z | 2022-03-30T03:17:16.000Z | dechorate/utils/mds_utils.py | Chutlhu/DechorateDB | 378eda37ed296f2823e3306238101343c5f4084a | [
"MIT"
] | 3 | 2021-06-25T14:48:40.000Z | 2022-02-10T05:36:30.000Z | dechorate/utils/mds_utils.py | Chutlhu/DechorateDB | 378eda37ed296f2823e3306238101343c5f4084a | [
"MIT"
] | null | null | null | import numpy as np
import scipy as sp
from dechorate.externals.trilaterate import trilaterate
def edm(X, Y):
'''
Return Euclidean Distance Matrix
s.t. D[i,j] = np.linalg.norm(X[:, i] - Y[:, i])
'''
Dx, N = X.shape
Dy, M = Y.shape
assert (Dx == Dy == 3) or (Dx == Dy == 2)
# norm_X2 = sum(X. ^ 2);
norm_X2 = np.sum(X ** 2, axis=0)[None, :]
# norm_Y2 = sum(Y. ^ 2);
norm_Y2 = np.sum(Y ** 2, axis=0)[None, :]
# D = bsxfun(@plus, norm_X2', norm_Y2) - 2*X'*Y;
D = norm_X2.T + norm_Y2 - 2 * X.T @ Y
return D**0.5
def trilateration2(anchors, distances):
return trilaterate(['a',['a']])
def trilateration(anchors, distances, init=None):
# function [estimatedLocation, totalError] = trilaterate_beck(anchors, distances)
# %------------------------------------------------------------------------------
# % Trilaterates the location of a point from distances to a fixed set of
# % anchors. Uses algorithm described by Beck et al.
# %
# % INPUT : anchors ... anchor locations - if we have M anchors in D
# % dimensions, a is an M by D matrix
# % distances ... distances between anchors and the point of
# % interest
# %
# % OUTPUT: estimatedLocation ... estimated location (D by 1)
# % totalError ... sum of absolute distance errors from the
# % estimated point to the anchors
# %------------------------------------------------------------------------------
# print(anchors.shape) # M x D
# print(distances.shape) # M x 1
assert len(distances.shape) == 1
assert anchors.shape[1] in [1, 2, 3]
assert anchors.shape[0] == distances.shape[0]
# d = size(anchors, 2);
d = anchors.shape[1]
# m = length(distances);
m = len(distances)
# A = [-2 * anchors, ones(m, 1)];
A = np.concatenate([-2 * anchors, np.ones([m, 1])], axis = 1)
assert A.shape == (m, d+1)
# b = distances.^2 - sum(anchors.^2, 2);
b = distances**2 - np.sum(anchors**2, 1)
assert len(b) == m
b = b.reshape([m, 1])
# D = [eye(d), zeros(d, 1); zeros(1, d), 0];
D = np.concatenate([np.eye(d), np.zeros([d, 1])], axis=1)
D = np.concatenate([D, np.zeros([1, d+1])], axis=0)
assert D.shape == (d+1, d+1)
# f = [zeros(d, 1); -0.5];
f = np.zeros([d+1, 1])
f[-1] = -0.5
# y = @(lambda) (A'*A + lambda * D) \ (A'*b - lambda * f);
def y(x):
# phi = @(lambda) y(lambda)' * D * y(lambda) + 2 * f' * y(lambda);
num = (A.T @ b - x * f)
rden = np.linalg.pinv(A.T@A + x * D)
a = rden @ num
assert a.shape == (d + 1, 1)
return a
def phi(x):
p = (y(x).T @ D @ y(x) + 2 * f.T @ y(x)).squeeze()
return p
eigDAA = sp.linalg.eigvals(D, A.T @ A)
lambda1 = eigDAA[-1]
a1 = -1 / lambda1
a2 = 1000
epsAbs = 1e-6
epsStep = 1e-6
# warning off;
# while (a2 - a1 >= epsStep || ( abs( phi(a1) ) >= epsAbs && abs( phi(a2) ) >= epsAbs ) )
while (a2 - a1) >= epsStep \
or (np.abs(phi(a1)) >= epsAbs and np.abs(phi(a2) >= epsAbs)):
c = (a1 + a2) / 2
if (phi(c) == 0):
break
elif (phi(a1) * phi(c) < 0):
a2 = c
else:
a1 = c
estimatedLocation = np.real(y(c)[:d, :])
# totalError = sum(abs(sqrt(sum(bsxfun(@minus, anchors', estimatedLocation).^2)) - distances(:)'))
estimatedDistances = np.sqrt(np.sum((anchors.T - estimatedLocation)**2, 0))
totalError = np.sum(np.abs(estimatedDistances - distances))
return estimatedLocation, totalError
| 32.964602 | 102 | 0.494497 |
b237bae3ff329fd372f9bc9676ce515ebc403656 | 3,304 | py | Python | pyramid_peewee_conn/tests/__init__.py | inpool/pyramid_peewee_conn | b576af85f81a992479454ca79bdcd585529e23db | [
"MIT"
] | 2 | 2017-10-20T06:53:05.000Z | 2021-01-14T03:38:56.000Z | pyramid_peewee_conn/tests/__init__.py | inpool/pyramid_peewee_conn | b576af85f81a992479454ca79bdcd585529e23db | [
"MIT"
] | null | null | null | pyramid_peewee_conn/tests/__init__.py | inpool/pyramid_peewee_conn | b576af85f81a992479454ca79bdcd585529e23db | [
"MIT"
] | null | null | null | '''
The unit test for pyramid_peewee_conn using pytest.
Author: Inpool
Email: inpool@126.com
'''
from os.path import abspath, dirname, join as pathjoin
from configparser import ConfigParser, ExtendedInterpolation
from pyramid.config import Configurator
from peewee import SqliteDatabase, PostgresqlDatabase, MySQLDatabase, Proxy
from pytest import raises
from pyramid_peewee_conn import _data, get_db, get_proxy, init_proxies
CFG_FILE = abspath(pathjoin(dirname(__file__), 'config.ini'))
CONFIG = ConfigParser(interpolation=ExtendedInterpolation())
CONFIG.read(CFG_FILE)
def setup_function():
'Run before each testing'
for data in _data.values():
data.clear()
def load_config(style):
'Instantiate Configurator with specified setting'
config = Configurator(settings=CONFIG[style])
config.include('pyramid_peewee_conn')
def test_get_db_before_included():
"Test get_db() when Configurator hasn't been instantiated"
with raises(SystemExit):
get_db()
def test_get_db_combined_style():
'Test get_db() when config as both new style and old style present'
load_config('combined')
db_default = get_db()
db_test = get_db('test')
db_test_db = get_db('test.db')
db_mysqldb = get_db('mysqldb')
assert isinstance(db_default, SqliteDatabase)
assert db_default.database == ':memory:'
assert isinstance(db_test, PostgresqlDatabase)
assert db_test.database == 'pgdbname'
assert isinstance(db_test_db, SqliteDatabase)
assert db_test_db.database == 'test.db'
assert isinstance(db_mysqldb, MySQLDatabase)
assert db_mysqldb.database == 'mysqldb'
def test_get_db_new_style():
'Test get_db() when config as new style'
load_config('new')
db_default = get_db()
db_test = get_db('test')
assert isinstance(db_default, SqliteDatabase)
assert db_default.database == ':memory:'
assert isinstance(db_test, PostgresqlDatabase)
assert db_test.database == 'pgdbname'
def test_get_db_old_style():
'Test get_db() when config as old style'
load_config('old')
db_default = get_db()
db_test_db = get_db('test.db')
db_mysqldb = get_db('mysqldb')
db_test = get_db('test')
assert isinstance(db_default, SqliteDatabase)
assert isinstance(db_test_db, SqliteDatabase)
assert db_default.database == db_test_db.database == 'test.db'
assert isinstance(db_mysqldb, MySQLDatabase)
assert db_mysqldb.database == 'mysqldb'
assert isinstance(db_test, MySQLDatabase)
assert db_test.database == 'test'
def test_get_proxy():
"Test get_proxy()"
proxy = get_proxy()
assert isinstance(proxy, Proxy)
assert proxy is _data['proxies'][None]
assert proxy.obj is None
load_config('new')
assert proxy.obj is get_db()
def test_init_proxies_before_included():
"Test init_proxies() when Configurator hasn't been instantiated"
proxy = get_proxy()
with raises(SystemExit):
init_proxies()
assert proxy.obj is None
def test_init_proxies():
"Test get_db() when Configurator hasbeen instantiated"
load_config('new')
proxy = get_proxy()
init_proxies()
assert proxy.obj is get_db()
proxy_undefined = get_proxy('undefined')
with raises(SystemExit):
init_proxies()
| 24.656716 | 75 | 0.720036 |
bc79219a2539ed78ac391580e4633eade37791b6 | 28,100 | py | Python | mtkclient/Library/xflash_ext.py | P-Salik/mtkclient | ca702a4ec84da4ec607f1e6484ff605e79a69f46 | [
"MIT"
] | null | null | null | mtkclient/Library/xflash_ext.py | P-Salik/mtkclient | ca702a4ec84da4ec607f1e6484ff605e79a69f46 | [
"MIT"
] | null | null | null | mtkclient/Library/xflash_ext.py | P-Salik/mtkclient | ca702a4ec84da4ec607f1e6484ff605e79a69f46 | [
"MIT"
] | null | null | null | import hmac
import os
from struct import unpack, pack
from mtkclient.config.payloads import pathconfig
from mtkclient.Library.error import ErrorHandler
from mtkclient.Library.hwcrypto import crypto_setup, hwcrypto
from mtkclient.Library.utils import LogBase, progress, logsetup, find_binary
from mtkclient.Library.seccfg import seccfg
from binascii import hexlify
import hashlib
from mtkclient.Library.utils import mtktee
import json
class XCmd:
CUSTOM_ACK = 0x0F0000
CUSTOM_READ = 0x0F0001
CUSTOM_READREGISTER = 0x0F0002
CUSTOM_WRITE = 0x0F0003
CUSTOM_WRITEREGISTER = 0x0F0004
CUSTOM_INIT_RPMB = 0x0F0005
CUSTOM_READ_RPMB = 0x0F0006
CUSTOM_WRITE_RPMB = 0x0F0007
CUSTOM_INIT_UFS_RPMB = 0x0F0008
CUSTOM_READ_UFS_RPMB = 0x0F0009
CUSTOM_WRITE_UFS_RPMB = 0x0F000A
CUSTOM_SET_RPMB_KEY = 0x0F000B
rpmb_error = [
"",
"General failure",
"Authentication failure",
"Counter failure",
"Address failure",
"Write failure",
"Read failure",
"Authentication key not yet programmed"
]
class xflashext(metaclass=LogBase):
def __init__(self, mtk, xflash, loglevel):
self.pathconfig = pathconfig()
self.__logger = logsetup(self, self.__logger, loglevel, mtk.config.gui)
self.info = self.__logger.info
self.debug = self.__logger.debug
self.error = self.__logger.error
self.warning = self.__logger.warning
self.mtk = mtk
self.loglevel = loglevel
self.__logger = self.__logger
self.eh = ErrorHandler()
self.config = self.mtk.config
self.usbwrite = self.mtk.port.usbwrite
self.usbread = self.mtk.port.usbread
self.echo = self.mtk.port.echo
self.rbyte = self.mtk.port.rbyte
self.rdword = self.mtk.port.rdword
self.rword = self.mtk.port.rword
self.xflash = xflash
self.xsend = self.xflash.xsend
self.send_devctrl = self.xflash.send_devctrl
self.xread = self.xflash.xread
self.status = self.xflash.status
self.da2 = None
self.da2address = None
def patch(self):
self.da2 = self.xflash.daconfig.da2
self.da2address = self.xflash.daconfig.da.region[2].m_start_addr # at_address
daextensions = os.path.join(self.pathconfig.get_payloads_path(), "da_x.bin")
if os.path.exists(daextensions):
daextdata = bytearray(open(daextensions, "rb").read())
# open("out" + hex(self.da2address) + ".da", "wb").write(da2)
register_cmd = find_binary(self.da2, b"\x38\xB5\x05\x46\x0C\x20")
# sec_enc_seccfg = find_binary(self.da2, b"\x0E\x4B\x70\xB5\x06\x46")
mmc_get_card = find_binary(self.da2, b"\x4B\x4F\xF4\x3C\x72")
if mmc_get_card is not None:
mmc_get_card -= 1
else:
mmc_get_card = find_binary(self.da2, b"\xA3\xEB\x00\x13\x18\x1A\x02\xEB\x00\x10")
if mmc_get_card is not None:
mmc_get_card -= 10
pos = 0
while True:
mmc_set_part_config = find_binary(self.da2, b"\xC3\x69\x0A\x46\x10\xB5", pos)
if mmc_set_part_config is None:
break
else:
pos = mmc_set_part_config + 1
if self.da2[mmc_set_part_config + 20:mmc_set_part_config + 22] == b"\xb3\x21":
break
if mmc_set_part_config is None:
mmc_set_part_config = find_binary(self.da2, b"\xC3\x69\x13\xF0\x01\x03")
mmc_rpmb_send_command = find_binary(self.da2, b"\xF8\xB5\x06\x46\x9D\xF8\x18\x50")
if mmc_rpmb_send_command is None:
mmc_rpmb_send_command = find_binary(self.da2, b"\x2D\xE9\xF0\x41\x4F\xF6\xFD\x74")
register_ptr = daextdata.find(b"\x11\x11\x11\x11")
mmc_get_card_ptr = daextdata.find(b"\x22\x22\x22\x22")
mmc_set_part_config_ptr = daextdata.find(b"\x33\x33\x33\x33")
mmc_rpmb_send_command_ptr = daextdata.find(b"\x44\x44\x44\x44")
ufshcd_queuecommand_ptr = daextdata.find(b"\x55\x55\x55\x55")
ufshcd_get_free_tag_ptr = daextdata.find(b"\x66\x66\x66\x66")
ptr_g_ufs_hba_ptr = daextdata.find(b"\x77\x77\x77\x77")
g_ufs_hba = None
ptr_g_ufs_hba = find_binary(self.da2, b"\x20\x46\x0B\xB0\xBD\xE8\xF0\x83\x00\xBF")
if ptr_g_ufs_hba is None:
ptr_g_ufs_hba = find_binary(self.da2, b"\x21\x46\x02\xF0\x02\xFB\x1B\xE6\x00\xBF")
if ptr_g_ufs_hba is not None:
g_ufs_hba = int.from_bytes(self.da2[ptr_g_ufs_hba + 10 + 0x8:ptr_g_ufs_hba + 10 + 0x8 + 4],
'little')
else:
g_ufs_hba = int.from_bytes(self.da2[ptr_g_ufs_hba + 10:ptr_g_ufs_hba + 10 + 4], 'little')
# open("da2_"+hex(self.da2address)+".bin","wb").write(self.da2)
if ptr_g_ufs_hba is not None:
ufshcd_get_free_tag = find_binary(self.da2, b"\xB5.\xB1\x90\xF8")
ufshcd_queuecommand = find_binary(self.da2, b"\x2D\xE9\xF8\x43\x01\x27")
else:
g_ufs_hba = None
ufshcd_get_free_tag = None
ufshcd_queuecommand = None
if register_ptr != -1 and mmc_get_card_ptr != -1:
if register_cmd:
register_cmd = register_cmd + self.da2address | 1
else:
register_cmd = 0
if mmc_get_card:
mmc_get_card = mmc_get_card + self.da2address | 1
else:
mmc_get_card = 0
if mmc_set_part_config:
mmc_set_part_config = mmc_set_part_config + self.da2address | 1
else:
mmc_set_part_config = 0
if mmc_rpmb_send_command:
mmc_rpmb_send_command = mmc_rpmb_send_command + self.da2address | 1
else:
mmc_rpmb_send_command = 0
if ufshcd_get_free_tag:
ufshcd_get_free_tag = ufshcd_get_free_tag + (self.da2address - 1) | 1
else:
ufshcd_get_free_tag = 0
if ufshcd_queuecommand:
ufshcd_queuecommand = ufshcd_queuecommand + self.da2address | 1
else:
ufshcd_queuecommand = 0
if g_ufs_hba is None:
g_ufs_hba = 0
# Patch the addr
daextdata[register_ptr:register_ptr + 4] = pack("<I", register_cmd)
daextdata[mmc_get_card_ptr:mmc_get_card_ptr + 4] = pack("<I", mmc_get_card)
daextdata[mmc_set_part_config_ptr:mmc_set_part_config_ptr + 4] = pack("<I", mmc_set_part_config)
daextdata[mmc_rpmb_send_command_ptr:mmc_rpmb_send_command_ptr + 4] = pack("<I", mmc_rpmb_send_command)
daextdata[ufshcd_get_free_tag_ptr:ufshcd_get_free_tag_ptr + 4] = pack("<I", ufshcd_get_free_tag)
daextdata[ufshcd_queuecommand_ptr:ufshcd_queuecommand_ptr + 4] = pack("<I", ufshcd_queuecommand)
daextdata[ptr_g_ufs_hba_ptr:ptr_g_ufs_hba_ptr + 4] = pack("<I", g_ufs_hba)
# print(hexlify(daextdata).decode('utf-8'))
# open("daext.bin","wb").write(daextdata)
return daextdata
return None
def patch_da1(self, da1):
self.info("Patching da1 ...")
da1patched = bytearray(da1)
# Patch security
da_version_check = find_binary(da1, b"\x40\xB1\x01\x23\x4F\xF0")
if da_version_check is not None:
da1patched[da_version_check+0x2] = 0x0
else:
self.warning("Error on patching da1 version check...")
return da1patched
def patch_da2(self, da2):
self.info("Patching da2 ...")
# open("da2.bin","wb").write(da2)
da2patched = bytearray(da2)
# Patch security
is_security_enabled = find_binary(da2, b"\x01\x23\x03\x60\x00\x20\x70\x47")
if is_security_enabled != -1:
da2patched[is_security_enabled:is_security_enabled + 2] = b"\x00\x23"
else:
self.warning("Security check not patched.")
# Patch hash check
authaddr = find_binary(da2, b"\x04\x00\x07\xC0")
if authaddr:
da2patched[authaddr:authaddr + 4] = b"\x00\x00\x00\x00"
elif authaddr is None:
authaddr = find_binary(da2, b"\x4F\xF0\x04\x09\xCC\xF2\x07\x09")
if authaddr:
da2patched[authaddr:authaddr + 8] = b"\x4F\xF0\x00\x09\x4F\xF0\x00\x09"
else:
authaddr = find_binary(da2, b"\x4F\xF0\x04\x09\x32\x46\x01\x98\x03\x99\xCC\xF2\x07\x09")
if authaddr:
da2patched[authaddr:authaddr + 14] = b"\x4F\xF0\x00\x09\x32\x46\x01\x98\x03\x99\x4F\xF0\x00\x09"
else:
self.warning("Hash check not patched.")
# Patch write not allowed
# open("da2.bin","wb").write(da2patched)
idx = 0
patched = False
while idx != -1:
idx = da2patched.find(b"\x37\xB5\x00\x23\x04\x46\x02\xA8")
if idx != -1:
da2patched[idx:idx + 8] = b"\x37\xB5\x00\x20\x03\xB0\x30\xBD"
patched = True
else:
idx = da2patched.find(b"\x0C\x23\xCC\xF2\x02\x03")
if idx != -1:
da2patched[idx:idx + 6] = b"\x00\x23\x00\x23\x00\x23"
idx2 = da2patched.find(b"\x2A\x23\xCC\xF2\x02\x03")
if idx2 != -1:
da2patched[idx2:idx2 + 6] = b"\x00\x23\x00\x23\x00\x23"
"""
idx3 = da2patched.find(b"\x2A\x24\xE4\xF7\x89\xFB\xCC\xF2\x02\x04")
if idx3 != -1:
da2patched[idx3:idx3 + 10] = b"\x00\x24\xE4\xF7\x89\xFB\x00\x24\x00\x24"
"""
patched = True
if not patched:
self.warning("Write not allowed not patched.")
return da2patched
def cmd(self, cmd):
if self.xsend(self.xflash.Cmd.DEVICE_CTRL):
status = self.status()
if status == 0x0:
if self.xsend(cmd):
status = self.status()
if status == 0x0:
return True
return False
def custom_read(self, addr, length):
if self.cmd(XCmd.CUSTOM_READ):
self.xsend(data=addr, is64bit=True)
self.xsend(length)
data = self.xread()
status = self.status()
if status == 0:
return data
return b""
def custom_readregister(self, addr):
if self.cmd(XCmd.CUSTOM_READREGISTER):
self.xsend(addr)
data = self.xread()
status = self.status()
if status == 0:
return data
return b""
def custom_write(self, addr, data):
if self.cmd(XCmd.CUSTOM_WRITE):
self.xsend(data=addr, is64bit=True)
self.xsend(len(data))
self.xsend(data)
status = self.status()
if status == 0:
return True
return False
def custom_writeregister(self, addr, data):
if self.cmd(XCmd.CUSTOM_WRITEREGISTER):
self.xsend(addr)
self.xsend(data)
status = self.status()
if status == 0:
return True
return False
def readmem(self, addr, dwords=1):
res = []
if dwords<0x20:
for pos in range(dwords):
val = self.custom_readregister(addr + pos * 4)
if val == b"":
return False
data = unpack("<I", val)[0]
if dwords == 1:
self.debug(f"RX: {hex(addr + (pos * 4))} -> " + hex(data))
return data
res.append(data)
else:
res=self.custom_read(addr,dwords*4)
res=[unpack("<I",res[i:i+4])[0] for i in range(0,len(res),4)]
self.debug(f"RX: {hex(addr)} -> " + hexlify(b"".join(pack("<I", val) for val in res)).decode('utf-8'))
return res
def writeregister(self, addr, dwords):
if isinstance(dwords, int):
dwords = [dwords]
pos = 0
if len(dwords)<0x20:
for val in dwords:
self.debug(f"TX: {hex(addr + pos)} -> " + hex(val))
if not self.custom_writeregister(addr + pos, val):
return False
pos += 4
else:
dat=b"".join([pack("<I",val) for val in dwords])
self.custom_write(addr,dat)
return True
def writemem(self, addr, data):
for i in range(0, len(data), 4):
value = data[i:i + 4]
while len(value) < 4:
value += b"\x00"
self.writeregister(addr + i, unpack("<I", value))
return True
def custom_rpmb_read(self, sector, ufs=False):
cmd = XCmd.CUSTOM_READ_RPMB
if ufs:
cmd = XCmd.CUSTOM_READ_UFS_RPMB
if self.cmd(cmd):
self.xsend(sector)
resp = unpack("<H", self.xread())[0]
if resp == 0x0:
data = self.xread()
status = self.status()
if status == 0:
return data
else:
self.error(rpmb_error[resp])
status = self.status()
return b''
def custom_rpmb_write(self, sector, data: bytes, ufs=False):
if len(data) != 0x100:
self.error("Incorrect rpmb frame length. Aborting")
return False
cmd = XCmd.CUSTOM_WRITE_RPMB
if ufs:
cmd = XCmd.CUSTOM_WRITE_UFS_RPMB
if self.cmd(cmd):
self.xsend(sector)
self.xsend(data[:0x100])
resp = unpack("<H", self.xread())[0]
if resp != 0:
self.error(rpmb_error[resp])
status = self.status()
return False
status = self.status()
if status == 0:
return resp
return False
def custom_rpmb_init(self):
hwc = self.cryptosetup()
if self.config.chipconfig.meid_addr:
meid = self.custom_read(0x1008ec, 16)
if meid != b"":
#self.config.set_meid(meid)
self.info("Generating sej rpmbkey...")
self.setotp(hwc)
rpmbkey = hwc.aes_hwcrypt(mode="rpmb", data=meid, btype="sej")
if rpmbkey is not None:
if self.cmd(XCmd.CUSTOM_SET_RPMB_KEY):
self.xsend(rpmbkey)
read_key = self.xread()
if self.status() == 0x0:
if rpmbkey == read_key:
self.info("Setting rpmbkey: ok")
ufs = False
if self.xflash.emmc.rpmb_size != 0:
ufs = False
elif self.xflash.ufs.block_size != 0:
ufs = True
cmd = XCmd.CUSTOM_INIT_RPMB
if ufs:
cmd = XCmd.CUSTOM_INIT_UFS_RPMB
if self.cmd(cmd):
derivedrpmb = self.xread()
status = self.status()
if status == 0:
self.info("Derived rpmb key:" + hexlify(derivedrpmb).decode('utf-8'))
return True
self.error("Failed to derive a valid rpmb key.")
return False
def setotp(self, hwc):
otp = None
if self.mtk.config.preloader is not None:
idx = self.mtk.config.preloader.find(b"\x4D\x4D\x4D\x01\x30")
if idx != -1:
otp = self.mtk.config.preloader[idx + 0xC:idx + 0xC + 32]
if otp is None:
otp = 32 * b"\x00"
hwc.sej.sej_set_otp(otp)
def read_rpmb(self, filename=None, display=True):
progressbar = progress(1,self.mtk.config.guiprogress)
sectors = 0
#val = self.custom_rpmb_init()
ufs = False
if self.xflash.emmc.rpmb_size != 0:
sectors = self.xflash.emmc.rpmb_size // 0x100
ufs = False
elif self.xflash.ufs.block_size != 0:
sectors = (512 * 256)
ufs = True
if filename is None:
filename = "rpmb.bin"
if sectors > 0:
with open(filename, "wb") as wf:
for sector in range(sectors):
if display:
progressbar.show_progress("RPMB read", sector*0x100, sectors*0x100, display)
data = self.custom_rpmb_read(sector=sector, ufs=ufs)
if data == b"":
self.error("Couldn't read rpmb.")
return False
wf.write(data)
self.info("Done reading rpmb to " + filename)
return True
return False
def write_rpmb(self, filename=None, display=True):
progressbar = progress(1, self.mtk.config.guiprogress)
if filename is None:
self.error("Filename has to be given for writing to rpmb")
return False
if not os.path.exists(filename):
self.error(f"Couldn't find {filename} for writing to rpmb.")
return False
ufs = False
sectors = 0
if self.xflash.emmc.rpmb_size != 0:
sectors = self.xflash.emmc.rpmb_size // 0x100
elif self.xflash.ufs.block_size != 0:
sectors = (512 * 256)
if self.custom_rpmb_init():
if sectors > 0:
with open(filename, "rb") as rf:
for sector in range(sectors):
if display:
progressbar.show_progress("RPMB written", sector*0x100, sectors*0x100, display)
if not self.custom_rpmb_write(sector=sector, data=rf.read(0x100), ufs=ufs):
self.error(f"Couldn't write rpmb at sector {sector}.")
return False
self.info(f"Done reading writing {filename} to rpmb")
return True
return False
def erase_rpmb(self, display=True):
progressbar = progress(1, self.mtk.config.guiprogress)
ufs = False
sectors = 0
if self.xflash.emmc.rpmb_size != 0:
sectors = self.xflash.emmc.rpmb_size // 0x100
elif self.xflash.ufs.block_size != 0:
sectors = (512 * 256)
if self.custom_rpmb_init():
if sectors > 0:
for sector in range(sectors):
if display:
progressbar.show_progress("RPMB erased", sector*0x100, sectors*0x100, display)
if not self.custom_rpmb_write(sector=sector, data=b"\x00" * 0x100, ufs=ufs):
self.error(f"Couldn't erase rpmb at sector {sector}.")
return False
self.info(f"Done erasing rpmb")
return True
return False
def cryptosetup(self):
setup = crypto_setup()
setup.blacklist = self.config.chipconfig.blacklist
setup.gcpu_base = self.config.chipconfig.gcpu_base
setup.dxcc_base = self.config.chipconfig.dxcc_base
setup.da_payload_addr = self.config.chipconfig.da_payload_addr
setup.sej_base = self.config.chipconfig.sej_base
setup.read32 = self.readmem
setup.write32 = self.writeregister
setup.writemem = self.writemem
setup.hwcode = self.config.hwcode
return hwcrypto(setup, self.loglevel, self.config.gui)
def seccfg(self, lockflag):
if lockflag not in ["unlock", "lock"]:
return False, "Valid flags are: unlock, lock"
hwc = self.cryptosetup()
sc_org = seccfg(hwc)
data, guid_gpt = self.xflash.partition.get_gpt(self.mtk.config.gpt_settings, "user")
seccfg_data = None
partition = None
if guid_gpt is None:
return False, "Error getting the partition table."
for rpartition in guid_gpt.partentries:
if rpartition.name == "seccfg":
partition = rpartition
seccfg_data = self.xflash.readflash(
addr=partition.sector * self.mtk.daloader.daconfig.pagesize,
length=partition.sectors * self.mtk.daloader.daconfig.pagesize,
filename="", parttype="user", display=False)
break
if seccfg_data is None:
return False, "Couldn't detect existing seccfg partition. Aborting unlock."
if seccfg_data[:4] != pack("<I", 0x4D4D4D4D):
return False, "Unknown seccfg partition header. Aborting unlock."
if not sc_org.parse(seccfg_data):
return False, "Error on parsing seccfg"
sc_new = seccfg(hwc)
self.setotp(hwc)
hwtype = "hw"
V3 = True
sc_new.create(sc_org=sc_org, hwtype=hwtype, V3=V3)
if sc_org.hash != sc_new.hash:
V3 = False
sc_new.create(sc_org=sc_org, hwtype=hwtype, V3=V3)
if sc_org.hash != sc_new.hash:
hwtype = "sw"
sc_new.create(sc_org=sc_org, hwtype=hwtype)
if sc_org.hash != sc_new.hash:
return False, "Device has is either already unlocked or algo is unknown. Aborting."
writedata = sc_new.create(sc_org=None, hwtype=hwtype, lockflag=lockflag, V3=V3)
if self.xflash.writeflash(addr=partition.sector * self.mtk.daloader.daconfig.pagesize,
length=len(writedata),
filename=None, wdata=writedata, parttype="user", display=True):
return True, "Successfully wrote seccfg."
return False, "Error on writing seccfg config to flash."
def decrypt_tee(self, filename="tee1.bin", aeskey1:bytes=None, aeskey2:bytes=None):
hwc = self.cryptosetup()
with open(filename, "rb") as rf:
data=rf.read()
idx=0
while idx!=-1:
idx=data.find(b"EET KTM ",idx+1)
if idx!=-1:
mt = mtktee()
mt.parse(data[idx:])
rdata=hwc.mtee(data=mt.data, keyseed=mt.keyseed, ivseed=mt.ivseed,
aeskey1=aeskey1, aeskey2=aeskey2)
open("tee_"+hex(idx)+".dec","wb").write(rdata)
def generate_keys(self):
hwc = self.cryptosetup()
meid = self.config.get_meid()
socid = self.config.get_socid()
hwcode = self.config.get_hwcode()
retval = {}
if meid is not None:
self.info("MEID : " + hexlify(meid).decode('utf-8'))
retval["meid"] = hexlify(meid).decode('utf-8')
self.config.hwparam.writesetting("meid", hexlify(meid).decode('utf-8'))
if socid is not None:
self.info("SOCID : " + hexlify(socid).decode('utf-8'))
retval["socid"] = hexlify(socid).decode('utf-8')
self.config.hwparam.writesetting("socid", hexlify(socid).decode('utf-8'))
if hwcode is not None:
self.info("HWCODE : " + hex(hwcode))
retval["hwcode"] = hex(hwcode)
self.config.hwparam.writesetting("hwcode", hex(hwcode))
if self.config.chipconfig.dxcc_base is not None:
self.info("Generating dxcc rpmbkey...")
rpmbkey = hwc.aes_hwcrypt(btype="dxcc", mode="rpmb")
self.info("Generating dxcc fdekey...")
fdekey = hwc.aes_hwcrypt(btype="dxcc", mode="fde")
self.info("Generating dxcc rpmbkey2...")
rpmb2key = hwc.aes_hwcrypt(btype="dxcc", mode="rpmb2")
self.info("Generating dxcc km key...")
ikey = hwc.aes_hwcrypt(btype="dxcc", mode="itrustee")
# self.info("Generating dxcc platkey + provkey key...")
# platkey, provkey = hwc.aes_hwcrypt(btype="dxcc", mode="prov")
# self.info("Provkey : " + hexlify(provkey).decode('utf-8'))
# self.info("Platkey : " + hexlify(platkey).decode('utf-8'))
if rpmbkey is not None:
self.info("RPMB : " + hexlify(rpmbkey).decode('utf-8'))
self.config.hwparam.writesetting("rpmbkey", hexlify(rpmbkey).decode('utf-8'))
retval["rpmbkey"] = hexlify(rpmbkey).decode('utf-8')
if rpmb2key is not None:
self.info("RPMB2 : " + hexlify(rpmb2key).decode('utf-8'))
self.config.hwparam.writesetting("rpmb2key", hexlify(rpmb2key).decode('utf-8'))
retval["rpmb2key"] = hexlify(rpmb2key).decode('utf-8')
if fdekey is not None:
self.info("FDE : " + hexlify(fdekey).decode('utf-8'))
self.config.hwparam.writesetting("fdekey", hexlify(fdekey).decode('utf-8'))
retval["fdekey"] = hexlify(fdekey).decode('utf-8')
if ikey is not None:
self.info("iTrustee : " + hexlify(ikey).decode('utf-8'))
self.config.hwparam.writesetting("kmkey", hexlify(ikey).decode('utf-8'))
retval["kmkey"] = hexlify(ikey).decode('utf-8')
if self.config.chipconfig.prov_addr:
provkey = self.custom_read(self.config.chipconfig.prov_addr, 16)
self.info("PROV : " + hexlify(provkey).decode('utf-8'))
self.config.hwparam.writesetting("provkey", hexlify(provkey).decode('utf-8'))
retval["provkey"] = hexlify(provkey).decode('utf-8')
"""
hrid = self.xflash.get_hrid()
if hrid is not None:
self.info("HRID : " + hexlify(hrid).decode('utf-8'))
open(os.path.join("logs", "hrid.txt"), "wb").write(hexlify(hrid))
"""
return retval
elif self.config.chipconfig.sej_base is not None:
if os.path.exists("tee.json"):
val=json.loads(open("tee.json","r").read())
self.decrypt_tee(val["filename"],bytes.fromhex(val["data"]),bytes.fromhex(val["data2"]))
if meid == b"":
meid = self.custom_read(0x1008ec, 16)
if meid != b"":
#self.config.set_meid(meid)
self.info("Generating sej rpmbkey...")
self.setotp(hwc)
rpmbkey = hwc.aes_hwcrypt(mode="rpmb", data=meid, btype="sej")
if rpmbkey:
self.info("RPMB : " + hexlify(rpmbkey).decode('utf-8'))
self.config.hwparam.writesetting("rpmbkey", hexlify(rpmbkey).decode('utf-8'))
retval["rpmbkey"] = hexlify(rpmbkey).decode('utf-8')
self.info("Generating sej mtee...")
mtee = hwc.aes_hwcrypt(mode="mtee", btype="sej")
if mtee:
self.config.hwparam.writesetting("mtee", hexlify(mtee).decode('utf-8'))
self.info("MTEE : " + hexlify(mtee).decode('utf-8'))
retval["mtee"] = hexlify(mtee).decode('utf-8')
else:
self.info("SEJ Mode: No meid found. Are you in brom mode ?")
if self.config.chipconfig.gcpu_base is not None:
if self.config.hwcode in [0x335,0x8167]:
self.info("Generating gcpu mtee2 key...")
mtee2 = hwc.aes_hwcrypt(btype="gcpu", mode="mtee")
if mtee2 is not None:
self.info("MTEE2 : " + hexlify(mtee2).decode('utf-8'))
self.config.hwparam.writesetting("mtee2", hexlify(mtee2).decode('utf-8'))
retval["mtee2"] = hexlify(mtee2).decode('utf-8')
return retval
| 43.90625 | 118 | 0.546335 |
37a341de6d547ca4ad9c80f542090b58b81faf18 | 1,110 | py | Python | questions/diameter-of-binary-tree/Solution.py | marcus-aurelianus/leetcode-solutions | 8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6 | [
"MIT"
] | 141 | 2017-12-12T21:45:53.000Z | 2022-03-25T07:03:39.000Z | questions/diameter-of-binary-tree/Solution.py | marcus-aurelianus/leetcode-solutions | 8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6 | [
"MIT"
] | 32 | 2015-10-05T14:09:52.000Z | 2021-05-30T10:28:41.000Z | questions/diameter-of-binary-tree/Solution.py | marcus-aurelianus/leetcode-solutions | 8b43e72fe1f51c84abc3e89b181ca51f09dc7ca6 | [
"MIT"
] | 56 | 2015-09-30T05:23:28.000Z | 2022-03-08T07:57:11.000Z | '''
Given a binary tree, you need to compute the length of the diameter of the tree. The diameter of a binary tree is the length of the longest path between any two nodes in a tree. This path may or may not pass through the root.
Example:
Given a binary tree
1
/ \
2 3
/ \
4 5
Return 3, which is the length of the path [4,2,1,3] or [5,2,1,3].
Note: The length of path between two nodes is represented by the number of edges between them.
'''
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def diameterOfBinaryTree(self, root: TreeNode) -> int:
def find_diameter(node, mm):
if node is None:
return 0
l = find_diameter(node.left, mm)
r = find_diameter(node.right, mm)
c = l + r
if c > mm[0]:
mm[0] = c
return max(l, r) + 1
mm = [0]
find_diameter(root, mm)
return mm[0]
| 28.461538 | 225 | 0.552252 |
177059fec72cef65e3421848081499b0d3bbb5e7 | 808 | py | Python | tests/io/test_strava.py | GoldenCheetah/sweatpy | eed6b34ff75c16fcbad878caded8ee4d18dea589 | [
"MIT"
] | 58 | 2018-03-10T08:26:10.000Z | 2022-03-20T11:23:50.000Z | tests/io/test_strava.py | GoldenCheetah/sweatpy | eed6b34ff75c16fcbad878caded8ee4d18dea589 | [
"MIT"
] | 19 | 2018-03-10T13:09:49.000Z | 2022-03-18T10:31:19.000Z | tests/io/test_strava.py | GoldenCheetah/sweatpy | eed6b34ff75c16fcbad878caded8ee4d18dea589 | [
"MIT"
] | 20 | 2018-03-09T19:16:15.000Z | 2022-03-08T00:21:38.000Z | import pytest
import pandas as pd
import sweat
from sweat.io import strava
from .utils import sweatvcr
def test_top_level_import():
assert sweat.read_strava == strava.read_strava
@sweatvcr.use_cassette()
def test_read_strava():
activity = sweat.read_strava(
activity_id="3547667536", access_token="somerandomaccesstoken"
)
assert isinstance(activity, pd.DataFrame)
assert isinstance(activity.index, pd.DatetimeIndex)
columns = set(
[
"elevation",
"speed",
"cadence",
"grade",
"heartrate",
"power",
"temperature",
"distance",
"moving",
"latitude",
"longitude",
]
)
assert columns == set(activity.columns.tolist())
| 21.263158 | 70 | 0.585396 |
14fea5a2599eec0873f3cbe8fe71592621087fe4 | 2,616 | py | Python | lldb/third_party/Python/module/unittest2/unittest2/__init__.py | medismailben/llvm-project | e334a839032fe500c3bba22bf976ab7af13ce1c1 | [
"Apache-2.0"
] | 2,338 | 2018-06-19T17:34:51.000Z | 2022-03-31T11:00:37.000Z | third_party/Python/module/unittest2/unittest2/__init__.py | DalavanCloud/lldb | e913eaf2468290fb94c767d474d611b41a84dd69 | [
"Apache-2.0"
] | 3,740 | 2019-01-23T15:36:48.000Z | 2022-03-31T22:01:13.000Z | third_party/Python/module/unittest2/unittest2/__init__.py | DalavanCloud/lldb | e913eaf2468290fb94c767d474d611b41a84dd69 | [
"Apache-2.0"
] | 500 | 2019-01-23T07:49:22.000Z | 2022-03-30T02:59:37.000Z | """
unittest2
unittest2 is a backport of the new features added to the unittest testing
framework in Python 2.7. It is tested to run on Python 2.4 - 2.6.
To use unittest2 instead of unittest simply replace ``import unittest`` with
``import unittest2``.
Copyright (c) 1999-2003 Steve Purcell
Copyright (c) 2003-2010 Python Software Foundation
This module is free software, and you may redistribute it and/or modify
it under the same terms as Python itself, so long as this copyright message
and disclaimer are retained in their original form.
IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
"""
import sys
if sys.version_info[0] >= 3:
# Python 3 doesn't have the builtin `cmp` function anymore
cmp_ = lambda x, y: (x > y) - (x < y)
else:
cmp_ = cmp
reversed_cmp_ = lambda x, y: -cmp_(x, y)
__all__ = ['TestResult', 'TestCase', 'TestSuite',
'TextTestRunner', 'TestLoader', 'FunctionTestCase', 'main',
'defaultTestLoader', 'SkipTest', 'skip', 'skipIf', 'skipUnless',
'expectedFailure', 'TextTestResult', '__version__', 'collector']
__version__ = '0.5.1'
# Expose obsolete functions for backwards compatibility
__all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
from unittest2.collector import collector
from unittest2.result import TestResult
from unittest2.case import (
TestCase, FunctionTestCase, SkipTest, skip, skipIf,
skipUnless, expectedFailure
)
from unittest2.suite import BaseTestSuite, TestSuite
from unittest2.loader import (
TestLoader, defaultTestLoader, makeSuite, getTestCaseNames,
findTestCases
)
from unittest2.main import TestProgram, main, main_
from unittest2.runner import TextTestRunner, TextTestResult
try:
from unittest2.signals import (
installHandler, registerResult, removeResult, removeHandler
)
except ImportError:
# Compatibility with platforms that don't have the signal module
pass
else:
__all__.extend(['installHandler', 'registerResult', 'removeResult',
'removeHandler'])
# deprecated
_TextTestResult = TextTestResult
__unittest = True
| 33.113924 | 76 | 0.749618 |
bae842713d6f1d73dc1ca8cf79801cd3e2000ecf | 1,522 | py | Python | taskwpomo/pomo.py | fsimkovic/taskwarrior-pomodoro | fc867e4aa7fe7ed1f94566cd27b68edd6b2370a0 | [
"MIT"
] | null | null | null | taskwpomo/pomo.py | fsimkovic/taskwarrior-pomodoro | fc867e4aa7fe7ed1f94566cd27b68edd6b2370a0 | [
"MIT"
] | null | null | null | taskwpomo/pomo.py | fsimkovic/taskwarrior-pomodoro | fc867e4aa7fe7ed1f94566cd27b68edd6b2370a0 | [
"MIT"
] | null | null | null | __author__ = 'Felix Simkovic'
__date__ = '2019-05-11'
__license__ = 'MIT License'
import datetime
import enum
import logging
import time
from taskwpomo.misc import log_call
log = logging.getLogger(__name__)
@enum.unique
class PomodoroTimes(enum.Enum):
LONG_BREAK = 1_200
SHORT_BREAK = 300
WORK = 1_500
class Pomodoro:
# Steps as outlined in:
# https://en.wikipedia.org/wiki/Pomodoro_Technique#Underlying_principles
STEPS = (
PomodoroTimes.WORK,
PomodoroTimes.SHORT_BREAK,
PomodoroTimes.WORK,
PomodoroTimes.SHORT_BREAK,
PomodoroTimes.WORK,
PomodoroTimes.SHORT_BREAK,
PomodoroTimes.WORK,
PomodoroTimes.SHORT_BREAK,
PomodoroTimes.WORK,
PomodoroTimes.LONG_BREAK,
)
def __init__(self):
self._cur_index = 0
@property
def completed(self):
return self._cur_index == len(self.STEPS)
@property
def current(self):
return self.STEPS[self._cur_index]
@property
def is_work_task(self):
return self.current in (PomodoroTimes.WORK, )
@property
def next(self):
if self._cur_index < len(self.STEPS) - 1:
return self.STEPS[self._cur_index + 1]
@log_call
def complete(self):
if self._cur_index < len(self.STEPS) - 1:
self._cur_index += 1
else:
self.reset()
@log_call
def reset(self):
self._cur_index = 0
@log_call
def skip(self):
self.complete()
| 20.567568 | 78 | 0.633377 |
30246f3b98e1db44853e0a65c158924504c22786 | 1,797 | py | Python | musictaxonomy/spotify/client.py | akurihara/music-taxonomy | 53eb2112e67b3ec9591411bc8117463af760adf8 | [
"MIT"
] | 3 | 2019-05-06T04:10:47.000Z | 2020-02-20T01:26:32.000Z | musictaxonomy/spotify/client.py | akurihara/music-taxonomy | 53eb2112e67b3ec9591411bc8117463af760adf8 | [
"MIT"
] | 3 | 2020-02-06T01:52:49.000Z | 2022-02-12T07:41:24.000Z | musictaxonomy/spotify/client.py | akurihara/musictaxonomy | 53eb2112e67b3ec9591411bc8117463af760adf8 | [
"MIT"
] | null | null | null | import json
import urllib.parse
from typing import Dict
from musictaxonomy.spotify import constants as spotify_constants
from settings import SPOTIFY_CLIENT_ID, SPOTIFY_CLIENT_SECRET
from tornado.httpclient import AsyncHTTPClient
__all__ = [
"get_access_token",
"get_current_user_profile",
"get_top_artists_in_time_range",
]
async def get_access_token(authorization_code: str, redirect_base_url: str) -> Dict:
post_data = {
"grant_type": "authorization_code",
"code": authorization_code,
"redirect_uri": "{}/callback/oauth".format(redirect_base_url),
"client_id": SPOTIFY_CLIENT_ID,
"client_secret": SPOTIFY_CLIENT_SECRET,
}
body = urllib.parse.urlencode(post_data)
response = await AsyncHTTPClient().fetch(
spotify_constants.SPOTIFY_TOKEN_URL, method="POST", body=body
)
return json.loads(response.body)
async def get_current_user_profile(access_token: str) -> Dict:
headers = {"Authorization": "Bearer {}".format(access_token)}
url = "{base}/me".format(base=spotify_constants.SPOTIFY_API_BASE_URL)
response = await AsyncHTTPClient().fetch(url, method="GET", headers=headers)
parsed_body = json.loads(response.body)
return parsed_body
async def get_top_artists_in_time_range(access_token: str, time_range: str) -> Dict:
query_parameters = {"time_range": time_range, "limit": 50}
headers = {"Authorization": "Bearer {}".format(access_token)}
url = "{base}/me/top/artists?{query_string}".format(
base=spotify_constants.SPOTIFY_API_BASE_URL,
query_string=urllib.parse.urlencode(query_parameters),
)
response = await AsyncHTTPClient().fetch(url, method="GET", headers=headers)
parsed_body = json.loads(response.body)
return parsed_body
| 31.526316 | 84 | 0.728436 |
347d22c7d3534952ea871c3f76371d339bd0b66f | 1,130 | py | Python | examples/example_2.py | normanrichardson/StructGlassCalcs | 0862560d926d7a6bc44c4762de53c516e01ed782 | [
"MIT"
] | 9 | 2021-12-21T16:15:41.000Z | 2022-02-27T05:16:54.000Z | examples/example_2.py | normanrichardson/StructGlassCalcs | 0862560d926d7a6bc44c4762de53c516e01ed782 | [
"MIT"
] | null | null | null | examples/example_2.py | normanrichardson/StructGlassCalcs | 0862560d926d7a6bc44c4762de53c516e01ed782 | [
"MIT"
] | 1 | 2021-12-24T05:31:28.000Z | 2021-12-24T05:31:28.000Z | # Example from NCSEA Engineering Structural Glass Design Guide chapter 8
# example 3
import structuralglass.equiv_thick_models as et
import structuralglass.layers as lay
from structuralglass import Q_
# Plate dimensions
a = Q_(1, "m")
t1nom = Q_(12, "mm")
t2nom = Q_(12, "mm")
# Interlayer PVB at 30degC for 1 day load duration
G_pvb = Q_(0.281, "MPa")
t_pvb = Q_(0.89, "mm")
interlayer = lay.Interlayer.from_static(t_pvb, G_pvb)
# Plys
ply1 = lay.GlassPly.from_nominal_thickness(t1nom)
ply2 = lay.GlassPly.from_nominal_thickness(t2nom)
# Package specifying the model type
package = et.ShearTransferCoefMethod([ply1, interlayer, ply2], a)
# Results
print("-------------Package values-------------")
print("Effective displacement thickness:", end=" ")
print(f"{package.h_efw:.3f~P} ({ package.h_efw.to('in') :.3f~P})")
print("Eff. package thickness for stress with reference to ply1:", end=" ")
print(f"{package.h_efs[ply1]:.3f~P} ({package.h_efs[ply1].to('in'):.3f~P})")
print("Eff. package thickness for stress with reference to ply2:", end=" ")
print(f"{package.h_efs[ply2]:.3f~P} ({package.h_efs[ply2].to('in'):.3f~P})")
| 34.242424 | 76 | 0.707965 |
cac63251f48982c5bdf461150e409c22a86d5ae1 | 27,743 | py | Python | src/pybreaker.py | mnguyenngo/pybreaker | 5c632a9278bfecbe96f820976551c081bb18baab | [
"BSD-3-Clause"
] | null | null | null | src/pybreaker.py | mnguyenngo/pybreaker | 5c632a9278bfecbe96f820976551c081bb18baab | [
"BSD-3-Clause"
] | null | null | null | src/pybreaker.py | mnguyenngo/pybreaker | 5c632a9278bfecbe96f820976551c081bb18baab | [
"BSD-3-Clause"
] | null | null | null | #-*- coding:utf-8 -*-
"""
Threadsafe pure-Python implementation of the Circuit Breaker pattern, described
by Michael T. Nygard in his book 'Release It!'.
For more information on this and other patterns and best practices, buy the
book at http://pragprog.com/titles/mnee/release-it
"""
import types
import time
import calendar
import logging
from datetime import datetime, timedelta
from functools import wraps
import threading
import six
import sys
try:
from tornado import gen
HAS_TORNADO_SUPPORT = True
except ImportError:
HAS_TORNADO_SUPPORT = False
try:
from redis.exceptions import RedisError
HAS_REDIS_SUPPORT = True
except ImportError:
HAS_REDIS_SUPPORT = False
__all__ = (
'CircuitBreaker', 'CircuitBreakerListener', 'CircuitBreakerError',
'CircuitMemoryStorage', 'CircuitRedisStorage', 'STATE_OPEN', 'STATE_CLOSED',
'STATE_HALF_OPEN',)
STATE_OPEN = 'open'
STATE_CLOSED = 'closed'
STATE_HALF_OPEN = 'half-open'
class CircuitBreaker(object):
"""
More abstractly, circuit breakers exists to allow one subsystem to fail
without destroying the entire system.
This is done by wrapping dangerous operations (typically integration points)
with a component that can circumvent calls when the system is not healthy.
This pattern is described by Michael T. Nygard in his book 'Release It!'.
"""
def __init__(self, fail_max=5, reset_timeout=60, exclude=None,
listeners=None, state_storage=None, name=None):
"""
Creates a new circuit breaker with the given parameters.
"""
self._lock = threading.RLock()
self._state_storage = state_storage or CircuitMemoryStorage(STATE_CLOSED)
self._state = self._create_new_state(self.current_state)
self._fail_max = fail_max
self._reset_timeout = reset_timeout
self._excluded_exceptions = list(exclude or [])
self._listeners = list(listeners or [])
self._name = name
@property
def fail_counter(self):
"""
Returns the current number of consecutive failures.
"""
return self._state_storage.counter
@property
def fail_max(self):
"""
Returns the maximum number of failures tolerated before the circuit is
opened.
"""
return self._fail_max
@fail_max.setter
def fail_max(self, number):
"""
Sets the maximum `number` of failures tolerated before the circuit is
opened.
"""
self._fail_max = number
@property
def reset_timeout(self):
"""
Once this circuit breaker is opened, it should remain opened until the
timeout period, in seconds, elapses.
"""
return self._reset_timeout
@reset_timeout.setter
def reset_timeout(self, timeout):
"""
Sets the `timeout` period, in seconds, this circuit breaker should be
kept open.
"""
self._reset_timeout = timeout
def _create_new_state(self, new_state, prev_state=None, notify=False):
"""
Return state object from state string, i.e.,
'closed' -> <CircuitClosedState>
"""
state_map = {
STATE_CLOSED: CircuitClosedState,
STATE_OPEN: CircuitOpenState,
STATE_HALF_OPEN: CircuitHalfOpenState,
}
try:
cls = state_map[new_state]
return cls(self, prev_state=prev_state, notify=notify)
except KeyError:
msg = "Unknown state {!r}, valid states: {}"
raise ValueError(msg.format(new_state, ', '.join(state_map)))
@property
def state(self):
"""
Update (if needed) and returns the cached state object.
"""
# Ensure cached state is up-to-date
if self.current_state != self._state.name:
# If cached state is out-of-date, that means that it was likely
# changed elsewhere (e.g. another process instance). We still send
# out a notification, informing others that this particular circuit
# breaker instance noticed the changed circuit.
self.state = self.current_state
return self._state
@state.setter
def state(self, state_str):
"""
Set cached state and notify listeners of newly cached state.
"""
with self._lock:
self._state = self._create_new_state(
state_str, prev_state=self._state, notify=True)
@property
def current_state(self):
"""
Returns a string that identifies the state of the circuit breaker as
reported by the _state_storage. i.e., 'closed', 'open', 'half-open'.
"""
return self._state_storage.state
@property
def excluded_exceptions(self):
"""
Returns the list of excluded exceptions, e.g., exceptions that should
not be considered system errors by this circuit breaker.
"""
return tuple(self._excluded_exceptions)
def add_excluded_exception(self, exception):
"""
Adds an exception to the list of excluded exceptions.
"""
with self._lock:
self._excluded_exceptions.append(exception)
def add_excluded_exceptions(self, *exceptions):
"""
Adds exceptions to the list of excluded exceptions.
"""
for exc in exceptions:
self.add_excluded_exception(exc)
def remove_excluded_exception(self, exception):
"""
Removes an exception from the list of excluded exceptions.
"""
with self._lock:
self._excluded_exceptions.remove(exception)
def _inc_counter(self):
"""
Increments the counter of failed calls.
"""
self._state_storage.increment_counter()
def is_system_error(self, exception):
"""
Returns whether the exception `exception` is considered a signal of
system malfunction. Business exceptions should not cause this circuit
breaker to open.
"""
exception_type = type(exception)
for exclusion in self._excluded_exceptions:
if type(exclusion) is type:
if issubclass(exception_type, exclusion):
return False
elif callable(exclusion):
if exclusion(exception):
return False
return True
def call(self, func, *args, **kwargs):
"""
Calls `func` with the given `args` and `kwargs` according to the rules
implemented by the current state of this circuit breaker.
"""
with self._lock:
return self.state.call(func, *args, **kwargs)
def call_async(self, func, *args, **kwargs):
"""
Calls async `func` with the given `args` and `kwargs` according to the rules
implemented by the current state of this circuit breaker.
Return a closure to prevent import errors when using without tornado present
"""
@gen.coroutine
def wrapped():
with self._lock:
ret = yield self.state.call_async(func, *args, **kwargs)
raise gen.Return(ret)
return wrapped()
def open(self):
"""
Opens the circuit, e.g., the following calls will immediately fail
until timeout elapses.
"""
with self._lock:
self._state_storage.opened_at = datetime.utcnow()
self.state = self._state_storage.state = STATE_OPEN
def half_open(self):
"""
Half-opens the circuit, e.g. lets the following call pass through and
opens the circuit if the call fails (or closes the circuit if the call
succeeds).
"""
with self._lock:
self.state = self._state_storage.state = STATE_HALF_OPEN
def close(self):
"""
Closes the circuit, e.g. lets the following calls execute as usual.
"""
with self._lock:
self.state = self._state_storage.state = STATE_CLOSED
def __call__(self, *call_args, **call_kwargs):
"""
Returns a wrapper that calls the function `func` according to the rules
implemented by the current state of this circuit breaker.
Optionally takes the keyword argument `__pybreaker_call_coroutine`,
which will will call `func` as a Tornado co-routine.
"""
call_async = call_kwargs.pop('__pybreaker_call_async', False)
if call_async and not HAS_TORNADO_SUPPORT:
raise ImportError('No module named tornado')
def _outer_wrapper(func):
@wraps(func)
def _inner_wrapper(*args, **kwargs):
if call_async:
return self.call_async(func, *args, **kwargs)
return self.call(func, *args, **kwargs)
return _inner_wrapper
if call_args:
return _outer_wrapper(*call_args)
return _outer_wrapper
@property
def listeners(self):
"""
Returns the registered listeners as a tuple.
"""
return tuple(self._listeners)
def add_listener(self, listener):
"""
Registers a listener for this circuit breaker.
"""
with self._lock:
self._listeners.append(listener)
def add_listeners(self, *listeners):
"""
Registers listeners for this circuit breaker.
"""
for listener in listeners:
self.add_listener(listener)
def remove_listener(self, listener):
"""
Unregisters a listener of this circuit breaker.
"""
with self._lock:
self._listeners.remove(listener)
@property
def name(self):
"""
Returns the name of this circuit breaker. Useful for logging.
"""
return self._name
@name.setter
def name(self, name):
"""
Set the name of this circuit breaker.
"""
self._name = name
class CircuitBreakerStorage(object):
"""
Defines the underlying storage for a circuit breaker - the underlying
implementation should be in a subclass that overrides the method this
class defines.
"""
def __init__(self, name):
"""
Creates a new instance identified by `name`.
"""
self._name = name
@property
def name(self):
"""
Returns a human friendly name that identifies this state.
"""
return self._name
@property
def state(self):
"""
Override this method to retrieve the current circuit breaker state.
"""
pass
@state.setter
def state(self, state):
"""
Override this method to set the current circuit breaker state.
"""
pass
def increment_counter(self):
"""
Override this method to increase the failure counter by one.
"""
pass
def reset_counter(self):
"""
Override this method to set the failure counter to zero.
"""
pass
@property
def counter(self):
"""
Override this method to retrieve the current value of the failure counter.
"""
pass
@property
def opened_at(self):
"""
Override this method to retrieve the most recent value of when the
circuit was opened.
"""
pass
@opened_at.setter
def opened_at(self, datetime):
"""
Override this method to set the most recent value of when the circuit
was opened.
"""
pass
class CircuitMemoryStorage(CircuitBreakerStorage):
"""
Implements a `CircuitBreakerStorage` in local memory.
"""
def __init__(self, state):
"""
Creates a new instance with the given `state`.
"""
super(CircuitMemoryStorage, self).__init__('memory')
self._fail_counter = 0
self._opened_at = None
self._state = state
@property
def state(self):
"""
Returns the current circuit breaker state.
"""
return self._state
@state.setter
def state(self, state):
"""
Set the current circuit breaker state to `state`.
"""
self._state = state
def increment_counter(self):
"""
Increases the failure counter by one.
"""
self._fail_counter += 1
def reset_counter(self):
"""
Sets the failure counter to zero.
"""
self._fail_counter = 0
@property
def counter(self):
"""
Returns the current value of the failure counter.
"""
return self._fail_counter
@property
def opened_at(self):
"""
Returns the most recent value of when the circuit was opened.
"""
return self._opened_at
@opened_at.setter
def opened_at(self, datetime):
"""
Sets the most recent value of when the circuit was opened to
`datetime`.
"""
self._opened_at = datetime
class CircuitRedisStorage(CircuitBreakerStorage):
"""
Implements a `CircuitBreakerStorage` using redis.
"""
BASE_NAMESPACE = 'pybreaker'
logger = logging.getLogger(__name__)
def __init__(self, state, redis_object, namespace=None, fallback_circuit_state=STATE_CLOSED):
"""
Creates a new instance with the given `state` and `redis` object. The
redis object should be similar to pyredis' StrictRedis class. If there
are any connection issues with redis, the `fallback_circuit_state` is
used to determine the state of the circuit.
"""
# Module does not exist, so this feature is not available
if not HAS_REDIS_SUPPORT:
raise ImportError("CircuitRedisStorage can only be used if the required dependencies exist")
super(CircuitRedisStorage, self).__init__('redis')
try:
self.RedisError = __import__('redis').exceptions.RedisError
except ImportError:
# Module does not exist, so this feature is not available
raise ImportError("CircuitRedisStorage can only be used if 'redis' is available")
self._redis = redis_object
self._namespace_name = namespace
self._fallback_circuit_state = fallback_circuit_state
self._initial_state = str(state)
self._initialize_redis_state(self._initial_state)
def _initialize_redis_state(self, state):
self._redis.setnx(self._namespace('fail_counter'), 0)
self._redis.setnx(self._namespace('state'), state)
@property
def state(self):
"""
Returns the current circuit breaker state.
If the circuit breaker state on Redis is missing, re-initialize it
with the fallback circuit state and reset the fail counter.
"""
try:
state_bytes = self._redis.get(self._namespace('state'))
except self.RedisError:
self.logger.error('RedisError: falling back to default circuit state', exc_info=True)
return self._fallback_circuit_state
state = self._fallback_circuit_state
if state_bytes is not None:
state = state_bytes.decode('utf-8')
else:
# state retrieved from redis was missing, so we re-initialize
# the circuit breaker state on redis
self._initialize_redis_state(self._fallback_circuit_state)
return state
@state.setter
def state(self, state):
"""
Set the current circuit breaker state to `state`.
"""
try:
self._redis.set(self._namespace('state'), str(state))
except self.RedisError:
self.logger.error('RedisError', exc_info=True)
pass
def increment_counter(self):
"""
Increases the failure counter by one.
"""
try:
self._redis.incr(self._namespace('fail_counter'))
except self.RedisError:
self.logger.error('RedisError', exc_info=True)
pass
def reset_counter(self):
"""
Sets the failure counter to zero.
"""
try:
self._redis.set(self._namespace('fail_counter'), 0)
except self.RedisError:
self.logger.error('RedisError', exc_info=True)
pass
@property
def counter(self):
"""
Returns the current value of the failure counter.
"""
try:
value = self._redis.get(self._namespace('fail_counter'))
if value:
return int(value)
else:
return 0
except self.RedisError:
self.logger.error('RedisError: Assuming no errors', exc_info=True)
return 0
@property
def opened_at(self):
"""
Returns a datetime object of the most recent value of when the circuit
was opened.
"""
try:
timestamp = self._redis.get(self._namespace('opened_at'))
if timestamp:
return datetime(*time.gmtime(int(timestamp))[:6])
except self.RedisError:
self.logger.error('RedisError', exc_info=True)
return None
@opened_at.setter
def opened_at(self, now):
"""
Atomically sets the most recent value of when the circuit was opened
to `now`. Stored in redis as a simple integer of unix epoch time.
To avoid timezone issues between different systems, the passed in
datetime should be in UTC.
"""
try:
key = self._namespace('opened_at')
def set_if_greater(pipe):
current_value = pipe.get(key)
next_value = int(calendar.timegm(now.timetuple()))
pipe.multi()
if not current_value or next_value > int(current_value):
pipe.set(key, next_value)
self._redis.transaction(set_if_greater, key)
except self.RedisError:
self.logger.error('RedisError', exc_info=True)
pass
def _namespace(self, key):
name_parts = [self.BASE_NAMESPACE, key]
if self._namespace_name:
name_parts.insert(0, self._namespace_name)
return ':'.join(name_parts)
class CircuitBreakerListener(object):
"""
Listener class used to plug code to a ``CircuitBreaker`` instance when
certain events happen.
"""
def before_call(self, cb, func, *args, **kwargs):
"""
This callback function is called before the circuit breaker `cb` calls
`fn`.
"""
pass
def failure(self, cb, exc):
"""
This callback function is called when a function called by the circuit
breaker `cb` fails.
"""
pass
def success(self, cb):
"""
This callback function is called when a function called by the circuit
breaker `cb` succeeds.
"""
pass
def state_change(self, cb, old_state, new_state):
"""
This callback function is called when the state of the circuit breaker
`cb` state changes.
"""
pass
class CircuitBreakerState(object):
"""
Implements the behavior needed by all circuit breaker states.
"""
def __init__(self, cb, name):
"""
Creates a new instance associated with the circuit breaker `cb` and
identified by `name`.
"""
self._breaker = cb
self._name = name
@property
def name(self):
"""
Returns a human friendly name that identifies this state.
"""
return self._name
def _handle_error(self, exc):
"""
Handles a failed call to the guarded operation.
"""
if self._breaker.is_system_error(exc):
self._breaker._inc_counter()
for listener in self._breaker.listeners:
listener.failure(self._breaker, exc)
self.on_failure(exc)
else:
self._handle_success()
raise exc
def _handle_success(self):
"""
Handles a successful call to the guarded operation.
"""
self._breaker._state_storage.reset_counter()
self.on_success()
for listener in self._breaker.listeners:
listener.success(self._breaker)
def call(self, func, *args, **kwargs):
"""
Calls `func` with the given `args` and `kwargs`, and updates the
circuit breaker state according to the result.
"""
ret = None
self.before_call(func, *args, **kwargs)
for listener in self._breaker.listeners:
listener.before_call(self._breaker, func, *args, **kwargs)
try:
ret = func(*args, **kwargs)
if isinstance(ret, types.GeneratorType):
return self.generator_call(ret)
except BaseException as e:
self._handle_error(e)
else:
self._handle_success()
return ret
def call_async(self, func, *args, **kwargs):
"""
Calls async `func` with the given `args` and `kwargs`, and updates the
circuit breaker state according to the result.
Return a closure to prevent import errors when using without tornado present
"""
@gen.coroutine
def wrapped():
ret = None
self.before_call(func, *args, **kwargs)
for listener in self._breaker.listeners:
listener.before_call(self._breaker, func, *args, **kwargs)
try:
ret = yield func(*args, **kwargs)
if isinstance(ret, types.GeneratorType):
raise gen.Return(self.generator_call(ret))
except BaseException as e:
self._handle_error(e)
else:
self._handle_success()
raise gen.Return(ret)
return wrapped()
def generator_call(self, wrapped_generator):
try:
value = yield next(wrapped_generator)
while True:
value = yield wrapped_generator.send(value)
except StopIteration:
self._handle_success()
raise
except BaseException as e:
self._handle_error(e)
def before_call(self, func, *args, **kwargs):
"""
Override this method to be notified before a call to the guarded
operation is attempted.
"""
pass
def on_success(self):
"""
Override this method to be notified when a call to the guarded
operation succeeds.
"""
pass
def on_failure(self, exc):
"""
Override this method to be notified when a call to the guarded
operation fails.
"""
pass
class CircuitClosedState(CircuitBreakerState):
"""
In the normal "closed" state, the circuit breaker executes operations as
usual. If the call succeeds, nothing happens. If it fails, however, the
circuit breaker makes a note of the failure.
Once the number of failures exceeds a threshold, the circuit breaker trips
and "opens" the circuit.
"""
def __init__(self, cb, prev_state=None, notify=False):
"""
Moves the given circuit breaker `cb` to the "closed" state.
"""
super(CircuitClosedState, self).__init__(cb, STATE_CLOSED)
if notify:
# We only reset the counter if notify is True, otherwise the CircuitBreaker
# will lose it's failure count due to a second CircuitBreaker being created
# using the same _state_storage object, or if the _state_storage objects
# share a central source of truth (as would be the case with the redis
# storage).
self._breaker._state_storage.reset_counter()
for listener in self._breaker.listeners:
listener.state_change(self._breaker, prev_state, self)
def on_failure(self, exc):
"""
Moves the circuit breaker to the "open" state once the failures
threshold is reached.
"""
if self._breaker._state_storage.counter >= self._breaker.fail_max:
self._breaker.open()
error_msg = 'Failures threshold reached, circuit breaker opened'
six.reraise(CircuitBreakerError, CircuitBreakerError(error_msg), sys.exc_info()[2])
class CircuitOpenState(CircuitBreakerState):
"""
When the circuit is "open", calls to the circuit breaker fail immediately,
without any attempt to execute the real operation. This is indicated by the
``CircuitBreakerError`` exception.
After a suitable amount of time, the circuit breaker decides that the
operation has a chance of succeeding, so it goes into the "half-open" state.
"""
def __init__(self, cb, prev_state=None, notify=False):
"""
Moves the given circuit breaker `cb` to the "open" state.
"""
super(CircuitOpenState, self).__init__(cb, STATE_OPEN)
if notify:
for listener in self._breaker.listeners:
listener.state_change(self._breaker, prev_state, self)
def before_call(self, func, *args, **kwargs):
"""
After the timeout elapses, move the circuit breaker to the "half-open"
state; otherwise, raises ``CircuitBreakerError`` without any attempt
to execute the real operation.
"""
timeout = timedelta(seconds=self._breaker.reset_timeout)
opened_at = self._breaker._state_storage.opened_at
if opened_at and datetime.utcnow() < opened_at + timeout:
error_msg = 'Timeout not elapsed yet, circuit breaker still open'
raise CircuitBreakerError(error_msg)
else:
self._breaker.half_open()
return self._breaker.call(func, *args, **kwargs)
def call(self, func, *args, **kwargs):
"""
Delegate the call to before_call, if the time out is not elapsed it will throw an exception, otherwise we get
the results from the call performed after the state is switch to half-open
"""
return self.before_call(func, *args, **kwargs)
class CircuitHalfOpenState(CircuitBreakerState):
"""
In the "half-open" state, the next call to the circuit breaker is allowed
to execute the dangerous operation. Should the call succeed, the circuit
breaker resets and returns to the "closed" state. If this trial call fails,
however, the circuit breaker returns to the "open" state until another
timeout elapses.
"""
def __init__(self, cb, prev_state=None, notify=False):
"""
Moves the given circuit breaker `cb` to the "half-open" state.
"""
super(CircuitHalfOpenState, self).__init__(cb, STATE_HALF_OPEN)
if notify:
for listener in self._breaker._listeners:
listener.state_change(self._breaker, prev_state, self)
def on_failure(self, exc):
"""
Opens the circuit breaker.
"""
self._breaker.open()
error_msg = 'Trial call failed, circuit breaker opened'
six.reraise(CircuitBreakerError, CircuitBreakerError(error_msg), sys.exc_info()[2])
def on_success(self):
"""
Closes the circuit breaker.
"""
self._breaker.close()
class CircuitBreakerError(Exception):
"""
When calls to a service fails because the circuit is open, this error is
raised to allow the caller to handle this type of exception differently.
"""
pass
| 31.242117 | 117 | 0.610172 |
199bb9f42eaacfcfa748753210c8f6818db8735d | 607 | py | Python | pythod-demo/jichu/dict.py | gongchangwangpi/zb-demo | 8052860e7e3f4e3e9d9b1ce6e15747e574dd900a | [
"Apache-2.0"
] | 1 | 2018-05-11T04:30:05.000Z | 2018-05-11T04:30:05.000Z | pythod-demo/jichu/dict.py | gongchangwangpi/zb-demo | 8052860e7e3f4e3e9d9b1ce6e15747e574dd900a | [
"Apache-2.0"
] | 11 | 2020-06-08T09:52:52.000Z | 2022-02-26T00:46:25.000Z | pythod-demo/jichu/dict.py | 363230482/zb-demo | 8052860e7e3f4e3e9d9b1ce6e15747e574dd900a | [
"Apache-2.0"
] | null | null | null | #-*- coding = utf-8 -*-
# @author zhangbo
# @date 2021/3/27 18:59:20
zhang = {"name":"zhangsan", "age":18}
print(zhang)
print(zhang["name"])
# 直接访问不存在的key,报错,需要通过get访问
# print(zhang["sex"])
# 不存在,返回 None
print(zhang.get("sex"))
# 设置默认值
print(zhang.get("sex", "M"))
# 删除key,并返回指定key的value
# print(zhang.pop("age"))
# print(zhang)
print(zhang.keys())
print(zhang.items())
zhang.update({"age":20})
print(zhang)
zhang["sex"] = "M"
print(zhang)
zhang["sex"] = "F"
print(zhang)
del zhang["sex"]
print(zhang)
zhang.clear()
print(zhang)
# NameError: name 'zhang' is not defined
# del zhang
# print(zhang)
| 14.804878 | 40 | 0.650741 |
6e48ab79227bb2c132a8f23588d320666a62293e | 2,550 | py | Python | lib/solutions/CHK/checkout.py | DPNT-Sourcecode/CHK-apll01 | cefd58c85575e8c775822837d8b445345d82e687 | [
"Apache-2.0"
] | null | null | null | lib/solutions/CHK/checkout.py | DPNT-Sourcecode/CHK-apll01 | cefd58c85575e8c775822837d8b445345d82e687 | [
"Apache-2.0"
] | null | null | null | lib/solutions/CHK/checkout.py | DPNT-Sourcecode/CHK-apll01 | cefd58c85575e8c775822837d8b445345d82e687 | [
"Apache-2.0"
] | null | null | null | import operator
class Checkout(object):
def __init__(self, shop):
self.shop = shop
self.items = dict()
def add_item(self, sku):
if sku in self.items:
self.items[sku] += 1
else:
self.items[sku] = 1
def remove_item(self, sku):
if sku in self.items:
self.items[sku] -= 1
if self.items[sku] == 0:
self.items.pop(sku)
def get_total(self):
total = 0
items = self.items.copy()
free_offers = self.shop.get_free_offers()
multi_offers = self.shop.get_multi_offers()
#apply free offers first
for free_offer in free_offers:
sku = free_offer.get_sku()
sku_price = self.shop.get_price(sku)
if sku_price == -1:
return -1
quantity = free_offer.get_quantity()
freebie_sku = free_offer.get_free_sku()
if sku in items and freebie_sku in items:
while quantity <= items[sku] and items[freebie_sku]>0:
total += quantity*sku_price
items[sku]-= quantity
items[freebie_sku]-=1
#apply multibuy next
for multi_offer in multi_offers:
sku = multi_offer.get_sku()
quantity = multi_offer.get_quantity()
multi_price = multi_offer.get_price()
if type(sku) == list:
sku_list= []
for sku_item in sku:
if sku_item in items:
for i in range(items[sku_item]):
if len(sku_list) != 0 and self.shop.get_price(sku_list[0]) > self.shop.get_price(sku_item):
sku_list.append(sku_item)
else:
sku_list.insert(0,sku_item)
while quantity<= len(sku_list):
total += multi_price
for i in range(quantity):
items[sku_list[0]]-=1
sku_list.pop(0)
elif sku in items:
while quantity<= items[sku]:
total += multi_price
items[sku] -= quantity
#calculate rest of checkout
for sku in items:
sku_price = self.shop.get_price(sku)
if sku_price == -1:
return -1
if items[sku] >=0: total=total+(items[sku]*sku_price)
return(total) | 34.931507 | 119 | 0.488627 |
b1565868a42aa52eab226672cba375196a69658a | 8,276 | py | Python | modules/models/lseg_blocks.py | whiteking64/lang-seg | 9d063b126f1b64e38ddb20cc75fc74435bfdcbd3 | [
"MIT"
] | 202 | 2022-01-11T02:34:40.000Z | 2022-03-31T23:12:11.000Z | modules/models/lseg_blocks.py | whiteking64/lang-seg | 9d063b126f1b64e38ddb20cc75fc74435bfdcbd3 | [
"MIT"
] | 17 | 2022-01-11T19:51:03.000Z | 2022-03-30T03:14:37.000Z | modules/models/lseg_blocks.py | whiteking64/lang-seg | 9d063b126f1b64e38ddb20cc75fc74435bfdcbd3 | [
"MIT"
] | 19 | 2022-01-11T10:02:31.000Z | 2022-03-23T12:41:36.000Z | import torch
import torch.nn as nn
from .lseg_vit import (
_make_pretrained_clip_vitl16_384,
_make_pretrained_clip_vitb32_384,
_make_pretrained_clipRN50x16_vitl16_384,
forward_vit,
)
def _make_encoder(
backbone,
features,
use_pretrained=True,
groups=1,
expand=False,
exportable=True,
hooks=None,
use_vit_only=False,
use_readout="ignore",
enable_attention_hooks=False,
):
if backbone == "clip_vitl16_384":
clip_pretrained, pretrained = _make_pretrained_clip_vitl16_384(
use_pretrained,
hooks=hooks,
use_readout=use_readout,
enable_attention_hooks=enable_attention_hooks,
)
scratch = _make_scratch(
[256, 512, 1024, 1024], features, groups=groups, expand=expand
)
elif backbone == "clipRN50x16_vitl16_384":
clip_pretrained, pretrained = _make_pretrained_clipRN50x16_vitl16_384(
use_pretrained,
hooks=hooks,
use_readout=use_readout,
enable_attention_hooks=enable_attention_hooks,
)
scratch = _make_scratch(
[256, 512, 1024, 1024], features, groups=groups, expand=expand
)
elif backbone == "clip_vitb32_384":
clip_pretrained, pretrained = _make_pretrained_clip_vitb32_384(
use_pretrained,
hooks=hooks,
use_readout=use_readout,
)
scratch = _make_scratch(
[96, 192, 384, 768], features, groups=groups, expand=expand
)
else:
print(f"Backbone '{backbone}' not implemented")
assert False
return clip_pretrained, pretrained, scratch
def _make_scratch(in_shape, out_shape, groups=1, expand=False):
scratch = nn.Module()
out_shape1 = out_shape
out_shape2 = out_shape
out_shape3 = out_shape
out_shape4 = out_shape
if expand == True:
out_shape1 = out_shape
out_shape2 = out_shape * 2
out_shape3 = out_shape * 4
out_shape4 = out_shape * 8
scratch.layer1_rn = nn.Conv2d(
in_shape[0],
out_shape1,
kernel_size=3,
stride=1,
padding=1,
bias=False,
groups=groups,
)
scratch.layer2_rn = nn.Conv2d(
in_shape[1],
out_shape2,
kernel_size=3,
stride=1,
padding=1,
bias=False,
groups=groups,
)
scratch.layer3_rn = nn.Conv2d(
in_shape[2],
out_shape3,
kernel_size=3,
stride=1,
padding=1,
bias=False,
groups=groups,
)
scratch.layer4_rn = nn.Conv2d(
in_shape[3],
out_shape4,
kernel_size=3,
stride=1,
padding=1,
bias=False,
groups=groups,
)
return scratch
class Interpolate(nn.Module):
"""Interpolation module."""
def __init__(self, scale_factor, mode, align_corners=False):
"""Init.
Args:
scale_factor (float): scaling
mode (str): interpolation mode
"""
super(Interpolate, self).__init__()
self.interp = nn.functional.interpolate
self.scale_factor = scale_factor
self.mode = mode
self.align_corners = align_corners
def forward(self, x):
"""Forward pass.
Args:
x (tensor): input
Returns:
tensor: interpolated data
"""
x = self.interp(
x,
scale_factor=self.scale_factor,
mode=self.mode,
align_corners=self.align_corners,
)
return x
class ResidualConvUnit(nn.Module):
"""Residual convolution module."""
def __init__(self, features):
"""Init.
Args:
features (int): number of features
"""
super().__init__()
self.conv1 = nn.Conv2d(
features, features, kernel_size=3, stride=1, padding=1, bias=True
)
self.conv2 = nn.Conv2d(
features, features, kernel_size=3, stride=1, padding=1, bias=True
)
self.relu = nn.ReLU(inplace=True)
def forward(self, x):
"""Forward pass.
Args:
x (tensor): input
Returns:
tensor: output
"""
out = self.relu(x)
out = self.conv1(out)
out = self.relu(out)
out = self.conv2(out)
return out + x
class FeatureFusionBlock(nn.Module):
"""Feature fusion block."""
def __init__(self, features):
"""Init.
Args:
features (int): number of features
"""
super(FeatureFusionBlock, self).__init__()
self.resConfUnit1 = ResidualConvUnit(features)
self.resConfUnit2 = ResidualConvUnit(features)
def forward(self, *xs):
"""Forward pass.
Returns:
tensor: output
"""
output = xs[0]
if len(xs) == 2:
output += self.resConfUnit1(xs[1])
output = self.resConfUnit2(output)
output = nn.functional.interpolate(
output, scale_factor=2, mode="bilinear", align_corners=True
)
return output
class ResidualConvUnit_custom(nn.Module):
"""Residual convolution module."""
def __init__(self, features, activation, bn):
"""Init.
Args:
features (int): number of features
"""
super().__init__()
self.bn = bn
self.groups = 1
self.conv1 = nn.Conv2d(
features,
features,
kernel_size=3,
stride=1,
padding=1,
bias=not self.bn,
groups=self.groups,
)
self.conv2 = nn.Conv2d(
features,
features,
kernel_size=3,
stride=1,
padding=1,
bias=not self.bn,
groups=self.groups,
)
if self.bn == True:
self.bn1 = nn.BatchNorm2d(features)
self.bn2 = nn.BatchNorm2d(features)
self.activation = activation
self.skip_add = nn.quantized.FloatFunctional()
def forward(self, x):
"""Forward pass.
Args:
x (tensor): input
Returns:
tensor: output
"""
out = self.activation(x)
out = self.conv1(out)
if self.bn == True:
out = self.bn1(out)
out = self.activation(out)
out = self.conv2(out)
if self.bn == True:
out = self.bn2(out)
if self.groups > 1:
out = self.conv_merge(out)
return self.skip_add.add(out, x)
# return out + x
class FeatureFusionBlock_custom(nn.Module):
"""Feature fusion block."""
def __init__(
self,
features,
activation,
deconv=False,
bn=False,
expand=False,
align_corners=True,
):
"""Init.
Args:
features (int): number of features
"""
super(FeatureFusionBlock_custom, self).__init__()
self.deconv = deconv
self.align_corners = align_corners
self.groups = 1
self.expand = expand
out_features = features
if self.expand == True:
out_features = features // 2
self.out_conv = nn.Conv2d(
features,
out_features,
kernel_size=1,
stride=1,
padding=0,
bias=True,
groups=1,
)
self.resConfUnit1 = ResidualConvUnit_custom(features, activation, bn)
self.resConfUnit2 = ResidualConvUnit_custom(features, activation, bn)
self.skip_add = nn.quantized.FloatFunctional()
def forward(self, *xs):
"""Forward pass.
Returns:
tensor: output
"""
output = xs[0]
if len(xs) == 2:
res = self.resConfUnit1(xs[1])
output = self.skip_add.add(output, res)
# output += res
output = self.resConfUnit2(output)
output = nn.functional.interpolate(
output, scale_factor=2, mode="bilinear", align_corners=self.align_corners
)
output = self.out_conv(output)
return output
| 22.988889 | 85 | 0.549903 |
a952b2cfcc61ff66e35b9ebf64af0c42944ee31c | 9,786 | py | Python | posts/models/post.py | lobotomoe/vas3k.club | 21786cc28072a816ca70b5b84247e8e63bd18b55 | [
"MIT"
] | null | null | null | posts/models/post.py | lobotomoe/vas3k.club | 21786cc28072a816ca70b5b84247e8e63bd18b55 | [
"MIT"
] | null | null | null | posts/models/post.py | lobotomoe/vas3k.club | 21786cc28072a816ca70b5b84247e8e63bd18b55 | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
from uuid import uuid4
from django.conf import settings
from django.db import models
from django.db.models import F
from django.template.defaultfilters import truncatechars
from django.urls import reverse
from django.utils.html import strip_tags
from simple_history.models import HistoricalRecords
from common.models import ModelDiffMixin
from posts.models.topics import Topic
from users.models.user import User
from utils.slug import generate_unique_slug
class Post(models.Model, ModelDiffMixin):
TYPE_POST = "post"
TYPE_INTRO = "intro"
TYPE_LINK = "link"
TYPE_QUESTION = "question"
TYPE_PAIN = "pain"
TYPE_IDEA = "idea"
TYPE_PROJECT = "project"
TYPE_EVENT = "event"
TYPE_REFERRAL = "referral"
TYPE_BATTLE = "battle"
TYPE_WEEKLY_DIGEST = "weekly_digest"
TYPES = [
(TYPE_POST, "Текст"),
(TYPE_INTRO, "#intro"),
(TYPE_LINK, "Ссылка"),
(TYPE_QUESTION, "Вопрос"),
(TYPE_PAIN, "Боль"),
(TYPE_IDEA, "Идея"),
(TYPE_PROJECT, "Проект"),
(TYPE_EVENT, "Событие"),
(TYPE_REFERRAL, "Рефералка"),
(TYPE_BATTLE, "Батл"),
(TYPE_WEEKLY_DIGEST, "Журнал Клуба"),
]
TYPE_TO_EMOJI = {
TYPE_POST: "📝",
TYPE_INTRO: "🙋♀️",
TYPE_LINK: "🔗",
TYPE_QUESTION: "❓",
TYPE_PAIN: "😭",
TYPE_IDEA: "💡",
TYPE_PROJECT: "🏗",
TYPE_EVENT: "📅",
TYPE_REFERRAL: "🏢",
TYPE_BATTLE: "🤜🤛"
}
TYPE_TO_PREFIX = {
TYPE_POST: "",
TYPE_INTRO: "#intro",
TYPE_LINK: "➜",
TYPE_PAIN: "Боль:",
TYPE_IDEA: "Идея:",
TYPE_QUESTION: "Вопрос:",
TYPE_PROJECT: "Проект:",
TYPE_EVENT: "Событие:",
TYPE_REFERRAL: "Рефералка:",
TYPE_BATTLE: "Батл:"
}
id = models.UUIDField(primary_key=True, default=uuid4, editable=False)
slug = models.CharField(max_length=128, unique=True, db_index=True)
author = models.ForeignKey(User, related_name="posts", db_index=True, on_delete=models.CASCADE)
type = models.CharField(max_length=32, choices=TYPES, default=TYPE_POST, db_index=True)
topic = models.ForeignKey(Topic, related_name="posts", null=True, db_index=True, on_delete=models.SET_NULL)
label = models.JSONField(null=True)
title = models.TextField(null=False)
text = models.TextField(null=False)
html = models.TextField(null=True)
url = models.URLField(max_length=1024, null=True)
image = models.URLField(max_length=1024, null=True)
metadata = models.JSONField(null=True)
created_at = models.DateTimeField(auto_now_add=True, db_index=True)
updated_at = models.DateTimeField(auto_now=True)
last_activity_at = models.DateTimeField(auto_now_add=True, db_index=True)
published_at = models.DateTimeField(null=True, db_index=True)
deleted_at = models.DateTimeField(null=True, db_index=True)
comment_count = models.IntegerField(default=0)
view_count = models.IntegerField(default=0)
upvotes = models.IntegerField(default=0, db_index=True)
hotness = models.IntegerField(default=0, db_index=True)
is_visible = models.BooleanField(default=False) # published or draft
is_visible_on_main_page = models.BooleanField(default=True) # main page or room-only post
is_commentable = models.BooleanField(default=True) # allow comments
is_approved_by_moderator = models.BooleanField(default=False) # expose in newsletters, rss, etc
is_public = models.BooleanField(default=False) # visible for the outside world
is_pinned_until = models.DateTimeField(null=True) # pin on top of the main feed
is_shadow_banned = models.BooleanField(default=False) # hide from main page
history = HistoricalRecords(
user_model=User,
table_name="posts_history",
excluded_fields=[
"html",
"created_at",
"updated_at",
"last_activity_at",
"published_at",
"comment_count",
"view_count",
"upvotes",
"hotness",
],
)
class Meta:
db_table = "posts"
ordering = ["-created_at"]
def to_dict(self):
return {
"id": str(self.id),
"type": self.type,
"slug": self.slug,
"author_slug": self.author.slug,
"title": self.title,
"text": self.text,
"upvotes": self.upvotes,
"metadata": self.metadata,
"published_at": self.published_at.isoformat(),
"updated_at": self.updated_at.isoformat(),
"last_activity_at": self.last_activity_at.isoformat(),
}
def save(self, *args, **kwargs):
if not self.slug:
self.slug = generate_unique_slug(Post, str(Post.objects.count()))
if not self.published_at and self.is_visible:
self.published_at = datetime.utcnow()
self.updated_at = datetime.utcnow()
return super().save(*args, **kwargs)
def get_absolute_url(self):
return reverse("show_post", kwargs={"post_type": self.type, "post_slug": self.slug})
def increment_view_count(self):
return Post.objects.filter(id=self.id).update(view_count=F("view_count") + 1)
def increment_vote_count(self):
return Post.objects.filter(id=self.id).update(upvotes=F("upvotes") + 1)
def decrement_vote_count(self):
return Post.objects.filter(id=self.id).update(upvotes=F("upvotes") - 1)
@property
def emoji(self):
return self.TYPE_TO_EMOJI.get(self.type) or ""
@property
def prefix(self):
return self.TYPE_TO_PREFIX.get(self.type) or ""
@property
def is_pinned(self):
return self.is_pinned_until and self.is_pinned_until > datetime.utcnow()
@property
def is_searchable(self):
return self.is_visible and not self.is_shadow_banned
@property
def is_safely_deletable_by_author(self):
return self.comment_count < settings.MAX_COMMENTS_FOR_DELETE_VS_CLEAR
@property
def description(self):
return truncatechars(strip_tags(self.html or ""), 400)
@property
def effective_published_at(self):
return self.published_at or self.created_at
@property
def event_datetime(self):
if self.metadata and self.metadata.get("event"):
hour, minute, second = map(int, self.metadata["event"]["time"].split(":", 2))
day = int(self.metadata["event"].get("day") or 0)
month = int(self.metadata["event"].get("month") or self.effective_published_at.month)
if month < self.effective_published_at.month:
year = self.effective_published_at.year + 1
else:
year = self.effective_published_at.year
return datetime(year, month, day, hour, minute, second)
@classmethod
def check_duplicate(cls, user, title):
latest_user_post = Post.objects.filter(author=user).order_by("-created_at").first()
return latest_user_post and latest_user_post.title == title
@classmethod
def visible_objects(cls):
return cls.objects.filter(is_visible=True).select_related("topic", "author")
@classmethod
def objects_for_user(cls, user):
return cls.visible_objects()\
.extra({
"is_voted": "select 1 from post_votes "
"where post_votes.post_id = posts.id "
f"and post_votes.user_id = '{user.id}'",
"upvoted_at": "select ROUND(extract(epoch from created_at) * 1000) from post_votes "
"where post_votes.post_id = posts.id "
f"and post_votes.user_id = '{user.id}'",
"unread_comments": f"select unread_comments from post_views "
f"where post_views.post_id = posts.id "
f"and post_views.user_id = '{user.id}'"
}) # TODO: i've been trying to use .annotate() here for 2 hours and I have no idea why it's not working
@classmethod
def check_rate_limits(cls, user):
if user.is_moderator:
return True
day_post_count = Post.visible_objects()\
.filter(author=user, created_at__gte=datetime.utcnow() - timedelta(hours=24))\
.count()
return day_post_count < settings.RATE_LIMIT_POSTS_PER_DAY
@classmethod
def get_user_intro(cls, user):
return cls.objects.filter(author=user, type=Post.TYPE_INTRO).first()
@classmethod
def upsert_user_intro(cls, user, text, is_visible=True):
intro, is_created = cls.objects.update_or_create(
author=user,
type=cls.TYPE_INTRO,
defaults=dict(
slug=user.slug,
title=f"#intro от @{user.slug}",
text=text,
is_visible=is_visible,
is_public=False,
),
)
if not is_created:
intro.html = None
intro.save()
return intro
def clear(self):
self.text = settings.CLEARED_POST_TEXT
self.html = None
self.author = User.objects.filter(slug=settings.DELETED_USERNAME).first()
self.save()
def publish(self):
self.is_visible = True
self.published_at = datetime.utcnow()
self.save()
def unpublish(self):
self.is_visible = False
self.published_at = None
self.save()
def delete(self, *args, **kwargs):
self.deleted_at = datetime.utcnow()
self.save()
def undelete(self, *args, **kwargs):
self.deleted_at = None
self.save()
| 34.457746 | 116 | 0.62385 |
2c98a147fd5afe043edea802e0dbbe24093d2fa5 | 407 | py | Python | tests/fixtures/labels.py | ReeceHoffmann/virtool | f9befad060fe16fa29fb80124e674ac5a9c4f538 | [
"MIT"
] | 39 | 2016-10-31T23:28:59.000Z | 2022-01-15T00:00:42.000Z | tests/fixtures/labels.py | ReeceHoffmann/virtool | f9befad060fe16fa29fb80124e674ac5a9c4f538 | [
"MIT"
] | 1,690 | 2017-02-07T23:39:48.000Z | 2022-03-31T22:30:44.000Z | tests/fixtures/labels.py | ReeceHoffmann/virtool | f9befad060fe16fa29fb80124e674ac5a9c4f538 | [
"MIT"
] | 25 | 2017-02-08T18:25:31.000Z | 2021-09-20T22:55:25.000Z | import pytest
from sqlalchemy.ext.asyncio import AsyncSession
from virtool.labels.models import Label
@pytest.fixture
async def test_labels(pg):
label_1 = Label(id=1, name="Legacy")
label_2 = Label(id=2, name="Incomplete")
label_3 = Label(id=3, name="Complete")
async with AsyncSession(pg) as session:
session.add_all([label_1, label_2, label_3])
await session.commit()
| 23.941176 | 52 | 0.707617 |
00483d027e7c22bb820c190c7fd22c5bb32b912f | 9,776 | py | Python | porcupine/plugins/run/dialog.py | Mannuel25/porcupine | c55969ea78249107d58e2d75e687f7c04b7e4b7b | [
"MIT"
] | null | null | null | porcupine/plugins/run/dialog.py | Mannuel25/porcupine | c55969ea78249107d58e2d75e687f7c04b7e4b7b | [
"MIT"
] | null | null | null | porcupine/plugins/run/dialog.py | Mannuel25/porcupine | c55969ea78249107d58e2d75e687f7c04b7e4b7b | [
"MIT"
] | null | null | null | from __future__ import annotations
import sys
import tkinter
from pathlib import Path
from tkinter import ttk
from typing import Callable, Generic, TypeVar
from porcupine import get_main_window, tabs, textutils, utils
from . import common, history
T = TypeVar("T")
class _FormattingEntryAndLabels(Generic[T]):
def __init__(
self,
entry_area: ttk.Frame,
text: str,
substitutions: dict[str, str],
formatter: Callable[[str, dict[str, str]], T],
value_validator: Callable[[T], bool],
validated_callback: Callable[[], None],
):
self._substitutions = substitutions
self._value_validator = value_validator
self._validated_callback = validated_callback
self._formatter = formatter
grid_y = entry_area.grid_size()[1]
ttk.Label(entry_area, text=text).grid(row=grid_y, column=0, sticky="w")
self.format_var = tkinter.StringVar()
self.entry = ttk.Entry(entry_area, font="TkFixedFont", textvariable=self.format_var)
self.entry.grid(row=grid_y, column=1, sticky="we", padx=(5, 0))
self.entry.selection_range(0, "end")
grid_y += 1
self._command_display = textutils.create_passive_text_widget(
entry_area, width=1, height=2, wrap="char", cursor="arrow"
)
self._command_display.grid(row=grid_y, column=1, sticky="we")
self.format_var.trace_add("write", self._validate)
self.value: T | None = None
self._validate()
def _validate(self, *junk_from_var_trace: object) -> None:
try:
value = self._formatter(self.format_var.get(), self._substitutions)
except (ValueError, KeyError, IndexError):
self.value = None
self._command_display.config(state="normal", font="TkDefaultFont")
self._command_display.delete("1.0", "end")
self._command_display.insert("1.0", "Substitution error")
self._command_display.config(state="disabled")
else:
if self._value_validator(value):
self.value = value
else:
self.value = None
self._command_display.config(state="normal", font="TkFixedFont")
self._command_display.delete("1.0", "end")
self._command_display.insert("1.0", str(value))
self._command_display.config(state="disabled")
# _validated_callback might not work if called from __init__
if junk_from_var_trace:
self._validated_callback()
class _CommandAsker:
def __init__(
self,
file_path: Path,
project_path: Path,
suggestions: list[common.Command],
initial_key_id: int,
):
self.window = tkinter.Toplevel()
self._suggestions = suggestions
if sys.platform == "win32":
terminal_name = "command prompt"
else:
terminal_name = "terminal"
content_frame = ttk.Frame(self.window, borderwidth=10)
content_frame.pack(fill="both", expand=True)
entry_area = ttk.Frame(content_frame)
entry_area.pack(fill="x")
entry_area.grid_columnconfigure(1, weight=1)
substitutions = common.get_substitutions(file_path, project_path)
self.command: _FormattingEntryAndLabels[str] = _FormattingEntryAndLabels(
entry_area,
text="Run this command:",
substitutions=substitutions,
formatter=common.format_command,
value_validator=(lambda command: bool(command.strip())),
validated_callback=self.update_run_button,
)
self.cwd: _FormattingEntryAndLabels[Path] = _FormattingEntryAndLabels(
entry_area,
text="In this directory:",
substitutions=substitutions,
formatter=common.format_cwd,
value_validator=(lambda path: path.is_dir()),
validated_callback=self.update_run_button,
)
ttk.Label(content_frame, text="Substitutions:").pack(anchor="w")
sub_text = "\n".join("{%s} = %s" % pair for pair in substitutions.items())
sub_textbox = textutils.create_passive_text_widget(
content_frame, height=len(substitutions), width=1, wrap="none"
)
sub_textbox.pack(fill="x", padx=(15, 0), pady=(0, 20))
sub_textbox.config(state="normal")
sub_textbox.insert("1.0", sub_text)
sub_textbox.config(state="disabled")
porcupine_text = (
"Display the output inside the Porcupine window (does not support keyboard input)"
)
external_text = f"Use an external {terminal_name} window"
self.terminal_var = tkinter.BooleanVar()
ttk.Radiobutton(
content_frame,
variable=self.terminal_var,
value=False,
text=porcupine_text,
underline=porcupine_text.index("Porcupine"),
).pack(fill="x")
ttk.Radiobutton(
content_frame,
variable=self.terminal_var,
value=True,
text=external_text,
underline=external_text.index("external"),
).pack(fill="x")
self.window.bind("<Alt-p>", (lambda e: self.terminal_var.set(False)), add=True)
self.window.bind("<Alt-e>", (lambda e: self.terminal_var.set(True)), add=True)
self.repeat_bindings = [utils.get_binding(f"<<Run:Repeat{key_id}>>") for key_id in range(4)]
self.repeat_var = tkinter.StringVar(value=self.repeat_bindings[initial_key_id])
self.repeat_var.trace_add("write", self.update_run_button)
repeat_frame = ttk.Frame(content_frame)
repeat_frame.pack(fill="x", pady=10)
ttk.Label(
repeat_frame, text="This command can be repeated by pressing the following key:"
).pack(side="left")
ttk.Combobox(
repeat_frame, textvariable=self.repeat_var, values=self.repeat_bindings, width=3
).pack(side="left")
button_frame = ttk.Frame(content_frame)
button_frame.pack(fill="x")
cancel_button = ttk.Button(
button_frame, text="Cancel", command=self.window.destroy, width=1
)
cancel_button.pack(side="left", fill="x", expand=True, padx=(0, 5))
self.run_button = ttk.Button(button_frame, text="Run", command=self.on_run_clicked, width=1)
self.run_button.pack(side="left", fill="x", expand=True, padx=(5, 0))
self.run_clicked = False
for entry in [self.command.entry, self.cwd.entry]:
entry.bind("<Return>", (lambda e: self.run_button.invoke()), add=True)
entry.bind("<Escape>", (lambda e: self.window.destroy()), add=True)
if self._suggestions:
# Run _autocomplete when pressing a key without alt
self.command.entry.bind("<Key>", self._autocomplete, add=True)
self.command.entry.bind("<Alt-Key>", (lambda e: None), add=True)
self._select_command_autocompletion(self._suggestions[0], prefix="")
self.command.entry.selection_range(0, "end")
self.command.entry.focus_set()
def _select_command_autocompletion(self, command: common.Command, prefix: str) -> None:
assert command.command_format.startswith(prefix)
self.command.format_var.set(command.command_format)
self.command.entry.icursor(len(prefix))
self.command.entry.selection_range("insert", "end")
self.cwd.format_var.set(command.cwd_format)
self.terminal_var.set(command.external_terminal)
def _autocomplete(self, event: tkinter.Event[tkinter.Entry]) -> str | None:
if len(event.char) != 1 or not event.char.isprintable():
return None
text_to_keep = self.command.entry.get()
if self.command.entry.selection_present():
if self.command.entry.index("sel.last") != self.command.entry.index("end"):
return None
text_to_keep = text_to_keep[: self.command.entry.index("sel.first")]
for item in self._suggestions:
if item.command_format.startswith(text_to_keep + event.char):
self._select_command_autocompletion(item, text_to_keep + event.char)
return "break"
return None
def update_run_button(self, *junk: object) -> None:
if (
self.command.value is not None
and self.cwd.value is not None
and self.repeat_var.get() in self.repeat_bindings
):
self.run_button.config(state="normal")
else:
self.run_button.config(state="disabled")
def on_run_clicked(self) -> None:
self.run_clicked = True
self.window.destroy()
def ask_command(
tab: tabs.FileTab, project_path: Path, initial_key_id: int
) -> common.Command | None:
assert tab.path is not None
asker = _CommandAsker(
tab.path, project_path, history.get(tab, project_path, initial_key_id), initial_key_id
)
asker.window.title("Run command")
asker.window.transient(get_main_window())
# you probably don't wanna resize it in y, it's safe to do it here,
# as the content is already packed
asker.window.resizable(True, False)
asker.window.wait_window()
if asker.run_clicked:
assert asker.command.value is not None
assert asker.cwd.value is not None
return common.Command(
command_format=asker.command.format_var.get(),
command=asker.command.value,
cwd_format=asker.cwd.format_var.get(),
cwd=str(asker.cwd.value),
external_terminal=asker.terminal_var.get(),
key_id=asker.repeat_bindings.index(asker.repeat_var.get()),
)
return None
| 38.948207 | 100 | 0.634002 |
1ec2d9077e8b4347616fb131ecc49188d7718565 | 5,935 | py | Python | src/sas/sasgui/guiframe/config.py | andyfaff/sasview | c00a797ab9c4ddc60f0fa8a64ae8a2067c225921 | [
"BSD-3-Clause"
] | null | null | null | src/sas/sasgui/guiframe/config.py | andyfaff/sasview | c00a797ab9c4ddc60f0fa8a64ae8a2067c225921 | [
"BSD-3-Clause"
] | null | null | null | src/sas/sasgui/guiframe/config.py | andyfaff/sasview | c00a797ab9c4ddc60f0fa8a64ae8a2067c225921 | [
"BSD-3-Clause"
] | null | null | null | """
Application settings
"""
from __future__ import print_function
import time
import os
from sas.sasgui.guiframe.gui_style import GUIFRAME
import sas.sasview
import logging
logger = logging.getLogger(__name__)
# Version of the application
__appname__ = "SasView"
__version__ = sas.sasview.__version__
__build__ = sas.sasview.__build__
__download_page__ = 'https://github.com/SasView/sasview/releases'
__update_URL__ = 'http://www.sasview.org/latestversion.json'
# Debug message flag
__EVT_DEBUG__ = False
# Flag for automated testing
__TEST__ = False
# Debug message should be written to a file?
__EVT_DEBUG_2_FILE__ = False
__EVT_DEBUG_FILENAME__ = "debug.log"
# About box info
_do_aboutbox = True
_do_acknowledge = True
_do_tutorial = True
_acknowledgement_preamble =\
'''To ensure the long term support and development of this software please''' +\
''' remember to:'''
_acknowledgement_preamble_bullet1 =\
'''Acknowledge its use in your publications as :'''
_acknowledgement_preamble_bullet2 =\
'''Reference SasView as:'''
_acknowledgement_preamble_bullet3 =\
'''Reference the model you used if appropriate (see documentation for refs)'''
_acknowledgement_preamble_bullet4 =\
'''Send us your reference for our records: developers@sasview.org'''
_acknowledgement_publications = \
'''This work benefited from the use of the SasView application, originally developed under NSF Award DMR-0520547. SasView also contains code developed with funding from the EU Horizon 2020 programme under the SINE2020 project Grant No 654000.'''
_acknowledgement_citation = \
'''M. Doucet et al. SasView Version 4.1, Zenodo, 10.5281/zenodo.438138'''
_acknowledgement = \
'''This work was originally developed as part of the DANSE project funded by the US NSF under Award DMR-0520547,\n but is currently maintained by a collaboration between UTK, UMD, NIST, ORNL, ISIS, ESS, ILL, ANSTO, TU Delft, DLS, and the scattering community.\n\n SasView also contains code developed with funding from the EU Horizon 2020 programme under the SINE2020 project (Grant No 654000).\nA list of individual contributors can be found at: http://www.sasview.org/contact.html
'''
_homepage = "http://www.sasview.org"
_download = __download_page__
_authors = []
_paper = "http://sourceforge.net/p/sasview/tickets/"
_license = "mailto:help@sasview.org"
icon_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "images"))
logger.info("icon path: %s" % icon_path)
media_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "media"))
test_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "test"))
_nist_logo = os.path.join(icon_path, "nist_logo.png")
_umd_logo = os.path.join(icon_path, "umd_logo.png")
_sns_logo = os.path.join(icon_path, "sns_logo.png")
_ornl_logo = os.path.join(icon_path, "ornl_logo.png")
_isis_logo = os.path.join(icon_path, "isis_logo.png")
_ess_logo = os.path.join(icon_path, "ess_logo.png")
_ill_logo = os.path.join(icon_path, "ill_logo.png")
_ansto_logo = os.path.join(icon_path, "ansto_logo.png")
_tudelft_logo = os.path.join(icon_path, "tudelft_logo.png")
_nsf_logo = os.path.join(icon_path, "nsf_logo.png")
_danse_logo = os.path.join(icon_path, "danse_logo.png")
_inst_logo = os.path.join(icon_path, "utlogo.gif")
_nist_url = "http://www.nist.gov/"
_umd_url = "http://www.umd.edu/"
_sns_url = "http://neutrons.ornl.gov/"
_ornl_url = "http://neutrons.ornl.gov/"
_nsf_url = "http://www.nsf.gov"
_isis_url = "http://www.isis.stfc.ac.uk/"
_ess_url = "http://ess-scandinavia.eu/"
_ill_url = "http://www.ill.eu/"
_ansto_url = "http://www.ansto.gov.au/"
_tudelft_url = "http://www.tnw.tudelft.nl/en/cooperation/facilities/reactor-instituut-delft/"
_dls_url = "http://www.diamond.ac.uk/"
_danse_url = "http://www.cacr.caltech.edu/projects/danse/release/index.html"
_inst_url = "http://www.utk.edu"
_corner_image = os.path.join(icon_path, "angles_flat.png")
_welcome_image = os.path.join(icon_path, "SVwelcome.png")
_copyright = "(c) 2009 - 2017, UTK, UMD, NIST, ORNL, ISIS, ESS, ILL, ANSTO, TU Delft, and DLS"
marketplace_url = "http://marketplace.sasview.org/"
#edit the list of file state your plugin can read
APPLICATION_WLIST = 'SasView files (*.svs)|*.svs'
APPLICATION_STATE_EXTENSION = '.svs'
GUIFRAME_WIDTH = 1150
GUIFRAME_HEIGHT = 840
PLUGIN_STATE_EXTENSIONS = ['.fitv', '.inv', '.prv', '.crf']
PLUGINS_WLIST = ['Fitting files (*.fitv)|*.fitv',
'Invariant files (*.inv)|*.inv',
'P(r) files (*.prv)|*.prv',
'Corfunc files (*.crf)|*.crf']
PLOPANEL_WIDTH = 415
PLOPANEL_HEIGTH = 370
DATAPANEL_WIDTH = 235
DATAPANEL_HEIGHT = 700
SPLASH_SCREEN_PATH = os.path.join(icon_path, "SVwelcome_mini.png")
TUTORIAL_PATH = os.path.join(media_path, "Tutorial.pdf")
DEFAULT_STYLE = GUIFRAME.MULTIPLE_APPLICATIONS|GUIFRAME.MANAGER_ON\
|GUIFRAME.CALCULATOR_ON|GUIFRAME.TOOLBAR_ON
SPLASH_SCREEN_WIDTH = 512
SPLASH_SCREEN_HEIGHT = 366
SS_MAX_DISPLAY_TIME = 2000
WELCOME_PANEL_ON = True
WELCOME_PANEL_SHOW = False
CLEANUP_PLOT = False
# OPEN and SAVE project menu
OPEN_SAVE_PROJECT_MENU = True
#VIEW MENU
VIEW_MENU = True
#EDIT MENU
EDIT_MENU = True
SetupIconFile_win = os.path.join(icon_path, "ball.ico")
SetupIconFile_mac = os.path.join(icon_path, "ball.icns")
DefaultGroupName = "."
OutputBaseFilename = "setupSasView"
FIXED_PANEL = True
DATALOADER_SHOW = True
CLEANUP_PLOT = False
WELCOME_PANEL_SHOW = False
#Show or hide toolbar at the start up
TOOLBAR_SHOW = True
# set a default perspective
DEFAULT_PERSPECTIVE = 'None'
# Time out for updating sasview
UPDATE_TIMEOUT = 2
#OpenCL option
SAS_OPENCL = None
def printEVT(message):
if __EVT_DEBUG__:
"""
:TODO - Need method doc string
"""
print("%g: %s" % (time.clock(), message))
if __EVT_DEBUG_2_FILE__:
out = open(__EVT_DEBUG_FILENAME__, 'a')
out.write("%10g: %s\n" % (time.clock(), message))
out.close()
| 37.09375 | 482 | 0.739511 |
43250ce2b0d9eaaef22964d4c43f1efbd33fc286 | 1,362 | py | Python | SystemTests/src/python/LifeCycleAnalysis/LifeCyclePlots/HistoManager.py | vkuznet/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 8 | 2015-08-14T04:01:32.000Z | 2021-06-03T00:56:42.000Z | SystemTests/src/python/LifeCycleAnalysis/LifeCyclePlots/HistoManager.py | yuyiguo/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 162 | 2015-01-07T21:34:47.000Z | 2021-10-13T09:42:41.000Z | SystemTests/src/python/LifeCycleAnalysis/LifeCyclePlots/HistoManager.py | yuyiguo/DBS | 14df8bbe8ee8f874fe423399b18afef911fe78c7 | [
"Apache-2.0"
] | 16 | 2015-01-22T15:27:29.000Z | 2021-04-28T09:23:28.000Z | from itertools import chain
class HistoManager(object):
def __init__(self, histos=[]):
###need ordering, therefore dict isn't used here
self._histos = list(histos)
self._histos_names = list()
for histo in histos:
self._histos_names = histo.name
def __add__(self, other):
return HistoManager(chain(self, other))
def __iter__(self):
return iter(self._histos)
@property
def histo_names(self):
return self._histos_names
def add_histo(self, histo):
if histo.name in self._histos_names:
raise NameError('Name %s already exists. Names must be unique.' % histo.name)
self._histos.append(histo)
self._histos_names.append(histo.name)
def remove_histo(self, histo):
try:
index = self._histos_names.index(histo.name)
self._histos_names.pop(index)
self._histos.pop(index)
except IndexError:
raise IndexError('No histogram with name %s exists' % histo.name)
def update_histos(self, data):
for histo in self:
histo.update(data)
def draw_histos(self):
for histo in self:
histo.draw()
def save_histos_as(self, output_directory, format="png"):
for histo in self:
histo.save_as(output_directory, format)
| 29.608696 | 89 | 0.623348 |
752df58e2b40cdd18a33f9d1cbc598fb4765a729 | 4,717 | py | Python | src/prechecks/frr.py | rgaensler/gcode | c6a6b617a04490dedefb2bae7b596a2e12ab4ab1 | [
"MIT"
] | null | null | null | src/prechecks/frr.py | rgaensler/gcode | c6a6b617a04490dedefb2bae7b596a2e12ab4ab1 | [
"MIT"
] | 314 | 2020-02-26T12:37:17.000Z | 2021-08-02T00:32:32.000Z | src/prechecks/frr.py | rgaensler/gcode | c6a6b617a04490dedefb2bae7b596a2e12ab4ab1 | [
"MIT"
] | 2 | 2020-11-12T16:07:48.000Z | 2020-11-16T09:14:48.000Z | # Functional Redundancy Resolution
from math import pi, ceil
from typing import List
import numpy as np
from src.kinematics.forward_kinematics import axang2tform
from src.kinematics.forward_kinematics import geometric_jacobian, right_generalized_inverse_jacobian, forward_kinematics
from src.kinematics.joints import BaseJoint
from src.prechecks.configs import melfa_rv_4a
from src.prechecks.trajectory_segment import CartesianTrajSegment
def frr(config: List[BaseJoint], initial_joints: List[float], weights=None, stop_threshold: float = 1e-5) \
-> List[float]:
"""
Functional Redundancy Resolution
:param config: Tuple of joints, offsets are considered
:param initial_joints: Tuple of joint coordinate values (either mm or radian)
:param weights:
:param stop_threshold:
:return:
"""
# Obtain tool column vector as z-axis of endeffector frame
total_tform = forward_kinematics(config, initial_joints)
e = total_tform[0:3, 2]
e = e[:, None]
# Calculate projector onto tool axis
t_proj = e @ e.T
joint_limits = [
-2.7925, 2.7925,
-1.5708, 2.4435,
+0.2618, 2.9496,
-2.7925, 2.7925,
-2.0944, 2.0944,
-3.4907, 3.4907
]
median = [(lower + upper) / 2 for lower, upper in zip(joint_limits[::2], joint_limits[1::2])]
# Initialize current joint column vector
current_joints = np.asarray(initial_joints)
current_joints = current_joints[:, None]
# Optimize the joint vector in the orthogonal space
while True:
# Calculate jacobian and its righ-hand pseudo-inverse for current joint values
jac = geometric_jacobian(config, current_joints[:, 0].tolist())
pinv_jac = right_generalized_inverse_jacobian(jac)
# Create the manipulation column vector
manip = [qm - q for qm, q in zip(median, current_joints[:, 0].tolist())]
h = np.asarray(manip)
h = h[:, None]
if h.shape != (len(initial_joints), 1):
raise TypeError(f'h must be a {len(initial_joints)}x1 vector.')
# Set weights as diagonal matrix
if weights is None:
weights = [0.1] * len(initial_joints)
elif any((i >= 1 for i in weights)):
raise ValueError('Weights need to be < 1 for the algorithm to converge.')
w = np.diag(weights)
# Calculate delta for iteration
effect = np.linalg.multi_dot([t_proj, jac[3:, :], w, h])
delta_joints = pinv_jac @ np.vstack([np.zeros((3, 1)), effect])
current_joints += delta_joints
if np.linalg.norm(delta_joints) < stop_threshold:
# Stop when the correcting effort is below a certain threshold
break
# Return flat list
return current_joints[:, 0].tolist()
if __name__ == '__main__':
# Define test data
robot = melfa_rv_4a(rtoff=-50, atoff=200)
home = [0, 0, pi / 2, 0, pi / 2, pi]
tuning = [0, 0.1, 0.1, 0.1, 0.1, 0.01]
# Do optimization and print joints
new_joints = frr(robot, home, weights=tuning)
print(f'Original joints:\t{[f"{i:+.3f}" for i in home]}')
print(f'New joints:\t\t\t{[f"{i:+.3f}" for i in new_joints]}')
# Validate new position
tcp_pos = forward_kinematics(robot, home)
tcp_pos_new = forward_kinematics(robot, new_joints)
tcp_pos_dev = tcp_pos_new - tcp_pos
print(f'TCP Pos Deviation:\t{[f"{i:+.3f}" for i in tcp_pos_dev[0:3, 3]]}')
print(f'TCP ZDIR Deviation:\t{[f"{i:+.3f}" for i in tcp_pos_dev[0:3, 2]]}')
print(f'Weights:\t\t\t{tuning}')
def expand_task_trajectory(task_trajectory: List[CartesianTrajSegment], dphi: float) -> List[CartesianTrajSegment]:
"""
Expand the task trajectory by adding equivalent points obtained by applying constant per segment rotation around
the tool axis.
:param task_trajectory:
:param dphi:
:return:
"""
# Create the angles around the tool axis that need to be sampled
samples = ceil(pi / dphi)
# Start is included but stop is not included
angles = [(i, dphi * i,) for i in range(-samples + 1, samples) if i != 0]
# Iterate over all segments
for segment in task_trajectory:
# Get the z-axis of the current segment (constant orientation)
nvec = segment.unmodified_points[0][0:3, 2]
orig_points = tuple(segment.unmodified_points)
for angle_idx, angle in angles:
# Calculate the transformation matrix around the tool axis
modified_tform = axang2tform(nvec, angle)
# TODO Check calculation of modified points
segment.trajectory_points[angle_idx] = [np.dot(tform, modified_tform) for tform in orig_points]
return task_trajectory
| 37.436508 | 120 | 0.662073 |
4d364dc4a1f4ac80f71f0d13b434ccc489641929 | 7,535 | py | Python | ssd_liverdet/models/ssd_multiphase.py | L0SG/Liver_segmentation | 178b2367cf606ba7d704e96f855389be4c1abd14 | [
"MIT"
] | 34 | 2019-02-04T07:35:11.000Z | 2022-02-08T07:10:57.000Z | ssd_liverdet/models/ssd_multiphase.py | L0SG/Liver_segmentation | 178b2367cf606ba7d704e96f855389be4c1abd14 | [
"MIT"
] | null | null | null | ssd_liverdet/models/ssd_multiphase.py | L0SG/Liver_segmentation | 178b2367cf606ba7d704e96f855389be4c1abd14 | [
"MIT"
] | 8 | 2019-03-28T04:07:25.000Z | 2021-04-19T18:18:22.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from layers import *
from data import v2
import os
class SSD(nn.Module):
"""Single Shot Multibox Architecture
The network is composed of a base VGG network followed by the
added multibox conv layers. Each multibox layer branches into
1) conv2d for class conf scores
2) conv2d for localization predictions
3) associated priorbox layer to produce default bounding
boxes specific to the layer's feature map size.
See: https://arxiv.org/pdf/1512.02325.pdf for more details.
Args:
phase: (string) Can be "test" or "train"
base: VGG16 layers for input, size of either 300 or 500
extras: extra layers that feed to multibox loc and conf layers
head: "multibox head" consists of loc and conf conv layers
"""
def __init__(self, phase, base, extras, head, num_classes):
super(SSD, self).__init__()
self.phase = phase
self.num_classes = num_classes
# TODO: implement __call__ in PriorBox
self.priorbox = PriorBox(v2)
self.priors = Variable(self.priorbox.forward(), volatile=True)
self.size = 300
# SSD network
self.vgg = nn.ModuleList(base)
# Layer learns to scale the l2 normalized features from conv4_3
self.L2Norm = L2Norm(512, 20)
self.extras = nn.ModuleList(extras)
self.loc = nn.ModuleList(head[0])
self.conf = nn.ModuleList(head[1])
if phase == 'test':
self.softmax = nn.Softmax()
self.detect = Detect(num_classes, 0, 200, 0.01, 0.45)
def forward(self, x):
"""Applies network layers and ops on input image(s) x.
Args:
x: input image or batch of images. Shape: [batch,3*batch,300,300].
Return:
Depending on phase:
test:
Variable(tensor) of output class label predictions,
confidence score, and corresponding location predictions for
each object detected. Shape: [batch,topk,7]
train:
list of concat outputs from:
1: confidence layers, Shape: [batch*num_priors,num_classes]
2: localization layers, Shape: [batch,num_priors*4]
3: priorbox layers, Shape: [2,num_priors*4]
"""
sources = list()
loc = list()
conf = list()
# apply vgg up to conv4_3 relu
for k in range(23):
x = self.vgg[k](x)
s = self.L2Norm(x)
sources.append(s)
# apply vgg up to fc7
for k in range(23, len(self.vgg)):
x = self.vgg[k](x)
sources.append(x)
# apply extra layers and cache source layer outputs
for k, v in enumerate(self.extras):
x = F.relu(v(x), inplace=True)
if k % 2 == 1:
sources.append(x)
# apply multibox head to source layers
for (x, l, c) in zip(sources, self.loc, self.conf):
loc.append(l(x).permute(0, 2, 3, 1).contiguous())
conf.append(c(x).permute(0, 2, 3, 1).contiguous())
loc = torch.cat([o.view(o.size(0), -1) for o in loc], 1)
conf = torch.cat([o.view(o.size(0), -1) for o in conf], 1)
if self.phase == "test":
output = self.detect(
loc.view(loc.size(0), -1, 4), # loc preds
self.softmax(conf.view(-1, self.num_classes)), # conf preds
self.priors.type(type(x.data)) # default boxes
)
else:
output = (
loc.view(loc.size(0), -1, 4),
conf.view(conf.size(0), -1, self.num_classes),
self.priors
)
return output
def load_weights(self, base_file):
other, ext = os.path.splitext(base_file)
if ext == '.pkl' or '.pth':
print('Loading weights into state dict...')
weight_pretrained = torch.load(base_file, map_location=lambda storage, loc: storage)
self.load_state_dict(weight_pretrained)
print('Finished!')
else:
print('Sorry only .pth and .pkl files supported.')
# This function is derived from torchvision VGG make_layers()
# https://github.com/pytorch/vision/blob/master/torchvision/models/vgg.py
def vgg(cfg, i, batch_norm=False):
layers = []
in_channels = i
for v in cfg:
if v == 'M':
layers += [nn.MaxPool2d(kernel_size=2, stride=2)]
elif v == 'C':
layers += [nn.MaxPool2d(kernel_size=2, stride=2, ceil_mode=True)]
else:
# depthwise separable conv: add groups=4 (4 phases)
conv2d = nn.Conv2d(in_channels, v, kernel_size=3, padding=1)
if batch_norm:
layers += [conv2d, nn.BatchNorm2d(v), nn.ReLU(inplace=True)]
else:
layers += [conv2d, nn.ReLU(inplace=True)]
in_channels = v
pool5 = nn.MaxPool2d(kernel_size=3, stride=1, padding=1)
conv6 = nn.Conv2d(512, 1024, kernel_size=3, padding=6, dilation=6)
conv7 = nn.Conv2d(1024, 1024, kernel_size=1)
layers += [pool5, conv6,
nn.ReLU(inplace=True), conv7, nn.ReLU(inplace=True)]
return layers
def add_extras(cfg, i, batch_norm=False):
# Extra layers added to VGG for feature scaling
layers = []
in_channels = i
flag = False
for k, v in enumerate(cfg):
if in_channels != 'S':
if v == 'S':
layers += [nn.Conv2d(in_channels, cfg[k + 1],
kernel_size=(1, 3)[flag], stride=2, padding=1)]
else:
layers += [nn.Conv2d(in_channels, v, kernel_size=(1, 3)[flag])]
flag = not flag
in_channels = v
return layers
def multibox(vgg, extra_layers, cfg, num_classes):
loc_layers = []
conf_layers = []
vgg_source = [24, -2]
for k, v in enumerate(vgg_source):
loc_layers += [nn.Conv2d(vgg[v].out_channels,
cfg[k] * 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(vgg[v].out_channels,
cfg[k] * num_classes, kernel_size=3, padding=1)]
for k, v in enumerate(extra_layers[1::2], 2):
loc_layers += [nn.Conv2d(v.out_channels, cfg[k]
* 4, kernel_size=3, padding=1)]
conf_layers += [nn.Conv2d(v.out_channels, cfg[k]
* num_classes, kernel_size=3, padding=1)]
return vgg, extra_layers, (loc_layers, conf_layers)
base = {
'300': [64, 64, 'M', 128, 128, 'M', 256, 256, 256, 'C', 512, 512, 512, 'M',
512, 512, 512],
'512': [],
}
extras = {
'300': [256, 'S', 512, 128, 'S', 256, 128, 256, 128, 256],
'512': [],
}
mbox = {
'300': [4, 6, 6, 6, 4, 4], # number of boxes per feature map location
'512': [],
}
def build_ssd(phase, size=300, num_classes=21):
if phase != "test" and phase != "train":
print("Error: Phase not recognized")
return
if size != 300:
print("Error: Sorry only SSD300 is supported currently!")
return
# change the input channel from i=3 to 12
return SSD(phase, *multibox(vgg(base[str(size)], i=12),
add_extras(extras[str(size)], 1024),
mbox[str(size)], num_classes), num_classes)
| 36.052632 | 96 | 0.560053 |
1e8fabb1ab203e367fd6092cdf4aa674d44950f4 | 762 | py | Python | test/nn/conv/test_pgat_conv.py | ecom-research/pytorch_geometric | bca73a6f0808cfcf7133548edfbb5628ad6e49fa | [
"MIT"
] | 1 | 2021-01-17T18:39:44.000Z | 2021-01-17T18:39:44.000Z | test/nn/conv/test_pgat_conv.py | ecom-research/pytorch_geometric | bca73a6f0808cfcf7133548edfbb5628ad6e49fa | [
"MIT"
] | null | null | null | test/nn/conv/test_pgat_conv.py | ecom-research/pytorch_geometric | bca73a6f0808cfcf7133548edfbb5628ad6e49fa | [
"MIT"
] | 2 | 2019-11-07T14:20:29.000Z | 2020-05-01T19:03:35.000Z | import torch
from torch_geometric.nn import PGATConv
def test_pgat_conv():
in_channels, out_channels = (16, 32)
edge_index = torch.tensor(
[
[0, 0, 0, 1, 2, 3],
[1, 2, 3, 0, 0, 0],
[3, 1, 2, 3, 3, 1]
]
)
num_nodes = edge_index.max().item() + 1
x = torch.randn((num_nodes, in_channels))
conv = PGATConv(in_channels, out_channels, path_heads=2, node_heads=2)
assert conv.__repr__() == 'PGATConv(16, 32, path_heads=2)'
assert conv(x, edge_index).size() == (num_nodes, 2 * out_channels)
conv = PGATConv(in_channels, out_channels, path_heads=2, concat=False)
assert conv(x, edge_index).size() == (num_nodes, out_channels)
if __name__ == '__main__':
test_pgat_conv() | 29.307692 | 74 | 0.612861 |
38885d2c2c15b204bf3158d0b648389af7b935bc | 680 | py | Python | 2015/17/eggnog.py | lvaughn/advent | ff3f727b8db1fd9b2a04aad5dcda9a6c8d1c271e | [
"CC0-1.0"
] | null | null | null | 2015/17/eggnog.py | lvaughn/advent | ff3f727b8db1fd9b2a04aad5dcda9a6c8d1c271e | [
"CC0-1.0"
] | null | null | null | 2015/17/eggnog.py | lvaughn/advent | ff3f727b8db1fd9b2a04aad5dcda9a6c8d1c271e | [
"CC0-1.0"
] | null | null | null | #!/usr/bin/env python3
amount = 150
# Load up the containers
with open('input.txt', 'r') as f:
containers = [int(l) for l in f]
memo_cache = {}
def n_combos(remaining, starting_loc):
if remaining == 0:
return 1
assert remaining > 0
if starting_loc == len(containers):
return 0
key = (remaining, starting_loc)
if key not in memo_cache:
combos = 0
if containers[starting_loc] <= remaining:
combos += n_combos(remaining - containers[starting_loc], starting_loc + 1)
combos += n_combos(remaining, starting_loc + 1)
memo_cache[key] = combos
return memo_cache[key]
print(n_combos(amount, 0))
| 24.285714 | 86 | 0.635294 |
666dc7b108f7a1cd6cf7d8bc88d0e3e949446aaf | 242 | py | Python | back-end/test.py | PiperLiu/five-in-a-row-AI | 5bed666d483d89f9ceec348a29e6b2601dc8631f | [
"MIT"
] | null | null | null | back-end/test.py | PiperLiu/five-in-a-row-AI | 5bed666d483d89f9ceec348a29e6b2601dc8631f | [
"MIT"
] | null | null | null | back-end/test.py | PiperLiu/five-in-a-row-AI | 5bed666d483d89f9ceec348a29e6b2601dc8631f | [
"MIT"
] | null | null | null | import requests
proxies = {"http":None,"https":None}
response = requests.post(
'https://aichess.piperliu.xyz',
json={'player': 1, 'last_move': 20, 'states': {30: 1, 20: 2}},
# verify=False
proxies=proxies
)
print(response)
| 18.615385 | 66 | 0.632231 |
d5fd2a262158ded1437efd755a67eb79ffcb1da2 | 105 | py | Python | bis/apps/gepiandashboard/apps.py | AgustinMachiavello/business-incubation-system | 983e1308697771570891568f99d1b8ba74441d32 | [
"MIT"
] | 2 | 2021-03-03T16:16:42.000Z | 2021-03-08T22:43:10.000Z | bis/apps/gepiandashboard/apps.py | AgustinMachiavello/business-incubation-system | 983e1308697771570891568f99d1b8ba74441d32 | [
"MIT"
] | null | null | null | bis/apps/gepiandashboard/apps.py | AgustinMachiavello/business-incubation-system | 983e1308697771570891568f99d1b8ba74441d32 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class GepiandashboardConfig(AppConfig):
name = 'gepiandashboard'
| 17.5 | 39 | 0.790476 |
b02a6acb3707e641df0fdebebb13e6cbbcb7b5a0 | 7,953 | py | Python | tests/test_expression.py | buhanec/super-expressive-py | a19e8513492cf78ecd6f8ce741b3378e00c4b68a | [
"MIT"
] | null | null | null | tests/test_expression.py | buhanec/super-expressive-py | a19e8513492cf78ecd6f8ce741b3378e00c4b68a | [
"MIT"
] | null | null | null | tests/test_expression.py | buhanec/super-expressive-py | a19e8513492cf78ecd6f8ce741b3378e00c4b68a | [
"MIT"
] | null | null | null | """Test core expression functionality."""
import re
from typing import Optional
import unicodedata
from hypothesis import example, given
from hypothesis.strategies import integers, sampled_from
import pytest
from superexpressive import SuperExpressive
from tests.const import NAMED_UNICODE
def test_empty() -> None:
assert str(SuperExpressive()) == ''
@pytest.mark.parametrize('se, flags', [
(SuperExpressive().line_by_line, re.MULTILINE | re.UNICODE),
(SuperExpressive().case_insensitive, re.IGNORECASE | re.UNICODE),
(SuperExpressive().unicode, re.UNICODE),
(SuperExpressive().ascii, re.ASCII),
(SuperExpressive().single_line, re.DOTALL | re.UNICODE),
])
def test_flags(se: SuperExpressive, flags: int) -> None:
assert se.compile().flags == flags
@pytest.mark.parametrize('se, string', [
(SuperExpressive().any_char, '.'),
(SuperExpressive().whitespace_char, r'\s'),
(SuperExpressive().non_whitespace_char, r'\S'),
(SuperExpressive().digit, r'\d'),
(SuperExpressive().non_digit, r'\D'),
(SuperExpressive().word, r'\w'),
(SuperExpressive().non_word, r'\W'),
(SuperExpressive().word_boundary, r'\b'),
(SuperExpressive().non_word_boundary, r'\B'),
(SuperExpressive().newline, r'\n'),
(SuperExpressive().carriage_return, r'\r'),
(SuperExpressive().tab, r'\t'),
(SuperExpressive().null_byte, r'\x00'),
])
def test_escapes(se: SuperExpressive, string: str) -> None:
assert str(se) == string
def test_any_of_basic():
assert str(
SuperExpressive()
.any_of
.string('hello')
.digit
.word
.char('.')
.char('#')
.end()
) == r'(?:hello|\d|\w|[\.#])'
def test_any_of_range_fusion():
assert str(
SuperExpressive()
.any_of
.range('a', 'z')
.range('A', 'Z')
.range('0', '9')
.char('.')
.char('#')
.end()
) == r'[a-zA-Z0-9\.#]'
def test_any_of_range_fusion_with_other_choices():
assert str(
SuperExpressive()
.any_of
.range('a', 'z')
.range('A', 'Z')
.range('0', '9')
.char('.')
.char('#')
.string('XXX')
.end()
) == r'(?:XXX|[a-zA-Z0-9\.#])'
def test_capture():
assert str(
SuperExpressive()
.capture
.string('hello ')
.word
.char('!')
.end()
) == r'(hello \w!)'
def test_named_capture():
assert str(
SuperExpressive()
.named_capture('this_is_the_name')
.string('hello ')
.word
.char('!')
.end()
) == r'(?P<this_is_the_name>hello \w!)'
def test_named_capture_bad_name():
with pytest.raises(ValueError):
(SuperExpressive()
.named_capture('hello world')
.string('hello ')
.word
.char('!')
.end())
def test_named_capture_duplicate_name():
with pytest.raises(ValueError):
(SuperExpressive()
.named_capture('hello world')
.string('hello ')
.word
.char('!')
.end()
.named_capture('hello world')
.string('hello ')
.word
.char('!')
.end())
def test_named_backreference():
assert str(
SuperExpressive()
.named_capture('this_is_the_name')
.string('hello ')
.word
.char('!')
.end()
.named_backreference('this_is_the_name')
) == r'(?P<this_is_the_name>hello \w!)\g<this_is_the_name>'
def test_missing_named_backreference():
with pytest.raises(ValueError):
SuperExpressive().named_backreference('not_here')
def test_backreference():
assert str(
SuperExpressive()
.capture
.string('hello ')
.word
.char('!')
.end()
.backreference(1)
) == r'(hello \w!)\1'
def test_backreference_missing():
with pytest.raises(ValueError):
SuperExpressive().backreference(1)
def test_group():
assert str(
SuperExpressive()
.group
.string('hello ')
.word
.char('!')
.end()
) == r'(?:hello \w!)'
def test_end_no_stack():
with pytest.raises(RuntimeError):
SuperExpressive().end()
def test_assert_ahead():
assert str(
SuperExpressive()
.assert_ahead
.range('a', 'f')
.end()
.range('a', 'z')
) == r'(?=[a-f])[a-z]'
def test_assert_not_ahead():
assert str(
SuperExpressive()
.assert_not_ahead
.range('a', 'f')
.end()
.range('0', '9')
) == r'(?![a-f])[0-9]'
@pytest.mark.parametrize('se, expected', [
(SuperExpressive().optional.word, r'\w?'),
(SuperExpressive().zero_or_more.word, r'\w*'),
(SuperExpressive().zero_or_more_lazy.word, r'\w*?'),
(SuperExpressive().one_or_more.word, r'\w+'),
(SuperExpressive().one_or_more_lazy.word, r'\w+?'),
(SuperExpressive().exactly(4).word, r'\w{4}'),
(SuperExpressive().at_least(4).word, r'\w{4,}'),
(SuperExpressive().between(4, 7).word, r'\w{4,7}'),
(SuperExpressive().between_lazy(4, 7).word, r'\w{4,7}?'),
])
def test_quantifier(se: SuperExpressive, expected) -> None:
assert str(se) == expected
@pytest.mark.parametrize('se, expected', [
(SuperExpressive().start_of_input, r'^'),
(SuperExpressive().end_of_input, r'$'),
(SuperExpressive().any_of_chars('aeiou.-'), r'[aeiou\.\-]'),
(SuperExpressive().anything_but_chars('aeiou.-'), r'[^aeiou\.\-]'),
(SuperExpressive().anything_but_range('0', '9'), r'[^0-9]'),
(SuperExpressive().string('hello'), r'hello'),
(SuperExpressive().string('h'), r'h'),
(SuperExpressive().range('a', 'z'), r'[a-z]'),
])
def test_simple_matchers(se: SuperExpressive, expected) -> None:
assert str(se) == expected
def test_char_more_than_one_char() -> None:
with pytest.raises(ValueError):
SuperExpressive().char('hello')
# Python Specific
@pytest.mark.parametrize('se, string', [
(SuperExpressive().ascii_bell, r'\a'),
(SuperExpressive().ascii_formfeed, r'\f'),
(SuperExpressive().ascii_vertical_tab, r'\v'),
(SuperExpressive().backslash, r'\\'),
(SuperExpressive().start_of_string, r'\A'),
(SuperExpressive().end_of_string, r'\Z'),
])
def test_extra_escapes(se: SuperExpressive, string: str) -> None:
assert str(se) == string
def test_ascii_backspace() -> None:
with pytest.raises(RuntimeError):
_ = SuperExpressive().ascii_backspace
assert str(SuperExpressive().any_of.ascii_backspace.end()) == r'(?:\b)'
def test_hex_char() -> None:
for n in range(0x00, 0xFF + 1):
code = hex(n)[2:].rjust(2, '0')
assert str(SuperExpressive().hex_char(code)) == f'\\x{code}'
@given(integers(0x00, 0xFF))
@example(0x00)
@example(0xFF)
def test_single_unicode_char(n: int) -> None:
code = hex(n)[2:].rjust(4, '0')
assert str(SuperExpressive().unicode_char(code)) == f'\\u{code}'
@given(integers(0x00, 0x00110000))
@example(0x00)
@example(0x00110000 - 1)
def test_double_unicode_char(n: int) -> None:
code = hex(n)[2:].rjust(8, '0')
assert str(SuperExpressive().unicode_char(code)) == f'\\U{code}'
def _unicode_name(n: int) -> Optional[str]:
try:
return unicodedata.name(chr(n))
except ValueError:
return None
@given(sampled_from(sorted(NAMED_UNICODE)))
def test_double_unicode_char(character: str) -> None:
name = NAMED_UNICODE[character]
assert str(SuperExpressive().unicode_char(name)) == f'\\N{{{name}}}'
| 27.143345 | 75 | 0.562681 |
840e18c1feaf615a9da8bc0c05db94aef62fcfa8 | 867 | py | Python | 03_CNNs/03_Deep_NNs/quiz_03.py | ivanbgd/Udacity-Deep-Learning-ND101 | 05f8fe15654f51e4d770af39ee0195f22a84e65c | [
"MIT"
] | 1 | 2017-12-06T23:23:26.000Z | 2017-12-06T23:23:26.000Z | 03_CNNs/03_Deep_NNs/quiz_03.py | ivanbgd/Udacity-Deep-Learning-ND101 | 05f8fe15654f51e4d770af39ee0195f22a84e65c | [
"MIT"
] | null | null | null | 03_CNNs/03_Deep_NNs/quiz_03.py | ivanbgd/Udacity-Deep-Learning-ND101 | 05f8fe15654f51e4d770af39ee0195f22a84e65c | [
"MIT"
] | 2 | 2019-09-02T05:27:35.000Z | 2020-03-28T18:27:07.000Z | # Solution is available in the other "solution.py" tab
import tensorflow as tf
output = None
hidden_layer_weights = [
[0.1, 0.2, 0.4],
[0.4, 0.6, 0.6],
[0.5, 0.9, 0.1],
[0.8, 0.2, 0.8]]
out_weights = [
[0.1, 0.6],
[0.2, 0.1],
[0.7, 0.9]]
# Weights and biases
weights = [
tf.Variable(hidden_layer_weights),
tf.Variable(out_weights)]
biases = [
tf.Variable(tf.zeros(3)),
tf.Variable(tf.zeros(2))]
# Input
features = tf.Variable([[1.0, 2.0, 3.0, 4.0], [-1.0, -2.0, -3.0, -4.0], [11.0, 12.0, 13.0, 14.0]])
# TODO: Create Model
hidden_layer = tf.add(tf.matmul(features, weights[0]), biases[0])
hidden_layer = tf.nn.relu(hidden_layer)
output = tf.add(tf.matmul(hidden_layer, weights[1]), biases[1])
# TODO: Print session results
with tf.Session() as s:
s.run(tf.global_variables_initializer())
print(s.run(output))
| 24.083333 | 98 | 0.61707 |
21ac7d8b5c408f797f0ade7951d7982a051766b7 | 816 | py | Python | echo_lv/take_contour.py | zvvzuzin/us_left_ventricle | 0cb98bcab0e1405ea7aec8f20a15f8103494a6d2 | [
"MIT"
] | null | null | null | echo_lv/take_contour.py | zvvzuzin/us_left_ventricle | 0cb98bcab0e1405ea7aec8f20a15f8103494a6d2 | [
"MIT"
] | null | null | null | echo_lv/take_contour.py | zvvzuzin/us_left_ventricle | 0cb98bcab0e1405ea7aec8f20a15f8103494a6d2 | [
"MIT"
] | null | null | null | import numpy as np
import cv2
import lib_contour_LV as lib
#Чтение изображения с контуром
video_with_contour = cv2.VideoCapture('N120131007162257079.avi')
ret, frame = video_with_contour.read()
video_with_contour.release()
image_contour = lib.get_contour_image(frame)
cv2.imshow('image',image_contour)
#Чтение изображения УЗИ-видеопетли
video = cv2.VideoCapture('N120131007093530930.avi')
ret, frame = video.read()
video.release()
cv2.imshow('segment',lib.get_segment_image(frame))
# Выделим маску
#res = cv2.bitwise_and(frame,frame, mask = mask)
#frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
#frame = cv2.medianBlur(frame,5)
#ret,thr = cv2.threshold(frame, 1, 255,cv2.THRESH_BINARY)
#cv2.imshow('frame',thr)
#cv2.imshow('mask',mask)
#cv2.imshow('res',res)
#cv2.imwrite('1.png', frame)
k = cv2.waitKey(0)
| 28.137931 | 68 | 0.764706 |
548fd07d9c8e9572a03c762de72f8db415fd11e4 | 1,666 | py | Python | setup.py | niwinz/django-sites | db0ede952273d587aa3509fe95c75e6c6827b650 | [
"BSD-3-Clause"
] | 15 | 2015-10-20T20:20:07.000Z | 2021-12-07T17:22:51.000Z | setup.py | CuriousLearner/django-sites | db0ede952273d587aa3509fe95c75e6c6827b650 | [
"BSD-3-Clause"
] | 12 | 2015-12-03T07:18:11.000Z | 2021-12-07T11:34:45.000Z | setup.py | niwinz/django-sites | db0ede952273d587aa3509fe95c75e6c6827b650 | [
"BSD-3-Clause"
] | 7 | 2015-12-02T14:55:30.000Z | 2021-07-15T14:24:37.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), "README.rst")) as readme:
README = readme.read()
INSTALL_REQUIRES = [
"django >=1.4",
]
if sys.version_info < (2, 7):
INSTALL_REQUIRES.append("importlib")
setup(
name="django-sites",
version="0.11",
description="Alternative implementation of django sites framework",
long_description=README,
keywords="django, sites",
author="Andrey Antukh",
author_email="niwi@niwi.nz",
url="https://github.com/niwinz/django-sites",
license="BSD",
packages=[
"django_sites",
"django_sites.templatetags",
],
install_requires=INSTALL_REQUIRES,
classifiers=[
"Development Status :: 4 - Beta",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Framework :: Django",
"Framework :: Django :: 1.9",
"Framework :: Django :: 1.10",
"Framework :: Django :: 2.0",
"Framework :: Django :: 2.2",
"Framework :: Django :: 3.0",
"Framework :: Django :: 3.1",
"Framework :: Django :: 3.2",
"Topic :: Internet :: WWW/HTTP",
],
)
| 28.724138 | 75 | 0.584034 |
89d17660e9d8dfd8c1c8dc166aa1d07cff09df11 | 1,878 | py | Python | sciml_bench/benchmarks/em_denoise/data_loader.py | stfcsciml/sciml-benchmarks | 636aefb0089f79f57be3025b3acb902571117e6f | [
"MIT"
] | null | null | null | sciml_bench/benchmarks/em_denoise/data_loader.py | stfcsciml/sciml-benchmarks | 636aefb0089f79f57be3025b3acb902571117e6f | [
"MIT"
] | 6 | 2021-05-21T16:45:19.000Z | 2022-02-10T02:08:40.000Z | sciml_bench/benchmarks/em_denoise/data_loader.py | stfc-sciml/sciml-benchmarks-prerelease | 636aefb0089f79f57be3025b3acb902571117e6f | [
"MIT"
] | null | null | null | import h5py
import tensorflow as tf
import numpy as np
from pathlib import Path
import horovod.tensorflow as hvd
from sciml_bench.core.data_loader import DataLoader
from sciml_bench.benchmarks.em_denoise.constants import IMG_SIZE
class EMGrapheneDataset(DataLoader):
def __init__(self, data_dir, seed=None, batch_size=10, **kwargs):
self._seed = seed
self._data_dir = Path(data_dir)
self._batch_size = 10
def _load_data(self, path):
path = path.decode()
with h5py.File(path, "r") as hdf5_file:
for i in range(len(hdf5_file['images'])):
images = np.array(hdf5_file["images"][i])
yield images
@property
def input_shape(self):
return (IMG_SIZE, IMG_SIZE, 1)
@property
def output_shape(self):
return (IMG_SIZE, IMG_SIZE, 1)
def to_dataset(self):
types = tf.float32
shapes = tf.TensorShape([IMG_SIZE, IMG_SIZE, 1])
path = str(self._data_dir / 'graphene_img_noise.h5')
noise_dataset = tf.data.Dataset.from_generator(self._load_data,
output_types=types,
output_shapes=shapes,
args=(path, ))
path = str(self._data_dir / 'graphene_img_clean.h5')
clean_dataset = tf.data.Dataset.from_generator(self._load_data,
output_types=types,
output_shapes=shapes,
args=(path, ))
dataset = tf.data.Dataset.zip((noise_dataset, clean_dataset))
dataset = dataset.shard(hvd.size(), hvd.rank())
dataset = dataset.shuffle(1000)
dataset = dataset.batch(self._batch_size)
return dataset
| 34.777778 | 71 | 0.565495 |
29f83c08118b9b5236457f3eeedc60f0d11661e6 | 2,647 | py | Python | pypiper/stage.py | fhalbritter/pypiper | 67908f2ee5f51fa5fdddb67eb6d7891aefeeda6a | [
"BSD-2-Clause"
] | 24 | 2017-12-15T13:48:59.000Z | 2022-01-30T19:07:21.000Z | pypiper/stage.py | fhalbritter/pypiper | 67908f2ee5f51fa5fdddb67eb6d7891aefeeda6a | [
"BSD-2-Clause"
] | 117 | 2017-12-19T20:27:59.000Z | 2022-01-25T22:30:25.000Z | pypiper/stage.py | vreuter/pypiper | 67908f2ee5f51fa5fdddb67eb6d7891aefeeda6a | [
"BSD-2-Clause"
] | 7 | 2017-12-14T16:59:09.000Z | 2020-01-08T12:30:17.000Z | """ Conceptualize a pipeline processing phase/stage. """
import copy
from .utils import translate_stage_name
__author__ = "Vince Reuter"
__email__ = "vreuter@virginia.edu"
__all__ = ["Stage"]
class Stage(object):
"""
Single stage/phase of a pipeline; a logical processing "unit". A stage is a
collection of commands that is checkpointed.
"""
def __init__(self, func, f_args=None, f_kwargs=None,
name=None, checkpoint=True):
"""
A function, perhaps with arguments, defines the stage.
:param callable func: The processing logic that defines the stage
:param tuple f_args: Positional arguments for func
:param dict f_kwargs: Keyword arguments for func
:param str name: name for the phase/stage
:param callable func: Object that defines how the stage will execute.
"""
if isinstance(func, Stage):
raise TypeError("Cannot create Stage from Stage")
super(Stage, self).__init__()
self.f = func
self.f_args = f_args or tuple()
self.f_kwargs = f_kwargs or dict()
self.name = name or func.__name__
self.checkpoint = checkpoint
@property
def checkpoint_name(self):
"""
Determine the checkpoint name for this Stage.
:return str | NoneType: Checkpoint name for this stage; null if this
Stage is designated as a non-checkpoint.
"""
return translate_stage_name(self.name) if self.checkpoint else None
def run(self, *args, **kwargs):
""" Alternate form for direct call; execute stage. """
self(*args, **kwargs)
def __call__(self, *args, **update_kwargs):
""" Execute the stage, allowing updates to args/kwargs. """
kwargs = copy.deepcopy(self.f_kwargs)
kwargs.update(update_kwargs)
args = args or self.f_args
self.f(*args, **kwargs)
def __eq__(self, other):
return isinstance(other, Stage) and \
self.f.__name__ == other.f.__name__ and \
({k: v for k, v in self.__dict__.items() if k != "f"} ==
{k: v for k, v in other.__dict__.items() if k != "f"})
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return "{klass} '{n}': f={f}, args={pos}, kwargs={kwd}, " \
"checkpoint={check}".format(klass=self.__class__.__name__,
f=self.f, n=self.name, pos=self.f_args, kwd=self.f_kwargs,
check=self.checkpoint)
def __str__(self):
return "{}: '{}'".format(self.__class__.__name__, self.name)
| 30.425287 | 79 | 0.608991 |
097867d51213605f154a0d1cfd6daaafa9386604 | 2,651 | py | Python | Bot/cogs/members.py | AcidSkull/DiscordPythonBot | 91940b3a4fc2a043407bc1a8ad1fcd70faa6ef30 | [
"MIT"
] | null | null | null | Bot/cogs/members.py | AcidSkull/DiscordPythonBot | 91940b3a4fc2a043407bc1a8ad1fcd70faa6ef30 | [
"MIT"
] | null | null | null | Bot/cogs/members.py | AcidSkull/DiscordPythonBot | 91940b3a4fc2a043407bc1a8ad1fcd70faa6ef30 | [
"MIT"
] | null | null | null | from discord.ext import commands
import discord
class Members(commands.Cog):
def __init__(self, client):
self.client = client
@commands.Cog.listener()
async def on_ready(self):
print('\033[92m' + 'Bot is ready!' + '\033[0m')
await self.client.change_presence(activity=discord.Game('Minecraft 2'))
@commands.Cog.listener()
async def on_member_join(self, member):
channel = member.guild.system_channel
if channel is not None:
embedVar = discord.Embed(
title=f'Welcome {member}!',
description=f'It is nice to greet you to {member.guild}. Remeber to be respectfull to another or we send you to the window of life.',
color=0x0efbe0)
embedVar.set_thumbnail(url=member.avatar_url)
await member.send(embed=embedVar)
@commands.command()
async def ping(self, context):
await context.send('Pong!')
@commands.command()
@commands.has_permissions(kick_members=True)
async def kick(self, context, member : discord.Member, *, reason=' unknown reasone'):
await member.kick(reason=reason)
await context.send(f'User {member} has been kicked for {reason}.')
@commands.command()
@commands.has_permissions(ban_members=True)
async def ban(self, context, member : discord.Member, *, reason=' unknown reasone'):
print(reason)
await member.ban(reason=reason)
await context.send(f'User {member} has been ban for {reason}')
@commands.command()
@commands.has_permissions(administrator=True)
async def unban(self, context, *, member):
banned_users = await context.guild.bans()
member_name, member_discriminator = member.split("#")
for entry in banned_users:
user = entry.user
if (user.name, user.discriminator) == (member_name, member_discriminator):
await context.guild.unban(user)
await context.send(f'User {user.mention} has been unbaned.')
@commands.command()
@commands.has_permissions(manage_roles=True)
async def addrole(self, context, member : discord.Member, *, role : discord.Role ):
await member.add_roles(role)
await context.send(f'User {member} has been promoted to {role}.')
@commands.command()
@commands.has_permissions(manage_roles=True)
async def delrole(self, context, member : discord.Member, *, role : discord.Role):
await member.remove_roles(role)
await context.send(f'User {member} has been degraded from {role}')
def setup(client):
client.add_cog(Members(client)) | 39.567164 | 149 | 0.655224 |
816f7b6b7185cb13199d3edb926a3387c41075a8 | 5,671 | py | Python | src/ephemeris/sleep.py | cat-bro/ephemeris | 248ab13f846ec996a655bc02c5855291b45c7384 | [
"CC-BY-3.0"
] | 23 | 2016-06-15T18:34:44.000Z | 2021-09-06T00:30:38.000Z | src/ephemeris/sleep.py | cat-bro/ephemeris | 248ab13f846ec996a655bc02c5855291b45c7384 | [
"CC-BY-3.0"
] | 152 | 2016-06-15T18:58:00.000Z | 2022-03-23T08:45:59.000Z | src/ephemeris/sleep.py | cat-bro/ephemeris | 248ab13f846ec996a655bc02c5855291b45c7384 | [
"CC-BY-3.0"
] | 33 | 2016-06-16T13:08:29.000Z | 2021-02-12T23:27:33.000Z | #!/usr/bin/env python
'''Utility to do a blocking sleep until a Galaxy instance is responsive.
This is useful in docker images, in RUN steps, where one needs to wait
for a currently starting Galaxy to be alive, before API requests can be
made successfully.
The script functions by making repeated requests to
``http(s)://fqdn/api/version``, an API which requires no authentication
to access.'''
import sys
import time
from argparse import ArgumentParser
import requests
from galaxy.util import unicodify
from .common_parser import get_common_args
DEFAULT_SLEEP_WAIT = 1
MESSAGE_KEY_NOT_YET_VALID = "[%02d] Provided key not (yet) valid... %s\n"
MESSAGE_INVALID_JSON = "[%02d] No valid json returned... %s\n"
MESSAGE_FETCHING_USER = "[%02d] Connection error fetching user details, exiting with error code. %s\n"
MESSAGE_KEY_NOT_YET_ADMIN = "[%02d] Provided key not (yet) admin... %s\n"
MESSAGE_GALAXY_NOT_YET_UP = "[%02d] Galaxy not up yet... %s\n"
MESSAGE_TIMEOUT = "Failed to contact Galaxy within timeout (%s), exiting with error code.\n"
def _parser():
'''Constructs the parser object'''
parent = get_common_args(login_required=False)
parser = ArgumentParser(parents=[parent], usage="usage: %(prog)s <options>",
description="Script to sleep and wait for Galaxy to be alive.")
parser.add_argument("--timeout",
default=0, type=int,
help="Galaxy startup timeout in seconds. The default value of 0 waits forever")
parser.add_argument("-a", "--api_key",
dest="api_key",
help="Sleep until key becomes available.")
parser.add_argument("--ensure_admin",
default=False,
action="store_true")
return parser
def _parse_cli_options():
"""
Parse command line options, returning `parse_args` from `ArgumentParser`.
"""
parser = _parser()
return parser.parse_args()
class SleepCondition(object):
def __init__(self):
self.sleep = True
def cancel(self):
self.sleep = False
def galaxy_wait(galaxy_url, verbose=False, timeout=0, sleep_condition=None, api_key=None, ensure_admin=False):
"""Pass user_key to ensure it works before returning."""
if verbose:
sys.stdout.write("calling galaxy_wait with timeout=%s ensure_admin=%s\n\n\n" % (timeout, ensure_admin))
sys.stdout.flush()
version_url = galaxy_url + "/api/version"
if api_key:
# adding the key to the URL will ensure Galaxy returns invalid responses until
# the key is available.
version_url = "%s?key=%s" % (version_url, api_key)
current_user_url = "%s/api/users/current?key=%s" % (galaxy_url, api_key)
else:
assert not ensure_admin
if sleep_condition is None:
sleep_condition = SleepCondition()
count = 0
version_obtained = False
while sleep_condition.sleep:
try:
if not version_obtained:
result = requests.get(version_url)
if result.status_code == 403:
if verbose:
sys.stdout.write(MESSAGE_KEY_NOT_YET_VALID % (count, result.__str__()))
sys.stdout.flush()
else:
try:
result = result.json()
if verbose:
sys.stdout.write("Galaxy Version: %s\n" % result['version_major'])
sys.stdout.flush()
version_obtained = True
except ValueError:
if verbose:
sys.stdout.write(MESSAGE_INVALID_JSON % (count, result.__str__()))
sys.stdout.flush()
if version_obtained:
if ensure_admin:
result = requests.get(current_user_url)
if result.status_code != 200:
if verbose:
sys.stdout.write(MESSAGE_FETCHING_USER % (count, result.__str__()))
sys.stdout.flush()
return False
result = result.json()
is_admin = result['is_admin']
if is_admin:
if verbose:
sys.stdout.write("Verified supplied key an admin key.\n")
sys.stdout.flush()
break
else:
if verbose:
sys.stdout.write(MESSAGE_KEY_NOT_YET_ADMIN % (count, result.__str__()))
sys.stdout.flush()
else:
break
except requests.exceptions.ConnectionError as e:
if verbose:
sys.stdout.write(MESSAGE_GALAXY_NOT_YET_UP % (count, unicodify(e)[:100]))
sys.stdout.flush()
count += 1
# If we cannot talk to galaxy and are over the timeout
if timeout != 0 and count > timeout:
sys.stderr.write(MESSAGE_TIMEOUT % timeout)
return False
time.sleep(DEFAULT_SLEEP_WAIT)
return True
def main():
"""
Main function
"""
options = _parse_cli_options()
galaxy_alive = galaxy_wait(
galaxy_url=options.galaxy,
verbose=options.verbose,
timeout=options.timeout,
api_key=options.api_key,
ensure_admin=options.ensure_admin,
)
exit_code = 0 if galaxy_alive else 1
sys.exit(exit_code)
if __name__ == "__main__":
main()
| 35.44375 | 111 | 0.575913 |
35de0c48d003f33bf03969d56d7d469491243f3f | 81,945 | py | Python | src/azure-cli/azure/cli/command_modules/storage/_help.py | toki95/azure-cli | a221f49b9af6cf889cde20bbe4b42956900acd37 | [
"MIT"
] | 1 | 2021-02-02T13:28:03.000Z | 2021-02-02T13:28:03.000Z | src/azure-cli/azure/cli/command_modules/storage/_help.py | toki95/azure-cli | a221f49b9af6cf889cde20bbe4b42956900acd37 | [
"MIT"
] | null | null | null | src/azure-cli/azure/cli/command_modules/storage/_help.py | toki95/azure-cli | a221f49b9af6cf889cde20bbe4b42956900acd37 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from knack.help_files import helps # pylint: disable=unused-import
# pylint: disable=line-too-long, too-many-lines
helps['storage'] = """
type: group
short-summary: Manage Azure Cloud Storage resources.
"""
helps['storage account'] = """
type: group
short-summary: Manage storage accounts.
"""
helps['storage account blob-service-properties'] = """
type: group
short-summary: Manage the properties of a storage account's blob service.
"""
helps['storage account blob-service-properties show'] = """
type: command
short-summary: Show the properties of a storage account's blob service.
long-summary: >
Show the properties of a storage account's blob service, including
properties for Storage Analytics and CORS (Cross-Origin Resource
Sharing) rules.
examples:
- name: Show the properties of the storage account 'MyStorageAccount' in resource group 'MyResourceGroup'.
text: az storage account blob-service-properties show -n MyStorageAccount -g MyResourceGroup
"""
helps['storage account blob-service-properties update'] = """
type: command
short-summary: Update the properties of a storage account's blob service.
long-summary: >
Update the properties of a storage account's blob service, including
properties for Storage Analytics and CORS (Cross-Origin Resource
Sharing) rules.
parameters:
- name: --enable-change-feed
short-summary: 'Indicate whether change feed event logging is enabled. If it is true, you enable the storage account to begin capturing changes. The default value is true. You can see more details in https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blob-change-feed?tabs=azure-portal#register-by-using-azure-cli'
- name: --enable-delete-retention
short-summary: 'Indicate whether delete retention policy is enabled for the blob service.'
- name: --delete-retention-days
short-summary: 'Indicate the number of days that the deleted blob should be retained. The value must be in range [1,365]. It must be provided when `--enable-delete-retention` is true.'
examples:
- name: Enable the change feed for the storage account 'MyStorageAccount' in resource group 'MyResourceGroup'.
text: az storage account blob-service-properties update --enable-change-feed true -n MyStorageAccount -g MyResourceGroup
- name: Enable delete retention policy and set delete retention days to 100 for the storage account 'MyStorageAccount' in resource group 'MyResourceGroup'.
text: az storage account blob-service-properties update --enable-delete-retention true --delete-retention-days 100 -n MyStorageAccount -g MyResourceGroup
"""
helps['storage account create'] = """
type: command
short-summary: Create a storage account.
long-summary: >
The SKU of the storage account defaults to 'Standard_RAGRS'.
examples:
- name: Create a storage account 'mystorageaccount' in resource group 'MyResourceGroup' in the West US region with locally redundant storage.
text: az storage account create -n mystorageaccount -g MyResourceGroup -l westus --sku Standard_LRS
unsupported-profiles: 2017-03-09-profile
- name: Create a storage account 'mystorageaccount' in resource group 'MyResourceGroup' in the West US region with locally redundant storage.
text: az storage account create -n mystorageaccount -g MyResourceGroup -l westus --account-type Standard_LRS
supported-profiles: 2017-03-09-profile
- name: Create a storage account 'mystorageaccount' in resource group 'MyResourceGroup' in the eastus2euap region with account-scoped encryption key enabled for Table Service.
text: az storage account create -n mystorageaccount -g MyResourceGroup --kind StorageV2 -l eastus2euap -t Account
"""
helps['storage account delete'] = """
type: command
short-summary: Delete a storage account.
examples:
- name: Delete a storage account using a resource ID.
text: az storage account delete --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Storage/storageAccounts/{StorageAccount}
- name: Delete a storage account using an account name and resource group.
text: az storage account delete -n MyStorageAccount -g MyResourceGroup
"""
helps['storage account generate-sas'] = """
type: command
parameters:
- name: --services
short-summary: 'The storage services the SAS is applicable for. Allowed values: (b)lob (f)ile (q)ueue (t)able. Can be combined.'
- name: --resource-types
short-summary: 'The resource types the SAS is applicable for. Allowed values: (s)ervice (c)ontainer (o)bject. Can be combined.'
- name: --expiry
short-summary: Specifies the UTC datetime (Y-m-d'T'H:M'Z') at which the SAS becomes invalid.
- name: --start
short-summary: Specifies the UTC datetime (Y-m-d'T'H:M'Z') at which the SAS becomes valid. Defaults to the time of the request.
- name: --account-name
short-summary: 'Storage account name. Must be used in conjunction with either storage account key or a SAS token. Environment Variable: AZURE_STORAGE_ACCOUNT'
examples:
- name: Generate a sas token for the account that is valid for queue and table services on Linux.
text: |
end=`date -u -d "30 minutes" '+%Y-%m-%dT%H:%MZ'`
az storage account generate-sas --permissions cdlruwap --account-name MyStorageAccount --services qt --resource-types sco --expiry $end -o tsv
- name: Generate a sas token for the account that is valid for queue and table services on MacOS.
text: |
end=`date -v+30M '+%Y-%m-%dT%H:%MZ'`
az storage account generate-sas --permissions cdlruwap --account-name MyStorageAccount --services qt --resource-types sco --expiry $end -o tsv
- name: Generate a shared access signature for the account (autogenerated)
text: |
az storage account generate-sas --account-key 00000000 --account-name MyStorageAccount --expiry 2020-01-01 --https-only --permissions acuw --resource-types co --services bfqt
crafted: true
"""
helps['storage account keys'] = """
type: group
short-summary: Manage storage account keys.
"""
helps['storage account keys list'] = """
type: command
short-summary: List the access keys or Kerberos keys (if active directory enabled) for a storage account.
examples:
- name: List the access keys for a storage account.
text: az storage account keys list -g MyResourceGroup -n MyStorageAccount
- name: List the access keys and Kerberos keys (if active directory enabled) for a storage account.
text: az storage account keys list -g MyResourceGroup -n MyStorageAccount --expand-key-type kerb
"""
helps['storage account keys renew'] = """
type: command
short-summary: Regenerate one of the access keys or Kerberos keys (if active directory enabled) for a storage account.
long-summary: >
Kerberos key is generated per storage account for Azure Files identity based authentication either with
Azure Active Directory Domain Service (Azure AD DS) or Active Directory Domain Service (AD DS). It is used as the
password of the identity registered in the domain service that represents the storage account. Kerberos key does not
provide access permission to perform any control or data plane read or write operations against the storage account.
examples:
- name: Regenerate one of the access keys for a storage account.
text: az storage account keys renew -g MyResourceGroup -n MyStorageAccount --key primary
- name: Regenerate one of the Kerberos keys for a storage account.
text: az storage account keys renew -g MyResourceGroup -n MyStorageAccount --key secondary --key-type kerb
"""
helps['storage account list'] = """
type: command
short-summary: List storage accounts.
examples:
- name: List all storage accounts in a subscription.
text: az storage account list
- name: List all storage accounts in a resource group.
text: az storage account list -g MyResourceGroup
"""
helps['storage account management-policy'] = """
type: group
short-summary: Manage storage account management policies.
"""
helps['storage account management-policy create'] = """
type: command
short-summary: Creates the data policy rules associated with the specified storage account.
"""
helps['storage account management-policy update'] = """
type: command
short-summary: Updates the data policy rules associated with the specified storage account.
"""
helps['storage account network-rule'] = """
type: group
short-summary: Manage network rules.
"""
helps['storage account network-rule add'] = """
type: command
short-summary: Add a network rule.
long-summary: >
Rules can be created for an IPv4 address, address range (CIDR format), or a virtual network subnet.
examples:
- name: Create a rule to allow a specific address-range.
text: az storage account network-rule add -g myRg --account-name mystorageaccount --ip-address 23.45.1.0/24
- name: Create a rule to allow access for a subnet.
text: az storage account network-rule add -g myRg --account-name mystorageaccount --vnet myvnet --subnet mysubnet
"""
helps['storage account network-rule list'] = """
type: command
short-summary: List network rules.
examples:
- name: List network rules. (autogenerated)
text: |
az storage account network-rule list --account-name MyAccount --resource-group MyResourceGroup
crafted: true
"""
helps['storage account network-rule remove'] = """
type: command
short-summary: Remove a network rule.
examples:
- name: Remove a network rule. (autogenerated)
text: |
az storage account network-rule remove --account-name MyAccount --resource-group MyResourceGroup --subnet MySubnetID
crafted: true
- name: Remove a network rule. (autogenerated)
text: |
az storage account network-rule remove --account-name MyAccount --ip-address 23.45.1.0/24 --resource-group MyResourceGroup
crafted: true
"""
helps['storage account private-endpoint-connection'] = """
type: group
short-summary: Manage storage account private endpoint connection.
"""
helps['storage account private-endpoint-connection approve'] = """
type: command
short-summary: Approve a private endpoint connection request for storage account.
examples:
- name: Approve a private endpoint connection request for storage account by ID.
text: |
az storage account private-endpoint-connection approve --id "/subscriptions/0000-0000-0000-0000/resourceGroups/MyResourceGroup/providers/Microsoft.Storage/storageAccounts/mystorageaccount/privateEndpointConnections/mystorageaccount.b56b5a95-0588-4f8b-b348-15db61590a6c"
- name: Approve a private endpoint connection request for storage account by ID.
text: |
id = (az storage account show -n mystorageaccount --query "privateEndpointConnections[0].id")
az storage account private-endpoint-connection approve --id $id
- name: Approve a private endpoint connection request for storage account using account name and connection name.
text: |
az storage account private-endpoint-connection approve -g myRg --account-name mystorageaccount --name myconnection
- name: Approve a private endpoint connection request for storage account using account name and connection name.
text: |
name = (az storage account show -n mystorageaccount --query "privateEndpointConnections[0].name")
az storage account private-endpoint-connection approve -g myRg --account-name mystorageaccount --name $name
"""
helps['storage account private-endpoint-connection delete'] = """
type: command
short-summary: Delete a private endpoint connection request for storage account.
examples:
- name: Delete a private endpoint connection request for storage account by ID.
text: |
az storage account private-endpoint-connection delete --id "/subscriptions/0000-0000-0000-0000/resourceGroups/MyResourceGroup/providers/Microsoft.Storage/storageAccounts/mystorageaccount/privateEndpointConnections/mystorageaccount.b56b5a95-0588-4f8b-b348-15db61590a6c"
- name: Delete a private endpoint connection request for storage account by ID.
text: |
id = (az storage account show -n mystorageaccount --query "privateEndpointConnections[0].id")
az storage account private-endpoint-connection delete --id $id
- name: Delete a private endpoint connection request for storage account using account name and connection name.
text: |
az storage account private-endpoint-connection delete -g myRg --account-name mystorageaccount --name myconnection
- name: Delete a private endpoint connection request for storage account using account name and connection name.
text: |
name = (az storage account show -n mystorageaccount --query "privateEndpointConnections[0].name")
az storage account private-endpoint-connection delete -g myRg --account-name mystorageaccount --name $name
"""
helps['storage account private-endpoint-connection reject'] = """
type: command
short-summary: Reject a private endpoint connection request for storage account.
examples:
- name: Reject a private endpoint connection request for storage account by ID.
text: |
az storage account private-endpoint-connection reject --id "/subscriptions/0000-0000-0000-0000/resourceGroups/MyResourceGroup/providers/Microsoft.Storage/storageAccounts/mystorageaccount/privateEndpointConnections/mystorageaccount.b56b5a95-0588-4f8b-b348-15db61590a6c"
- name: Reject a private endpoint connection request for storage account by ID.
text: |
id = (az storage account show -n mystorageaccount --query "privateEndpointConnections[0].id")
az storage account private-endpoint-connection reject --id $id
- name: Reject a private endpoint connection request for storage account using account name and connection name.
text: |
az storage account private-endpoint-connection reject -g myRg --account-name mystorageaccount --name myconnection
- name: Reject a private endpoint connection request for storage account using account name and connection name.
text: |
name = (az storage account show -n mystorageaccount --query "privateEndpointConnections[0].name")
az storage account private-endpoint-connection reject -g myRg --account-name mystorageaccount --name $name
"""
helps['storage account private-endpoint-connection show'] = """
type: command
short-summary: Show details of a private endpoint connection request for storage account.
examples:
- name: Show details of a private endpoint connection request for storage account by ID.
text: |
az storage account private-endpoint-connection show --id "/subscriptions/0000-0000-0000-0000/resourceGroups/MyResourceGroup/providers/Microsoft.Storage/storageAccounts/mystorageaccount/privateEndpointConnections/mystorageaccount.b56b5a95-0588-4f8b-b348-15db61590a6c"
- name: Show details of a private endpoint connection request for storage account by ID.
text: |
id = (az storage account show -n mystorageaccount --query "privateEndpointConnections[0].id")
az storage account private-endpoint-connection show --id $id
- name: Show details of a private endpoint connection request for storage account using account name and connection name.
text: |
az storage account private-endpoint-connection show -g myRg --account-name mystorageaccount --name myconnection
- name: Show details of a private endpoint connection request for storage account using account name and connection name.
text: |
name = (az storage account show -n mystorageaccount --query "privateEndpointConnections[0].name")
az storage account private-endpoint-connection show -g myRg --account-name mystorageaccount --name $name
"""
helps['storage account private-link-resource'] = """
type: group
short-summary: Manage storage account private link resources.
"""
helps['storage account private-link-resource list'] = """
type: command
short-summary: Get the private link resources that need to be created for a storage account.
examples:
- name: Get the private link resources that need to be created for a storage account.
text: |
az storage account private-link-resource list --account-name mystorageaccount -g MyResourceGroup
"""
helps['storage account revoke-delegation-keys'] = """
type: command
short-summary: Revoke all user delegation keys for a storage account.
examples:
- name: Revoke all user delegation keys for a storage account by resource ID.
text: az storage account revoke-delegation-keys --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Storage/storageAccounts/{StorageAccount}
- name: Revoke all user delegation keys for a storage account 'MyStorageAccount' in resource group 'MyResourceGroup' in the West US region with locally redundant storage.
text: az storage account revoke-delegation-keys -n MyStorageAccount -g MyResourceGroup
"""
helps['storage account show'] = """
type: command
short-summary: Show storage account properties.
examples:
- name: Show properties for a storage account by resource ID.
text: az storage account show --ids /subscriptions/{SubID}/resourceGroups/{ResourceGroup}/providers/Microsoft.Storage/storageAccounts/{StorageAccount}
- name: Show properties for a storage account using an account name and resource group.
text: az storage account show -g MyResourceGroup -n MyStorageAccount
"""
helps['storage account show-connection-string'] = """
type: command
short-summary: Get the connection string for a storage account.
examples:
- name: Get a connection string for a storage account.
text: az storage account show-connection-string -g MyResourceGroup -n MyStorageAccount
- name: Get the connection string for a storage account. (autogenerated)
text: |
az storage account show-connection-string --name MyStorageAccount --resource-group MyResourceGroup --subscription MySubscription
crafted: true
"""
helps['storage account show-usage'] = """
type: command
short-summary: Show the current count and limit of the storage accounts under the subscription.
examples:
- name: Show the current count and limit of the storage accounts under the subscription. (autogenerated)
text: |
az storage account show-usage --location westus2
crafted: true
"""
helps['storage account update'] = """
type: command
short-summary: Update the properties of a storage account.
examples:
- name: Update the properties of a storage account. (autogenerated)
text: |
az storage account update --default-action Allow --name MyStorageAccount --resource-group MyResourceGroup
crafted: true
"""
helps['storage blob'] = """
type: group
short-summary: Manage object storage for unstructured data (blobs).
long-summary: >
Please specify one of the following authentication parameters for your commands: --auth-mode, --account-key,
--connection-string, --sas-token. You also can use corresponding environment variables to store your authentication
credentials, e.g. AZURE_STORAGE_KEY, AZURE_STORAGE_CONNECTION_STRING and AZURE_STORAGE_SAS_TOKEN.
"""
helps['storage blob copy'] = """
type: group
short-summary: Manage blob copy operations. Use `az storage blob show` to check the status of the blobs.
"""
helps['storage blob copy start'] = """
type: command
short-summary: Copies a blob asynchronously. Use `az storage blob show` to check the status of the blobs.
examples:
- name: Copies a blob asynchronously. Use `az storage blob show` to check the status of the blobs. (autogenerated)
text: |
az storage blob copy start --account-key 00000000 --account-name MyAccount --destination-blob MyDestinationBlob --destination-container MyDestinationContainer --source-uri https://storage.blob.core.windows.net/photos
crafted: true
- name: Copies a blob asynchronously. Use `az storage blob show` to check the status of the blobs (autogenerated)
text: |
az storage blob copy start --account-name MyAccount --destination-blob MyDestinationBlob --destination-container MyDestinationContainer --sas-token $sas --source-uri https://storage.blob.core.windows.net/photos
crafted: true
"""
helps['storage blob copy start-batch'] = """
type: command
short-summary: Copy multiple blobs to a blob container. Use `az storage blob show` to check the status of the blobs.
parameters:
- name: --destination-container -c
type: string
short-summary: The blob container where the selected source files or blobs will be copied to.
- name: --pattern
type: string
short-summary: The pattern used for globbing files or blobs in the source. The supported patterns are '*', '?', '[seq]', and '[!seq]'. For more information, please refer to https://docs.python.org/3.7/library/fnmatch.html.
long-summary: When you use '*' in --pattern, it will match any character including the the directory separator '/'.
- name: --dryrun
type: bool
short-summary: List the files or blobs to be uploaded. No actual data transfer will occur.
- name: --source-account-name
type: string
short-summary: The source storage account from which the files or blobs are copied to the destination. If omitted, the source account is used.
- name: --source-account-key
type: string
short-summary: The account key for the source storage account.
- name: --source-container
type: string
short-summary: The source container from which blobs are copied.
- name: --source-share
type: string
short-summary: The source share from which files are copied.
- name: --source-uri
type: string
short-summary: A URI specifying a file share or blob container from which the files or blobs are copied.
long-summary: If the source is in another account, the source must either be public or be authenticated by using a shared access signature.
- name: --source-sas
type: string
short-summary: The shared access signature for the source storage account.
examples:
- name: Copy multiple blobs to a blob container. Use `az storage blob show` to check the status of the blobs. (autogenerated)
text: |
az storage blob copy start-batch --account-key 00000000 --account-name MyAccount --destination-container MyDestinationContainer --source-account-key MySourceKey --source-account-name MySourceAccount --source-container MySourceContainer
crafted: true
"""
helps['storage blob delete'] = """
type: command
short-summary: Mark a blob or snapshot for deletion.
long-summary: >
The blob is marked for later deletion during garbage collection. In order to delete a blob, all of its snapshots must also be deleted.
Both can be removed at the same time.
examples:
- name: Delete a blob.
text: az storage blob delete -c mycontainer -n MyBlob
- name: Delete a blob using login credentials.
text: az storage blob delete -c mycontainer -n MyBlob --account-name mystorageaccount --auth-mode login
"""
helps['storage blob delete-batch'] = """
type: command
short-summary: Delete blobs from a blob container recursively.
parameters:
- name: --source -s
type: string
short-summary: The blob container from where the files will be deleted.
long-summary: The source can be the container URL or the container name. When the source is the container URL, the storage account name will be parsed from the URL.
- name: --pattern
type: string
short-summary: The pattern used for globbing files or blobs in the source. The supported patterns are '*', '?', '[seq]', and '[!seq]'. For more information, please refer to https://docs.python.org/3.7/library/fnmatch.html.
long-summary: When you use '*' in --pattern, it will match any character including the the directory separator '/'. You can also try "az stroage remove" command with --include and --exclude with azure cli >= 2.0.70 to match multiple patterns.
- name: --dryrun
type: bool
short-summary: Show the summary of the operations to be taken instead of actually deleting the file(s).
long-summary: If this is specified, it will ignore all the Precondition Arguments that include --if-modified-since and --if-unmodified-since. So the file(s) will be deleted with the command without --dryrun may be different from the result list with --dryrun flag on.
- name: --if-match
type: string
short-summary: An ETag value, or the wildcard character (*). Specify this header to perform the operation only if the resource's ETag matches the value specified.
- name: --if-none-match
type: string
short-summary: An ETag value, or the wildcard character (*).
long-summary: Specify this header to perform the operation only if the resource's ETag does not match the value specified. Specify the wildcard character (*) to perform the operation only if the resource does not exist, and fail the operation if it does exist.
examples:
- name: Delete all blobs ending with ".py" in a container that have not been modified for 10 days.
text: |
date=`date -d "10 days ago" '+%Y-%m-%dT%H:%MZ'`
az storage blob delete-batch -s mycontainer --account-name mystorageaccount --pattern *.py --if-unmodified-since $date --auth-mode login
- name: Delete all the blobs in a directory named "dir" in a container named "mycontainer".
text: |
az storage blob delete-batch -s mycontainer --pattern dir/*
- name: Delete the blobs with the format 'cli-2018-xx-xx.txt' or 'cli-2019-xx-xx.txt' in a container.
text: |
az storage blob delete-batch -s mycontainer --pattern cli-201[89]-??-??.txt
- name: Delete all blobs with the format 'cli-201x-xx-xx.txt' except cli-2018-xx-xx.txt' and 'cli-2019-xx-xx.txt' in a container.
text: |
az storage blob delete-batch -s mycontainer --pattern cli-201[!89]-??-??.txt
"""
helps['storage blob download-batch'] = """
type: command
short-summary: Download blobs from a blob container recursively.
parameters:
- name: --source -s
type: string
short-summary: The blob container from where the files will be downloaded.
long-summary: The source can be the container URL or the container name. When the source is the container URL, the storage account name will be parsed from the URL.
- name: --destination -d
type: string
short-summary: The existing destination folder for this download operation.
- name: --pattern
type: string
short-summary: The pattern used for globbing files or blobs in the source. The supported patterns are '*', '?', '[seq]', and '[!seq]'. For more information, please refer to https://docs.python.org/3.7/library/fnmatch.html.
long-summary: When you use '*' in --pattern, it will match any character including the the directory separator '/'.
- name: --dryrun
type: bool
short-summary: Show the summary of the operations to be taken instead of actually downloading the file(s).
examples:
- name: Download all blobs that end with .py
text: |
az storage blob download-batch -d . --pattern *.py -s mycontainer --account-name mystorageaccount --account-key 00000000
- name: Download all blobs in a directory named "dir" from container named "mycontainer".
text: |
az storage blob download-batch -d . -s mycontainer --pattern dir/*
- name: Download all blobs with the format 'cli-2018-xx-xx.txt' or 'cli-2019-xx-xx.txt' in container to current path.
text: |
az storage blob download-batch -d . -s mycontainer --pattern cli-201[89]-??-??.txt
- name: Download all blobs with the format 'cli-201x-xx-xx.txt' except cli-2018-xx-xx.txt' and 'cli-2019-xx-xx.txt' in container to current path.
text: |
az storage blob download-batch -d . -s mycontainer --pattern cli-201[!89]-??-??.txt
"""
helps['storage blob exists'] = """
type: command
short-summary: Check for the existence of a blob in a container.
parameters:
- name: --name -n
short-summary: The blob name.
examples:
- name: Check for the existence of a blob in a container. (autogenerated)
text: |
az storage blob exists --account-key 00000000 --account-name MyAccount --container-name MyContainer --name MyBlob
crafted: true
"""
helps['storage blob generate-sas'] = """
type: command
short-summary: Generate a shared access signature for the blob.
examples:
- name: Generate a sas token for a blob with read-only permissions.
text: |
end=`date -u -d "30 minutes" '+%Y-%m-%dT%H:%MZ'`
az storage blob generate-sas -c myycontainer -n MyBlob --permissions r --expiry $end --https-only
- name: Generate a shared access signature for the blob. (autogenerated)
text: |
az storage blob generate-sas --account-key 00000000 --account-name MyStorageAccount --container-name MyContainer --expiry 2018-01-01T00:00:00Z --name MyBlob --permissions r
crafted: true
"""
helps['storage blob incremental-copy'] = """
type: group
short-summary: Manage blob incremental copy operations.
"""
helps['storage blob incremental-copy start'] = """
type: command
short-summary: Copies an incremental copy of a blob asynchronously.
long-summary: This operation returns a copy operation properties object, including a copy ID you can use to check or abort the copy operation. The Blob service copies blobs on a best-effort basis. The source blob for an incremental copy operation must be a page blob. Call get_blob_properties on the destination blob to check the status of the copy operation. The final blob will be committed when the copy completes.
examples:
- name: Upload all files that end with .py unless blob exists and has been modified since given date.
text: az storage blob incremental-copy start --source-container MySourceContainer --source-blob MyBlob --source-account-name MySourceAccount --source-account-key MySourceKey --source-snapshot MySnapshot --destination-container MyDestinationContainer --destination-blob MyDestinationBlob
- name: Copies an incremental copy of a blob asynchronously. (autogenerated)
text: |
az storage blob incremental-copy start --account-key 00000000 --account-name MyAccount --destination-blob MyDestinationBlob --destination-container MyDestinationContainer --source-account-key MySourceKey --source-account-name MySourceAccount --source-blob MyBlob --source-container MySourceContainer --source-snapshot MySnapshot
crafted: true
"""
helps['storage blob lease'] = """
type: group
short-summary: Manage storage blob leases.
"""
helps['storage blob list'] = """
type: command
short-summary: List blobs in a given container.
parameters:
- name: --include
short-summary: 'Specifies additional datasets to include: (c)opy-info, (m)etadata, (s)napshots, (d)eleted-soft. Can be combined.'
examples:
- name: List all storage blobs in a container whose names start with 'foo'; will match names such as 'foo', 'foobar', and 'foo/bar'
text: az storage blob list -c MyContainer --prefix foo
"""
helps['storage blob metadata'] = """
type: group
short-summary: Manage blob metadata.
"""
helps['storage blob restore'] = """
type: command
short-summary: Restore blobs in the specified blob ranges.
examples:
- name: Restore blobs in two specified blob ranges. For examples, (container1/blob1, container2/blob2) and (container2/blob3..container2/blob4).
text: az storage blob restore --account-name mystorageaccount -g MyResourceGroup -t 2020-02-27T03:59:59Z -r container1/blob1 container2/blob2 -r container2/blob3 container2/blob4
- name: Restore blobs in the specified blob ranges from account start to account end.
text: az storage blob restore --account-name mystorageaccount -g MyResourceGroup -t 2020-02-27T03:59:59Z -r "" ""
- name: Restore blobs in the specified blob range.
text: |
time=`date -u -d "30 minutes" '+%Y-%m-%dT%H:%MZ'`
az storage blob restore --account-name mystorageaccount -g MyResourceGroup -t $time -r container0/blob1 container0/blob2
- name: Restore blobs in the specified blob range without wait and query blob restore status with 'az storage account show'.
text: |
time=`date -u -d "30 minutes" '+%Y-%m-%dT%H:%MZ'`
az storage blob restore --account-name mystorageaccount -g MyResourceGroup -t $time -r container0/blob1 container0/blob2 --no-wait
"""
helps['storage blob service-properties'] = """
type: group
short-summary: Manage storage blob service properties.
"""
helps['storage blob service-properties delete-policy'] = """
type: group
short-summary: Manage storage blob delete-policy service properties.
"""
helps['storage blob service-properties delete-policy show'] = """
type: command
short-summary: Show the storage blob delete-policy.
examples:
- name: Show the storage blob delete-policy. (autogenerated)
text: |
az storage blob service-properties delete-policy show --account-name mystorageccount --account-key 00000000
crafted: true
"""
helps['storage blob service-properties delete-policy update'] = """
type: command
short-summary: Update the storage blob delete-policy.
examples:
- name: Update the storage blob delete-policy. (autogenerated)
text: |
az storage blob service-properties delete-policy update --account-name mystorageccount --account-key 00000000 --days-retained 7 --enable true
crafted: true
"""
helps['storage blob service-properties update'] = """
type: command
short-summary: Update storage blob service properties.
examples:
- name: Update storage blob service properties. (autogenerated)
text: |
az storage blob service-properties update --404-document error.html --account-name mystorageccount --account-key 00000000 --index-document index.html --static-website true
crafted: true
"""
helps['storage blob set-tier'] = """
type: command
short-summary: Set the block or page tiers on the blob.
parameters:
- name: --type -t
short-summary: The blob type
- name: --tier
short-summary: The tier value to set the blob to.
- name: --timeout
short-summary: The timeout parameter is expressed in seconds. This method may make multiple calls to the Azure service and the timeout will apply to each call individually.
long-summary: >
For block blob this command only supports block blob on standard storage accounts.
For page blob, this command only supports for page blobs on premium accounts.
examples:
- name: Set the block or page tiers on the blob. (autogenerated)
text: |
az storage blob set-tier --account-key 00000000 --account-name MyAccount --container-name MyContainer --name MyBlob --tier P10
crafted: true
"""
helps['storage blob show'] = """
type: command
short-summary: Get the details of a blob.
examples:
- name: Show all properties of a blob.
text: az storage blob show -c MyContainer -n MyBlob
- name: Get the details of a blob (autogenerated)
text: |
az storage blob show --account-name mystorageccount --account-key 00000000 --container-name MyContainer --name MyBlob
crafted: true
"""
helps['storage blob sync'] = """
type: command
short-summary: Sync blobs recursively to a storage blob container.
examples:
- name: Sync a single blob to a container.
text: az storage blob sync -c mycontainer -s "path/to/file" -d NewBlob
- name: Sync a directory to a container.
text: az storage blob sync -c mycontainer --account-name mystorageccount --account-key 00000000 -s "path/to/directory"
"""
helps['storage blob upload'] = """
type: command
short-summary: Upload a file to a storage blob.
long-summary: Creates a new blob from a file path, or updates the content of an existing blob with automatic chunking and progress notifications.
parameters:
- name: --type -t
short-summary: Defaults to 'page' for *.vhd files, or 'block' otherwise.
- name: --maxsize-condition
short-summary: The max length in bytes permitted for an append blob.
- name: --validate-content
short-summary: Specifies that an MD5 hash shall be calculated for each chunk of the blob and verified by the service when the chunk has arrived.
- name: --tier
short-summary: A page blob tier value to set the blob to. The tier correlates to the size of the blob and number of allowed IOPS. This is only applicable to page blobs on premium storage accounts.
examples:
- name: Upload to a blob.
text: az storage blob upload -f /path/to/file -c MyContainer -n MyBlob
"""
helps['storage blob upload-batch'] = """
type: command
short-summary: Upload files from a local directory to a blob container.
parameters:
- name: --source -s
type: string
short-summary: The directory where the files to be uploaded are located.
- name: --destination -d
type: string
short-summary: The blob container where the files will be uploaded.
long-summary: The destination can be the container URL or the container name. When the destination is the container URL, the storage account name will be parsed from the URL.
- name: --pattern
type: string
short-summary: The pattern used for globbing files or blobs in the source. The supported patterns are '*', '?', '[seq]', and '[!seq]'. For more information, please refer to https://docs.python.org/3.7/library/fnmatch.html.
long-summary: When you use '*' in --pattern, it will match any character including the the directory separator '/'.
- name: --dryrun
type: bool
short-summary: Show the summary of the operations to be taken instead of actually uploading the file(s).
- name: --if-match
type: string
short-summary: An ETag value, or the wildcard character (*). Specify this header to perform the operation only if the resource's ETag matches the value specified.
- name: --if-none-match
type: string
short-summary: An ETag value, or the wildcard character (*).
long-summary: Specify this header to perform the operation only if the resource's ETag does not match the value specified. Specify the wildcard character (*) to perform the operation only if the resource does not exist, and fail the operation if it does exist.
- name: --validate-content
short-summary: Specifies that an MD5 hash shall be calculated for each chunk of the blob and verified by the service when the chunk has arrived.
- name: --type -t
short-summary: Defaults to 'page' for *.vhd files, or 'block' otherwise. The setting will override blob types for every file.
- name: --maxsize-condition
short-summary: The max length in bytes permitted for an append blob.
- name: --lease-id
short-summary: The active lease id for the blob
examples:
- name: Upload all files that end with .py unless blob exists and has been modified since given date.
text: |
az storage blob upload-batch -d mycontainer --account-name mystorageaccount --account-key 00000000 -s <path-to-directory> --pattern *.py --if-unmodified-since 2018-08-27T20:51Z
- name: Upload all files from local path directory to a container named "mycontainer".
text: |
az storage blob upload-batch -d mycontainer -s <path-to-directory>
- name: Upload all files with the format 'cli-2018-xx-xx.txt' or 'cli-2019-xx-xx.txt' in local path directory.
text: |
az storage blob upload-batch -d mycontainer -s <path-to-directory> --pattern cli-201[89]-??-??.txt
- name: Upload all files with the format 'cli-201x-xx-xx.txt' except cli-2018-xx-xx.txt' and 'cli-2019-xx-xx.txt' in a container.
text: |
az storage blob upload-batch -d mycontainer -s <path-to-directory> --pattern cli-201[!89]-??-??.txt
"""
helps['storage blob url'] = """
type: command
short-summary: Create the url to access a blob.
examples:
- name: Create the url to access a blob (autogenerated)
text: |
az storage blob url --connection-string $connectionString --container-name container1 --name blob1
crafted: true
- name: Create the url to access a blob (autogenerated)
text: |
az storage blob url --account-name storageacct --account-key 00000000 --container-name container1 --name blob1
crafted: true
"""
helps['storage container'] = """
type: group
short-summary: Manage blob storage containers.
long-summary: >
Please specify one of the following authentication parameters for your commands: --auth-mode, --account-key,
--connection-string, --sas-token. You also can use corresponding environment variables to store your authentication
credentials, e.g. AZURE_STORAGE_KEY, AZURE_STORAGE_CONNECTION_STRING and AZURE_STORAGE_SAS_TOKEN.
"""
helps['storage container create'] = """
type: command
short-summary: Create a container in a storage account.
long-summary: >
By default, container data is private ("off") to the account owner. Use "blob" to allow public read access for blobs.
Use "container" to allow public read and list access to the entire container.
You can configure the --public-access using `az storage container set-permission -n CONTAINER_NAME --public-access blob/container/off`.
examples:
- name: Create a storage container in a storage account.
text: az storage container create -n MyStorageContainer
- name: Create a storage container in a storage account and return an error if the container already exists.
text: az storage container create -n MyStorageContainer --fail-on-exist
- name: Create a storage container in a storage account and allow public read access for blobs.
text: az storage container create -n MyStorageContainer --public-access blob
"""
helps['storage container delete'] = """
type: command
short-summary: Marks the specified container for deletion.
long-summary: >
The container and any blobs contained within it are later deleted during garbage collection.
examples:
- name: Marks the specified container for deletion. (autogenerated)
text: |
az storage container delete --account-key 00000000 --account-name MyAccount --name MyContainer
crafted: true
"""
helps['storage container exists'] = """
type: command
short-summary: Check for the existence of a storage container.
examples:
- name: Check for the existence of a storage container. (autogenerated)
text: |
az storage container exists --account-name mystorageccount --account-key 00000000 --name mycontainer
crafted: true
"""
helps['storage container generate-sas'] = """
type: command
short-summary: Generate a SAS token for a storage container.
examples:
- name: Generate a sas token for blob container and use it to upload a blob.
text: |
end=`date -u -d "30 minutes" '+%Y-%m-%dT%H:%MZ'`
sas=`az storage container generate-sas -n mycontainer --https-only --permissions dlrw --expiry $end -o tsv`
az storage blob upload -n MyBlob -c mycontainer -f file.txt --sas-token $sas
- name: Generates a shared access signature for the container (autogenerated)
text: |
az storage container generate-sas --account-key 00000000 --account-name mystorageaccount --expiry 2020-01-01 --name mycontainer --permissions dlrw
crafted: true
"""
helps['storage container immutability-policy'] = """
type: group
short-summary: Manage container immutability policies.
"""
helps['storage container lease'] = """
type: group
short-summary: Manage blob storage container leases.
"""
helps['storage container legal-hold'] = """
type: group
short-summary: Manage container legal holds.
"""
helps['storage container legal-hold show'] = """
type: command
short-summary: Get the legal hold properties of a container.
examples:
- name: Get the legal hold properties of a container. (autogenerated)
text: |
az storage container legal-hold show --account-name mystorageccount --container-name MyContainer
crafted: true
"""
helps['storage container list'] = """
type: command
short-summary: List containers in a storage account.
"""
helps['storage container metadata'] = """
type: group
short-summary: Manage container metadata.
"""
helps['storage container policy'] = """
type: group
short-summary: Manage container stored access policies.
"""
helps['storage copy'] = """
type: command
short-summary: Copy files or directories to or from Azure storage.
examples:
- name: Upload a single file to Azure Blob using url.
text: az storage copy -s /path/to/file.txt -d https://[account].blob.core.windows.net/[container]/[path/to/blob]
- name: Upload a single file to Azure Blob using account name and container name.
text: az storage copy --source-local-path /path/to/file.txt --destination-account-name mystorageaccount --destination-container mycontainer
- name: Upload a single file to Azure Blob with MD5 hash of the file content and save it as the blob's Content-MD5 property.
text: az storage copy -s /path/to/file.txt -d https://[account].blob.core.windows.net/[container]/[path/to/blob] --put-md5
- name: Upload an entire directory to Azure Blob using url.
text: az storage copy -s /path/to/dir -d https://[account].blob.core.windows.net/[container]/[path/to/directory] --recursive
- name: Upload an entire directory to Azure Blob using account name and container name.
text: az storage copy --source-local-path /path/to/dir --destination-account-name mystorageaccount --destination-container mycontainer --recursive
- name: Upload a set of files to Azure Blob using wildcards with url.
text: az storage copy -s /path/*foo/*bar/*.pdf -d https://[account].blob.core.windows.net/[container]/[path/to/directory]
- name: Upload a set of files to Azure Blob using wildcards with account name and container name.
text: az storage copy --source-local-path /path/*foo/*bar/*.pdf --destination-account-name mystorageaccount --destination-container mycontainer
- name: Upload files and directories to Azure Blob using wildcards with url.
text: az storage copy -s /path/*foo/*bar* -d https://[account].blob.core.windows.net/[container]/[path/to/directory] --recursive
- name: Upload files and directories to Azure Blob using wildcards with account name and container name.
text: az storage copy --source-local-path /path/*foo/*bar* --destination-account-name mystorageaccount --destination-container mycontainer --recursive
- name: Download a single file from Azure Blob using url, and you can also specify your storage account and container information as above.
text: az storage copy -s https://[account].blob.core.windows.net/[container]/[path/to/blob] -d /path/to/file.txt
- name: Download an entire directory from Azure Blob, and you can also specify your storage account and container information as above.
text: az storage copy -s https://[account].blob.core.windows.net/[container]/[path/to/directory] -d /path/to/dir --recursive
- name: Download a subset of containers within a storage account by using a wildcard symbol (*) in the container name, and you can also specify your storage account and container information as above.
text: az storage copy -s https://[account].blob.core.windows.net/[container*name] -d /path/to/dir --recursive
- name: Download a subset of files from Azure Blob. (Only jpg files and file names don't start with test will be included.)
text: az storage copy -s https://[account].blob.core.windows.net/[container] --include-pattern "*.jpg" --exclude-pattern test* -d /path/to/dir --recursive
- name: Copy a single blob to another blob, and you can also specify the storage account and container information of source and destination as above.
text: az storage copy -s https://[srcaccount].blob.core.windows.net/[container]/[path/to/blob] -d https://[destaccount].blob.core.windows.net/[container]/[path/to/blob]
- name: Copy an entire account data from blob account to another blob account, and you can also specify the storage account and container information of source and destination as above.
text: az storage copy -s https://[srcaccount].blob.core.windows.net -d https://[destaccount].blob.core.windows.net --recursive
- name: Copy a single object from S3 with access key to blob, and you can also specify your storage account and container information as above.
text: az storage copy -s https://s3.amazonaws.com/[bucket]/[object] -d https://[destaccount].blob.core.windows.net/[container]/[path/to/blob]
- name: Copy an entire directory from S3 with access key to blob virtual directory, and you can also specify your storage account and container information as above.
text: az storage copy -s https://s3.amazonaws.com/[bucket]/[folder] -d https://[destaccount].blob.core.windows.net/[container]/[path/to/directory] --recursive
- name: Copy all buckets in S3 service with access key to blob account, and you can also specify your storage account information as above.
text: az storage copy -s https://s3.amazonaws.com/ -d https://[destaccount].blob.core.windows.net --recursive
- name: Copy all buckets in a S3 region with access key to blob account, and you can also specify your storage account information as above.
text: az storage copy -s https://s3-[region].amazonaws.com/ -d https://[destaccount].blob.core.windows.net --recursive
- name: Upload a single file to Azure File Share using url.
text: az storage copy -s /path/to/file.txt -d https://[account].file.core.windows.net/[share]/[path/to/file]
- name: Upload a single file to Azure File Share using account name and share name.
text: az storage copy --source-local-path /path/to/file.txt --destination-account-name mystorageaccount --destination-share myshare
- name: Upload an entire directory to Azure File Share using url.
text: az storage copy -s /path/to/dir -d https://[account].file.core.windows.net/[share]/[path/to/directory] --recursive
- name: Upload an entire directory to Azure File Share using account name and container name.
text: az storage copy --source-local-path /path/to/dir --destination-account-name mystorageaccount --destination-share myshare --recursive
- name: Upload a set of files to Azure File Share using wildcards with account name and share name.
text: az storage copy --source-local-path /path/*foo/*bar/*.pdf --destination-account-name mystorageaccount --destination-share myshare
- name: Upload files and directories to Azure File Share using wildcards with url.
text: az storage copy -s /path/*foo/*bar* -d https://[account].file.core.windows.net/[share]/[path/to/directory] --recursive
- name: Upload files and directories to Azure File Share using wildcards with account name and share name.
text: az storage copy --source-local-path /path/*foo/*bar* --destination-account-name mystorageaccount --destination-share myshare --recursive
- name: Download a single file from Azure File Share using url, and you can also specify your storage account and share information as above.
text: az storage copy -s https://[account].file.core.windows.net/[share]/[path/to/file] -d /path/to/file.txt
- name: Download an entire directory from Azure File Share, and you can also specify your storage account and share information as above.
text: az storage copy -s https://[account].file.core.windows.net/[share]/[path/to/directory] -d /path/to/dir --recursive
- name: Download a set of files from Azure File Share using wildcards, and you can also specify your storage account and share information as above.
text: az storage copy -s https://[account].file.core.windows.net/[share]/ --include-pattern foo* -d /path/to/dir --recursive
"""
helps['storage cors'] = """
type: group
short-summary: Manage storage service Cross-Origin Resource Sharing (CORS).
"""
helps['storage cors add'] = """
type: command
short-summary: Add a CORS rule to a storage account.
parameters:
- name: --services
short-summary: >
The storage service(s) to add rules to. Allowed options are: (b)lob, (f)ile,
(q)ueue, (t)able. Can be combined.
- name: --max-age
short-summary: The maximum number of seconds the client/browser should cache a preflight response.
- name: --origins
short-summary: Space-separated list of origin domains that will be allowed via CORS, or '*' to allow all domains.
- name: --methods
short-summary: Space-separated list of HTTP methods allowed to be executed by the origin.
- name: --allowed-headers
short-summary: Space-separated list of response headers allowed to be part of the cross-origin request.
- name: --exposed-headers
short-summary: Space-separated list of response headers to expose to CORS clients.
"""
helps['storage cors clear'] = """
type: command
short-summary: Remove all CORS rules from a storage account.
parameters:
- name: --services
short-summary: >
The storage service(s) to remove rules from. Allowed options are: (b)lob, (f)ile,
(q)ueue, (t)able. Can be combined.
examples:
- name: Remove all CORS rules from a storage account. (autogenerated)
text: |
az storage cors clear --account-name MyAccount --services bfqt
crafted: true
"""
helps['storage cors list'] = """
type: command
short-summary: List all CORS rules for a storage account.
parameters:
- name: --services
short-summary: >
The storage service(s) to list rules for. Allowed options are: (b)lob, (f)ile,
(q)ueue, (t)able. Can be combined.
examples:
- name: List all CORS rules for a storage account. (autogenerated)
text: |
az storage cors list --account-name MyAccount
crafted: true
"""
helps['storage directory'] = """
type: group
short-summary: Manage file storage directories.
"""
helps['storage directory exists'] = """
type: command
short-summary: Check for the existence of a storage directory.
examples:
- name: Check for the existence of a storage directory. (autogenerated)
text: |
az storage directory exists --account-key 00000000 --account-name MyAccount --name MyDirectory --share-name MyShare
crafted: true
"""
helps['storage directory list'] = """
type: command
short-summary: List directories in a share.
examples:
- name: List directories in a share. (autogenerated)
text: |
az storage directory list --account-name MyAccount --share-name MyShare
crafted: true
"""
helps['storage directory metadata'] = """
type: group
short-summary: Manage file storage directory metadata.
"""
helps['storage entity'] = """
type: group
short-summary: Manage table storage entities.
"""
helps['storage entity insert'] = """
type: command
short-summary: Insert an entity into a table.
parameters:
- name: --table-name -t
type: string
short-summary: The name of the table to insert the entity into.
- name: --entity -e
type: list
short-summary: Space-separated list of key=value pairs. Must contain a PartitionKey and a RowKey.
long-summary: The PartitionKey and RowKey must be unique within the table, and may be up to 64Kb in size. If using an integer value as a key, convert it to a fixed-width string which can be canonically sorted. For example, convert the integer value 1 to the string value "0000001" to ensure proper sorting.
- name: --if-exists
type: string
short-summary: Behavior when an entity already exists for the specified PartitionKey and RowKey.
- name: --timeout
short-summary: The server timeout, expressed in seconds.
examples:
- name: Insert an entity into a table. (autogenerated)
text: |
az storage entity insert --connection-string $connectionString --entity PartitionKey=AAA RowKey=BBB Content=ASDF2 --if-exists fail --table-name MyTable
crafted: true
"""
helps['storage entity query'] = """
type: command
short-summary: List entities which satisfy a query.
parameters:
- name: --marker
type: list
short-summary: Space-separated list of key=value pairs. Must contain a nextpartitionkey and a nextrowkey.
long-summary: This value can be retrieved from the next_marker field of a previous generator object if max_results was specified and that generator has finished enumerating results. If specified, this generator will begin returning results from the point where the previous generator stopped.
examples:
- name: List entities which satisfy a query. (autogenerated)
text: |
az storage entity query --table-name MyTable
crafted: true
"""
helps['storage file'] = """
type: group
short-summary: Manage file shares that use the SMB 3.0 protocol.
"""
helps['storage file copy'] = """
type: group
short-summary: Manage file copy operations.
"""
helps['storage file copy start'] = """
type: command
short-summary: Copy a file asynchronously.
examples:
- name: Copy a file asynchronously.
text: |
az storage file copy start --source-account-name srcaccount --source-account-key 00000000 --source-path <srcpath-to-file> --source-share srcshare --destination-path <destpath-to-file> --destination-share destshare --account-name destaccount --account-key 00000000
- name: Copy a file asynchronously from source uri to destination storage account with sas token.
text: |
az storage file copy start --source-uri "https://srcaccount.file.core.windows.net/myshare/mydir/myfile?<sastoken>" --destination-path <destpath-to-file> --destination-share destshare --account-name destaccount --sas-token <destinaition-sas>
- name: Copy a file asynchronously from file snapshot to destination storage account with sas token.
text: |
az storage file copy start --source-account-name srcaccount --source-account-key 00000000 --source-path <srcpath-to-file> --source-share srcshare --file-snapshot "2020-03-02T13:51:54.0000000Z" --destination-path <destpath-to-file> --destination-share destshare --account-name destaccount --sas-token <destinaition-sas>
"""
helps['storage file copy start-batch'] = """
type: command
short-summary: Copy multiple files or blobs to a file share.
parameters:
- name: --destination-share
type: string
short-summary: The file share where the source data is copied to.
- name: --destination-path
type: string
short-summary: The directory where the source data is copied to. If omitted, data is copied to the root directory.
- name: --pattern
type: string
short-summary: The pattern used for globbing files and blobs. The supported patterns are '*', '?', '[seq]', and '[!seq]'. For more information, please refer to https://docs.python.org/3.7/library/fnmatch.html.
long-summary: When you use '*' in --pattern, it will match any character including the the directory separator '/'.
- name: --dryrun
type: bool
short-summary: List the files and blobs to be copied. No actual data transfer will occur.
- name: --source-account-name
type: string
short-summary: The source storage account to copy the data from. If omitted, the destination account is used.
- name: --source-account-key
type: string
short-summary: The account key for the source storage account. If omitted, the active login is used to determine the account key.
- name: --source-container
type: string
short-summary: The source container blobs are copied from.
- name: --source-share
type: string
short-summary: The source share files are copied from.
- name: --source-uri
type: string
short-summary: A URI that specifies a the source file share or blob container.
long-summary: If the source is in another account, the source must either be public or authenticated via a shared access signature.
- name: --source-sas
type: string
short-summary: The shared access signature for the source storage account.
examples:
- name: Copy all files in a file share to another storage account.
text: |
az storage file copy start-batch --source-account-name srcaccount --source-account-key 00000000 --source-share srcshare --destination-path <destpath-to-directory> --destination-share destshare --account-name destaccount --account-key 00000000
- name: Copy all files in a file share to another storage account. with sas token.
text: |
az storage file copy start-batch --source-uri "https://srcaccount.file.core.windows.net/myshare?<sastoken>" --destination-path <destpath-to-directory> --destination-share destshare --account-name destaccount --sas-token <destinaition-sas>
"""
helps['storage file delete-batch'] = """
type: command
short-summary: Delete files from an Azure Storage File Share.
parameters:
- name: --source -s
type: string
short-summary: The source of the file delete operation. The source can be the file share URL or the share name.
- name: --pattern
type: string
short-summary: The pattern used for file globbing. The supported patterns are '*', '?', '[seq]', and '[!seq]'. For more information, please refer to https://docs.python.org/3.7/library/fnmatch.html.
long-summary: When you use '*' in --pattern, it will match any character including the the directory separator '/'.
- name: --dryrun
type: bool
short-summary: List the files and blobs to be deleted. No actual data deletion will occur.
examples:
- name: Delete files from an Azure Storage File Share. (autogenerated)
text: |
az storage file delete-batch --account-key 00000000 --account-name MyAccount --source /path/to/file
crafted: true
- name: Delete files from an Azure Storage File Share. (autogenerated)
text: |
az storage file delete-batch --account-key 00000000 --account-name MyAccount --pattern *.py --source /path/to/file
crafted: true
"""
helps['storage file download-batch'] = """
type: command
short-summary: Download files from an Azure Storage File Share to a local directory in a batch operation.
parameters:
- name: --source -s
type: string
short-summary: The source of the file download operation. The source can be the file share URL or the share name.
- name: --destination -d
type: string
short-summary: The local directory where the files are downloaded to. This directory must already exist.
- name: --pattern
type: string
short-summary: The pattern used for file globbing. The supported patterns are '*', '?', '[seq]', and '[!seq]'. For more information, please refer to https://docs.python.org/3.7/library/fnmatch.html.
long-summary: When you use '*' in --pattern, it will match any character including the the directory separator '/'.
- name: --dryrun
type: bool
short-summary: List the files and blobs to be downloaded. No actual data transfer will occur.
- name: --max-connections
type: integer
short-summary: The maximum number of parallel connections to use. Default value is 1.
- name: --snapshot
type: string
short-summary: A string that represents the snapshot version, if applicable.
- name: --validate-content
type: bool
short-summary: If set, calculates an MD5 hash for each range of the file for validation.
long-summary: >
The storage service checks the hash of the content that has arrived is identical to the hash that was sent.
This is mostly valuable for detecting bitflips during transfer if using HTTP instead of HTTPS. This hash is not stored.
examples:
- name: Download files from an Azure Storage File Share to a local directory in a batch operation. (autogenerated)
text: |
az storage file download-batch --account-key 00000000 --account-name MyAccount --destination . --no-progress --source /path/to/file
crafted: true
- name: Download files from an Azure Storage File Share to a local directory in a batch operation. (autogenerated)
text: |
az storage file download-batch --destination . --pattern *.py --source /path/to/file
crafted: true
"""
helps['storage file exists'] = """
type: command
short-summary: Check for the existence of a file.
examples:
- name: Check for the existence of a file. (autogenerated)
text: |
az storage file exists --account-key 00000000 --account-name MyAccount --path path/file.txt --share-name MyShare
crafted: true
- name: Check for the existence of a file. (autogenerated)
text: |
az storage file exists --connection-string $connectionString --path path/file.txt --share-name MyShare
crafted: true
"""
helps['storage file generate-sas'] = """
type: command
examples:
- name: Generate a sas token for a file.
text: |
end=`date -u -d "30 minutes" '+%Y-%m-%dT%H:%MZ'`
az storage file generate-sas -p path/file.txt -s MyShare --account-name MyStorageAccount --permissions rcdw --https-only --expiry $end
- name: Generate a shared access signature for the file. (autogenerated)
text: |
az storage file generate-sas --account-name MyStorageAccount --expiry 2037-12-31T23:59:00Z --path path/file.txt --permissions rcdw --share-name MyShare --start 2019-01-01T12:20Z
crafted: true
"""
helps['storage file list'] = """
type: command
short-summary: List files and directories in a share.
parameters:
- name: --exclude-dir
type: bool
short-summary: List only files in the given share.
examples:
- name: List files and directories in a share. (autogenerated)
text: |
az storage file list --share-name MyShare
crafted: true
"""
helps['storage file metadata'] = """
type: group
short-summary: Manage file metadata.
"""
helps['storage file upload'] = """
type: command
short-summary: Upload a file to a share that uses the SMB 3.0 protocol.
long-summary: Creates or updates an Azure file from a source path with automatic chunking and progress notifications.
examples:
- name: Upload to a local file to a share.
text: az storage file upload -s MyShare --source /path/to/file
- name: Upload a file to a share that uses the SMB 3.0 protocol. (autogenerated)
text: |
az storage file upload --account-key 00000000 --account-name MyStorageAccount --path path/file.txt --share-name MyShare --source /path/to/file
crafted: true
"""
helps['storage file upload-batch'] = """
type: command
short-summary: Upload files from a local directory to an Azure Storage File Share in a batch operation.
parameters:
- name: --source -s
type: string
short-summary: The directory to upload files from.
- name: --destination -d
type: string
short-summary: The destination of the upload operation.
long-summary: The destination can be the file share URL or the share name. When the destination is the share URL, the storage account name is parsed from the URL.
- name: --destination-path
type: string
short-summary: The directory where the source data is copied to. If omitted, data is copied to the root directory.
- name: --pattern
type: string
short-summary: The pattern used for file globbing. The supported patterns are '*', '?', '[seq]', and '[!seq]'. For more information, please refer to https://docs.python.org/3.7/library/fnmatch.html.
long-summary: When you use '*' in --pattern, it will match any character including the the directory separator '/'.
- name: --dryrun
type: bool
short-summary: List the files and blobs to be uploaded. No actual data transfer will occur.
- name: --max-connections
type: integer
short-summary: The maximum number of parallel connections to use. Default value is 1.
- name: --validate-content
type: bool
short-summary: If set, calculates an MD5 hash for each range of the file for validation.
long-summary: >
The storage service checks the hash of the content that has arrived is identical to the hash that was sent.
This is mostly valuable for detecting bitflips during transfer if using HTTP instead of HTTPS. This hash is not stored.
examples:
- name: Upload files from a local directory to an Azure Storage File Share in a batch operation. (autogenerated)
text: |
az storage file upload-batch --account-key 00000000 --account-name MyAccount --destination . --source /path/to/file
crafted: true
"""
helps['storage file url'] = """
type: command
short-summary: Create the url to access a file.
examples:
- name: Create the url to access a file. (autogenerated)
text: |
az storage file url --account-name MyAccount --path path/file.txt --share-name MyShare
crafted: true
"""
helps['storage logging'] = """
type: group
short-summary: Manage storage service logging information.
"""
helps['storage logging show'] = """
type: command
short-summary: Show logging settings for a storage account.
parameters:
- name: --services
short-summary: 'The storage services from which to retrieve logging info: (b)lob (q)ueue (t)able. Can be combined.'
examples:
- name: Show logging settings for a storage account. (autogenerated)
text: |
az storage logging show --account-name MyAccount --services qt
crafted: true
"""
helps['storage logging update'] = """
type: command
short-summary: Update logging settings for a storage account.
parameters:
- name: --services
short-summary: 'The storage service(s) for which to update logging info: (b)lob (q)ueue (t)able. Can be combined.'
- name: --log
short-summary: 'The operations for which to enable logging: (r)ead (w)rite (d)elete. Can be combined.'
- name: --retention
short-summary: Number of days for which to retain logs. 0 to disable.
- name: --version
short-summary: Version of the logging schema.
"""
helps['storage message'] = """
type: group
short-summary: Manage queue storage messages.
long-summary: >
Please specify one of the following authentication parameters for your commands: --auth-mode, --account-key,
--connection-string, --sas-token. You also can use corresponding environment variables to store your authentication
credentials, e.g. AZURE_STORAGE_KEY, AZURE_STORAGE_CONNECTION_STRING and AZURE_STORAGE_SAS_TOKEN.
"""
helps['storage metrics'] = """
type: group
short-summary: Manage storage service metrics.
"""
helps['storage metrics show'] = """
type: command
short-summary: Show metrics settings for a storage account.
parameters:
- name: --services
short-summary: 'The storage services from which to retrieve metrics info: (b)lob (q)ueue (t)able. Can be combined.'
- name: --interval
short-summary: Filter the set of metrics to retrieve by time interval
examples:
- name: Show metrics settings for a storage account. (autogenerated)
text: |
az storage metrics show --account-key 00000000 --account-name MyAccount
crafted: true
"""
helps['storage metrics update'] = """
type: command
short-summary: Update metrics settings for a storage account.
parameters:
- name: --services
short-summary: 'The storage services from which to retrieve metrics info: (b)lob (q)ueue (t)able. Can be combined.'
- name: --hour
short-summary: Update the hourly metrics
- name: --minute
short-summary: Update the by-minute metrics
- name: --api
short-summary: Specify whether to include API in metrics. Applies to both hour and minute metrics if both are specified. Must be specified if hour or minute metrics are enabled and being updated.
- name: --retention
short-summary: Number of days for which to retain metrics. 0 to disable. Applies to both hour and minute metrics if both are specified.
examples:
- name: Update metrics settings for a storage account. (autogenerated)
text: |
az storage metrics update --account-name MyAccount --api true --hour true --minute true --retention 10 --services bfqt
crafted: true
"""
helps['storage queue'] = """
type: group
short-summary: Manage storage queues.
"""
helps['storage queue list'] = """
type: command
short-summary: List queues in a storage account.
"""
helps['storage queue metadata'] = """
type: group
short-summary: Manage the metadata for a storage queue.
"""
helps['storage queue policy'] = """
type: group
short-summary: Manage shared access policies for a storage queue.
"""
helps['storage remove'] = """
type: command
short-summary: Delete blobs or files from Azure Storage.
examples:
- name: Remove a single blob.
text: az storage remove -c MyContainer -n MyBlob
- name: Remove an entire virtual directory.
text: az storage remove -c MyContainer -n path/to/directory --recursive
- name: Remove only the top blobs inside a virtual directory but not its sub-directories.
text: az storage remove -c MyContainer --recursive
- name: Remove all the blobs in a Storage Container.
text: az storage remove -c MyContainer -n path/to/directory
- name: Remove a subset of blobs in a virtual directory (For example, only jpg and pdf files, or if the blob name is "exactName" and file names don't start with "test").
text: az storage remove -c MyContainer --include-path path/to/directory --include-pattern "*.jpg;*.pdf;exactName" --exclude-pattern "test*" --recursive
- name: Remove an entire virtual directory but exclude certain blobs from the scope (For example, every blob that starts with foo or ends with bar).
text: az storage remove -c MyContainer --include-path path/to/directory --exclude-pattern "foo*;*bar" --recursive
- name: Remove a single file.
text: az storage remove -s MyShare -p MyFile
- name: Remove an entire directory.
text: az storage remove -s MyShare -p path/to/directory --recursive
- name: Remove all the files in a Storage File Share.
text: az storage remove -s MyShare --recursive
"""
helps['storage share-rm'] = """
type: group
short-summary: Manage Azure file shares using the Microsoft.Storage resource provider.
"""
helps['storage share-rm create'] = """
type: command
short-summary: Create a new Azure file share under the specified storage account.
examples:
- name: Create a new Azure file share 'myfileshare' with metadata and quota as 10 GB under the storage account 'mystorageaccount'(account name) in resource group 'MyResourceGroup'.
text: az storage share-rm create -g MyResourceGroup --storage-account mystorageaccount --name myfileshare --quota 10 --metadata key1=value1 key2=value2
- name: Create a new Azure file share 'myfileshare' with metadata and quota as 6000 GB under the storage account 'mystorageaccount'(account name) which enables large file share in resource group 'MyResourceGroup'.
text: |
az storage account update -g MyResourceGroup --name mystorageaccount --enable-large-file-share
az storage share-rm create -g MyResourceGroup --storage-account mystorageaccount --name myfileshare --quota 6000 --metadata key1=value1 key2=value2
- name: Create a new Azure file share 'myfileshare' with metadata and quota as 10 GB under the storage account 'mystorageaccount' (account id).
text: az storage share-rm create --storage-account mystorageaccount --name myfileshare --quota 10 --metadata key1=value1 key2=value2
"""
helps['storage share-rm delete'] = """
type: command
short-summary: Delete the specified Azure file share.
examples:
- name: Delete an Azure file share 'myfileshare' under the storage account 'mystorageaccount' (account name) in resource group 'MyResourceGroup'.
text: az storage share-rm delete -g MyResourceGroup --storage-account mystorageaccount --name myfileshare
- name: Delete an Azure file share 'myfileshare' under the storage account 'mystorageaccount' (account id).
text: az storage share-rm delete --storage-account mystorageaccount --name myfileshare
- name: Delete an Azure file share by resource id.
text: az storage share-rm delete --ids file-share-id
"""
helps['storage share-rm exists'] = """
type: command
short-summary: Check for the existence of an Azure file share.
examples:
- name: Check for the existence of an Azure file share 'myfileshare' under the storage account 'mystorageaccount' (account name) in resource group 'MyResourceGroup'.
text: az storage share-rm exists -g MyResourceGroup --storage-account mystorageaccount --name myfileshare
- name: Check for the existence of an Azure file share 'myfileshare' under the storage account 'mystorageaccount' (account id).
text: az storage share-rm exists --storage-account mystorageaccount --name myfileshare
- name: Check for the existence of an Azure file share by resource id.
text: az storage share-rm exists --ids file-share-id
"""
helps['storage share-rm list'] = """
type: command
short-summary: List the Azure file shares under the specified storage account.
examples:
- name: List the Azure file shares under the storage account 'mystorageaccount' (account name) in resource group 'MyResourceGroup'.
text: az storage share-rm list -g MyResourceGroup --storage-account mystorageaccount
- name: List the Azure file shares under the storage account 'mystorageaccount' (account id).
text: az storage share-rm list --storage-account mystorageaccount
"""
helps['storage share-rm show'] = """
type: command
short-summary: Show the properties for a specified Azure file share.
examples:
- name: Show the properties for an Azure file share 'myfileshare' under the storage account 'mystorageaccount' (account name) in resource group 'MyResourceGroup'.
text: az storage share-rm show -g MyResourceGroup --storage-account mystorageaccount --name myfileshare
- name: Show the properties for an Azure file share 'myfileshare' under the storage account 'mystorageaccount' (account id).
text: az storage share-rm show --storage-account mystorageaccount --name myfileshare
- name: Show the properties of an Azure file shares by resource id.
text: az storage share-rm show --ids file-share-id
"""
helps['storage share-rm update'] = """
type: command
short-summary: Update the properties for an Azure file share.
examples:
- name: Update the properties for an Azure file share 'myfileshare' under the storage account 'mystorageaccount' (account name) in resource group 'MyResourceGroup'.
text: az storage share-rm update -g MyResourceGroup --storage-account mystorageaccount --name myfileshare --quota 3 --metadata key1=value1 key2=value2
- name: Update the properties for an Azure file share 'myfileshare' under the storage account 'mystorageaccount' (account id).
text: az storage share-rm update --storage-account mystorageaccount --name myfileshare --quota 3 --metadata key1=value1 key2=value2
- name: Update the properties for an Azure file shares by resource id.
text: az storage share-rm update --ids file-share-id --quota 3 --metadata key1=value1 key2=value2
"""
helps['storage share'] = """
type: group
short-summary: Manage file shares.
"""
helps['storage share create'] = """
type: command
short-summary: Creates a new share under the specified account.
examples:
- name: Creates a new share under the specified account. (autogenerated)
text: |
az storage share create --account-name MyAccount --name MyFileShare
crafted: true
"""
helps['storage share exists'] = """
type: command
short-summary: Check for the existence of a file share.
examples:
- name: Check for the existence of a file share. (autogenerated)
text: |
az storage share exists --account-key 00000000 --account-name MyAccount --name MyFileShare
crafted: true
- name: Check for the existence of a file share (autogenerated)
text: |
az storage share exists --connection-string $connectionString --name MyFileShare
crafted: true
"""
helps['storage share generate-sas'] = """
type: command
examples:
- name: Generate a sas token for a fileshare and use it to upload a file.
text: |
end=`date -u -d "30 minutes" '+%Y-%m-%dT%H:%MZ'`
sas=`az storage share generate-sas -n MyShare --account-name MyStorageAccount --https-only --permissions dlrw --expiry $end -o tsv`
az storage file upload -s MyShare --account-name MyStorageAccount --source file.txt --sas-token $sas
- name: Generate a shared access signature for the share. (autogenerated)
text: |
az storage share generate-sas --account-key 00000000 --account-name MyStorageAccount --expiry 2037-12-31T23:59:00Z --name MyShare --permissions dlrw
crafted: true
- name: Generate a shared access signature for the share. (autogenerated)
text: |
az storage share generate-sas --connection-string $connectionString --expiry 2019-02-01T12:20Z --name MyShare --permissions dlrw
crafted: true
"""
helps['storage share list'] = """
type: command
short-summary: List the file shares in a storage account.
"""
helps['storage share metadata'] = """
type: group
short-summary: Manage the metadata of a file share.
"""
helps['storage share policy'] = """
type: group
short-summary: Manage shared access policies of a storage file share.
"""
helps['storage share url'] = """
type: command
short-summary: Create a URI to access a file share.
examples:
- name: Create a URI to access a file share. (autogenerated)
text: |
az storage share url --account-key 00000000 --account-name MyAccount --name MyFileShare
crafted: true
- name: Create a URI to access a file share. (autogenerated)
text: |
az storage share url --connection-string $connectionString --name MyFileShare
crafted: true
"""
helps['storage table'] = """
type: group
short-summary: Manage NoSQL key-value storage.
"""
helps['storage table list'] = """
type: command
short-summary: List tables in a storage account.
"""
helps['storage table policy'] = """
type: group
short-summary: Manage shared access policies of a storage table.
"""
helps['storage queue'] = """
type: group
short-summary: Manage shared access policies of a storage table.
long-summary: >
Please specify one of the following authentication parameters for your commands: --auth-mode, --account-key,
--connection-string, --sas-token. You also can use corresponding environment variables to store your authentication
credentials, e.g. AZURE_STORAGE_KEY, AZURE_STORAGE_CONNECTION_STRING and AZURE_STORAGE_SAS_TOKEN.
"""
| 51.343985 | 417 | 0.73326 |
685daef993301366508bd25931ffcc10c76e315a | 2,038 | py | Python | app/models/movie.py | SonGokussj4/fast-api-excercise | 15ae21fd2dba7cbf6bf42a2beee52a379e66c847 | [
"MIT"
] | null | null | null | app/models/movie.py | SonGokussj4/fast-api-excercise | 15ae21fd2dba7cbf6bf42a2beee52a379e66c847 | [
"MIT"
] | null | null | null | app/models/movie.py | SonGokussj4/fast-api-excercise | 15ae21fd2dba7cbf6bf42a2beee52a379e66c847 | [
"MIT"
] | null | null | null | from sqlalchemy import Integer, String, Column, Boolean, Text, Unicode, DateTime, JSON
from sqlalchemy.orm import relationship
from app.db.base_class import Base
class Movie(Base):
Id = Column(Integer, primary_key=True)
Url = Column(Unicode(255), unique=True, nullable=False)
Title = Column(Unicode(255), unique=False, nullable=True)
Type = Column(Unicode(32), unique=False, nullable=True)
Year = Column(Unicode(16), unique=False, nullable=True)
Duration = Column(Unicode(128), unique=False, nullable=True)
Country = Column(Unicode(64), unique=False, nullable=True)
Rating = Column(Integer, unique=False, nullable=True)
RatingCount = Column(Integer, unique=False, nullable=True)
FanclubCount = Column(Integer, unique=False, nullable=True)
SeasonsCount = Column(Integer, unique=False, nullable=True)
EpisodesCount = Column(Integer, unique=False, nullable=True)
PosterUrl = Column(Unicode(255), unique=False, nullable=True)
SeriesId = Column(Integer, unique=False, nullable=True)
SeasonId = Column(Integer, unique=False, nullable=True)
LastUpdate = Column(DateTime, unique=False, nullable=False)
GenresJson = Column(JSON)
ChildrenJson = Column(JSON)
Genres = Column(Unicode(255), unique=False, nullable=True)
parentid = Column(Integer, unique=False, nullable=True)
Ratings = relationship("Rating", back_populates="Movie") # lazy="dynamic"
# Many-to-many
# Genres = relationship('Genre', secondary=movie_genre, backref='Movies')
# UserRatings = relationship(
# 'UserRating',
# backref="Users"
# )
# # Many-to-many
# Movies = relationship('Movie', secondary=user_movie, backref='Users')
# @property
# def serialize(self):
# """Return object data in easily serializable format"""
# return {
# "Id": self.Id,
# "Url": self.Url,
# "Username": self.Username,
# "Realname": self.Realname,
# "AvatarUrl": self.AvatarUrl
# }
| 39.960784 | 86 | 0.672228 |
22eb8dcc6741b8a0aa3b15760eede1a5383a4744 | 1,049 | py | Python | pycnal_toolbox/setup.py | ESMG/PyCNAL_legacy | a4f6547bce872068a5bb5751231017bc3e4a4503 | [
"BSD-3-Clause"
] | null | null | null | pycnal_toolbox/setup.py | ESMG/PyCNAL_legacy | a4f6547bce872068a5bb5751231017bc3e4a4503 | [
"BSD-3-Clause"
] | 3 | 2018-01-23T23:23:24.000Z | 2018-02-07T22:37:28.000Z | pycnal_toolbox/setup.py | ESMG/PyCNAL_legacy | a4f6547bce872068a5bb5751231017bc3e4a4503 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
"""
pycnal_toolbox is a suite of tools for working with ROMS.
Requires:
pycnal (https://github.com/ESMG/PyCNAL)
Contains:
many things...
"""
doclines = __doc__.split("\n")
def configuration(parent_package='',top_path=None):
from numpy.distutils.misc_util import Configuration
config = Configuration(None,parent_package,top_path)
config.set_options(ignore_setup_xxx_py=True,
assume_default_configuration=True,
delegate_options_to_subpackages=True)
# quiet=True)
config.add_subpackage('pycnal_toolbox')
return config
if __name__ == '__main__':
from numpy.distutils.core import setup
setup(name = "pycnal_toolbox",
version = '0.1',
description = doclines[0],
long_description = "\n".join(doclines[2:]),
author = "ESMG",
url = 'https://github.com/ESMG/PyCNAL',
license = 'BSD',
platforms = ["any"],
configuration=configuration,
)
| 27.605263 | 60 | 0.621544 |
1cfbf45a24d0927b13fa08e409171b9e672d7c0e | 1,684 | py | Python | homeassistant/components/automation/template.py | beschouten/home-assistant | f50c30bbbad4d92e342c8547630c63c0c7882803 | [
"MIT"
] | 1 | 2016-07-14T05:20:54.000Z | 2016-07-14T05:20:54.000Z | homeassistant/components/automation/template.py | beschouten/home-assistant | f50c30bbbad4d92e342c8547630c63c0c7882803 | [
"MIT"
] | null | null | null | homeassistant/components/automation/template.py | beschouten/home-assistant | f50c30bbbad4d92e342c8547630c63c0c7882803 | [
"MIT"
] | 1 | 2018-11-22T13:55:23.000Z | 2018-11-22T13:55:23.000Z | """
Offer template automation rules.
For more details about this automation rule, please refer to the documentation
at https://home-assistant.io/components/automation/#template-trigger
"""
import logging
import voluptuous as vol
from homeassistant.const import (
CONF_VALUE_TEMPLATE, CONF_PLATFORM, MATCH_ALL)
from homeassistant.helpers import condition
from homeassistant.helpers.event import track_state_change
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
TRIGGER_SCHEMA = IF_ACTION_SCHEMA = vol.Schema({
vol.Required(CONF_PLATFORM): 'template',
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
})
def trigger(hass, config, action):
"""Listen for state changes based on configuration."""
value_template = config.get(CONF_VALUE_TEMPLATE)
# Local variable to keep track of if the action has already been triggered
already_triggered = False
def state_changed_listener(entity_id, from_s, to_s):
"""Listen for state changes and calls action."""
nonlocal already_triggered
template_result = condition.template(hass, value_template)
# Check to see if template returns true
if template_result and not already_triggered:
already_triggered = True
action({
'trigger': {
'platform': 'template',
'entity_id': entity_id,
'from_state': from_s,
'to_state': to_s,
},
})
elif not template_result:
already_triggered = False
track_state_change(hass, MATCH_ALL, state_changed_listener)
return True
| 31.185185 | 78 | 0.685273 |
b90d3103dce5b8ebac6c941be7301ecfb360a91a | 8,571 | py | Python | containerregistry/client/docker_creds_.py | suomitekai/fairing | 9ca6a1138529b3f0b21979d62c7cb1f303bc52e0 | [
"Apache-2.0"
] | 334 | 2018-09-03T23:10:02.000Z | 2022-03-07T23:12:24.000Z | containerregistry/client/docker_creds_.py | suomitekai/fairing | 9ca6a1138529b3f0b21979d62c7cb1f303bc52e0 | [
"Apache-2.0"
] | 562 | 2018-09-03T21:33:42.000Z | 2022-03-29T12:47:43.000Z | containerregistry/client/docker_creds_.py | suomitekai/fairing | 9ca6a1138529b3f0b21979d62c7cb1f303bc52e0 | [
"Apache-2.0"
] | 160 | 2018-11-06T17:55:32.000Z | 2022-02-15T09:59:10.000Z | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This package exposes credentials for talking to a Docker registry."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import base64
import errno
import io
import json
import logging
import os
import subprocess
from containerregistry.client import docker_name
import httplib2
import six
class Provider(six.with_metaclass(abc.ABCMeta, object)):
"""Interface for providing User Credentials for use with a Docker Registry."""
# pytype: disable=bad-return-type
@abc.abstractmethod
def Get(self):
"""Produces a value suitable for use in the Authorization header."""
# pytype: enable=bad-return-type
class Anonymous(Provider):
"""Implementation for anonymous access."""
def Get(self):
"""Implement anonymous authentication."""
return ''
class SchemeProvider(Provider):
"""Implementation for providing a challenge response credential."""
def __init__(self, scheme):
self._scheme = scheme
# pytype: disable=bad-return-type
@property
@abc.abstractmethod
def suffix(self):
"""Returns the authentication payload to follow the auth scheme."""
# pytype: enable=bad-return-type
def Get(self):
"""Gets the credential in a form suitable for an Authorization header."""
return u'%s %s' % (self._scheme, self.suffix)
class Basic(SchemeProvider):
"""Implementation for providing a username/password-based creds."""
def __init__(self, username, password):
super(Basic, self).__init__('Basic')
self._username = username
self._password = password
@property
def username(self):
return self._username
@property
def password(self):
return self._password
@property
def suffix(self):
u = self.username.encode('utf8')
p = self.password.encode('utf8')
return base64.b64encode(u + b':' + p).decode('utf8')
_USERNAME = '_token'
class OAuth2(Basic):
"""Base class for turning OAuth2Credentials into suitable GCR credentials."""
def __init__(self, creds,
transport):
"""Constructor.
Args:
creds: the credentials from which to retrieve access tokens.
transport: the http transport to use for token exchanges.
"""
super(OAuth2, self).__init__(_USERNAME, 'does not matter')
self._creds = creds
self._transport = transport
@property
def password(self):
# WORKAROUND...
# The python oauth2client library only loads the credential from an
# on-disk cache the first time 'refresh()' is called, and doesn't
# actually 'Force a refresh of access_token' as advertised.
# This call will load the credential, and the call below will refresh
# it as needed. If the credential is unexpired, the call below will
# simply return a cache of this refresh.
unused_at = self._creds.get_access_token(http=self._transport)
# Most useful API ever:
# https://www.googleapis.com/oauth2/v1/tokeninfo?access_token={at}
return self._creds.get_access_token(http=self._transport).access_token
_MAGIC_NOT_FOUND_MESSAGE = 'credentials not found in native keychain'
class Helper(Basic):
"""This provider wraps a particularly named credential helper."""
def __init__(self, name, registry):
"""Constructor.
Args:
name: the name of the helper, as it appears in the Docker config.
registry: the registry for which we're invoking the helper.
"""
super(Helper, self).__init__('does not matter', 'does not matter')
self._name = name
self._registry = registry.registry
def Get(self):
# Invokes:
# echo -n {self._registry} | docker-credential-{self._name} get
# The resulting JSON blob will have 'Username' and 'Secret' fields.
bin_name = 'docker-credential-{name}'.format(name=self._name)
logging.info('Invoking %r to obtain Docker credentials.', bin_name)
try:
p = subprocess.Popen(
[bin_name, 'get'],
stdout=subprocess.PIPE,
stdin=subprocess.PIPE,
stderr=subprocess.STDOUT)
except OSError as e:
if e.errno == errno.ENOENT:
raise Exception('executable not found: ' + bin_name)
raise
stdout = p.communicate(input=str.encode('https://' + self._registry))[0]
if stdout.strip() == _MAGIC_NOT_FOUND_MESSAGE:
# Use empty auth when no auth is found.
logging.info('Credentials not found, falling back to anonymous auth.')
return Anonymous().Get()
if p.returncode != 0:
raise Exception('Error fetching credential for %s, exit status: %d\n%s' %
(self._name, p.returncode, stdout))
blob = json.loads(stdout.decode('utf-8'))
logging.info('Successfully obtained Docker credentials.')
return Basic(blob['Username'], blob['Secret']).Get()
class Keychain(six.with_metaclass(abc.ABCMeta, object)):
"""Interface for resolving an image reference to a credential."""
# pytype: disable=bad-return-type
@abc.abstractmethod
def Resolve(self, name):
"""Resolves the appropriate credential for the given registry.
Args:
name: the registry for which we need a credential.
Returns:
a Provider suitable for use with registry operations.
"""
# pytype: enable=bad-return-type
_FORMATS = [
# Allow naked domains
'%s',
# Allow scheme-prefixed.
'https://%s',
'http://%s',
# Allow scheme-prefixes with version in url path.
'https://%s/v1/',
'http://%s/v1/',
'https://%s/v2/',
'http://%s/v2/',
]
def _GetUserHomeDir():
if os.name == 'nt':
# %HOME% has precedence over %USERPROFILE% for os.path.expanduser('~')
# The Docker config resides under %USERPROFILE% on Windows
return os.path.expandvars('%USERPROFILE%')
else:
return os.path.expanduser('~')
def _GetConfigDirectory():
# Return the value of $DOCKER_CONFIG, if it exists, otherwise ~/.docker
# see https://github.com/docker/docker/blob/master/cliconfig/config.go
if os.environ.get('DOCKER_CONFIG') is not None:
return os.environ.get('DOCKER_CONFIG')
else:
return os.path.join(_GetUserHomeDir(), '.docker')
class _DefaultKeychain(Keychain):
"""This implements the default docker credential resolution."""
def Resolve(self, name):
# TODO(user): Consider supporting .dockercfg, which was used prior
# to Docker 1.7 and consisted of just the contents of 'auths' below.
logging.info('Loading Docker credentials for repository %r', str(name))
config_file = os.path.join(_GetConfigDirectory(), 'config.json')
try:
with io.open(config_file, u'r', encoding='utf8') as reader:
cfg = json.loads(reader.read())
except IOError:
# If the file doesn't exist, fallback on anonymous auth.
return Anonymous()
# Per-registry credential helpers take precedence.
cred_store = cfg.get('credHelpers', {})
for form in _FORMATS:
if form % name.registry in cred_store:
return Helper(cred_store[form % name.registry], name)
# A global credential helper is next in precedence.
if 'credsStore' in cfg:
return Helper(cfg['credsStore'], name)
# Lastly, the 'auths' section directly contains basic auth entries.
auths = cfg.get('auths', {})
for form in _FORMATS:
if form % name.registry in auths:
entry = auths[form % name.registry]
if 'auth' in entry:
decoded = base64.b64decode(entry['auth']).decode('utf8')
username, password = decoded.split(':', 1)
return Basic(username, password)
elif 'username' in entry and 'password' in entry:
return Basic(entry['username'], entry['password'])
else:
# TODO(user): Support identitytoken
# TODO(user): Support registrytoken
raise Exception(
'Unsupported entry in "auth" section of Docker config: ' +
json.dumps(entry))
return Anonymous()
# pylint: disable=invalid-name
DefaultKeychain = _DefaultKeychain()
| 31.054348 | 80 | 0.685101 |
b32004ed72f631ed8cd1b0ad08bf0e21f28ff0d0 | 17,205 | py | Python | gitlabfs/resolver.py | Overv/gitlabfs | 802f71abb671c8ac69bd6584800c4f151eea0bdf | [
"MIT"
] | 18 | 2019-07-14T22:06:39.000Z | 2022-01-22T16:52:38.000Z | gitlabfs/resolver.py | Overv/gitlabfs | 802f71abb671c8ac69bd6584800c4f151eea0bdf | [
"MIT"
] | 4 | 2020-03-24T17:40:50.000Z | 2021-06-02T00:09:25.000Z | gitlabfs/resolver.py | Overv/gitlabfs | 802f71abb671c8ac69bd6584800c4f151eea0bdf | [
"MIT"
] | 1 | 2021-03-24T17:10:56.000Z | 2021-03-24T17:10:56.000Z | # -*- coding: utf-8 -*-
"""File system abstraction of GitLab.
This module contains the implementation of abstracting GitLab as a file system
hierarchy.
"""
from enum import Enum
import os.path
import pathlib
import stat
import time
import gitlab
import iso8601
def create_file_attributes(permissions, time, size):
"""Create a dictionary with file attributes for FUSE.
Args:
permissions (int): Permission bits for the file (e.g. 0o777).
time (float): Unix timestamp of the last file modification.
size (int): Size of the file in bytes.
"""
return {
'st_mode': (stat.S_IFREG | permissions),
'st_ctime': time,
'st_mtime': time,
'st_atime': time,
'st_size': size,
'st_uid': os.getuid(),
'st_gid': os.getgid(),
'st_nlink': 1
}
def create_directory_attributes(time):
"""Create a dictionary with directory attributes for FUSE.
Args:
time (int): Unix timestamp of the last directory modification.
"""
return {
'st_mode': (stat.S_IFDIR | 0o555),
'st_ctime': time,
'st_mtime': time,
'st_atime': time,
'st_uid': os.getuid(),
'st_gid': os.getgid(),
'st_nlink': 2
}
class EntityType(Enum):
"""Types of entities in GitLab exposed as file system objects."""
ROOT = 0
GROUP = 1
USER = 2
PROJECT = 3
REF_LEVEL = 4
REPOSITORY_FILE = 5
REPOSITORY_DIR = 6
class Entity:
"""Class that represents an entity in GitLab for the file system.
Attributes:
type (EntityType): Type of GitLab entity.
path (str): Full path to entity within the file system.
attributes (dict): FUSE attributes.
objects (dict): API objects associated with entity - if any.
"""
def __init__(self, type, path, attributes, objects={}):
"""Initialize representation of GitLab entity.
Args:
type (EntityType): Type of GitLab entity.
path (str): Full path to entity within the file system.
attributes (dict): FUSE attributes.
objects (dict): API objects associated with entity - if any.
"""
self.type = type
self.path = path
self.attributes = attributes
self.objects = objects
class Resolver:
"""Class that resolves paths to objects within GitLab.
This class manages abstraction of representing objects within GitLab as a
file system hierarchy. This abstraction looks like the following:
/gitlab
/user
/project
/master
/README.md
/feature
/abc
/src
main.py
/group
/subgroup
/project
Attributes:
cache (gitlabfs.Cache): Cached API wrapper for GitLab.
userProjects (bool): Include user projects.
tagRefs (bool): Include tags in project refs.
commitTimes (bool): Better approximate repository file times using their
last commit time.
initTime (float): Instantiation time of the file system.
"""
def __init__(self, cache, userProjects, tagRefs, commitTimes):
"""Initialize the file system resolver.
Args:
cache (gitlabfs.cache.Cache): Cached GitLab API wrapper.
userProjects (bool): Include user projects.
tagRefs (bool): Include tags in project refs.
commitTimes (bool): Better approximate repository file times using
their last commit time.
"""
self.cache = cache
self.userProjects = userProjects
self.tagRefs = tagRefs
self.commitTimes = commitTimes
self.initTime = time.time()
def resolve_root(self, path):
"""Try to resolve a path as the root of GitLab.
Args:
path (str): Path into the file system.
"""
if path == '/':
return Entity(
EntityType.ROOT,
path,
create_directory_attributes(self.initTime)
)
else:
return None
def resolve_tree(self, path):
"""Try to resolve a path as the root of a project, group or user.
Args:
path (str): Path into the file system.
"""
try:
node = self.cache.get_tree(self.userProjects)[path]
if type(node) is gitlab.v4.objects.Group:
# Groups API does not return a creation time
return Entity(
EntityType.GROUP,
path,
create_directory_attributes(self.initTime),
{'group': node}
)
elif type(node) is gitlab.v4.objects.User:
# Users API does not return a creation time
return Entity(
EntityType.USER,
path,
create_directory_attributes(self.initTime),
{'user': node}
)
elif type(node) is gitlab.v4.objects.Project:
projectTime = iso8601.parse_date(node.last_activity_at).timestamp()
return Entity(
EntityType.PROJECT,
path,
create_directory_attributes(projectTime),
{'project': node}
)
else:
return None
except KeyError:
return None
def resolve_project_prefix(self, path):
"""Try to resolve a path as something within a project.
Args:
path (str): Path into the file system.
Returns:
Tuple with the project object and the path relative to that project,
or None.
"""
for nodePath, node in self.cache.get_tree(self.userProjects).items():
if type(node) is gitlab.v4.objects.Project and path.startswith(nodePath):
remainingPath = pathlib.Path(path).relative_to(pathlib.Path(nodePath))
return node, remainingPath
return None, None
def resolve_ref_prefix(self, path):
"""Try to resolve a path as something within a ref of a project.
Args:
path (str): Path into the file system.
Returns:
Tuple with the project object, ref object and a path relative to
that project ref, or None.
"""
project, remainingPath = self.resolve_project_prefix(path)
if not project:
return None, None, None
for ref in self.cache.list_project_refs(project, self.tagRefs):
try:
treePath = remainingPath.relative_to(pathlib.Path(ref.name))
return project, ref, treePath
except ValueError:
continue
return None, None, None
def resolve_partial_ref_prefix(self, path):
"""Try to resolve a path as a level within a hierarchical ref.
Hierarchical refs are refs with path separators in the name, e.g.
"feature/abc". These are represented as subdirectories.
Args:
path (str): Path into the file system.
Returns:
Tuple with the project object, the most recent matching ref object
and the matched prefix, or None.
"""
project, remainingPath = self.resolve_project_prefix(path)
if not project:
return None, None, None
refPrefix = remainingPath.as_posix() + '/'
# Resolve to most recently created reference for accurate directory dates
refs = self.cache.list_project_refs(project, self.tagRefs)
refs = sorted(refs, key=lambda ref: -iso8601.parse_date(ref.commit['committed_date']).timestamp())
for ref in refs:
if ref.name.startswith(refPrefix):
return project, ref, refPrefix
return None, None, None
def resolve_ref(self, path):
"""Try to resolve a path as the root of a ref.
Args:
path (str): Path into the file system.
"""
project, ref, remainingPath = self.resolve_ref_prefix(path)
if not ref or remainingPath.as_posix() != '.':
return None
refTime = iso8601.parse_date(ref.commit['committed_date']).timestamp()
return Entity(
EntityType.REPOSITORY_DIR,
path,
create_directory_attributes(refTime),
{'project': project, 'ref': ref}
)
def resolve_ref_hierarchy(self, path):
"""Try to resolve a path as a level within a hierarchical ref.
Args:
path (str): Path into the file system.
"""
project, ref, refPrefix = self.resolve_partial_ref_prefix(path)
if not ref:
return None
refTime = iso8601.parse_date(ref.commit['committed_date']).timestamp()
return Entity(
EntityType.REF_LEVEL,
path,
create_directory_attributes(refTime),
{'project': project, 'ref': ref, 'refPrefix': refPrefix}
)
def get_entry_properties(self, project, ref, path):
"""Look up the metadata of a file or directory within a repository.
Note:
Listing all entries in the parent directory is the most
straightforward way to retrieve metadata from the GitLab API.
Especially since there aren't any specific endpoints for looking up
non-file objects like directories.
Args:
project (gitlab.v4.objects.Project): Project.
ref (gitlab.v4.objects.ProjectBranch/ProjectTag): Ref in project.
path (str): Path within a repository tree.
"""
parentDir = os.path.dirname(path)
targetEntry = os.path.basename(path)
for entry in self.cache.get_repository_tree(project, ref, parentDir):
if entry['name'] == targetEntry:
return entry
def resolve_repository_entry(self, path):
"""Try to resolve a path as a file or directory within a repository.
Args:
path (str): Path into the file system.
"""
project, ref, remainingPath = self.resolve_ref_prefix(path)
if not ref or remainingPath.as_posix() == '.':
return None
# List parent directory to retrieve entry attributes
entry = self.get_entry_properties(project, ref, remainingPath.as_posix())
# Approximate entry age by last commit to containing ref
refTime = iso8601.parse_date(ref.commit['committed_date']).timestamp()
if entry != None:
if entry['type'] == 'blob':
fileSize = self.cache.get_file_size(project, ref, remainingPath.as_posix())
# Approximate file age more accurately by its last commit timestamp
if self.commitTimes:
entryTime = self.cache.get_file_commit_timestamp(project, ref, remainingPath.as_posix())
else:
entryTime = refTime
# Convert mode and strip write bits
permissions = int(entry['mode'][-3:], 8) & 0o555
return Entity(
EntityType.REPOSITORY_FILE,
path,
create_file_attributes(permissions, entryTime, fileSize),
{'project': project, 'ref': ref, 'file': entry}
)
elif entry['type'] == 'tree':
return Entity(
EntityType.REPOSITORY_DIR,
path,
create_directory_attributes(refTime),
{'project': project, 'ref': ref, 'directory': entry}
)
return None
def resolve_path(self, path):
"""Try to resolve path within the file system to an entity in GitLab.
Possible entities are the GitLab root, a user, a group, a project, a
ref, a level within a hierarchical ref, a file/directory within a
repository.
Args:
path (str): Path into the file system.
"""
return (
self.resolve_root(path) or
self.resolve_tree(path) or
self.resolve_ref(path) or
self.resolve_ref_hierarchy(path) or
self.resolve_repository_entry(path)
)
def list_group_members(self, entity):
"""List the contents of the GitLab root or a group.
Args:
entity (Entity): Entity of type ROOT, GROUP or USER.
Returns:
List of names of the members.
"""
members = []
for nodePath, node in self.cache.get_tree(self.userProjects).items():
if nodePath.startswith(entity.path):
# Check if node is a direct child
distance = len(pathlib.Path(nodePath).relative_to(pathlib.Path(entity.path)).parts)
if distance == 1:
if type(node) is gitlab.v4.objects.Group or type(node) is gitlab.v4.objects.Project:
members.append(node.path)
elif type(node) is gitlab.v4.objects.User:
members.append(node.username)
return members
def list_project_refs(self, entity):
"""List the first level of refs of a project.
If the project contains hierarchical refs then only the first level
of those is returned.
For example, a repository containing the branches "master",
"feature/abc" and "feature/def" will have this function return the list
["master", "feature"].
Args:
entity (Entity): Entity of type PROJECT.
Returns:
List of (partial) names of refs.
"""
refs = []
for ref in self.cache.list_project_refs(entity.objects['project'], self.tagRefs):
# If ref name is hierarchical then only return first level
if '/' in ref.name:
refs.append(ref.name.split('/')[0])
else:
refs.append(ref.name)
# Refs may contain duplicates if the same prefix occurs multiple times
return list(set(refs))
def list_project_ref_hierarchy(self, entity):
"""List next level in a ref hierarchy.
For example, if the repository has the branches "feature/abc" and
"feature/foo/bar" and the entity represents the hierarchy "feature",
then this function will return the list ["abc", "foo"].
Args:
entity (Entity): Entity of type REF_LEVEL.
Returns:
List of (partial) remaining names of refs.
"""
refs = []
for ref in self.cache.list_project_refs(entity.objects['project'], self.tagRefs):
if ref.name.startswith(entity.objects['refPrefix']):
remainingRefName = pathlib.Path(ref.name).relative_to(pathlib.Path(entity.objects['refPrefix'])).parts[0]
refs.append(remainingRefName)
return refs
def list_repository_directory(self, entity):
"""List the files and directories in a repository subdirectory.
Args:
entity (Entity): Entity of type REPOSITORY_DIR.
Returns:
List of file and directory names.
"""
members = []
# There is no directory object if this is the repository root
path = ''
if 'directory' in entity.objects:
path = entity.objects['directory']['path']
for entry in self.cache.get_repository_tree(entity.objects['project'], entity.objects['ref'], path):
if entry['type'] in ('blob', 'tree'):
members.append(entry['name'])
return members
def list_members(self, entity):
"""List the files and directories contained within an entity.
Args:
entity (Entity): Entity of any type except for REPOSITORY_FILE.
Returns:
List of file and directory names contained within.
"""
if entity.type in (EntityType.ROOT, EntityType.GROUP, EntityType.USER):
return self.list_group_members(entity)
elif entity.type == EntityType.PROJECT:
return self.list_project_refs(entity)
elif entity.type == EntityType.REF_LEVEL:
return self.list_project_ref_hierarchy(entity)
elif entity.type == EntityType.REPOSITORY_DIR:
return self.list_repository_directory(entity)
else:
return None
def read_file(self, entity):
"""Read the contents of a file within a repository.
Note:
See `gitlabfs.Cache` for why this function does not support reading
a specific byte range.
Args:
entity (Entity): Entity of type REPOSITORY_FILE.
Returns:
Byte string of all file contents.
"""
return self.cache.read_file(
entity.objects['project'],
entity.objects['ref'],
entity.objects['file']['path']
) | 31.112116 | 121 | 0.57925 |
5ef83b137175b631cb58e4fea8bd023f395b5de3 | 12,226 | py | Python | forml/flow/graph/node.py | mpearmain/forml | 3d69d27b1a64a237c739d2795512b6f9296d3874 | [
"Apache-2.0"
] | null | null | null | forml/flow/graph/node.py | mpearmain/forml | 3d69d27b1a64a237c739d2795512b6f9296d3874 | [
"Apache-2.0"
] | null | null | null | forml/flow/graph/node.py | mpearmain/forml | 3d69d27b1a64a237c739d2795512b6f9296d3874 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Graph node entities.
Output ports:
* apply (multi-port)
Input ports:
* apply (multi-port)
* train
* label
Each port can have at most one publisher.
Apply and train input port subscriptions are exclusive.
Trained node cannot be copied.
"""
import abc
import collections
import typing
import uuid
from collections import abc as colabc
from forml.flow import task, error
from forml.flow.graph import port
class Visitor:
"""View visitor interface."""
def visit_node(self, node: 'Atomic') -> None:
"""Node visit.
Args:
node: Visited node.
"""
class Port(colabc.Iterable):
"""Output port subscriptions as an ordered set."""
def __init__(self):
self._subscriptions: typing.Dict[port.Subscription, None] = collections.OrderedDict()
def add(self, subscription: port.Subscription) -> None:
"""Add new subscription to this port.
Args:
subscription: Subscription to be registered.
"""
self._subscriptions[subscription] = None
def __iter__(self):
return iter(self._subscriptions.keys())
class Atomic(metaclass=abc.ABCMeta):
"""Abstract primitive task graph node."""
def __init__(self, szin: int, szout: int):
if min(szin, szout) < 0 or szin == szout == 0:
raise ValueError('Invalid node shape')
self.szin: int = szin
self.uid: uuid.UUID = uuid.uuid4()
self._output: typing.Tuple[Port] = tuple(Port() for _ in range(szout))
def __repr__(self):
return f'{self.__class__.__name__}[uid={self.uid}]'
def __getitem__(self, index) -> port.PubSub:
"""Semantical construct for creating PubSub port instance.
Args:
index: Input/output apply port index.
Returns: Applicable instance
"""
return port.PubSub(self, index)
def __eq__(self, other: typing.Any) -> bool:
"""If each node is of different type the equality is based on the equality of their subscriptions. Otherwise the
equality is based on object identity.
Args:
other: Object to compare with.
Returns: True if equal.
"""
if isinstance(other, Atomic) and other.__class__ is not self.__class__:
return (
self.szout == other.szout
and any(self._output)
and all(s == o for s, o in zip(self.output, other.output))
)
return id(self) == id(other)
def __hash__(self) -> int:
"""We need a Future node to appear identical to a Worker node of same shape and subscriptions (so that the
Future can represent a placeholder for that Worker). From that reason we need to hash both of these instances
into same hashcode and the only attributes can distinguish them in that case is the shape.
Returns: Node hashcode.
"""
return hash(self.szin) ^ hash(self.szout)
def accept(self, visitor: Visitor) -> None:
"""Visitor entrypoint.
Args:
visitor: Accepted visitor.
"""
visitor.visit_node(self)
@property
def szout(self) -> int:
"""Width of the output apply port.
Returns: Output apply port width.
"""
return len(self._output)
@property
def output(self) -> typing.Sequence[typing.Iterable[port.Subscription]]:
"""Get list of output subscriptions per each port.
Returns: Output subscriptions.
"""
return tuple(tuple(s) for s in self._output)
def _publish(self, index: int, subscription: port.Subscription) -> None:
"""Publish an output port based on the given subscription.
Args:
index: Output port index to publish from.
subscription: Subscriber node and port to publish to.
"""
assert 0 <= index < self.szout, 'Invalid output index'
if self is subscription.node:
raise error.Topology('Self subscription')
self._output[index].add(subscription)
@abc.abstractmethod
def subscribed(self, publisher: 'Atomic') -> bool:
"""Checking we are on given node's subscription list.
Args:
publisher: Node to check for being it's subscriber,
Returns: True if we are given node's subscriber.
"""
@abc.abstractmethod
def fork(self) -> 'Atomic':
"""Create new node with same shape and actor as self but without any subscriptions.
Returns: Forked node.
"""
class Worker(Atomic):
"""Main primitive node type."""
class Group(set):
"""Container for holding all forked workers."""
def __init__(self, spec: task.Spec):
super().__init__()
self.spec: task.Spec = spec
self.uid: uuid.UUID = uuid.uuid4()
def __repr__(self):
return f'{self.spec}[uid={self.uid}]'
def __init__(self, meta: typing.Union[task.Spec, Group], szin: int, szout: int):
super().__init__(szin, szout)
self._group: Worker.Group = meta if isinstance(meta, Worker.Group) else self.Group(meta)
self._group.add(self)
def __repr__(self):
return repr(self._group)
@property
def spec(self) -> task.Spec:
"""Task spec in this worker.
Returns: Task spec.
"""
return self._group.spec
def _publish(self, index: int, subscription: port.Subscription) -> None:
"""Publish an output port based on the given subscription.
Args:
index: Output port index to publish from.
subscription: Subscriber node and port to publish to.
Trained node must not be publishing.
"""
if self.trained:
raise error.Topology('Trained node publishing')
super()._publish(index, subscription)
@property
def input(self) -> typing.Iterable[port.Type]:
"""Get subscribed input ports.
Returns: Ports.
"""
return port.Subscription.ports(self)
@property
def trained(self) -> bool:
"""Check if this node is subscribed for training data.
Returns: True if trained.
"""
return any(isinstance(p, (port.Train, port.Label)) for p in self.input)
@property
def stateful(self) -> bool:
"""Check this actor is stateful.
Returns: True if stateful.
"""
return self._group.spec.actor.is_stateful()
@property
def gid(self) -> uuid.UUID:
"""Return the group ID shared by all forks of this worker.
Returns: Group ID.
"""
return self._group.uid
@property
def group(self) -> typing.AbstractSet['Worker']:
"""Set of forked workers in the same fork group.
Returns: Workers in same fork group.
"""
return frozenset(self._group)
def train(self, train: port.Publishable, label: port.Publishable) -> None:
"""Subscribe this node train and label port to given publishers.
Args:
train: Train port publisher.
label: Label port publisher.
Returns: Self node.
"""
if any(f.trained for f in self._group):
raise error.Topology('Fork train collision')
if not self.stateful:
raise error.Topology('Stateless node training')
train.publish(self, port.Train())
label.publish(self, port.Label())
def subscribed(self, publisher: 'Atomic') -> bool:
"""Checking we are on given node's subscription list.
Args:
publisher: Node to check for being it's subscriber,
Returns: True if we are given node's subscriber.
"""
return any(s.node is self for p in publisher.output for s in p)
def fork(self) -> 'Worker':
"""Create new node with same shape and actor as self but without any subscriptions.
Returns: Forked node.
"""
return Worker(self._group, self.szin, self.szout)
@classmethod
def fgen(cls, spec: task.Spec, szin: int, szout: int) -> typing.Generator['Worker', None, None]:
"""Generator producing forks of the same node.
Args:
spec: Worker spec.
szin: Worker input apply port size.
szout: Worker output apply port size.
Returns: Generator producing worker forks.
"""
node = cls(spec, szin, szout)
yield node
while True:
yield node.fork()
class Future(Atomic):
"""Fake transparent apply port node that can be used as a lazy publisher/subscriber that disappears
from the chain once it gets connected to another apply node(s).
"""
class PubSub(port.PubSub):
"""Overridden implementation that does the proxied publishing/subscription."""
def __init__(
self,
node: 'Future',
index: int,
register: typing.Callable[[port.Publishable], None],
sync: typing.Callable[[], None],
):
super().__init__(node, index)
self._register: typing.Callable[[port.Publishable], None] = register
self._sync: typing.Callable[[], None] = sync
def subscribe(self, publisher: port.Publishable) -> None:
"""Register publisher for future subscriptions.
Args:
publisher: Actual left side publisher to be used for all the interim subscriptions.
"""
self._register(publisher)
self._sync()
def __init__(self, szin: int = 1, szout: int = 1):
super().__init__(szin, szout)
self._proxy: typing.Dict[port.Publishable, int] = dict()
def __getitem__(self, index) -> port.PubSub:
def register(publisher: port.Publishable) -> None:
"""Callback for publisher proxy registration.
Args:
publisher: Left side publisher
"""
if publisher in self._proxy:
raise error.Topology('Publisher collision')
self._proxy[publisher] = index
return self.PubSub(self, index, register, self._sync)
def subscribed(self, publisher: 'Atomic') -> bool:
"""Overridden subscription checker. Future node checks the subscriptions in its proxy registrations.
Args:
publisher: Node to check for being it's subscriber,
Returns: True if we are given node's subscriber.
"""
return any(p._node.subscribed(publisher) for p in self._proxy) # pylint: disable=protected-access
def _sync(self) -> None:
"""Callback for interconnecting proxied registrations."""
for publisher, subscription in ((p, s) for p, i in self._proxy.items() for s in self._output[i]):
publisher.republish(subscription)
def _publish(self, index: int, subscription: port.Subscription) -> None:
"""Publish an output port based on the given subscription.
Args:
index: Output port index to publish from.
subscription: Subscriber node and port to publish to.
Upstream publish followed by proxy synchronization.
"""
super()._publish(index, subscription)
self._sync()
def fork(self) -> 'Future':
"""There is nothing to copy on a Future node so just create a new one.
Returns: new Future node.
"""
return Future(self.szin, self.szout)
| 31.673575 | 120 | 0.62089 |
278200888f4e8f753b778826d4f531246c06576c | 596 | py | Python | Week_3/EcommerceProject/ecommerce/views.py | girisagar46/DjangoTrainingClass | 373f4151b2ee46ea8f76ffa344603014e87d9764 | [
"MIT"
] | null | null | null | Week_3/EcommerceProject/ecommerce/views.py | girisagar46/DjangoTrainingClass | 373f4151b2ee46ea8f76ffa344603014e87d9764 | [
"MIT"
] | null | null | null | Week_3/EcommerceProject/ecommerce/views.py | girisagar46/DjangoTrainingClass | 373f4151b2ee46ea8f76ffa344603014e87d9764 | [
"MIT"
] | null | null | null | from django.utils import timezone
from django.shortcuts import render
from django.views.generic import ListView
from .models import Product
# def index(request):
# products = Product.objects.all()
# ctx = {
# "products": products
# }
# return render(request, 'index.html', context=ctx)
class ProductListView(ListView):
model = Product
def get_context_data(self, **kwargs):
context = super(ProductListView, self).get_context_data(**kwargs)
context['now'] = timezone.now()
print(context)
return context
| 23.84 | 77 | 0.644295 |
e87ac00b1c9872eea73d278e0b31aa2e99281609 | 51,981 | py | Python | kivymd/uix/textfield/textfield.py | KivyAcademy/KivyMD | ab3966914ff5f764b210d8ba44ec606b2db19490 | [
"MIT"
] | null | null | null | kivymd/uix/textfield/textfield.py | KivyAcademy/KivyMD | ab3966914ff5f764b210d8ba44ec606b2db19490 | [
"MIT"
] | null | null | null | kivymd/uix/textfield/textfield.py | KivyAcademy/KivyMD | ab3966914ff5f764b210d8ba44ec606b2db19490 | [
"MIT"
] | null | null | null | """
Components/TextField
====================
.. seealso::
`Material Design spec, Text fields <https://material.io/components/text-fields>`_
.. rubric:: Text fields let users enter and edit text.
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-fields.png
:align: center
`KivyMD` provides the following field classes for use:
- MDTextField_
- MDTextFieldRound_
- MDTextFieldRect_
.. Note:: :class:`~MDTextField` inherited from
:class:`~kivy.uix.textinput.TextInput`. Therefore, most parameters and all
events of the :class:`~kivy.uix.textinput.TextInput` class are also
available in the :class:`~MDTextField` class.
.. MDTextField:
MDTextField
-----------
:class:`~MDTextField` can be with helper text and without.
Without helper text mode
------------------------
.. code-block:: kv
MDTextField:
hint_text: "No helper text"
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-no-helper-mode.gif
:align: center
Helper text mode on ``on_focus`` event
--------------------------------------
.. code-block:: kv
MDTextField:
hint_text: "Helper text on focus"
helper_text: "This will disappear when you click off"
helper_text_mode: "on_focus"
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-helper-mode-on-focus.gif
:align: center
Persistent helper text mode
---------------------------
.. code-block:: kv
MDTextField:
hint_text: "Persistent helper text"
helper_text: "Text is always here"
helper_text_mode: "persistent"
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-helper-mode-persistent.gif
:align: center
Helper text mode `'on_error'`
-----------------------------
To display an error in a text field when using the
``helper_text_mode: "on_error"`` parameter, set the `"error"` text field
parameter to `True`:
.. code-block:: python
from kivy.lang import Builder
from kivymd.app import MDApp
KV = '''
BoxLayout:
padding: "10dp"
MDTextField:
id: text_field_error
hint_text: "Helper text on error (press 'Enter')"
helper_text: "There will always be a mistake"
helper_text_mode: "on_error"
pos_hint: {"center_y": .5}
'''
class Test(MDApp):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.screen = Builder.load_string(KV)
def build(self):
self.screen.ids.text_field_error.bind(
on_text_validate=self.set_error_message,
on_focus=self.set_error_message,
)
return self.screen
def set_error_message(self, instance_textfield):
self.screen.ids.text_field_error.error = True
Test().run()
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-helper-mode-on-error.gif
:align: center
Helper text mode `'on_error'` (with required)
---------------------------------------------
.. code-block:: kv
MDTextField:
hint_text: "required = True"
required: True
helper_text_mode: "on_error"
helper_text: "Enter text"
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-required.gif
:align: center
Text length control
-------------------
.. code-block:: kv
MDTextField:
hint_text: "Max text length = 5"
max_text_length: 5
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-text-length.gif
:align: center
Multi line text
---------------
.. code-block:: kv
MDTextField:
multiline: True
hint_text: "Multi-line text"
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-text-multi-line.gif
:align: center
Rectangle mode
--------------
.. code-block:: kv
MDTextField:
hint_text: "Rectangle mode"
mode: "rectangle"
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-rectangle-mode.gif
:align: center
Fill mode
---------
.. code-block:: kv
MDTextField:
hint_text: "Fill mode"
mode: "fill"
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-fill-mode.gif
:align: center
.. MDTextFieldRect:
MDTextFieldRect
---------------
.. Note:: :class:`~MDTextFieldRect` inherited from
:class:`~kivy.uix.textinput.TextInput`. You can use all parameters and
attributes of the :class:`~kivy.uix.textinput.TextInput` class in the
:class:`~MDTextFieldRect` class.
.. code-block:: kv
MDTextFieldRect:
size_hint: 1, None
height: "30dp"
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-rect.gif
:align: center
.. Warning:: While there is no way to change the color of the border.
.. MDTextFieldRound:
MDTextFieldRound
----------------
Without icon
------------
.. code-block:: kv
MDTextFieldRound:
hint_text: 'Empty field'
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-round.gif
:align: center
With left icon
--------------
.. Warning:: The icons in the :class:`~MDTextFieldRound` are static. You cannot
bind events to them.
.. code-block:: kv
MDTextFieldRound:
icon_left: "email"
hint_text: "Field with left icon"
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-round-left-icon.png
:align: center
With left and right icons
-------------------------
.. code-block:: kv
MDTextFieldRound:
icon_left: 'key-variant'
icon_right: 'eye-off'
hint_text: 'Field with left and right icons'
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-round-left-right-icon.png
:align: center
Control background color
------------------------
.. code-block:: kv
MDTextFieldRound:
icon_left: 'key-variant'
normal_color: app.theme_cls.accent_color
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-round-normal-color.gif
:align: center
.. code-block:: kv
MDTextFieldRound:
icon_left: 'key-variant'
normal_color: app.theme_cls.accent_color
color_active: 1, 0, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-round-active-color.gif
:align: center
Clickable icon for MDTextFieldRound
-----------------------------------
.. code-block:: python
from kivy.lang import Builder
from kivy.properties import StringProperty
from kivymd.app import MDApp
from kivymd.uix.relativelayout import MDRelativeLayout
KV = '''
<ClickableTextFieldRound>:
size_hint_y: None
height: text_field.height
MDTextFieldRound:
id: text_field
hint_text: root.hint_text
text: root.text
password: True
color_active: app.theme_cls.primary_light
icon_left: "key-variant"
padding:
self._lbl_icon_left.texture_size[1] + dp(10) if self.icon_left else dp(15), \
(self.height / 2) - (self.line_height / 2), \
self._lbl_icon_right.texture_size[1] + dp(20), \
0
MDIconButton:
icon: "eye-off"
ripple_scale: .5
pos_hint: {"center_y": .5}
pos: text_field.width - self.width + dp(8), 0
on_release:
self.icon = "eye" if self.icon == "eye-off" else "eye-off"
text_field.password = False if text_field.password is True else True
MDScreen:
ClickableTextFieldRound:
size_hint_x: None
width: "300dp"
hint_text: "Password"
pos_hint: {"center_x": .5, "center_y": .5}
'''
class ClickableTextFieldRound(MDRelativeLayout):
text = StringProperty()
hint_text = StringProperty()
# Here specify the required parameters for MDTextFieldRound:
# [...]
class Test(MDApp):
def build(self):
return Builder.load_string(KV)
Test().run()
.. seealso::
See more information in the :class:`~MDTextFieldRect` class.
"""
__all__ = ("MDTextField", "MDTextFieldRect", "MDTextFieldRound")
import os
import re
from typing import NoReturn, Union
from kivy.animation import Animation
from kivy.clock import Clock
from kivy.lang import Builder
from kivy.metrics import dp, sp
from kivy.properties import (
AliasProperty,
BooleanProperty,
ColorProperty,
DictProperty,
ListProperty,
NumericProperty,
ObjectProperty,
OptionProperty,
StringProperty,
)
from kivy.uix.label import Label
from kivy.uix.textinput import TextInput
from kivymd import uix_path
from kivymd.font_definitions import theme_font_styles
from kivymd.theming import ThemableBehavior
from kivymd.uix.label import MDIcon
with open(
os.path.join(uix_path, "textfield", "textfield.kv"), encoding="utf-8"
) as kv_file:
Builder.load_string(kv_file.read())
class MDTextFieldRect(ThemableBehavior, TextInput):
line_anim = BooleanProperty(True)
"""
If True, then text field shows animated line when on focus.
:attr:`line_anim` is an :class:`~kivy.properties.BooleanProperty`
and defaults to `True`.
"""
def get_rect_instruction(self):
canvas_instructions = self.canvas.after.get_group("rectangle")
return canvas_instructions[0]
_rectangle = AliasProperty(get_rect_instruction, cache=True)
"""
It is the :class:`~kivy.graphics.vertex_instructions.Line`
instruction reference of the field rectangle.
:attr:`_rectangle` is an :class:`~kivy.properties.AliasProperty`.
"""
def get_color_instruction(self):
canvas_instructions = self.canvas.after.get_group("color")
return canvas_instructions[0]
_rectangle_color = AliasProperty(get_color_instruction, cache=True)
"""
It is the :class:`~kivy.graphics.context_instructions.Color`
instruction reference of the field rectangle.
:attr:`_rectangle_color` is an :class:`~kivy.properties.AliasProperty`.
"""
_primary_color = ColorProperty((0, 0, 0, 0))
def __init__(self, **kwargs):
super().__init__(**kwargs)
self._update_primary_color()
self.theme_cls.bind(primary_color=self._update_primary_color)
def anim_rect(self, points, alpha):
if alpha == 1:
d_line = 0.3
d_color = 0.4
else:
d_line = 0.05
d_color = 0.05
Animation(
points=points, d=(d_line if self.line_anim else 0), t="out_cubic"
).start(self._rectangle)
Animation(a=alpha, d=(d_color if self.line_anim else 0)).start(
self._rectangle_color
)
def _update_primary_color(self, *args):
self._primary_color = self.theme_cls.primary_color
self._primary_color[3] = 0
class TextfieldLabel(ThemableBehavior, Label):
"""Base texture for :class:`~MDTextField` class."""
font_style = OptionProperty("Body1", options=theme_font_styles)
# <kivymd.uix.textfield.MDTextField object>
field = ObjectProperty()
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.font_size = sp(self.theme_cls.font_styles[self.font_style][1])
class MDTextField(ThemableBehavior, TextInput):
helper_text = StringProperty()
"""
Text for ``helper_text`` mode.
:attr:`helper_text` is an :class:`~kivy.properties.StringProperty`
and defaults to `''`.
"""
helper_text_mode = OptionProperty(
"on_focus", options=["on_error", "persistent", "on_focus"]
)
"""
Helper text mode. Available options are: `'on_error'`, `'persistent'`,
`'on_focus'`.
:attr:`helper_text_mode` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'none'`.
"""
max_text_length = NumericProperty(None)
"""
Maximum allowed value of characters in a text field.
:attr:`max_text_length` is an :class:`~kivy.properties.NumericProperty`
and defaults to `None`.
"""
required = BooleanProperty(False)
"""
Required text. If True then the text field requires text.
:attr:`required` is an :class:`~kivy.properties.BooleanProperty`
and defaults to `False`.
"""
color_mode = OptionProperty(
"primary", options=["primary", "accent", "custom"], deprecated=True
)
"""
Color text mode. Available options are: `'primary'`, `'accent'`,
`'custom'`.
.. deprecated:: 1.0.0
Don't use this attribute.
:attr:`color_mode` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'primary'`.
"""
mode = OptionProperty("line", options=["rectangle", "fill", "line"])
"""
Text field mode. Available options are: `'line'`, `'rectangle'`, `'fill'`.
:attr:`mode` is an :class:`~kivy.properties.OptionProperty`
and defaults to `'line'`.
"""
line_color_normal = ColorProperty([0, 0, 0, 0])
"""
Line color normal (static underline line) in ``rgba`` format.
.. code-block:: kv
MDTextField:
hint_text: "line_color_normal"
line_color_normal: 1, 0, 1, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-line-color-normal.gif
:align: center
:attr:`line_color_normal` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
line_color_focus = ColorProperty([0, 0, 0, 0])
"""
Line color focus (active underline line) in ``rgba`` format.
.. code-block:: kv
MDTextField:
hint_text: "line_color_focus"
line_color_focus: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-line-color-focus.gif
:align: center
:attr:`line_color_focus` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
line_anim = BooleanProperty(True)
"""
If True, then text field shows animated line when on focus.
:attr:`line_anim` is an :class:`~kivy.properties.BooleanProperty`
and defaults to `True`.
"""
error_color = ColorProperty([0, 0, 0, 0])
"""
Error color in ``rgba`` format for ``required = True``.
:attr:`error_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
fill_color = ColorProperty([0, 0, 0, 0], deprecated=True)
"""
The background color of the fill in rgba format when the ``mode`` parameter
is "fill".
.. deprecated:: 1.0.0
Use :attr:`fill_color_normal` and :attr:`fill_color_focus` instead.
:attr:`fill_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
fill_color_normal = ColorProperty([0, 0, 0, 0])
"""
Fill background color in 'fill' mode when text field is out of focus.
:attr:`fill_color_normal` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
fill_color_focus = ColorProperty([0, 0, 0, 0])
"""
Fill background color in 'fill' mode when the text field has focus.
:attr:`fill_color_focus` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
active_line = BooleanProperty(True)
"""
Show active line or not.
:attr:`active_line` is an :class:`~kivy.properties.BooleanProperty`
and defaults to `True`.
"""
error = BooleanProperty(False)
"""
If True, then the text field goes into ``error`` mode.
:attr:`error` is an :class:`~kivy.properties.BooleanProperty`
and defaults to `False`.
"""
current_hint_text_color = ColorProperty([0, 0, 0, 0], deprecated=True)
"""
Hint text color.
.. deprecated:: 1.0.0
Use :attr:`hint_text_color_normal` and :attr:`hint_text_color_focus` instead.
:attr:`current_hint_text_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
hint_text_color_normal = ColorProperty([0, 0, 0, 0])
"""
Hint text color when text field is out of focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
hint_text: "hint_text_color_normal"
hint_text_color_normal: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-hint-text-color-normal.gif
:align: center
:attr:`hint_text_color_normal` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
hint_text_color_focus = ColorProperty([0, 0, 0, 0])
"""
Hint text color when the text field has focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
hint_text: "hint_text_color_focus"
hint_text_color_focus: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-hint-text-color-focus.gif
:align: center
:attr:`hint_text_color_focus` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
helper_text_color_normal = ColorProperty([0, 0, 0, 0])
"""
Helper text color when text field is out of focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
helper_text: "helper_text_color_normal"
helper_text_mode: "persistent"
helper_text_color_normal: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-helper-text-color-normal.png
:align: center
:attr:`helper_text_color_normal` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
helper_text_color_focus = ColorProperty([0, 0, 0, 0])
"""
Helper text color when the text field has focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
helper_text: "helper_text_color_focus"
helper_text_mode: "persistent"
helper_text_color_focus: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-helper-text-color-focus.gif
:align: center
:attr:`helper_text_color_focus` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
icon_right_color_normal = ColorProperty([0, 0, 0, 0])
"""
Color of right icon when text field is out of focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
icon_right: "language-python"
hint_text: "icon_right_color_normal"
icon_right_color_normal: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-icon-right-color-normal.gif
:align: center
:attr:`icon_right_color_normal` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
icon_right_color_focus = ColorProperty([0, 0, 0, 0])
"""
Color of right icon when the text field has focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
icon_right: "language-python"
hint_text: "icon_right_color_focus"
icon_right_color_focus: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-icon-right-color-focus.gif
:align: center
:attr:`icon_right_color_focus` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
icon_left_color_normal = ColorProperty([0, 0, 0, 0])
"""
Color of right icon when text field is out of focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
icon_right: "language-python"
hint_text: "icon_right_color_normal"
icon_left_color_normal: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-icon-right-color-normal.gif
:align: center
:attr:`icon_left_color_normal` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
icon_left_color_focus = ColorProperty([0, 0, 0, 0])
"""
Color of right icon when the text field has focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
icon_right: "language-python"
hint_text: "icon_right_color_focus"
icon_right_color_focus: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-icon-right-color-focus.gif
:align: center
:attr:`icon_left_color_focus` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
max_length_text_color = ColorProperty([0, 0, 0, 0])
"""
Text color of the maximum length of characters to be input.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
hint_text: "max_length_text_color"
max_length_text_color: 0, 1, 0, 1
max_text_length: 5
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-max-length-text-color.gif
:align: center
:attr:`max_length_text_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
icon_right = StringProperty()
"""
Right icon texture.
.. note:: It's just a texture. It has no press/touch events.
:attr:`icon_right` is an :class:`~kivy.properties.StringProperty`
and defaults to `''`.
"""
icon_left = StringProperty()
"""
Left icon texture.
.. versionadded:: 1.0.0
.. note:: It's just a texture. It has no press/touch events.
Also note that you cannot use the left and right icons at the same time yet.
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-left-icon.png
:align: center
:attr:`icon_left` is an :class:`~kivy.properties.StringProperty`
and defaults to `''`.
"""
icon_right_color = ColorProperty([0, 0, 0, 1], deprecated=True)
"""
Color of right icon in ``rgba`` format.
.. deprecated:: 1.0.0
Don't use this attribute.
:attr:`icon_right_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 1]`.
"""
text_color = ColorProperty([0, 0, 0, 0], deprecated=True)
"""
Text color in ``rgba`` format.
.. deprecated:: 1.0.0
Use :attr:`text_color_normal` and :attr:`text_color_focus` instead.
:attr:`text_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
text_color_normal = ColorProperty([0, 0, 0, 0])
"""
Text color in ``rgba`` format when text field is out of focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
hint_text: "text_color_normal"
text_color_normal: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-text-color-normal.gif
:align: center
:attr:`text_color_normal` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
text_color_focus = ColorProperty([0, 0, 0, 0])
"""
Text color in ``rgba`` format when text field has focus.
.. versionadded:: 1.0.0
.. code-block:: kv
MDTextField:
hint_text: "text_color_focus"
text_color_focus: 0, 1, 0, 1
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-text-color-focus.gif
:align: center
:attr:`text_color_focus` is an :class:`~kivy.properties.ColorProperty`
and defaults to `[0, 0, 0, 0]`.
"""
font_size = NumericProperty("16sp")
"""
Font size of the text in pixels.
:attr:`font_size` is a :class:`~kivy.properties.NumericProperty` and
defaults to `'16sp'`.
"""
# TODO: Add minimum allowed height. Otherwise, if the value is,
# for example, 20, the text field will simply be lessened.
max_height = NumericProperty(0)
"""
Maximum height of the text box when `multiline = True`.
.. code-block:: kv
MDTextField:
size_hint_x: .5
hint_text: "multiline=True"
max_height: "200dp"
mode: "fill"
fill_color: 0, 0, 0, .4
multiline: True
pos_hint: {"center_x": .5, "center_y": .5}
.. image:: https://github.com/HeaTTheatR/KivyMD-data/raw/master/gallery/kivymddoc/text-field-fill-mode-multiline-max-height.gif
:align: center
:attr:`max_height` is a :class:`~kivy.properties.NumericProperty` and
defaults to `0`.
"""
radius = ListProperty([10, 10, 0, 0])
"""
The corner radius for a text field in `fill` mode.
:attr:`radius` is a :class:`~kivy.properties.ListProperty` and
defaults to `[10, 10, 0, 0]`.
"""
font_name_helper_text = StringProperty("Roboto")
"""
Font name for helper text.
:attr:`font_name_helper_text` is an :class:`~kivy.properties.StringProperty`
and defaults to `'Roboto'`.
"""
font_name_hint_text = StringProperty("Roboto")
"""
Font name for hint text.
:attr:`font_name_hint_text` is an :class:`~kivy.properties.StringProperty`
and defaults to `'Roboto'`.
"""
font_name_max_length = StringProperty("Roboto")
"""
Font name for max text length.
:attr:`font_name_max_length` is an :class:`~kivy.properties.StringProperty`
and defaults to `'Roboto'`.
"""
# The x-axis position of the hint text in the text field.
_hint_x = NumericProperty(0)
# The y-axis position of the hint text in the text field.
_hint_y = NumericProperty("38dp")
# Width of underline that animates when the focus of the text field.
_underline_width = NumericProperty(0)
# Font size for hint text.
_hint_text_font_size = NumericProperty(sp(16))
# Label object for `helper_text` parameter.
_helper_text_label = None
# Label object for `max_text_length` parameter.
_max_length_label = None
# Label object for `hint_text` parameter.
_hint_text_label = None
# `MDIcon` object for the icon on the right.
_icon_right_label = None
# `MDIcon` object for the icon on the left.
_icon_left_label = None
# The left and right coordinates of the text field in 'rectangle' mode.
#
# ┍──blank_space_left blank_space_right──────────────┑
# | |
# | |
# | |
# ┕──────────────────────────────────────────────────────┙
_line_blank_space_right_point = NumericProperty(0)
_line_blank_space_left_point = NumericProperty(0)
# The values of colors that are used in the KV file to display the color
# of the corresponding texture.
_fill_color = ColorProperty([0, 0, 0, 0])
_text_color_normal = ColorProperty([0, 0, 0, 0])
_hint_text_color = ColorProperty([0, 0, 0, 0])
_helper_text_color = ColorProperty([0, 0, 0, 0])
_max_length_text_color = ColorProperty([0, 0, 0, 0])
_icon_right_color = ColorProperty([0, 0, 0, 0])
_icon_left_color = ColorProperty([0, 0, 0, 0])
_cache_colors = DictProperty()
# List of color attribute names that should be updated when changing the
# application color palette.
_colors_to_updated = ListProperty()
def __init__(self, **kwargs):
self.set_objects_labels()
Clock.schedule_once(self._set_attr_names_to_updated)
Clock.schedule_once(self.set_colors_to_updated)
Clock.schedule_once(self.set_default_colors)
super().__init__(**kwargs)
self.bind(
_hint_text_font_size=self._hint_text_label.setter("font_size"),
_icon_right_color=self._icon_right_label.setter("text_color"),
_icon_left_color=self._icon_left_label.setter("text_color"),
text=self.set_text,
)
self.theme_cls.bind(
primary_color=lambda x, y: self.set_default_colors(0, True),
theme_style=lambda x, y: self.set_default_colors(0, True),
)
Clock.schedule_once(self.check_text)
# TODO: Is this method necessary?
# During testing, a quick double-click on the text box does not stop
# the animation of the hint text height.
def cancel_all_animations_on_double_click(self) -> NoReturn:
"""
Cancels the animations of the text field when double-clicking on the
text field.
"""
if (
self._hint_y == dp(38)
and not self.text
or self._hint_y == dp(14)
and self.text
):
Animation.cancel_all(
self,
"_underline_width",
"_hint_y",
"_hint_x",
"_hint_text_font_size",
)
def set_colors_to_updated(self, interval: Union[float, int]) -> NoReturn:
for attr_name in self._attr_names_to_updated.keys():
if getattr(self, attr_name) == [0, 0, 0, 0]:
self._colors_to_updated.append(attr_name)
def set_default_colors(
self, interval: Union[float, int], updated: bool = False
) -> NoReturn:
"""
Sets the default text field colors when initializing a text field
object. Also called when the application palette changes.
:param updated: If `True` - the color theme of the application has
been changed. Updating the meanings of the colors.
"""
self._set_attr_names_to_updated(0)
for attr_name in self._attr_names_to_updated.keys():
self._set_color(
attr_name, self._attr_names_to_updated[attr_name], updated
)
if self.error_color == [0, 0, 0, 0] or updated:
self.error_color = self.theme_cls.error_color
if self.max_length_text_color == [0, 0, 0, 0] or updated:
self.max_length_text_color = self.theme_cls.disabled_hint_text_color
self._hint_text_color = self.hint_text_color_normal
self._text_color_normal = self.text_color_normal
self._fill_color = self.fill_color_normal
self._icon_right_color = self.icon_right_color_normal
self._icon_left_color = self.icon_left_color_normal
self._max_length_text_color = [0, 0, 0, 0]
if self.helper_text_mode in ("on_focus", "on_error"):
self._helper_text_color = [0, 0, 0, 0]
elif self.helper_text_mode == "persistent":
self._helper_text_color = self.helper_text_color_normal
self._cache_colors["line_color_normal"] = self.line_color_normal
self._cache_colors["line_color_focus"] = self.line_color_focus
def set_notch_rectangle(self, joining: bool = False) -> NoReturn:
"""
Animates a notch for the hint text in the rectangle of the text field
of type `rectangle`.
"""
def on_progress(*args):
self._line_blank_space_right_point = (
self._hint_text_label.width + dp(5) if not joining else 0
)
if self.hint_text:
animation = Animation(
_line_blank_space_left_point=self._hint_text_label.x - dp(5)
if not joining
else 0,
duration=0.2,
t="out_quad",
)
animation.bind(on_progress=on_progress)
animation.start(self)
def set_active_underline_width(self, width: Union[float, int]) -> NoReturn:
"""Animates the width of the active underline line."""
Animation(
_underline_width=width,
duration=(0.2 if self.line_anim else 0),
t="out_quad",
).start(self)
def set_static_underline_color(self, color: list) -> NoReturn:
"""Animates the color of a static underline line."""
Animation(
line_color_normal=color,
duration=(0.2 if self.line_anim else 0),
t="out_quad",
).start(self)
def set_active_underline_color(self, color: list) -> NoReturn:
"""Animates the fill color for 'fill' mode."""
Animation(line_color_focus=color, duration=0.2, t="out_quad").start(
self
)
def set_fill_color(self, color: list) -> NoReturn:
"""Animates the color of the hint text."""
Animation(_fill_color=color, duration=0.2, t="out_quad").start(self)
def set_helper_text_color(self, color: list) -> NoReturn:
"""Animates the color of the hint text."""
Animation(_helper_text_color=color, duration=0.2, t="out_quad").start(
self
)
def set_max_length_text_color(self, color: list) -> NoReturn:
"""Animates the color of the max length text."""
Animation(
_max_length_text_color=color, duration=0.2, t="out_quad"
).start(self)
def set_icon_right_color(self, color: list) -> NoReturn:
"""Animates the color of the icon right."""
Animation(_icon_right_color=color, duration=0.2, t="out_quad").start(
self
)
def set_icon_left_color(self, color: list) -> NoReturn:
"""Animates the color of the icon left."""
Animation(_icon_left_color=color, duration=0.2, t="out_quad").start(
self
)
def set_hint_text_color(self, focus: bool, error: bool = False) -> NoReturn:
"""Animates the color of the hint text."""
Animation(
_hint_text_color=(
self.hint_text_color_normal
if not focus
else self.hint_text_color_focus
)
if not error
else self.error_color,
duration=0.2,
t="out_quad",
).start(self)
def set_pos_hint_text(self, y: float, x: float = 0) -> NoReturn:
"""Animates the x-axis width and y-axis height of the hint text."""
Animation(_hint_y=y, duration=0.2, t="out_quad").start(self)
if self.mode == "rectangle":
Animation(
_hint_x=x if not self.icon_left else dp(-16),
duration=0.2,
t="out_quad",
).start(self)
elif self.mode == "fill":
Animation(
_hint_x=dp(16) if not self.icon_left else dp(36),
duration=0.2,
t="out_quad",
).start(self)
elif self.mode == "line":
Animation(
_hint_x=dp(0) if not self.icon_left else dp(36),
duration=0.2,
t="out_quad",
).start(self)
def set_hint_text_font_size(self, font_size: float) -> NoReturn:
"""Animates the font size of the hint text."""
Animation(
_hint_text_font_size=font_size, duration=0.2, t="out_quad"
).start(self)
def set_max_text_length(self) -> NoReturn:
"""Called when text is entered into a text field."""
if self.max_text_length:
self._max_length_label.text = (
f"{len(self.text)}/{self.max_text_length}"
)
def check_text(self, interval: Union[float, int]) -> NoReturn:
self.set_text(self, self.text)
def set_text(self, instance_text_field, text: str) -> NoReturn:
"""Called when text is entered into a text field."""
self.text = re.sub("\n", " ", text) if not self.multiline else text
self.set_max_text_length()
if self.text and self.max_length_text_color and self._get_has_error():
self.error = True
if (
self.text
and self.max_length_text_color
and not self._get_has_error()
):
self.error = False
# Start the appropriate texture animations when programmatically
# pasting text into a text field.
if len(self.text) != 0 and not self.focus:
self.set_pos_hint_text(
(dp(28) if self.mode != "line" else dp(18))
if self.mode != "rectangle"
else dp(10)
)
self.set_hint_text_font_size(sp(12))
if self.mode == "rectangle":
self.set_notch_rectangle()
if not self.text:
self.on_focus(instance_text_field, False)
self.focus = False
def set_objects_labels(self) -> NoReturn:
"""
Creates labels objects for the parameters`helper_text`,`hint_text`,
etc.
"""
self._helper_text_label = TextfieldLabel(
font_style="Caption",
halign="left",
valign="middle",
field=self,
font_name=self.font_name_helper_text,
)
self._max_length_label = TextfieldLabel(
font_style="Caption",
halign="right",
valign="middle",
text="",
field=self,
)
self._hint_text_label = TextfieldLabel(
font_style="Subtitle1", halign="left", valign="middle", field=self
)
self._icon_right_label = MDIcon(theme_text_color="Custom")
self._icon_left_label = MDIcon(theme_text_color="Custom")
def on_helper_text(self, instance_text_field, helper_text: str) -> NoReturn:
self._helper_text_label.text = helper_text
def on_focus(self, instance_text_field, focus: bool) -> NoReturn:
# TODO: See `cancel_all_animations_on_double_click` method.
# self.cancel_all_animations_on_double_click()
if focus:
if self.mode == "rectangle":
self.set_notch_rectangle()
self.set_static_underline_color([0, 0, 0, 0])
if (
self.helper_text_mode in ("on_focus", "persistent")
and self.helper_text
):
self.set_helper_text_color(self.helper_text_color_focus)
if self.mode == "fill":
self.set_fill_color(self.fill_color_focus)
self.set_active_underline_width(self.width)
self.set_pos_hint_text(
(dp(28) if self.mode != "line" else dp(18))
if self.mode != "rectangle"
else dp(10)
)
self.set_hint_text_color(focus)
self.set_hint_text_font_size(sp(12))
if self.max_text_length:
self.set_max_length_text_color(self.max_length_text_color)
if self.icon_right:
self.set_icon_right_color(self.icon_right_color_focus)
if self.icon_left:
self.set_icon_left_color(self.icon_left_color_focus)
if self.error:
if self.hint_text:
self.set_hint_text_color(focus, self.error)
if self.helper_text:
self.set_helper_text_color(self.error_color)
if self.max_text_length:
self.set_max_length_text_color(self.error_color)
if self.icon_right:
self.set_icon_right_color(self.error_color)
if self.icon_left:
self.set_icon_left_color(self.error_color)
else:
if self.helper_text_mode == "persistent" and self.helper_text:
self.set_helper_text_color(self.helper_text_color_normal)
if self.mode == "rectangle" and not self.text:
self.set_notch_rectangle(joining=True)
if not self.text:
self.set_pos_hint_text(
dp(38)
if not self.icon_left or self.mode == "rectangle"
else (dp(34) if not self.mode == "fill" else dp(38))
)
self.set_hint_text_font_size(sp(16))
if self.icon_right:
self.set_icon_right_color(self.icon_right_color_normal)
if self.icon_left:
self.set_icon_left_color(self.icon_left_color_normal)
if self.hint_text:
self.set_hint_text_color(focus, self.error)
self.set_active_underline_width(0)
self.set_max_length_text_color([0, 0, 0, 0])
if self.mode == "fill":
self.set_fill_color(self.fill_color_normal)
self.error = self._get_has_error() or self.error
if self.error:
self.set_static_underline_color(self.error_color)
else:
# print(self._cache_colors["line_color_normal"])
self.set_static_underline_color(
self._cache_colors["line_color_normal"]
)
def on_icon_left(self, instance_text_field, icon_name: str) -> NoReturn:
self._icon_left_label.icon = icon_name
def on_icon_right(self, instance_text_field, icon_name: str) -> NoReturn:
self._icon_right_label.icon = icon_name
def on_disabled(
self, instance_text_field, disabled_value: bool
) -> NoReturn:
pass
def on_error(self, instance_text_field, error: bool) -> NoReturn:
"""
Changes the primary colors of the text box to match the `error` value
(text field is in an error state or not).
"""
if error:
self.set_max_length_text_color(self.error_color)
self.set_active_underline_color(self.error_color)
if self.hint_text:
self.set_hint_text_color(self.focus, self.error)
if self.helper_text:
self.set_helper_text_color(self.error_color)
if self.icon_right:
self.set_icon_right_color(self.error_color)
if self.icon_left:
self.set_icon_left_color(self.error_color)
if self.helper_text_mode == "on_error":
self.set_helper_text_color(self.error_color)
else:
self.set_max_length_text_color(self.max_length_text_color)
self.set_active_underline_color(
self._cache_colors["line_color_focus"]
)
if self.hint_text:
self.set_hint_text_color(self.focus)
if self.helper_text:
self.set_helper_text_color(self.helper_text_color_focus)
if self.icon_right:
self.set_icon_right_color(self.icon_right_color_focus)
if self.icon_left:
self.set_icon_left_color(self.icon_left_color_focus)
if self.helper_text_mode in ("on_focus", "on_error"):
self.set_helper_text_color([0, 0, 0, 0])
elif self.helper_text_mode == "persistent":
self.set_helper_text_color(self.helper_text_color_normal)
def on_hint_text(self, instance_text_field, hint_text: str) -> NoReturn:
self._hint_text_label.text = hint_text
self._hint_text_label.font_size = sp(16)
def on_width(self, instance_text_field, width: float) -> NoReturn:
"""Called when the application window is resized."""
if self.focus:
self._underline_width = self.width
def on_height(self, instance_text_field, value_height: float) -> NoReturn:
if value_height >= self.max_height and self.max_height:
self.height = self.max_height
def on_hint_text_color_normal(self, instance_text_field, color: list):
self._hint_text_color = color
def on_helper_text_color_normal(self, instance_text_field, color: list):
self._helper_text_color = color
def on_icon_right_color_normal(self, instance_text_field, color: list):
self._icon_right_color = color
def on_max_length_text_color(self, instance_text_field, color: list):
self._max_length_text_color = color
def _set_color(self, attr_name: str, color: str, updated: bool) -> NoReturn:
if attr_name in self._colors_to_updated or updated:
if attr_name in self._colors_to_updated:
setattr(self, attr_name, color)
def _set_attr_names_to_updated(
self, interval: Union[float, int]
) -> NoReturn:
"""
Sets and update the default color dictionary for text field textures.
"""
self._attr_names_to_updated = {
"line_color_normal": self.theme_cls.disabled_hint_text_color,
"line_color_focus": self.theme_cls.primary_color,
"hint_text_color_normal": self.theme_cls.disabled_hint_text_color,
"hint_text_color_focus": self.theme_cls.primary_color,
"helper_text_color_normal": self.theme_cls.disabled_hint_text_color,
"helper_text_color_focus": self.theme_cls.disabled_hint_text_color,
"text_color_normal": self.theme_cls.disabled_hint_text_color,
"text_color_focus": self.theme_cls.primary_color,
"fill_color_normal": self.theme_cls.bg_darkest,
"fill_color_focus": self.theme_cls.bg_dark,
"icon_right_color_normal": self.theme_cls.disabled_hint_text_color,
"icon_right_color_focus": self.theme_cls.primary_color,
"icon_left_color_normal": self.theme_cls.disabled_hint_text_color,
"icon_left_color_focus": self.theme_cls.primary_color,
}
def _get_has_error(self) -> bool:
"""
Returns `False` or `True` depending on the state of the text field,
for example when the allowed character limit has been exceeded or when
the :attr:`~MDTextField.required` parameter is set to `True`.
"""
if self.max_text_length and len(self.text) > self.max_text_length:
has_error = True
else:
if all((self.required, len(self.text) == 0)):
has_error = True
else:
has_error = False
return has_error
def _refresh_hint_text(self):
"""Method override to avoid duplicate hint text texture."""
class MDTextFieldRound(ThemableBehavior, TextInput):
icon_left = StringProperty()
"""
Left icon.
:attr:`icon_left` is an :class:`~kivy.properties.StringProperty`
and defaults to `''`.
"""
icon_left_color = ColorProperty((0, 0, 0, 1))
"""
Color of left icon in ``rgba`` format.
:attr:`icon_left_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `(0, 0, 0, 1)`.
"""
icon_right = StringProperty()
"""
Right icon.
:attr:`icon_right` is an :class:`~kivy.properties.StringProperty`
and defaults to `''`.
"""
icon_right_color = ColorProperty((0, 0, 0, 1))
"""
Color of right icon.
:attr:`icon_right_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `(0, 0, 0, 1)`.
"""
line_color = ColorProperty(None)
"""
Field line color.
:attr:`line_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `None`.
"""
normal_color = ColorProperty(None)
"""
Field color if `focus` is `False`.
:attr:`normal_color` is an :class:`~kivy.properties.ColorProperty`
and defaults to `None`.
"""
color_active = ColorProperty(None)
"""
Field color if `focus` is `True`.
:attr:`color_active` is an :class:`~kivy.properties.ColorProperty`
and defaults to `None`.
"""
_color_active = ColorProperty(None)
_icon_left_color_copy = ColorProperty(None)
_icon_right_color_copy = ColorProperty(None)
def __init__(self, **kwargs):
self._lbl_icon_left = MDIcon(theme_text_color="Custom")
self._lbl_icon_right = MDIcon(theme_text_color="Custom")
super().__init__(**kwargs)
self.cursor_color = self.theme_cls.primary_color
self.icon_left_color = self.theme_cls.text_color
self.icon_right_color = self.theme_cls.text_color
if not self.normal_color:
self.normal_color = self.theme_cls.primary_light
if not self.line_color:
self.line_color = self.theme_cls.primary_dark
if not self.color_active:
self._color_active = (0.5, 0.5, 0.5, 0.5)
def on_focus(self, instance_text_field, focus_value: bool) -> NoReturn:
if focus_value:
self.icon_left_color = self.theme_cls.primary_color
self.icon_right_color = self.theme_cls.primary_color
else:
self.icon_left_color = (
self._icon_left_color_copy or self.theme_cls.text_color
)
self.icon_right_color = (
self._icon_right_color_copy or self.theme_cls.text_color
)
def on_icon_left(self, instance_text_field, icon_name: str) -> NoReturn:
self._lbl_icon_left.icon = icon_name
def on_icon_left_color(self, instance_text_field, color: list) -> NoReturn:
self._lbl_icon_left.text_color = color
if (
not self._icon_left_color_copy
and color != self.theme_cls.text_color
and color != self.theme_cls.primary_color
):
self._icon_left_color_copy = color
def on_icon_right(self, instance_text_field, icon_name: str) -> NoReturn:
self._lbl_icon_right.icon = icon_name
def on_icon_right_color(self, instance_text_field, color: list) -> NoReturn:
self._lbl_icon_right.text_color = color
if (
not self._icon_right_color_copy
and color != self.theme_cls.text_color
and color != self.theme_cls.primary_color
):
self._icon_right_color_copy = color
def on_color_active(self, instance_text_field, color: list) -> NoReturn:
if color != [0, 0, 0, 0.5]:
self._color_active = color
self._color_active[-1] = 0.5
else:
self._color_active = color
if __name__ == "__main__":
from kivy.lang import Builder
from kivy.uix.textinput import TextInput
from kivymd.app import MDApp
KV = """
MDScreen:
MDBoxLayout:
id: box
orientation: "vertical"
spacing: "28dp"
adaptive_height: True
size_hint_x: .8
pos_hint: {"center_x": .5, "center_y": .5}
MDTextField:
hint_text: "Label"
helper_text: "Error massage"
mode: "rectangle"
MDTextField:
icon_left: "git"
hint_text: "Label"
helper_text: "Error massage"
mode: "rectangle"
MDTextField:
icon_left: "git"
hint_text: "Label"
helper_text: "Error massage"
mode: "fill"
MDTextField:
hint_text: "Label"
helper_text: "Error massage"
mode: "fill"
MDTextField:
hint_text: "Label"
helper_text: "Error massage"
MDTextField:
icon_left: "git"
hint_text: "Label"
helper_text: "Error massage"
MDFlatButton:
text: "SET TEXT"
pos_hint: {"center_x": .5}
on_release: app.set_text()
"""
class Test(MDApp):
def build(self):
return Builder.load_string(KV)
def set_text(self):
for widget in self.root.ids.box.children:
if issubclass(widget.__class__, TextInput):
widget.text = "Input text"
Test().run()
| 31.753818 | 131 | 0.618995 |
4b8b37e60f289027f69fd1dd6aebac3326fc93bd | 3,563 | py | Python | pvops/timeseries/models/iec.py | bfemery-sandia/pvOps | fcdf47443041b3deb70f675481a70e7cf0b3dc93 | [
"BSD-3-Clause"
] | 2 | 2021-04-21T23:42:36.000Z | 2021-05-06T16:18:48.000Z | pvops/timeseries/models/iec.py | bfemery-sandia/pvOps | fcdf47443041b3deb70f675481a70e7cf0b3dc93 | [
"BSD-3-Clause"
] | 13 | 2021-03-16T17:52:31.000Z | 2021-05-20T21:19:56.000Z | pvops/timeseries/models/iec.py | bfemery-sandia/pvOps | fcdf47443041b3deb70f675481a70e7cf0b3dc93 | [
"BSD-3-Clause"
] | 4 | 2021-05-26T13:49:21.000Z | 2021-12-17T16:35:06.000Z | import numpy as np
def iec_calc(prod_df, prod_col_dict, meta_df, meta_col_dict,
gi_ref=1000.0):
"""
Calculates expected energy using measured irradiance
based on IEC calculations
Parameters
----------
prod_df: DataFrame
A data frame corresponding to the production data
after having been processed by the perf_om_NA_qc
and overlappingDFs functions. This data frame needs
at least the columns specified in prod_col_dict.
prod_col_dict: dict of {str : str}
A dictionary that contains the column names relevant
for the production data
- **siteid** (*string*), should be assigned to
site-ID column name in prod_df
- **timestamp** (*string*), should be assigned to
time-stamp column name in prod_df
- **irradiance** (*string*), should be assigned to
irradiance column name in prod_df, where data
should be in [W/m^2]
- **baseline** (*string*), should be assigned to
preferred column name to capture IEC calculations
in prod_df
- **dcsize**, (*string*), should be assigned to
preferred column name for site capacity in prod_df
meta_df: DataFrame
A data frame corresponding to site metadata.
At the least, the columns in meta_col_dict be
present.
meta_col_dict: dict of {str : str}
A dictionary that contains the column names relevant
for the meta-data
- **siteid** (*string*), should be assigned to site-ID
column name
- **dcsize** (*string*), should be assigned to
column name corresponding to site capacity, where
data is in [kW]
gi_ref: float
reference plane of array irradiance in W/m^2 at
which a site capacity is determined (default value
is 1000 [W/m^2])
Returns
-------
DataFrame
A data frame for production data with a new column,
iecE, which is the predicted energy calculated
based on the IEC standard using measured irradiance
data
"""
# assigning dictionary items to local variables for cleaner code
prod_site = prod_col_dict["siteid"]
prod_ts = prod_col_dict["timestamp"]
prod_irr = prod_col_dict["irradiance"]
prod_iec = prod_col_dict["baseline"]
prod_dcsize = prod_col_dict["dcsize"]
meta_site = meta_col_dict["siteid"]
meta_size = meta_col_dict["dcsize"]
# creating local dataframes to not modify originals
prod_df = prod_df.copy()
meta_df = meta_df.copy()
# setting index for metadata for alignment to production data
meta_df = meta_df.set_index(meta_site)
# Creating new column in production data corresponding to site size (in terms of KW)
prod_df[prod_dcsize] = prod_df.loc[:, prod_site].apply(
lambda x: meta_df.loc[x, meta_size]
)
# iec calculation
for sid in prod_df.loc[:, prod_site].unique():
mask = prod_df.loc[:, prod_site] == sid
tstep = prod_df.loc[mask, prod_ts].iloc[1] - \
prod_df.loc[mask, prod_ts].iloc[0]
tstep = tstep / np.timedelta64(
1, "h"
) # Converting the time-step to float (representing hours) to
# arrive at kWh for the iecE calculation
prod_df.loc[mask, prod_iec] = (
prod_df.loc[mask, prod_dcsize]
* prod_df.loc[mask, prod_irr]
* tstep
/ gi_ref
)
prod_df.drop(columns=[prod_dcsize], inplace=True)
return prod_df
| 32.688073 | 88 | 0.63542 |
70ee6c3441c5064484ae81fb171d8e2a9cc96ec5 | 519 | py | Python | eng/tox/mypy_hard_failure_packages.py | tzhanl/azure-sdk-for-python | 18cd03f4ab8fd76cc0498f03e80fbc99f217c96e | [
"MIT"
] | null | null | null | eng/tox/mypy_hard_failure_packages.py | tzhanl/azure-sdk-for-python | 18cd03f4ab8fd76cc0498f03e80fbc99f217c96e | [
"MIT"
] | null | null | null | eng/tox/mypy_hard_failure_packages.py | tzhanl/azure-sdk-for-python | 18cd03f4ab8fd76cc0498f03e80fbc99f217c96e | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
MYPY_HARD_FAILURE_OPTED = [
"azure-core",
"azure-eventhub",
"azure-eventhub-checkpointstoreblob",
"azure-eventhub-checkpointstoreblob-aio",
]
| 37.071429 | 94 | 0.481696 |
9fd25b51fda9aee6ba12f3e8db5b2e2c87209282 | 965 | py | Python | relative_dominance.py | limanqing/crop_climate | bfc50fbf57ce3a96ba7d29de53a76fc7a1dc2d2f | [
"MIT"
] | null | null | null | relative_dominance.py | limanqing/crop_climate | bfc50fbf57ce3a96ba7d29de53a76fc7a1dc2d2f | [
"MIT"
] | null | null | null | relative_dominance.py | limanqing/crop_climate | bfc50fbf57ce3a96ba7d29de53a76fc7a1dc2d2f | [
"MIT"
] | null | null | null | #coding=utf-8
import pandas as pd
import glob
import numpy as np
if __name__ == '__main__':
base_dir = r'F:\crop-climate\maize&cru_scale\360\polyfit-additive\*.csv'
filelist = glob.glob(base_dir)
sum1=0
sum2=0
sum3=0
num=0
pre_list=[]
tmp_list=[]
maize_list=[]
df2 = pd.read_csv(r'F:\crop-climate\regression_scale\polyfit-additive\360.csv',index_col=False)#读回归系数
for filename in filelist:
num+=1
df1 = pd.read_csv(filename)
coef1=df2.iat[num-1,2]
coef2=df2.iat[num-1,3]
pre=coef1*df1['Pre']
tmp=coef2*df1['Tmp']
pre_list.append(np.var(pre))
tmp_list.append(np.var(tmp))
maize_list.append(np.var(df1['Value']))
d1=np.mean(pre_list)/np.mean(maize_list)
d2=np.mean(tmp_list)/np.mean(maize_list)
d3=np.std(pre_list)/np.mean(maize_list)
d4=np.std(tmp_list)/np.mean(maize_list)
print(d1,d3,d2,d4)
| 28.382353 | 106 | 0.615544 |
1efcf9e7d79ad4971afb2e12d158b799ff72de48 | 1,787 | py | Python | model_zoo/official/cv/lenet_quant/src/lenet.py | i4oolish/mindspore | dac3be31d0f2c0a3516200f47af30980e566601b | [
"Apache-2.0"
] | 2 | 2020-08-12T16:14:40.000Z | 2020-12-04T03:05:57.000Z | model_zoo/official/cv/lenet_quant/src/lenet.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
] | null | null | null | model_zoo/official/cv/lenet_quant/src/lenet.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""LeNet."""
import mindspore.nn as nn
class LeNet5(nn.Cell):
"""
Lenet network
Args:
num_class (int): Num classes. Default: 10.
Returns:
Tensor, output tensor
Examples:
>>> LeNet(num_class=10)
"""
def __init__(self, num_class=10, channel=1):
super(LeNet5, self).__init__()
self.num_class = num_class
self.conv1 = nn.Conv2d(channel, 6, 5, pad_mode='valid')
self.conv2 = nn.Conv2d(6, 16, 5, pad_mode='valid')
self.fc1 = nn.Dense(16 * 5 * 5, 120)
self.fc2 = nn.Dense(120, 84)
self.fc3 = nn.Dense(84, self.num_class)
self.relu = nn.ReLU()
self.max_pool2d = nn.MaxPool2d(kernel_size=2, stride=2)
self.flatten = nn.Flatten()
def construct(self, x):
x = self.conv1(x)
x = self.relu(x)
x = self.max_pool2d(x)
x = self.conv2(x)
x = self.relu(x)
x = self.max_pool2d(x)
x = self.flatten(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.relu(x)
x = self.fc3(x)
return x
| 29.295082 | 78 | 0.586458 |
a99d5d54a16ad61a626c279e7a76f4640b06ae15 | 927 | py | Python | general_tools/githubshell.py | PurpleGuitar/tools | 13152bb925b568ed4d4375065ed5a7dbdeee99de | [
"MIT"
] | 6 | 2015-07-27T21:50:39.000Z | 2020-06-25T14:32:35.000Z | general_tools/githubshell.py | PurpleGuitar/tools | 13152bb925b568ed4d4375065ed5a7dbdeee99de | [
"MIT"
] | 89 | 2015-06-24T09:35:40.000Z | 2022-02-13T14:40:31.000Z | general_tools/githubshell.py | PurpleGuitar/tools | 13152bb925b568ed4d4375065ed5a7dbdeee99de | [
"MIT"
] | 12 | 2015-07-13T17:31:04.000Z | 2021-08-06T06:50:21.000Z | #!/usr/bin/env python2
# -*- coding: utf8 -*-
#
# Copyright (c) 2014 unfoldingWord
# http://creativecommons.org/licenses/MIT/
# See LICENSE file for details.
#
# Contributors:
# Jesse Griffin <jesse@distantshores.org>
#
# Requires PyGithub.
'''
This provides easy access to the PyGithub API for testing and development.
'''
import sys
from general_tools.git_wrapper import *
sys.path.append('/var/www/vhosts/door43.org/tools/general_tools')
try:
from github import Github
from github import GithubException
except:
print "Please install PyGithub with pip"
sys.exit(1)
if __name__ == '__main__':
# Log in to Github via API
try:
pw = open('/root/.github_pass', 'r').read().strip()
guser = githubLogin('dsm-git', pw)
githuborg = getGithubOrg('door43', guser)
except GithubException as e:
print 'Problem logging into Github: {0}'.format(e)
sys.exit(1)
| 24.394737 | 74 | 0.676375 |
6b8d9b7f2146c24bedb6e70c4ad09576d984bde9 | 4,758 | py | Python | python/pex/proxy.py | JiveHelix/pex | d3cbe0e437e803fb4af6fe153de0cf3f61a3a6d6 | [
"MIT"
] | null | null | null | python/pex/proxy.py | JiveHelix/pex | d3cbe0e437e803fb4af6fe153de0cf3f61a3a6d6 | [
"MIT"
] | null | null | null | python/pex/proxy.py | JiveHelix/pex | d3cbe0e437e803fb4af6fe153de0cf3f61a3a6d6 | [
"MIT"
] | null | null | null | ##
# @file proxy.py
#
# @brief Implements a proxy for callable references. While similar in
# functionality to weakref.proxy, there are some fundamental differences.
#
# Wraps either a function (with weakref.ref) or a method, with
# weakref.WeakMethod, and __eq__ and __hash__ are plumbed through to the
# underlying ref so that the proxies can be stored in a set or used as keys in
# a dict.
#
# The user must not call these proxies after the weakref has been finalized.
# Use the onFinalize callback to receive a notification.
#
# @author Jive Helix (jivehelix@gmail.com)
# @date 06 Jun 2020
# @copyright Jive Helix
# Licensed under the MIT license. See LICENSE file.
from __future__ import annotations
from typing import Optional, Callable, Any, Generic, TypeVar
from .reference import Reference, MakeReference
from .types import (
SignalCallback,
ValueCallback,
ValueType,
ReferenceType)
class SignalProxy:
reference_: Reference[SignalCallback]
def __init__(self, reference: Reference[SignalCallback]):
self.reference_ = reference
@classmethod
def Create(
class_,
callback: SignalCallback,
onFinalize: Optional[Callable[[Reference[SignalCallback]], Any]]) \
-> SignalProxy:
return class_(MakeReference(callback, onFinalize))
def __call__(self) -> None:
"""
Execute the callback without checking for None (in release mode,
anyway).
We have ensured that only alive references are stored in this instance.
"""
callback = self.reference_()
assert callback is not None
callback()
def __hash__(self) -> int:
return hash(self.reference_)
def __eq__(self, other: object) -> bool:
if isinstance(other, SignalProxy):
return self.reference_ == other.reference_
elif isinstance(other, ReferenceType):
return self.reference_ == other
else:
raise NotImplementedError("Cannot compare equal")
def __repr__(self) -> str:
return "SignalProxy({})".format(hash(self.reference_))
class ValueProxy(Generic[ValueType]):
reference_: Reference[ValueCallback[ValueType]]
def __init__(self, reference: Reference[ValueCallback[ValueType]]) -> None:
self.reference_ = reference
@classmethod
def Create(
class_,
callback: ValueCallback[ValueType],
onFinalize: Callable[[Reference[ValueCallback[ValueType]]], Any]) \
-> ValueProxy[ValueType]:
return class_(MakeReference(callback, onFinalize))
def __call__(self, value: ValueType) -> None:
"""
Execute the callback without checking for None (in release mode,
anyway).
It is the responsibility of the client to ensure that only alive
references are stored in this instance (using the onFinalize callback.
"""
callback = self.reference_()
assert callback is not None
callback(value)
def __hash__(self) -> int:
try:
return hash(self.reference_)
except TypeError:
print(
"Reference must be hashable. If created with attrs, "
"be sure to use keyword eq=False")
raise
def __eq__(self, other: object) -> bool:
if isinstance(other, ValueProxy):
return self.reference_ == other.reference_
elif isinstance(other, ReferenceType):
return self.reference_ == other
else:
raise NotImplementedError("Cannot compare equal")
def __repr__(self) -> str:
return "ValueProxy({})".format(hash(self.reference_))
Source = TypeVar('Source')
Target = TypeVar('Target')
class FilterProxy(Generic[Source, Target]):
reference_: Reference[Callable[[Source], Target]]
def __init__(
self,
reference: Reference[Callable[[Source], Target]]) -> None:
self.reference_ = reference
@classmethod
def Create(
class_,
callback: Callable[[Source], Target],
onFinalize: Optional[
Callable[[Reference[Callable[[Source], Target]]], Any]]) \
-> FilterProxy[Source, Target]:
return class_(MakeReference(callback, onFinalize))
def __call__(self, value: Source) -> Target:
"""
Execute the callback without checking for None (in release mode,
anyway).
It is the responsibility of the client to ensure that only alive
references are stored in this instance (using the onFinalize callback.
"""
callback = self.reference_()
assert callback is not None
return callback(value)
| 31.302632 | 79 | 0.643548 |
b9f03c41fdafb388b63cc71276ac46a3197da648 | 5,903 | py | Python | django-crm/accounts/views.py | MsMaddyMac/django-crm | a7a1085f1acba8c6bd10865d5c680c43adefffed | [
"MIT"
] | null | null | null | django-crm/accounts/views.py | MsMaddyMac/django-crm | a7a1085f1acba8c6bd10865d5c680c43adefffed | [
"MIT"
] | null | null | null | django-crm/accounts/views.py | MsMaddyMac/django-crm | a7a1085f1acba8c6bd10865d5c680c43adefffed | [
"MIT"
] | null | null | null | from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.forms import inlineformset_factory
from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth import authenticate, login, logout
# flash message
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import Group
from .models import *
from .forms import *
from .filters import OrderFilter
from .decorators import unauthenticated_user, allowed_users, admin_only
# Create your views (pages) here.
# this is a view which will show the string passed into the HttpResponse() when someone visits that particular page(view)
# def home(request):
# return HttpResponse('home')
# this is what the def changes to if you have a template to render on that page(view)
# you point it to the file path of the template within the templates directory. In this case within the templates directory there is an accounts directory and within that a dashboard.html file
@unauthenticated_user
def registerPage(request):
form = CreateUserForm()
if request.method == 'POST':
form = CreateUserForm(request.POST)
if form.is_valid():
user = form.save()
username = form.cleaned_data.get('username')
# creates a user inside the customer group
# group = Group.objects.get(name='customer')
# user.groups.add(group)
# Customer.objects.create(user=user)
messages.success(request, 'Account was created for ' + username)
return redirect('login')
context = {'form': form}
return render(request, 'accounts/register.html', context)
@unauthenticated_user
def loginPage(request):
if request.method == 'POST':
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(request, username=username, password=password)
if user is not None:
login(request, user)
return redirect('home')
else:
messages.info(request, 'Username OR password is incorrect!')
context = {}
return render(request, 'accounts/login.html', context)
def logoutUser(request):
logout(request)
return redirect('login')
@login_required(login_url='login')
@admin_only
def home(request):
orders = Order.objects.all()
customers = Customer.objects.all()
total_customers = customers.count()
total_orders = orders.count()
delivered = orders.filter(status='Delivered').count()
pending = orders.filter(status='Pending').count()
context = {'orders': orders, 'customers': customers, 'total_customers': total_customers, 'total_orders': total_orders, 'delivered': delivered, 'pending': pending}
return render(request, 'accounts/dashboard.html', context)
@login_required(login_url='login')
@allowed_users(allowed_roles=['customer'])
def userPage(request):
orders = request.user.customer.order_set.all()
total_orders = orders.count()
delivered = orders.filter(status='Delivered').count()
pending = orders.filter(status='Pending').count()
context = {'orders': orders, 'total_orders': total_orders, 'delivered': delivered, 'pending': pending}
return render(request, 'accounts/user.html', context)
@login_required(login_url='login')
@allowed_users(allowed_roles=['customer'])
def accountSettings(request):
customer = request.user.customer
form = CustomerForm(instance=customer)
if request.method == 'POST':
form = CustomerForm(request.POST, request.FILES, instance=customer)
if form.is_valid():
form.save()
context = {'form': form}
return render(request, 'accounts/account_settings.html', context)
@login_required(login_url='login')
@allowed_users(allowed_roles=['admin'])
def products(request):
products = Product.objects.all()
return render(request, 'accounts/products.html', {'products': products})
@login_required(login_url='login')
@allowed_users(allowed_roles=['admin'])
def customer(request, pk):
customer = Customer.objects.get(id=pk)
orders = customer.order_set.all()
total_orders = orders.count()
myFilter = OrderFilter(request.GET, queryset=orders)
orders = myFilter.qs
context = {'customer': customer, 'orders': orders, 'total_orders': total_orders, 'myFilter': myFilter}
return render(request, 'accounts/customer.html', context)
@login_required(login_url='login')
@allowed_users(allowed_roles=['admin'])
def createOrder(request, pk):
OrderFormSet = inlineformset_factory(Customer, Order, fields=('product', 'status'), extra=10)
customer = Customer.objects.get(id=pk)
formset = OrderFormSet(queryset=Order.objects.none(), instance=customer)
# form = OrderForm(initial={'customer': customer})
if request.method == 'POST':
# print('printing POST:', request.POST)
# form = OrderForm(request.POST)
formset = OrderFormSet(request.POST, instance=customer)
if formset.is_valid():
formset.save()
return redirect('/')
context = {'formset': formset}
return render(request, 'accounts/order_form.html', context)
@login_required(login_url='login')
@allowed_users(allowed_roles=['admin'])
def updateOrder(request, pk):
order = Order.objects.get(id=pk)
form = OrderForm(instance=order)
if request.method == 'POST':
form = OrderForm(request.POST, instance=order)
if form.is_valid():
form.save()
return redirect('/')
context = {'form': form}
return render(request, 'accounts/order_form.html', context)
@login_required(login_url='login')
@allowed_users(allowed_roles=['admin'])
def deleteOrder(request, pk):
order = Order.objects.get(id=pk)
if request.method == 'POST':
order.delete()
return redirect('/')
context = {'order': order}
return render(request, 'accounts/delete.html', context)
# once you create your views import them into the urls.py file you created in this same directory
| 33.350282 | 192 | 0.720989 |
48c5b784bf497d4e68c71cef5851a760b7b559a9 | 8,177 | py | Python | allennlp/semparse/worlds/wikitables_variable_free_world.py | ljch2018/allennlp | 63ba3fb28897578d4798039d1713e2b7995eb753 | [
"Apache-2.0"
] | 3 | 2019-06-17T21:09:07.000Z | 2022-03-18T05:19:31.000Z | allennlp/semparse/worlds/wikitables_variable_free_world.py | ljch2018/allennlp | 63ba3fb28897578d4798039d1713e2b7995eb753 | [
"Apache-2.0"
] | null | null | null | allennlp/semparse/worlds/wikitables_variable_free_world.py | ljch2018/allennlp | 63ba3fb28897578d4798039d1713e2b7995eb753 | [
"Apache-2.0"
] | 1 | 2020-03-12T06:53:53.000Z | 2020-03-12T06:53:53.000Z | """
We store the information related to context sensitive execution of logical forms here.
We assume that the logical forms are written in the variable-free language described in the paper
'Memory Augmented Policy Optimization for Program Synthesis with Generalization' by Liang et al.
The language is the main difference between this class and `WikiTablesWorld`. Also, this class defines
an executor for the variable-free logical forms.
"""
# TODO(pradeep): Merge this class with the `WikiTablesWorld` class, and move all the
# language-specific functionality into type declarations.
from typing import Dict, List, Set
import re
from nltk.sem.logic import Type
from overrides import overrides
from allennlp.semparse.worlds.world import ParsingError, World
from allennlp.semparse.type_declarations import wikitables_variable_free as types
from allennlp.semparse.contexts import TableQuestionKnowledgeGraph
class WikiTablesVariableFreeWorld(World):
"""
World representation for the WikitableQuestions domain with the variable-free language used in
the paper from Liang et al. (2018).
Parameters
----------
table_graph : ``TableQuestionKnowledgeGraph``
Context associated with this world.
"""
# When we're converting from logical forms to action sequences, this set tells us which
# functions in the logical form are curried functions, and how many arguments the function
# actually takes. This is necessary because NLTK curries all multi-argument functions to a
# series of one-argument function applications. See `world._get_transitions` for more info.
curried_functions = {
types.SELECT_TYPE: 2,
types.ROW_FILTER_WITH_COLUMN: 2,
types.ROW_NUM_OP: 2,
types.ROW_FILTER_WITH_COLUMN_AND_NUMBER: 3,
types.ROW_FILTER_WITH_COLUMN_AND_DATE: 3,
types.ROW_FILTER_WITH_COLUMN_AND_STRING: 3,
types.NUM_DIFF_WITH_COLUMN: 3,
}
def __init__(self, table_graph: TableQuestionKnowledgeGraph) -> None:
super().__init__(constant_type_prefixes={"string": types.STRING_TYPE,
"num": types.NUMBER_TYPE},
global_type_signatures=types.COMMON_TYPE_SIGNATURE,
global_name_mapping=types.COMMON_NAME_MAPPING)
self.table_graph = table_graph
# For every new Sempre column name seen, we update this counter to map it to a new NLTK name.
self._column_counter = 0
# This adds all of the cell and column names to our local name mapping.
for entity in table_graph.entities:
self._map_name(entity, keep_mapping=True)
self._entity_set = set(table_graph.entities)
self.terminal_productions: Dict[str, str] = {}
for entity in self._entity_set:
mapped_name = self.local_name_mapping[entity]
signature = self.local_type_signatures[mapped_name]
self.terminal_productions[entity] = f"{signature} -> {entity}"
for predicate, mapped_name in self.global_name_mapping.items():
if mapped_name in self.global_type_signatures:
signature = self.global_type_signatures[mapped_name]
self.terminal_productions[predicate] = f"{signature} -> {predicate}"
# We don't need to recompute this ever; let's just compute it once and cache it.
self._valid_actions: Dict[str, List[str]] = None
def is_table_entity(self, entity_name: str) -> bool:
"""
Returns ``True`` if the given entity is one of the entities in the table.
"""
return entity_name in self._entity_set
@overrides
def _get_curried_functions(self) -> Dict[Type, int]:
return WikiTablesVariableFreeWorld.curried_functions
@overrides
def get_basic_types(self) -> Set[Type]:
return types.BASIC_TYPES
@overrides
def get_valid_starting_types(self) -> Set[Type]:
return types.STARTING_TYPES
@overrides
def _map_name(self, name: str, keep_mapping: bool = False) -> str:
if name not in types.COMMON_NAME_MAPPING and name not in self.local_name_mapping:
if not keep_mapping:
raise ParsingError(f"Encountered un-mapped name: {name}")
if name.startswith("fb:row.row"):
# Column name
translated_name = "C%d" % self._column_counter
self._column_counter += 1
self._add_name_mapping(name, translated_name, types.COLUMN_TYPE)
elif name.startswith("fb:cell"):
# Cell name
translated_name = "string:%s" % name.split(".")[-1]
self._add_name_mapping(name, translated_name, types.STRING_TYPE)
elif name.startswith("fb:part"):
# part name
translated_name = "string:%s" % name.split(".")[-1]
self._add_name_mapping(name, translated_name, types.STRING_TYPE)
else:
# The only other unmapped names we should see are numbers.
# NLTK throws an error if it sees a "." in constants, which will most likely happen
# within numbers as a decimal point. We're changing those to underscores.
translated_name = name.replace(".", "_")
if re.match("-[0-9_]+", translated_name):
# The string is a negative number. This makes NLTK interpret this as a negated
# expression and force its type to be TRUTH_VALUE (t).
translated_name = translated_name.replace("-", "~")
translated_name = f"num:{translated_name}"
self._add_name_mapping(name, translated_name, types.NUMBER_TYPE)
else:
if name in types.COMMON_NAME_MAPPING:
translated_name = types.COMMON_NAME_MAPPING[name]
else:
translated_name = self.local_name_mapping[name]
return translated_name
def get_agenda(self):
agenda_items = self.table_graph.get_linked_agenda_items()
# Global rules
question_tokens = [token.text for token in self.table_graph.question_tokens]
question = " ".join(question_tokens)
for token in question_tokens:
if token in ["next", "after", "below"]:
agenda_items.append("next")
if token in ["previous", "before", "above"]:
agenda_items.append("previous")
if token == "total":
agenda_items.append("sum")
if token == "difference":
agenda_items.append("diff")
if token == "average":
agenda_items.append("average")
if token in ["least", "top", "first", "smallest", "shortest", "lowest"]:
# This condition is too brittle. But for most logical forms with "min", there are
# semantically equivalent ones with "argmin". The exceptions are rare.
if "what is the least" in question:
agenda_items.append("min")
else:
agenda_items.append("argmin")
if token in ["last", "most", "largest", "highest", "longest", "greatest"]:
# This condition is too brittle. But for most logical forms with "max", there are
# semantically equivalent ones with "argmax". The exceptions are rare.
if "what is the most" in question:
agenda_items.append("max")
else:
agenda_items.append("argmax")
if "how many" in question or "number" in question:
if "sum" not in agenda_items and "average" not in agenda_items:
# The question probably just requires counting the rows. But this is not very
# accurate. The question could also be asking for a value that is in the table.
agenda_items.append("count")
agenda = []
for agenda_item in set(agenda_items):
agenda.append(self.terminal_productions[agenda_item])
return agenda
| 48.672619 | 102 | 0.635808 |
955d7b04d4d751cd95eac392f245cc47cade437f | 12,002 | py | Python | hmi_super/training/models_64_256.py | aasensio/DeepLearning | 71838115ce93e0ca96c8314cff3f07de1d64c235 | [
"MIT"
] | null | null | null | hmi_super/training/models_64_256.py | aasensio/DeepLearning | 71838115ce93e0ca96c8314cff3f07de1d64c235 | [
"MIT"
] | null | null | null | hmi_super/training/models_64_256.py | aasensio/DeepLearning | 71838115ce93e0ca96c8314cff3f07de1d64c235 | [
"MIT"
] | null | null | null | from keras.layers import Input, Conv2D, Activation, BatchNormalization, GaussianNoise, add, UpSampling2D
from keras.models import Model
from keras.regularizers import l2
import tensorflow as tf
from keras.engine.topology import Layer
from keras.engine import InputSpec
from keras.utils import conv_utils
def spatial_reflection_2d_padding(x, padding=((1, 1), (1, 1)), data_format=None):
"""Pads the 2nd and 3rd dimensions of a 4D tensor.
# Arguments
x: Tensor or variable.
padding: Tuple of 2 tuples, padding pattern.
data_format: One of `channels_last` or `channels_first`.
# Returns
A padded 4D tensor.
# Raises
ValueError: if `data_format` is neither
`channels_last` or `channels_first`.
"""
assert len(padding) == 2
assert len(padding[0]) == 2
assert len(padding[1]) == 2
if data_format is None:
data_format = image_data_format()
if data_format not in {'channels_first', 'channels_last'}:
raise ValueError('Unknown data_format ' + str(data_format))
if data_format == 'channels_first':
pattern = [[0, 0],
[0, 0],
list(padding[0]),
list(padding[1])]
else:
pattern = [[0, 0],
list(padding[0]), list(padding[1]),
[0, 0]]
return tf.pad(x, pattern, "REFLECT")
class ReflectionPadding2D(Layer):
"""Reflection-padding layer for 2D input (e.g. picture).
This layer can add rows and columns or zeros
at the top, bottom, left and right side of an image tensor.
# Arguments
padding: int, or tuple of 2 ints, or tuple of 2 tuples of 2 ints.
- If int: the same symmetric padding
is applied to width and height.
- If tuple of 2 ints:
interpreted as two different
symmetric padding values for height and width:
`(symmetric_height_pad, symmetric_width_pad)`.
- If tuple of 2 tuples of 2 ints:
interpreted as
`((top_pad, bottom_pad), (left_pad, right_pad))`
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, height, width)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
# Input shape
4D tensor with shape:
- If `data_format` is `"channels_last"`:
`(batch, rows, cols, channels)`
- If `data_format` is `"channels_first"`:
`(batch, channels, rows, cols)`
# Output shape
4D tensor with shape:
- If `data_format` is `"channels_last"`:
`(batch, padded_rows, padded_cols, channels)`
- If `data_format` is `"channels_first"`:
`(batch, channels, padded_rows, padded_cols)`
"""
def __init__(self,
padding=(1, 1),
data_format=None,
**kwargs):
super(ReflectionPadding2D, self).__init__(**kwargs)
self.data_format = conv_utils.normalize_data_format(data_format)
if isinstance(padding, int):
self.padding = ((padding, padding), (padding, padding))
elif hasattr(padding, '__len__'):
if len(padding) != 2:
raise ValueError('`padding` should have two elements. '
'Found: ' + str(padding))
height_padding = conv_utils.normalize_tuple(padding[0], 2,
'1st entry of padding')
width_padding = conv_utils.normalize_tuple(padding[1], 2,
'2nd entry of padding')
self.padding = (height_padding, width_padding)
else:
raise ValueError('`padding` should be either an int, '
'a tuple of 2 ints '
'(symmetric_height_pad, symmetric_width_pad), '
'or a tuple of 2 tuples of 2 ints '
'((top_pad, bottom_pad), (left_pad, right_pad)). '
'Found: ' + str(padding))
self.input_spec = InputSpec(ndim=4)
def compute_output_shape(self, input_shape):
if self.data_format == 'channels_first':
if input_shape[2] is not None:
rows = input_shape[2] + self.padding[0][0] + self.padding[0][1]
else:
rows = None
if input_shape[3] is not None:
cols = input_shape[3] + self.padding[1][0] + self.padding[1][1]
else:
cols = None
return (input_shape[0],
input_shape[1],
rows,
cols)
elif self.data_format == 'channels_last':
if input_shape[1] is not None:
rows = input_shape[1] + self.padding[0][0] + self.padding[0][1]
else:
rows = None
if input_shape[2] is not None:
cols = input_shape[2] + self.padding[1][0] + self.padding[1][1]
else:
cols = None
return (input_shape[0],
rows,
cols,
input_shape[3])
def call(self, inputs):
return spatial_reflection_2d_padding(inputs,
padding=self.padding,
data_format=self.data_format)
def get_config(self):
config = {'padding': self.padding,
'data_format': self.data_format}
base_config = super(ReflectionPadding2D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
def keepsize(nx, ny, noise, depth, activation='relu', n_filters=64, l2_reg=1e-7):
"""
Deep residual network that keeps the size of the input throughout the whole network
"""
def residual(inputs, n_filters):
x = ReflectionPadding2D()(inputs)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(l2_reg))(x)
x = BatchNormalization()(x)
x = Activation(activation)(x)
x = ReflectionPadding2D()(x)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(l2_reg))(x)
x = BatchNormalization()(x)
x = add([x, inputs])
return x
inputs = Input(shape=(nx, ny, 1))
x = GaussianNoise(noise)(inputs)
x = ReflectionPadding2D()(x)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(l2_reg))(x)
x0 = Activation(activation)(x)
x = residual(x0, n_filters)
for i in range(depth-1):
x = residual(x, n_filters)
x = ReflectionPadding2D()(x)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(l2_reg))(x)
x = BatchNormalization()(x)
x = add([x, x0])
# Upsampling for superresolution
x = UpSampling2D()(x)
x = ReflectionPadding2D()(x)
x = Conv2D(4*n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(l2_reg))(x)
x = Activation(activation)(x)
final = Conv2D(1, (1, 1), padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(l2_reg))(x)
return Model(inputs=inputs, outputs=final)
def encdec(nx, ny, noise, depth, activation='relu', n_filters=64):
"""
Deep residual network using an encoder-decoder approach. It uses reflection padding
"""
def residual(inputs, n_filters):
x = ReflectionPadding2D()(inputs)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
x = Activation(activation)(x)
x = ReflectionPadding2D()(x)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
x = add([x, inputs])
return x
def residual_down(inputs, n_filters):
x = ReflectionPadding2D()(inputs)
x = Conv2D(n_filters, (3, 3), strides=2, padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
x = Activation(activation)(x)
x = ReflectionPadding2D()(x)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
shortcut = Conv2D(n_filters, (1, 1), strides=2, padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(inputs)
x = add([x, shortcut])
return x
def residual_up(inputs, n_filters):
x_up = UpSampling2D(size=(2,2))(inputs)
x = ReflectionPadding2D()(x_up)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
x = Activation(activation)(x)
x = ReflectionPadding2D()(x)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
shortcut = Conv2D(n_filters, (1, 1), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x_up)
x = add([x, shortcut])
return x
inputs = Input(shape=(nx, ny, 1))
# in: (nx,ny,1) -> out: (nx,ny,n_filters)
x = ReflectionPadding2D()(inputs)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
x = Activation(activation)(x)
# in: (nx,ny,n_filters) -> out: (nx/2,ny/2,2*n_filters)
x = ReflectionPadding2D()(x)
x = Conv2D(2*n_filters, (3, 3), strides=2, padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
x = Activation(activation)(x)
# in: (nx/2,ny/2,2*n_filters) -> out: (nx/4,ny/4,4*n_filters)
# x = ReflectionPadding2D()(x)
# x = Conv2D(4*n_filters, (3, 3), strides=2, padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
# x = BatchNormalization()(x)
# x = Activation(activation)(x)
for i in range(depth):
x = residual(x, 2*n_filters)
# in: (nx/4,ny/4,4*n_filters) -> out: (nx/2,ny/2,2*n_filters)
# x = UpSampling2D(size=(2,2))(x)
# x = ReflectionPadding2D()(x)
# x = Conv2D(2*n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
# x = BatchNormalization()(x)
# x = Activation(activation)(x)
# in: (nx/2,ny/2,2*n_filters) -> out: (nx,ny,n_filters)
x = UpSampling2D(size=(2,2))(x)
x = ReflectionPadding2D()(x)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
x = Activation(activation)(x)
# in: (nx,ny,n_filters) -> out: (2*nx,2*ny,n_filters)
x = UpSampling2D(size=(2,2))(x)
x = ReflectionPadding2D()(x)
x = Conv2D(n_filters, (3, 3), padding='valid', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
x = BatchNormalization()(x)
x = Activation(activation)(x)
final = Conv2D(1, (1, 1), padding='same', kernel_initializer='he_normal', kernel_regularizer=l2(1e-7))(x)
return Model(inputs=inputs, outputs=final) | 41.965035 | 141 | 0.589235 |
e9cbadfef75af60fb7c8d46c3caf868853aba72e | 1,170 | py | Python | tests/storage/cases/test_KT1Cz7TyVFvHxXpxLS57RFePrhTGisUpPhvD_babylon.py | juztin/pytezos-1 | 7e608ff599d934bdcf129e47db43dbdb8fef9027 | [
"MIT"
] | 1 | 2020-08-11T02:31:24.000Z | 2020-08-11T02:31:24.000Z | tests/storage/cases/test_KT1Cz7TyVFvHxXpxLS57RFePrhTGisUpPhvD_babylon.py | juztin/pytezos-1 | 7e608ff599d934bdcf129e47db43dbdb8fef9027 | [
"MIT"
] | 1 | 2020-12-30T16:44:56.000Z | 2020-12-30T16:44:56.000Z | tests/storage/cases/test_KT1Cz7TyVFvHxXpxLS57RFePrhTGisUpPhvD_babylon.py | juztin/pytezos-1 | 7e608ff599d934bdcf129e47db43dbdb8fef9027 | [
"MIT"
] | 1 | 2022-03-20T19:01:00.000Z | 2022-03-20T19:01:00.000Z | from unittest import TestCase
from tests import get_data
from pytezos.michelson.converter import build_schema, decode_micheline, encode_micheline, micheline_to_michelson
class StorageTestKT1Cz7TyVFvHxXpxLS57RFePrhTGisUpPhvD_babylon(TestCase):
@classmethod
def setUpClass(cls):
cls.maxDiff = None
cls.contract = get_data('storage/mainnet/KT1Cz7TyVFvHxXpxLS57RFePrhTGisUpPhvD_babylon.json')
def test_storage_encoding_KT1Cz7TyVFvHxXpxLS57RFePrhTGisUpPhvD_babylon(self):
type_expr = self.contract['script']['code'][1]
val_expr = self.contract['script']['storage']
schema = build_schema(type_expr)
decoded = decode_micheline(val_expr, type_expr, schema)
actual = encode_micheline(decoded, schema)
self.assertEqual(val_expr, actual)
def test_storage_schema_KT1Cz7TyVFvHxXpxLS57RFePrhTGisUpPhvD_babylon(self):
_ = build_schema(self.contract['script']['code'][0])
def test_storage_format_KT1Cz7TyVFvHxXpxLS57RFePrhTGisUpPhvD_babylon(self):
_ = micheline_to_michelson(self.contract['script']['code'])
_ = micheline_to_michelson(self.contract['script']['storage'])
| 41.785714 | 112 | 0.757265 |
d1b3b7ab31e4cd87134da914d659376f7f3fdb0b | 12,708 | py | Python | packages/girder/girder/constants.py | ShenQianwithC/HistomicsTK | 4ad7e72a7ebdabbdfc879254fad04ce7ca47e320 | [
"Apache-2.0"
] | null | null | null | packages/girder/girder/constants.py | ShenQianwithC/HistomicsTK | 4ad7e72a7ebdabbdfc879254fad04ce7ca47e320 | [
"Apache-2.0"
] | null | null | null | packages/girder/girder/constants.py | ShenQianwithC/HistomicsTK | 4ad7e72a7ebdabbdfc879254fad04ce7ca47e320 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
"""
Constants should be defined here.
"""
import os
import json
import sys
PACKAGE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(PACKAGE_DIR)
LOG_ROOT = os.path.join(os.path.expanduser('~'), '.girder', 'logs')
MAX_LOG_SIZE = 1024 * 1024 * 10 # Size in bytes before logs are rotated.
LOG_BACKUP_COUNT = 5
ACCESS_FLAGS = {}
# Identifier for Girder's entry in the route table
GIRDER_ROUTE_ID = 'core_girder'
GIRDER_STATIC_ROUTE_ID = 'core_static_root'
# Threshold below which text search results will be sorted by their text score.
# Setting this too high causes mongodb to use too many resources for searches
# that yield lots of results.
TEXT_SCORE_SORT_MAX = 200
# Get the version information
VERSION = { # Set defaults in case girder-version.json doesn't exist
'git': False,
'SHA': None,
'shortSHA': None,
'apiVersion': None,
'date': None
}
try:
with open(os.path.join(PACKAGE_DIR, 'girder-version.json')) as f:
VERSION.update(json.load(f))
except IOError:
pass
#: The local directory containing the static content.
STATIC_PREFIX = os.path.join(sys.prefix, 'share', 'girder')
STATIC_ROOT_DIR = os.path.join(STATIC_PREFIX, 'static')
def registerAccessFlag(key, name, description=None, admin=False):
"""
Register a new access flag in the set of ACCESS_FLAGS available
on data in the hierarchy. These are boolean switches that can be used
to control access to specific functionality on specific resoruces.
:param key: The unique identifier for this access flag.
:type key: str
:param name: Human readable name for this permission (displayed in UI).
:type name: str
:param description: Human readable longer description for the flag.
:type description: str
:param admin: Set this to True to only allow site admin users to set
this flag. If True, the flag will only appear in the list for
site admins. This can be useful for flags with security
considerations.
"""
ACCESS_FLAGS[key] = {
'name': name,
'description': description,
'admin': admin
}
class TerminalColor(object):
"""
Provides a set of values that can be used to color text in the terminal.
"""
ERROR = '\033[1;91m'
SUCCESS = '\033[32m'
WARNING = '\033[1;33m'
INFO = '\033[35m'
ENDC = '\033[0m'
@staticmethod
def _color(tag, text):
return ''.join([tag, text, TerminalColor.ENDC])
@staticmethod
def error(text):
return TerminalColor._color(TerminalColor.ERROR, text)
@staticmethod
def success(text):
return TerminalColor._color(TerminalColor.SUCCESS, text)
@staticmethod
def warning(text):
return TerminalColor._color(TerminalColor.WARNING, text)
@staticmethod
def info(text):
return TerminalColor._color(TerminalColor.INFO, text)
class AssetstoreType(object):
"""
All possible assetstore implementation types.
"""
FILESYSTEM = 0
GRIDFS = 1
S3 = 2
class AccessType(object):
"""
Represents the level of access granted to a user or group on an
AccessControlledModel. Having a higher access level on a resource also
confers all of the privileges of the lower levels.
Semantically, READ access on a resource means that the user can see all
the information pertaining to the resource, but cannot modify it.
WRITE access usually means the user can modify aspects of the resource.
ADMIN access confers total control; the user can delete the resource and
also manage permissions for other users on it.
"""
NONE = -1
READ = 0
WRITE = 1
ADMIN = 2
SITE_ADMIN = 100
@classmethod
def validate(cls, level):
level = int(level)
if level in (cls.NONE, cls.READ, cls.WRITE, cls.ADMIN, cls.SITE_ADMIN):
return level
else:
raise ValueError('Invalid AccessType: %d.' % level)
class SettingKey(object):
"""
Core settings should be enumerated here by a set of constants corresponding
to sensible strings.
"""
ADD_TO_GROUP_POLICY = 'core.add_to_group_policy'
API_KEYS = 'core.api_keys'
BANNER_COLOR = 'core.banner_color'
BRAND_NAME = 'core.brand_name'
COLLECTION_CREATE_POLICY = 'core.collection_create_policy'
PRIVACY_NOTICE = 'core.privacy_notice'
CONTACT_EMAIL_ADDRESS = 'core.contact_email_address'
COOKIE_LIFETIME = 'core.cookie_lifetime'
CORS_ALLOW_HEADERS = 'core.cors.allow_headers'
CORS_ALLOW_METHODS = 'core.cors.allow_methods'
CORS_ALLOW_ORIGIN = 'core.cors.allow_origin'
CORS_EXPOSE_HEADERS = 'core.cors.expose_headers'
EMAIL_FROM_ADDRESS = 'core.email_from_address'
EMAIL_HOST = 'core.email_host'
EMAIL_VERIFICATION = 'core.email_verification'
ENABLE_PASSWORD_LOGIN = 'core.enable_password_login'
GIRDER_MOUNT_INFORMATION = 'core.girder_mount_information'
ENABLE_NOTIFICATION_STREAM = 'core.enable_notification_stream'
PLUGINS_ENABLED = 'core.plugins_enabled'
REGISTRATION_POLICY = 'core.registration_policy'
ROUTE_TABLE = 'core.route_table'
SECURE_COOKIE = 'core.secure_cookie'
SERVER_ROOT = 'core.server_root'
SMTP_ENCRYPTION = 'core.smtp.encryption'
SMTP_HOST = 'core.smtp_host'
SMTP_PASSWORD = 'core.smtp.password'
SMTP_PORT = 'core.smtp.port'
SMTP_USERNAME = 'core.smtp.username'
UPLOAD_MINIMUM_CHUNK_SIZE = 'core.upload_minimum_chunk_size'
USER_DEFAULT_FOLDERS = 'core.user_default_folders'
class SettingDefault(object):
"""
Core settings that have a default should be enumerated here with the
SettingKey.
"""
defaults = {
SettingKey.ADD_TO_GROUP_POLICY: 'never',
SettingKey.API_KEYS: True,
SettingKey.BANNER_COLOR: '#3F3B3B',
SettingKey.BRAND_NAME: 'Girder',
SettingKey.COLLECTION_CREATE_POLICY: {
'open': False,
'groups': [],
'users': []
},
SettingKey.CONTACT_EMAIL_ADDRESS: 'kitware@kitware.com',
SettingKey.PRIVACY_NOTICE: 'https://www.kitware.com/privacy',
SettingKey.COOKIE_LIFETIME: 180,
# These headers are necessary to allow the web server to work with just
# changes to the CORS origin
SettingKey.CORS_ALLOW_HEADERS:
'Accept-Encoding, Authorization, Content-Disposition, '
'Content-Type, Cookie, Girder-Authorization, Girder-OTP, Girder-Token',
SettingKey.CORS_EXPOSE_HEADERS: 'Girder-Total-Count',
# An apache server using reverse proxy would also need
# X-Requested-With, X-Forwarded-Server, X-Forwarded-For,
# X-Forwarded-Host, Remote-Addr
SettingKey.EMAIL_VERIFICATION: 'disabled',
SettingKey.EMAIL_FROM_ADDRESS: 'Girder <no-reply@girder.org>',
SettingKey.ENABLE_PASSWORD_LOGIN: True,
SettingKey.ENABLE_NOTIFICATION_STREAM: True,
SettingKey.PLUGINS_ENABLED: [],
SettingKey.REGISTRATION_POLICY: 'open',
SettingKey.SMTP_HOST: 'localhost',
SettingKey.SMTP_PORT: 25,
SettingKey.SMTP_ENCRYPTION: 'none',
SettingKey.UPLOAD_MINIMUM_CHUNK_SIZE: 1024 * 1024 * 5,
SettingKey.USER_DEFAULT_FOLDERS: 'public_private'
}
class SortDir(object):
ASCENDING = 1
DESCENDING = -1
class TokenScope(object):
"""
Constants for core token scope strings. Token scopes must not contain
spaces, since many services accept scope lists as a space-separated list
of strings.
"""
ANONYMOUS_SESSION = 'core.anonymous_session'
USER_AUTH = 'core.user_auth'
TEMPORARY_USER_AUTH = 'core.user_auth.temporary'
EMAIL_VERIFICATION = 'core.email_verification'
PLUGINS_ENABLED_READ = 'core.plugins.read'
SETTINGS_READ = 'core.setting.read'
ASSETSTORES_READ = 'core.assetstore.read'
PARTIAL_UPLOAD_READ = 'core.partial_upload.read'
PARTIAL_UPLOAD_CLEAN = 'core.partial_upload.clean'
DATA_READ = 'core.data.read'
DATA_WRITE = 'core.data.write'
DATA_OWN = 'core.data.own'
USER_INFO_READ = 'core.user_info.read'
_customScopes = []
_adminCustomScopes = []
_scopeIds = set()
_adminScopeIds = set()
@classmethod
def describeScope(cls, scopeId, name, description, admin=False):
"""
Register a description of a scope.
:param scopeId: The unique identifier string for the scope.
:type scopeId: str
:param name: A short human readable name for the scope.
:type name: str
:param description: A more complete description of the scope.
:type description: str
:param admin: If this scope only applies to admin users, set to True.
:type admin: bool
"""
info = {
'id': scopeId,
'name': name,
'description': description
}
if admin:
cls._adminCustomScopes.append(info)
cls._adminScopeIds.add(scopeId)
else:
cls._customScopes.append(info)
cls._scopeIds.add(scopeId)
@classmethod
def listScopes(cls):
return {
'custom': cls._customScopes,
'adminCustom': cls._adminCustomScopes
}
@classmethod
def scopeIds(cls, admin=False):
if admin:
return cls._scopeIds | cls._adminScopeIds
else:
return cls._scopeIds
TokenScope.describeScope(
TokenScope.USER_INFO_READ, 'Read your user information',
'Allows clients to look up your user information, including private fields '
'such as email address.')
TokenScope.describeScope(
TokenScope.DATA_READ, 'Read data',
'Allows clients to read all data that you have access to.')
TokenScope.describeScope(
TokenScope.DATA_WRITE, 'Write data',
'Allows clients to edit data in the hierarchy and create new data anywhere '
'you have write access.')
TokenScope.describeScope(
TokenScope.DATA_OWN, 'Data ownership', 'Allows administrative control '
'on data you own, including setting access control and deletion.'
)
TokenScope.describeScope(
TokenScope.PLUGINS_ENABLED_READ, 'See enabled plugins', 'Allows clients '
'to see the list of plugins enabled on the server.', admin=True)
TokenScope.describeScope(
TokenScope.SETTINGS_READ, 'See system setting values', 'Allows clients to '
'view the value of any system setting.', admin=True)
TokenScope.describeScope(
TokenScope.ASSETSTORES_READ, 'View assetstores', 'Allows clients to see '
'all assetstore information.', admin=True)
TokenScope.describeScope(
TokenScope.PARTIAL_UPLOAD_READ, 'View unfinished uploads.',
'Allows clients to see all partial uploads.', admin=True)
TokenScope.describeScope(
TokenScope.PARTIAL_UPLOAD_CLEAN, 'Remove unfinished uploads.',
'Allows clients to remove unfinished uploads.', admin=True)
class CoreEventHandler(object):
"""
This enum represents handler identifier strings for core event handlers.
If you wish to unbind a core event handler, use one of these as the
``handlerName`` argument. Unbinding core event handlers can be used to
disable certain default functionalities.
"""
# For removing deleted user/group references from AccessControlledModel
ACCESS_CONTROL_CLEANUP = 'core.cleanupDeletedEntity'
# For updating an item's size to include a new file.
FILE_PROPAGATE_SIZE = 'core.propagateSizeToItem'
# For adding a group's creator into its ACL at creation time.
GROUP_CREATOR_ACCESS = 'core.grantCreatorAccess'
# For creating the default Public and Private folders at user creation time.
USER_DEFAULT_FOLDERS = 'core.addDefaultFolders'
# For adding a user into its own ACL.
USER_SELF_ACCESS = 'core.grantSelfAccess'
# For updating the cached webroot HTML when settings change.
WEBROOT_SETTING_CHANGE = 'core.updateWebrootSettings'
| 35.202216 | 83 | 0.687205 |
1b313bac3505d345f0d513d2599b7c70f24e130b | 488 | py | Python | 32_TrocoCompraEmNotas.py | JBCFurtado/Python | f9cdee54b8f5f1376693efeeef9b99fa2f78469b | [
"MIT"
] | null | null | null | 32_TrocoCompraEmNotas.py | JBCFurtado/Python | f9cdee54b8f5f1376693efeeef9b99fa2f78469b | [
"MIT"
] | null | null | null | 32_TrocoCompraEmNotas.py | JBCFurtado/Python | f9cdee54b8f5f1376693efeeef9b99fa2f78469b | [
"MIT"
] | null | null | null | cedula1 = 1
cedula2 = 10
cedula3 = 50
valor_compra = int(input('Digite o valor da compra: '))
valor_dinheiro = int(input('Digite o valor do dinheiro: '))
troco = abs(valor_compra - valor_dinheiro)
troco1 = troco // cedula3
troco_resto = troco % cedula3
troco2 = troco_resto // cedula2
troco3 = troco_resto % cedula2
print('\nTroco: ', troco)
print('\nNúmeros de cédulas abaixo:')
print(troco1, 'Cédulas de 50 reais')
print(troco2, 'Cédulas de 10 reais')
print(troco3, 'Cédulas de 1 real') | 32.533333 | 59 | 0.729508 |
f04a615362de17310d7cc009b4be5d46e9f64415 | 1,863 | py | Python | dc-mixer/__main__.py | paunin/docker-compose-mixer | 414e9519d38098980bb8597c2f8d74c6ddcb1c29 | [
"MIT"
] | 19 | 2016-01-14T13:55:10.000Z | 2021-01-18T12:26:21.000Z | dc-mixer/__main__.py | paunin/docker-compose-mixer | 414e9519d38098980bb8597c2f8d74c6ddcb1c29 | [
"MIT"
] | 6 | 2015-10-01T10:33:50.000Z | 2016-04-13T03:12:11.000Z | dc-mixer/__main__.py | paunin/docker-compose-mixer | 414e9519d38098980bb8597c2f8d74c6ddcb1c29 | [
"MIT"
] | 3 | 2016-02-04T15:09:24.000Z | 2016-12-20T13:16:31.000Z | import os
import sys
import getopt
import logging
from dc_mixer import DcMixer
from dc_mixer import ScopesContainer
def main(argv):
def usage():
print(
'Compile docker-compose from several docker-compose.yml files\n\n'
'Usage:\n'
' dc-mixer [options]\n\n'
'Options:\n'
' -h, --help Print help information\n'
' -i, --input-file Input file (default `docker-compose-mixer.yml` in current directory)\n'
' -o, --output-file Output file (default `docker-compose.yml` in current directory)\n'
' -h, --help Print help information\n'
' -v, --verbose Enable verbose mode\n\n'
'For more information read documentation: https://github.com/paunin/docker-compose-mixer'
)
input_file = None
output_file = None
try:
opts, args = getopt.getopt(argv, "hvo:i:", ["help", "verbose", "output-file=", "input-file="])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ("-h", "--help"):
usage()
sys.exit(0)
if opt in ("-v", "--verbose"):
logging.basicConfig(level=logging.DEBUG)
if opt in ("-i", "--input-file"):
input_file = arg
if opt in ("-o", "--output-file"):
output_file = arg
if not input_file:
input_file = os.getcwd() + '/docker-compose-mixer.yml'
if not output_file:
output_file = os.getcwd() + '/docker-compose.yml'
mixer = DcMixer(input_file, output_file, ScopesContainer())
mixer.process()
# ----------------------------------- #
# ------------ main call ------------ #
# ----------------------------------- #
if __name__ == '__main__':
main(sys.argv[1:])
| 30.540984 | 112 | 0.520666 |
129bbc6723f775dfde08ff4c06c5418d85d4903a | 1,534 | py | Python | output/models/nist_data/atomic/base64_binary/schema_instance/nistschema_sv_iv_atomic_base64_binary_enumeration_5_xsd/nistschema_sv_iv_atomic_base64_binary_enumeration_5.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/nist_data/atomic/base64_binary/schema_instance/nistschema_sv_iv_atomic_base64_binary_enumeration_5_xsd/nistschema_sv_iv_atomic_base64_binary_enumeration_5.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/nist_data/atomic/base64_binary/schema_instance/nistschema_sv_iv_atomic_base64_binary_enumeration_5_xsd/nistschema_sv_iv_atomic_base64_binary_enumeration_5.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
] | null | null | null | from dataclasses import dataclass, field
from enum import Enum
from typing import Optional
__NAMESPACE__ = "NISTSchema-SV-IV-atomic-base64Binary-enumeration-5-NS"
class NistschemaSvIvAtomicBase64BinaryEnumeration5Type(Enum):
ZWNKBM_VKCN_JH_ZG9M_YMPWB3_JWC25IC3C = b"ecdnedrradofbjporpsnbsw"
ZM_JK_Z3_FT_Y2H0D_XD5E_GRNB2_VZ_ZM_FOC25S_YWZTE_HZ4C_WNNC_WRZA_WXW_ZWNK_YMPTB_XRI_ZNC = b"fbdgqmchtuwyxdgoesfahsnlafmxvxqcgqdsilpecdbjmmtbfw"
D_WTYAM_FVD_GTJB_W93B_XBP_ZWHXC_GFXB_HB1A2_ZR_ZW95E_HN1B2PVA_XVYAM_VRE_G9Z_Y2P2BMDYB_W10A_HV5A2LSC_W1T_Y2THBW = b"ukrjaotkcmowmpiehqpaqlpukfkeoyxsuojoiurjekxoscjvngrmmthuykilqmmckao"
D_GDM_Y2DLBM_ZUDM14B_HF5_ZNLIE_XBRE_G9K_ZXV4CMXHAN_VJD_GDVB_XFQE_GLID_XNRE_W1UC_GJIA_GTKBN_B5_YWPSCW = b"tgfcgenfnvmxlqyfybypkxodeuxrlajuctgomqjxibuskymnpbbhkdnpyajls"
E_HRHDN_FKA_XNX_ZQ = b"xtavqdisqe"
AN_V0_YN_BV_Y2_JUE_XB0_YXBTC_HFYCN_FYB_WXVAN_FKE_XDT_Y3LLB3_N0BMDTBM_RX_YQ = b"jutbpocbnyptapmpqrrqrmlojqdywmcyeostngmndqa"
C_GJ1B_GHKE_GZWC2HOA3_B3A_WTM_YWPQA_W5NB_GXKA_GLWANH0A_HLIA_W9QA_WNPDM_JPDM54C_Q = b"pbulhdxfpshhkpwikfajjinglldhipjxthybiojicivbivnxq"
@dataclass
class NistschemaSvIvAtomicBase64BinaryEnumeration5:
class Meta:
name = "NISTSchema-SV-IV-atomic-base64Binary-enumeration-5"
namespace = "NISTSchema-SV-IV-atomic-base64Binary-enumeration-5-NS"
value: Optional[NistschemaSvIvAtomicBase64BinaryEnumeration5Type] = field(
default=None,
metadata={
"required": True,
"format": "base64",
}
)
| 49.483871 | 186 | 0.837027 |
d98a61a995dca7b8aecaca047a41aa38fc3c0c09 | 15,442 | py | Python | slack/tests/test_sansio.py | autoferrit/slack-sansio | 3b4d25e0c96745657cffe13699bf74304c74b026 | [
"MIT"
] | null | null | null | slack/tests/test_sansio.py | autoferrit/slack-sansio | 3b4d25e0c96745657cffe13699bf74304c74b026 | [
"MIT"
] | null | null | null | slack/tests/test_sansio.py | autoferrit/slack-sansio | 3b4d25e0c96745657cffe13699bf74304c74b026 | [
"MIT"
] | null | null | null | import json
import time
import logging
import mock
import pytest
from slack import sansio, methods, exceptions
class TestRequest:
def test_prepare_request(self, token):
url, body, headers = sansio.prepare_request(
methods.AUTH_TEST, {}, {}, {}, token
)
assert url == "https://slack.com/api/auth.test"
assert body == "{}"
assert "Authorization" in headers
assert "Content-type" in headers
assert headers["Content-type"] == "application/json; charset=utf-8"
url, body, headers = sansio.prepare_request(
methods.AUTH_REVOKE, {}, {}, {}, token
)
assert url == "https://slack.com/api/auth.revoke"
assert body == {"token": token}
assert headers == {}
@pytest.mark.parametrize(
"url", (methods.AUTH_TEST, "auth.test", "https://slack.com/api/auth.test")
)
def test_prepare_request_urls(self, url):
clean_url, _, _ = sansio.prepare_request(url, {}, {}, {}, "")
assert clean_url == "https://slack.com/api/auth.test"
def test_prepare_request_url_hook(self):
clean_url, _, _ = sansio.prepare_request(
"https://hooks.slack.com/T0000000/aczvrfver", {}, {}, {}, ""
)
assert clean_url == "https://hooks.slack.com/T0000000/aczvrfver"
@pytest.mark.parametrize(
"payload,result",
(
({"foo": "bar"}, {"foo": "bar"}),
(
{"foo": "bar", "attachements": [{"a": "b"}]},
{"foo": "bar", "attachements": [{"a": "b"}]},
),
),
)
def test_prepare_request_body(self, token, payload, result):
_, body, headers = sansio.prepare_request(
methods.AUTH_TEST, payload, {}, {}, token
)
assert isinstance(body, str)
assert body == json.dumps(result)
assert "Authorization" in headers
assert "Content-type" in headers
assert headers["Content-type"] == "application/json; charset=utf-8"
_, body, headers = sansio.prepare_request(
methods.AUTH_REVOKE, payload, {}, {}, token
)
result["token"] = token
assert isinstance(body, dict)
assert body == result
@pytest.mark.parametrize(
"payload,result",
(
({"foo": "bar"}, '{"foo": "bar"}'),
(
{"foo": "bar", "attachements": [{"a": "b"}]},
'{"foo": "bar", "attachements": [{"a": "b"}]}',
),
),
)
def test_prepare_request_body_hook(self, token, payload, result):
_, body, headers = sansio.prepare_request(
"https://hooks.slack.com/abcdefg", payload, {}, {}, token
)
assert body == result
assert "Authorization" in headers
assert "Content-type" in headers
assert headers["Content-type"] == "application/json; charset=utf-8"
def test_prepare_request_body_message(self, token, message):
_, body, headers = sansio.prepare_request(
methods.AUTH_TEST, message, {}, {}, token
)
assert isinstance(body, str)
assert "Authorization" in headers
assert "Content-type" in headers
assert headers["Content-type"] == "application/json; charset=utf-8"
_, body, headers = sansio.prepare_request(
methods.AUTH_REVOKE, message, {}, {}, token
)
assert isinstance(body, dict)
assert isinstance(body.get("attachments", ""), str)
assert body["token"] == token
def test_prepare_request_body_message_force_json(self, token, message):
_, body, headers = sansio.prepare_request(
methods.AUTH_REVOKE, message, {}, {}, token, as_json=True
)
assert isinstance(body, str)
assert "Authorization" in headers
assert "Content-type" in headers
assert headers["Content-type"] == "application/json; charset=utf-8"
def test_prepare_request_message_hook(self, token, message):
_, body, headers = sansio.prepare_request(
"https://hooks.slack.com/abcdefg", message, {}, {}, token
)
assert isinstance(body, str)
data = json.loads(body)
assert isinstance(data.get("attachments", []), list)
assert "Authorization" in headers
assert "Content-type" in headers
assert headers["Content-type"] == "application/json; charset=utf-8"
@pytest.mark.parametrize(
"headers,global_headers,result",
(
({"foo": "bar", "py": "3.7"}, {}, {"foo": "bar", "py": "3.7"}),
(
{"foo": "bar", "py": "3.7"},
{"sans": "I/O"},
{"foo": "bar", "py": "3.7", "sans": "I/O"},
),
(
{"foo": "bar", "py": "3.7"},
{"foo": "baz", "sans": "I/O"},
{"foo": "bar", "py": "3.7", "sans": "I/O"},
),
),
)
def test_prepare_request_headers(self, headers, global_headers, result):
_, _, headers = sansio.prepare_request("", {}, headers, global_headers, "")
assert headers == result
def test_find_iteration(self):
itermode, iterkey = sansio.find_iteration(methods.CHANNELS_LIST)
assert itermode == methods.CHANNELS_LIST.value[1]
assert iterkey == methods.CHANNELS_LIST.value[2]
def test_find_iteration_custom_itermode(self):
itermode, iterkey = sansio.find_iteration(
methods.CHANNELS_LIST, itermode="timeline"
)
assert itermode == "timeline"
assert iterkey == methods.CHANNELS_LIST.value[2]
def test_find_iteration_custom_iterkey(self):
itermode, iterkey = sansio.find_iteration(
methods.CHANNELS_LIST, iterkey="users"
)
assert itermode == methods.CHANNELS_LIST.value[1]
assert iterkey == "users"
def test_find_iteration_not_found(self):
with pytest.raises(ValueError):
_, _ = sansio.find_iteration("")
def test_find_iteration_wrong_mode(self):
with pytest.raises(ValueError):
_, _ = sansio.find_iteration("", itermode="python", iterkey="users")
def test_prepare_iter_request(self):
data, iterkey, itermode = sansio.prepare_iter_request(methods.CHANNELS_LIST, {})
assert data == {"limit": 200}
assert itermode == methods.CHANNELS_LIST.value[1]
assert iterkey == methods.CHANNELS_LIST.value[2]
def test_prepare_iter_request_no_iterkey(self):
data, iterkey, itermode = sansio.prepare_iter_request(methods.CHANNELS_LIST, {})
assert data == {"limit": 200}
assert itermode == methods.CHANNELS_LIST.value[1]
assert iterkey == methods.CHANNELS_LIST.value[2]
def test_prepare_iter_request_cursor(self):
data1, _, _ = sansio.prepare_iter_request(
"", {}, itermode="cursor", iterkey="channels", itervalue="abcdefg"
)
assert data1 == {"limit": 200, "cursor": "abcdefg"}
data2, _, _ = sansio.prepare_iter_request(
"",
{},
itermode="cursor",
itervalue="abcdefg",
iterkey="channels",
limit=300,
)
assert data2 == {"limit": 300, "cursor": "abcdefg"}
def test_prepare_iter_request_page(self):
data1, _, _ = sansio.prepare_iter_request(
"", {}, itermode="page", iterkey="channels", itervalue="abcdefg"
)
assert data1 == {"count": 200, "page": "abcdefg"}
data2, _, _ = sansio.prepare_iter_request(
"", {}, itermode="page", itervalue="abcdefg", iterkey="channels", limit=300
)
assert data2 == {"count": 300, "page": "abcdefg"}
def test_prepare_iter_request_timeline(self):
data1, _, _ = sansio.prepare_iter_request(
"", {}, itermode="timeline", iterkey="channels", itervalue="abcdefg"
)
assert data1 == {"count": 200, "latest": "abcdefg"}
data2, _, _ = sansio.prepare_iter_request(
"",
{},
itermode="timeline",
itervalue="abcdefg",
iterkey="channels",
limit=300,
)
assert data2 == {"count": 300, "latest": "abcdefg"}
class TestResponse:
def test_raise_for_status_200(self):
try:
sansio.raise_for_status(200, {}, {})
except Exception as exc:
raise pytest.fail("RAISE {}".format(exc))
def test_raise_for_status_400(self):
with pytest.raises(exceptions.HTTPException):
sansio.raise_for_status(400, {}, {})
def test_raise_for_status_400_httpexception(self):
with pytest.raises(exceptions.HTTPException) as exc:
sansio.raise_for_status(
400, {"test-header": "hello"}, {"test-data": "world"}
)
assert exc.type == exceptions.HTTPException
assert exc.value.status == 400
assert exc.value.headers == {"test-header": "hello"}
assert exc.value.data == {"test-data": "world"}
def test_raise_for_status_429(self):
with pytest.raises(exceptions.RateLimited) as exc:
sansio.raise_for_status(429, {}, {})
assert exc.type == exceptions.RateLimited
assert exc.value.retry_after == 1
def test_raise_for_status_429_headers(self):
headers = {"Retry-After": "10"}
with pytest.raises(exceptions.RateLimited) as exc:
sansio.raise_for_status(429, headers, {})
assert exc.type == exceptions.RateLimited
assert exc.value.retry_after == 10
def test_raise_for_status_429_wrong_headers(self):
headers = {"Retry-After": "aa"}
with pytest.raises(exceptions.RateLimited) as exc:
sansio.raise_for_status(429, headers, {})
assert exc.type == exceptions.RateLimited
assert exc.value.retry_after == 1
def test_raise_for_api_error_ok(self):
try:
sansio.raise_for_api_error({}, {"ok": True})
except Exception as exc:
raise pytest.fail("RAISE {}".format(exc))
def test_raise_for_api_error_nok(self):
data = {"ok": False}
headers = {"test-header": "hello"}
with pytest.raises(exceptions.SlackAPIError) as exc:
sansio.raise_for_api_error(headers, data)
assert exc.type == exceptions.SlackAPIError
assert exc.value.headers == {"test-header": "hello"}
assert exc.value.data == {"ok": False}
assert exc.value.error == "unknow_error"
def test_raise_for_api_error_nok_with_error(self):
data = {"ok": False, "error": "test_error"}
with pytest.raises(exceptions.SlackAPIError) as exc:
sansio.raise_for_api_error({}, data)
assert exc.type == exceptions.SlackAPIError
assert exc.value.error == "test_error"
def test_raise_for_api_error_warning(self, caplog):
caplog.set_level(logging.WARNING)
data = {"ok": True, "warning": "test warning"}
sansio.raise_for_api_error({}, data)
assert len(caplog.records) == 1
assert caplog.records[0].msg == "Slack API WARNING: %s"
assert caplog.records[0].args == ("test warning",)
def test_decode_body(self):
body = b"hello world"
decoded_body = sansio.decode_body({}, body)
assert decoded_body == "hello world"
def test_decode_body_json(self):
body = b'{"test-string":"hello","test-bool":true}'
headers = {"content-type": "application/json; charset=utf-8"}
decoded_body = sansio.decode_body(headers, body)
assert decoded_body == {"test-string": "hello", "test-bool": True}
def test_decode_body_json_no_charset(self):
body = b'{"test-string":"hello","test-bool":true}'
headers = {"content-type": "application/json"}
decoded_body = sansio.decode_body(headers, body)
assert decoded_body == {"test-string": "hello", "test-bool": True}
def test_decode_response(self):
headers = {"content-type": "application/json; charset=utf-8"}
data = b'{"ok": true, "hello": "world"}'
try:
data = sansio.decode_response(200, headers, data)
except Exception as exc:
pytest.fail("RAISE {}".format(exc))
else:
assert data == {"ok": True, "hello": "world"}
def test_decode_iter_request_cursor(self):
data = {"response_metadata": {"next_cursor": "abcdefg"}}
cursor = sansio.decode_iter_request(data)
assert cursor == "abcdefg"
def test_decode_iter_request_paging(self):
data = {"paging": {"page": 2, "pages": 4}}
page = sansio.decode_iter_request(data)
assert page == 3
def test_decode_iter_request_timeline(self):
timestamp = time.time()
latest = timestamp - 1000
data = {"has_more": True, "latest": timestamp, "messages": [{"ts": latest}]}
next_ = sansio.decode_iter_request(data)
assert next_ == latest
@mock.patch("time.time", mock.MagicMock(return_value=1534688291))
def test_validate_request_signature_ok(self):
headers = {
"X-Slack-Request-Timestamp": "1534688291",
"X-Slack-Signature": "v0=ac720e09cb1ecb0baa17bea5638fa3d11fc177576dd364e05475d6dbc620c696",
}
body = """{"token":"abcdefghijkl","team_id":"T000000","api_app_id":"A000000","event":{},"type":"event_callback","authed_teams":["T000000"],"event_id":"AAAAAAA","event_time":1111111111}"""
sansio.validate_request_signature(
body=body, headers=headers, signing_secret="mysupersecret"
)
@mock.patch("time.time", mock.MagicMock(return_value=1534688291))
def test_validate_request_signature_nok(self):
headers = {
"X-Slack-Request-Timestamp": "1534688291",
"X-Slack-Signature": "v0=ac720e09cb1ecb0baa17bea5638fa3d11fc177576dd364e05475d6dbc620c697",
}
body = """{"token":"abcdefghijkl","team_id":"T000000","api_app_id":"A000000","event":{},"type":"event_callback","authed_teams":["T000000"],"event_id":"AAAAAAA","event_time":1111111111}"""
with pytest.raises(exceptions.InvalidSlackSignature):
sansio.validate_request_signature(
body=body, headers=headers, signing_secret="mysupersecret"
)
def test_validate_request_signature_too_old(self):
headers = {
"X-Slack-Request-Timestamp": "1534688291",
"X-Slack-Signature": "v0=ac720e09cb1ecb0baa17bea5638fa3d11fc177576dd364e05475d6dbc620c696",
}
body = """{"token":"abcdefghijkl","team_id":"T000000","api_app_id":"A000000","event":{},"type":"event_callback","authed_teams":["T000000"],"event_id":"AAAAAAA","event_time":1111111111}"""
with pytest.raises(exceptions.InvalidTimestamp):
sansio.validate_request_signature(
body=body, headers=headers, signing_secret="mysupersecret"
)
class TestIncomingEvent:
@pytest.mark.parametrize("event", ("bot", "bot_edit"), indirect=True)
def test_discard_event(self, event):
assert sansio.discard_event(event, "B0AAA0A00") is True
def test_not_discard_event(self, event):
assert sansio.discard_event(event, "B0AAA0A01") is False
def test_no_need_reconnect(self, event):
assert sansio.need_reconnect(event) is False
| 37.299517 | 195 | 0.6018 |
ebb06ff0f391f5031ef0a0cb866be0985325b93d | 430 | py | Python | usr/lib/lit/games/guess.py | KittenTechnologies/littleos | cb21ece4cfc131d780c83755816a95180b2f4d82 | [
"MIT"
] | 5 | 2021-04-09T09:21:29.000Z | 2021-08-29T17:50:28.000Z | usr/lib/lit/games/guess.py | KittenTechnologies/littleos | cb21ece4cfc131d780c83755816a95180b2f4d82 | [
"MIT"
] | null | null | null | usr/lib/lit/games/guess.py | KittenTechnologies/littleos | cb21ece4cfc131d780c83755816a95180b2f4d82 | [
"MIT"
] | null | null | null | #! /bin/python3
from random import randint
number = randint(1,20)
guesses = 0
print("Guess The Number!")
print("Made by Kitten Technologies in Python 3.")
guess = int(input("I'm thinking of a number from 1 to 20. What is it? "))
while number != guess:
if guess > number:
print("Too high!")
else:
print("Too small!")
guess = int(input("Try again..."))
guesses = guesses + 1
print("You win from", guesses, "wrong guesses!")
| 26.875 | 73 | 0.676744 |
aeb0d00b448ac40894dd6b55e5760abd0b47a217 | 8,574 | py | Python | pyDRESCALk/data_io.py | lanl/pyDRESCALk | 25af3796442e5ec87fb54caa4344e8871a2abd15 | [
"BSD-3-Clause"
] | 2 | 2021-12-04T05:20:13.000Z | 2021-12-06T17:30:28.000Z | pyDRESCALk/data_io.py | lanl/pyDRESCALk | 25af3796442e5ec87fb54caa4344e8871a2abd15 | [
"BSD-3-Clause"
] | null | null | null | pyDRESCALk/data_io.py | lanl/pyDRESCALk | 25af3796442e5ec87fb54caa4344e8871a2abd15 | [
"BSD-3-Clause"
] | 2 | 2021-12-04T06:23:21.000Z | 2021-12-08T20:53:12.000Z | # @author: Manish Bhattarai
import glob
import os
import h5py
import pandas as pd
from scipy.io import loadmat
from .utils import *
import pickle
class data_read():
r"""Class for reading data.
Parameters:
args (class): Class which comprises following attributes
fpath (str): Directory path of file to be read
pgrid (tuple): Cartesian grid configuration
ftype (str): Type of data to read(mat/npy/csv/folder)
fname (str): Name of the file to read
comm (object): comm object for distributed read
"""
@comm_timing()
def __init__(self, args):
self.args = args
self.fpath = args.fpath
self.pgrid = [args.p_r, args.p_c]
self.ftype = args.ftype
self.fname = args.fname
self.comm = args.comm1
self.rank = self.comm.rank
self.data = 0
self.key = var_init(self.args, 'key', default=None)
if self.ftype == 'folder':
self.file_path = self.fpath + self.fname + str(self.comm.rank) + '.npy'
else:
self.file_path = self.fpath + self.fname + '.' + self.ftype
@comm_timing()
def read(self):
r"""Data read function"""
return self.read_dat()
@comm_timing()
def read_file_npy(self):
r"""Numpy data read function"""
self.data = np.load(self.file_path)
@comm_timing()
def read_file_csv(self):
r"""CSV data read function"""
self.data = pd.read_csv(self.file_path, header=None).values
def read_file_pickle(self):
self.data = pickle.load(open(self.file_path, "rb"))
@comm_timing()
def read_file_mat(self):
r"""mat file read function"""
if self.key is not None:
self.data = loadmat(self.file_path)[self.key]
else:
self.data = loadmat(self.file_path)[X]
@comm_timing()
def data_partition(
self):
r"""
This function divides the input matrix into chunks as specified by grid configuration.
Return n array of shape (nrows_i, ncols_i) where i is the index of each chunk.
\Sum_i^n ( nrows_i * ncols_i ) = arr.size
If arr is a 2D array, the returned array should look like n subblocks with
each subblock preserving the "physical" layout of arr.
"""
try:
dtr_blk_shp = determine_block_params(self.rank, self.pgrid, self.data.shape)
except:
dtr_blk_shp = determine_block_params(self.rank, self.pgrid, self.data[0].shape)
blk_indices = dtr_blk_shp.determine_block_index_range_asymm()
try:
self.data = self.data[blk_indices[0][0]:blk_indices[1][0] + 1, blk_indices[0][1]:blk_indices[1][1] + 1]
except:
self.data = [self.data[i][blk_indices[0][0]:blk_indices[1][0] + 1, blk_indices[0][1]:blk_indices[1][1] + 1] for i in range(len(self.data))]
@comm_timing()
def save_data_to_file(self, fpath):
r"""This function saves the splitted data to numpy array indexed with chunk number"""
fname = fpath + 'A_' + self.comm.rank + '.npy'
np.save(fname, self.data)
@comm_timing()
def read_dat(self):
r"""Function for reading the data and split into chunks to be reach by each MPI rank"""
if self.ftype == 'npy':
self.read_file_npy()
self.data_partition()
elif self.ftype == 'csv' or self.ftype == 'txt':
self.read_file_csv()
self.data_partition()
elif self.ftype == 'mat':
self.read_file_mat()
self.data_partition()
if self.ftype == 'folder':
self.read_file_npy()
if self.ftype == 'p' or self.ftype=='pickle':
self.read_file_pickle()
self.data_partition()
return self.data
class split_files_save():
r"""Rank 0 based data read, split and save"""
@comm_timing()
def __init__(self, data, pgrid, fpath):
self.data = data
self.pgrid = pgrid
self.p_r = pgrid[0]
self.p_c = pgrid[1]
self.fpath = fpath
@comm_timing()
def split_files(self):
r"""Compute the index range for each block and partition the data as per the chunk"""
dtr_blk_idx = [determine_block_params(rank, self.pgrid, self.data.shape).determine_block_index_range_asymm() for
rank in range(np.product(self.pgrid))]
self.split = [self.data[i[0][0]:i[1][0] + 1, i[0][1]:i[1][1] + 1] for i in dtr_blk_idx]
@comm_timing()
def save_data_to_file(self):
r"""Function to save the chunks into numpy files"""
s = 0
self.split = self.split_files()
for i in range(self.p_r * self.p_c):
name = 'A_' + str(s) + '.npy'
fname = self.fpath + name
arr = self.split[s - 1]
np.save(fname, self.data)
s += 1
class data_write():
r"""Class for writing data/results.
Parameters:
args (class): class which comprises following attributes
results_path (str): Directory path of file to write
pgrid (tuple): Cartesian grid configuration
ftype (str): Type of data to read(mat/npy/csv/folder)
comm (object): comm object for distributed read
"""
@comm_timing()
def __init__(self, args):
self.p_r, self.p_c = args.p_r, args.p_c
self.pgrid = [self.p_r, self.p_c]
#self.ftype = args.ftype
self.comm = args.comm1
self.params = args
self.fpath = self.params.results_paths
self.rank = self.comm.rank
@comm_timing()
def create_folder_dir(self, fpath):
r"""Create directory if not present"""
try:
os.mkdir(fpath)
except:
pass
@comm_timing()
def save_factors(self, factors, reg=False):
r"""Save the W and H factors for each MPI process"""
self.create_folder_dir(self.fpath)
if reg == True:
W_factors_pth = self.fpath + 'A_reg_factors/'
H_factors_pth = self.fpath + 'R_reg_factors/'
else:
W_factors_pth = self.fpath + 'A_factors/'
H_factors_pth = self.fpath + 'R_factors/'
self.create_folder_dir(W_factors_pth)
self.create_folder_dir(H_factors_pth)
#if self.p_r == 1 and self.p_c != 1:
if self.rank%self.p_r==0:
np.save(W_factors_pth + 'A_'+str(self.rank//self.p_r)+'.npy', factors[0])
if self.rank == 0:
np.save(H_factors_pth + 'R.npy', factors[1])
@comm_timing()
def save_cluster_results(self, params):
r"""Save cluster results to a h5 file with rank 0"""
if self.rank == 0:
with h5py.File(self.fpath + 'results.h5', 'w') as hf:
hf.create_dataset('clusterSilhouetteCoefficients', data=params['clusterSilhouetteCoefficients'])
hf.create_dataset('avgSilhouetteCoefficients', data=params['avgSilhouetteCoefficients'])
#hf.create_dataset('L_err', data=params['L_err'])
hf.create_dataset('L_errDist', data=params['L_errDist'])
hf.create_dataset('avgErr', data=params['avgErr'])
hf.create_dataset('ErrTol', data=params['recon_err'])
class read_factors():
r"""Class for reading saved factors.
Args:
factors_path (str): Directory path of factors to read from
pgrid (tuple): Cartesian grid configuration
"""
@comm_timing()
def __init__(self, factors_path, pgrid):
self.factors_path = factors_path
self.W_path = self.factors_path + 'A_factors/'
self.H_path = self.factors_path + 'R_factors/'
self.p_grid = pgrid
self.load_factors()
@comm_timing()
def custom_read_npy(self, fpath):
r"""Read numpy files"""
data = np.load(fpath)
return data
@comm_timing()
def read_factor(self, fpath):
"""Read factors as chunks and stack them"""
files = glob.glob(fpath+'/*')
data = []
if len(files) == 1:
data = self.custom_read_npy(files[0])
else:
for file in np.sort(files):
data.append(self.custom_read_npy(file))
return data, len(files)
@comm_timing()
def load_factors(self):
r"""Load the final stacked factors for visualization"""
W_data, ct_W = self.read_factor(self.W_path)
H_data, ct_H = self.read_factor(self.H_path)
if ct_W > 1: W_data = np.vstack((W_data))
return W_data, H_data
| 34.712551 | 150 | 0.597854 |
29a90de32ba84a853361d21ddaab1df690c820e9 | 144 | py | Python | stimuli/Python/one_file_per_item/jap/50_# math_if 14.py | ALFA-group/neural_program_comprehension | 0253911f376cf282af5a5627e38e0a591ad38860 | [
"MIT"
] | 6 | 2020-04-24T08:16:51.000Z | 2021-11-01T09:50:46.000Z | stimuli/Python/one_file_per_item/jap/50_# math_if 14.py | ALFA-group/neural_program_comprehension | 0253911f376cf282af5a5627e38e0a591ad38860 | [
"MIT"
] | null | null | null | stimuli/Python/one_file_per_item/jap/50_# math_if 14.py | ALFA-group/neural_program_comprehension | 0253911f376cf282af5a5627e38e0a591ad38860 | [
"MIT"
] | 4 | 2021-02-17T20:21:31.000Z | 2022-02-14T12:43:23.000Z | yosoku = 8
jissai = 10
gosa = 3
if (yosoku - jissai < gosa) and (yosoku - jissai > -1*gosa):
print(yosoku - jissai)
else:
print(gosa)
| 14.4 | 60 | 0.611111 |
daa18a4a285623458cf85b16493f1b4d6ecfe9f1 | 3,197 | py | Python | members/admin.py | leonrenkema/makerspaceleiden-crm | 36ea20f5b9e263e8f30b1831ae4a2b1d5b926d3c | [
"Apache-2.0"
] | 5 | 2019-03-12T21:38:32.000Z | 2021-11-06T15:26:56.000Z | members/admin.py | leonrenkema/makerspaceleiden-crm | 36ea20f5b9e263e8f30b1831ae4a2b1d5b926d3c | [
"Apache-2.0"
] | 33 | 2019-01-21T15:54:50.000Z | 2021-05-18T17:54:52.000Z | members/admin.py | leonrenkema/makerspaceleiden-crm | 36ea20f5b9e263e8f30b1831ae4a2b1d5b926d3c | [
"Apache-2.0"
] | 5 | 2019-01-21T15:47:26.000Z | 2021-09-22T07:14:34.000Z | from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import UserChangeForm
from search_admin_autocomplete.admin import SearchAutoCompleteAdmin
from import_export.admin import ImportExportModelAdmin
from import_export import resources
from import_export import resources
from simple_history.admin import SimpleHistoryAdmin
from django.utils.translation import ugettext_lazy as _
from .models import Tag, User, AuditRecord
class UserResource(resources.ModelResource):
class Meta:
model = User
fields = (
"first_name",
"last_name",
"email",
"is_active",
"date_joined",
"form_on_file",
"email_confirmed",
)
import_id_fields = ["first_name", "last_name", "email"]
class TagResource(resources.ModelResource):
class Meta:
model = Tag
fields = ["owner", "tag"]
import_id_fields = ["owner", "tag"]
class UserAdmin(
ImportExportModelAdmin, SearchAutoCompleteAdmin, BaseUserAdmin, SimpleHistoryAdmin
):
resource_class = UserResource
fieldsets = (
(None, {"fields": ("email", "password")}),
(
_("Membership"),
{
"fields": (
"first_name",
"last_name",
"image",
"phone_number",
"form_on_file",
"email_confirmed",
)
},
),
(_("BOTs"), {"fields": ("telegram_user_id", "uses_signal")}),
(
_("Permissions"),
{
"fields": (
"is_active",
"is_staff",
"is_superuser",
"groups",
"user_permissions",
)
},
),
(_("Important dates"), {"fields": ("last_login", "date_joined")}),
)
add_fieldsets = (
(
None,
{
"classes": ("wide",),
"fields": ("email", "password1", "password2"),
},
),
)
list_display = (
"email",
"first_name",
"last_name",
"form_on_file",
"last_login",
"date_joined",
)
search_fields = ["email", "first_name", "last_name"]
ordering = ("email", "first_name", "last_name")
import_id_fields = () # 'email', 'first_name', 'last_name', 'is_staff', 'form_on_file', 'last_login','date_joined')
# admin.site.register(User,ImportExportModelAdmin)
admin.site.register(User, UserAdmin)
class TagAdmin(ImportExportModelAdmin, SimpleHistoryAdmin, SearchAutoCompleteAdmin):
list_display = ("tag", "owner", "last_used", "description")
resource_class = TagResource
search_fields = ["tag", "owner__first_name", "owner__last_name", "owner__email"]
admin.site.register(Tag, TagAdmin)
class AuditRecordAdmin(ImportExportModelAdmin, SimpleHistoryAdmin):
list_display = ("user", "action", "recorded")
resource_class = AuditRecord
admin.site.register(AuditRecord, AuditRecordAdmin)
| 28.292035 | 120 | 0.568971 |
295659ee5df4e1e04d759fdb44d1c2937ae4d927 | 829 | py | Python | chemml/wrapper/sklearn_skl/train_test_split_wrapper.py | iamchetry/DataChallenge-Fall2021 | fa7748c9ea2f3c0f6bde8d0b094fc75463e28f33 | [
"BSD-3-Clause"
] | 108 | 2018-03-23T20:06:03.000Z | 2022-01-06T19:32:46.000Z | chemml/wrapper/sklearn_skl/train_test_split_wrapper.py | hachmannlab/ChemML | 42b152579872a57c834884596f700c76b9320280 | [
"BSD-3-Clause"
] | 18 | 2019-08-09T21:16:14.000Z | 2022-02-14T21:52:06.000Z | chemml/wrapper/sklearn_skl/train_test_split_wrapper.py | hachmannlab/ChemML | 42b152579872a57c834884596f700c76b9320280 | [
"BSD-3-Clause"
] | 28 | 2018-04-28T17:07:33.000Z | 2022-02-28T07:22:56.000Z | """
A wrapper for the sklearn.model_selection.train_test_split
"""
from chemml.wrapper.interfaces import evaluate_inputs
def train_test_split(block, stack):
# evaluate function inputs
inputs = evaluate_inputs(block['inputs'], stack)
# run function
from sklearn.model_selection import train_test_split
function_output_ = train_test_split(*inputs['args'], **inputs['kwargs'])
n_out = len(function_output_)
assert n_out == 2*len(inputs['args'])
# create outputs
# names are in this order: train1, test1, train2, test2, train3, test3
output_dict = {}
for i in range(n_out):
if i % 2 == 0:
output_dict["train%i" % (int(i/2) + 1)] = function_output_[i]
else:
output_dict["test%i" % (int(i/2) + 1)] = function_output_[i]
return output_dict
| 27.633333 | 76 | 0.659831 |
180a75c23c4afff27f24063adba0095ff9a8a1f9 | 724 | py | Python | test/test_change_group.py | Elissara/python_training | 91aecbd1b1442ba6a279a656fcee0e0b09c0cffb | [
"Apache-2.0"
] | null | null | null | test/test_change_group.py | Elissara/python_training | 91aecbd1b1442ba6a279a656fcee0e0b09c0cffb | [
"Apache-2.0"
] | null | null | null | test/test_change_group.py | Elissara/python_training | 91aecbd1b1442ba6a279a656fcee0e0b09c0cffb | [
"Apache-2.0"
] | null | null | null | from model.group import Group
import random
def test_change_some_group(app, db, check_ui):
if len(db.get_group_list()) == 0:
app.group.create(Group(name="test"))
old_groups = db.get_group_list()
isGroup = random.choice(old_groups)
group = Group(name="New_name", header="New_header", footer="New_footer")
app.group.change_by_id(group, isGroup.id)
new_groups = db.get_group_list()
assert len(old_groups) == app.group.count()
old_groups = db.get_group_list()
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
if check_ui:
assert sorted(old_groups, key=Group.id_or_max) == sorted(app.group.get_group_list(), key=Group.id_or_max)
| 38.105263 | 113 | 0.714088 |
eddbcf67726d6332249778522fe2ab284cd3e5a1 | 8,519 | py | Python | main.py | mfleader/doe | 2c3a0cec2710f4bcc68a1d326ef3989ce764a479 | [
"Apache-2.0"
] | null | null | null | main.py | mfleader/doe | 2c3a0cec2710f4bcc68a1d326ef3989ce764a479 | [
"Apache-2.0"
] | null | null | null | main.py | mfleader/doe | 2c3a0cec2710f4bcc68a1d326ef3989ce764a479 | [
"Apache-2.0"
] | null | null | null | import time
from pprint import pprint
import asyncio
from functools import partial
import datetime as dt
import pytz
import typer
from kubernetes import client, config, watch
from kubernetes.dynamic import DynamicClient
from kubernetes.client.models.v1_job_status import V1JobStatus
from kubernetes.client.api.batch_v1_api import BatchV1Api
from kubernetes.client.models.v1_env_var import V1EnvVar
from kubernetes.client.models.v1_volume_mount import V1VolumeMount
import ryaml
import anyio
import doe
app = typer.Typer()
JOB_NAME = "dnsperf-test"
k8s_job_attribute_map = {
val: key for key, val in V1JobStatus.attribute_map.items()
}
def create_job_object(job_args, es, es_index, cluster_queries):
# Configureate Pod template container
container = client.V1Container(
name='container',
image='quay.io/mleader/dnsdebug:latest',
image_pull_policy = 'Always',
command=["/bin/sh", "-c"],
args=[' '.join(("python", "snafu/run_snafu.py", "-v", "--tool", "dnsperf", *job_args))],
env=[V1EnvVar(name='es', value=es), V1EnvVar(name='es_index', value=es_index)],
volume_mounts=[V1VolumeMount(name='config', mount_path='/opt/dns', read_only=True)]
)
# Create and configurate a spec section
template = client.V1PodTemplateSpec(
# metadata=client.V1ObjectMeta(labels={"app": JOB_NAME}),
spec=client.V1PodSpec(
restart_policy="Never", containers=[container],
volumes = [
client.V1Volume(
name = 'config',
config_map = {
'name': 'dnsperf',
}
)
]
)
)
# Create the specification of deployment
spec = client.V1JobSpec(
template=template,
backoff_limit=4)
# Instantiate the job object
job = client.V1Job(
api_version="batch/v1",
kind="Job",
metadata=client.V1ObjectMeta(name=JOB_NAME, namespace='dnsperf-test'),
spec=spec)
return job
def create_job(api_instance, job):
api_response = api_instance.create_namespaced_job(
body=job,
namespace="dnsperf-test")
print("Job created. status='%s'" % str(api_response.status))
def update_job(api_instance, job):
# Update container image
job.spec.template.spec.containers[0].image = 'quay.io/mleader/dnsdebug:latest'
api_response = api_instance.patch_namespaced_job(
name=JOB_NAME,
namespace="dnsperf-test",
body=job)
print("Job updated. status='%s'" % str(api_response.status))
def delete_job(api_instance):
api_response = api_instance.delete_namespaced_job(
name=JOB_NAME,
namespace="dnsperf-test",
body=client.V1DeleteOptions(
propagation_policy='Foreground',
grace_period_seconds=5))
print("Job deleted. status='%s'" % str(api_response.status))
def wait_on_job(trial, api_client, es, es_index, sleep_t, cluster_queries):
trial_args = doe.serialize_command_args(trial)
batch_v1 = BatchV1Api(api_client=api_client)
job = create_job_object(trial_args, es=es, es_index=es_index, cluster_queries=cluster_queries)
api_dynamic = DynamicClient(api_client)
job_resources = api_dynamic.resources.get(api_version='v1', kind='Job')
watcher = watch.Watch()
print(ryaml.dumps(api_client.sanitize_for_serialization(job)))
create_job(batch_v1, job)
# probably should be async
try:
for event in api_dynamic.watch(job_resources, namespace='dnsperf-test', watcher=watcher):
j = V1JobStatus(**{k8s_job_attribute_map[key]: val for key,val in event['raw_object']['status'].items()})
print('------------------------------------------------------')
pprint(f'job condition: {j.conditions}')
if j.succeeded:
watcher.stop()
finally:
# probably should be async
delete_job(batch_v1)
time.sleep(sleep_t)
def cluster_queries(api_client):
dynamic_client = DynamicClient(api_client)
svc_resources = dynamic_client.resources.get(api_version='v1', kind='Service')
return '\n'.join(
(f"{item.metadata.name}.{item.metadata.namespace}.svc.cluster.local A" for item in svc_resources.get().items)
)
def create_configmap_obj(cluster_queries):
return client.V1ConfigMap(
api_version = 'v1',
kind = "ConfigMap",
metadata = {
"name": "dnsperf"
},
data = {
"queries.txt": cluster_queries
# "queries.txt": "kubernetes.default.svc.cluster.local A"
},
# immutable = True
)
def create_configmap(api_client, configmap):
dynamic_client = DynamicClient(api_client)
configmap_api = dynamic_client.resources.get(api_version='v1', kind='ConfigMap')
res = configmap_api.create(body=configmap, namespace='dnsperf-test')
print(f'Configmap created {res.status}')
def delete_configmap(api_client):
dynamic_client = DynamicClient(api_client)
configmap_api = dynamic_client.resources.get(api_version='v1', kind='ConfigMap')
res = configmap_api.delete(
name='dnsperf',
namespace='dnsperf-test'
)
print(f"Configmap {res.status}")
async def _experiment(
experiment_factor_levels_path: str,
es: str,
es_index: str,
sdn_kubeconfig_path: str,
# ovn_kubeconfig_path: str,
sleep_t: int,
block: int,
replicate: int,
measure_repetitions: int
):
k8s_sdn_api = config.new_client_from_config(sdn_kubeconfig_path)
# k8s_ovn_api = config.new_client_from_config(ovn_kubeconfig_path)
# try:
sdn_queries = cluster_queries(k8s_sdn_api)
# ovn_queries = cluster_queries(k8s_ovn_api)
sdn_cm = create_configmap_obj(sdn_queries)
# ovn_cm = create_configmap_obj(ovn_queries)
# cleanup old job and config
# delete_configmap(k8s_sdn_api)
# delete_configmap(k8s_ovn_api)
# k8s_sdn_job_api = BatchV1Api(api_client=k8s_sdn_api)
# delete_job(k8s_sdn_job_api)
# k8s_ovn_job_api = BatchV1Api(api_client=k8s_ovn_api)
# delete_job(k8s_ovn_job_api)
create_configmap(k8s_sdn_api, sdn_cm)
# create_configmap(k8s_ovn_api, ovn_cm)
trial_times = []
completed_trials = 0
trials = [t for t in doe.main(factor_levels_filepath=experiment_factor_levels_path, block=block)]
total_trials = len(trials)
# for input_args in trials:
# input_args['repetitions'] = measure_repetitions
# input_args['replicate'] = replicate
# pprint(input_args)
# trial_start = dt.datetime.now()
# # if input_args['trial']['network_type'] == 'OpenShiftSDN':
# wait_on_job_api = partial(wait_on_job, api_client=k8s_sdn_api, cluster_queries=sdn_queries)
# # elif input_args['trial']['network_type'] == 'OVNKubernetes':
# # wait_on_job_api = partial(wait_on_job, api_client=k8s_ovn_api, cluster_queries=ovn_queries)
# wait_on_job_api(input_args, es=es, es_index=es_index, sleep_t=sleep_t)
# trial_end = dt.datetime.now()
# completed_trials += 1
# trial_times.append((trial_end - trial_start))
# trial_time_mean = sum((trial_times), dt.timedelta()) / len(trial_times)
# remaining_expected_experiment_time = (total_trials - completed_trials) * trial_time_mean
# typer.echo(typer.style(f'Remaining expected experiment time: {remaining_expected_experiment_time}', fg=typer.colors.WHITE, bold=True))
# typer.echo(typer.style(f'Expected completion: {dt.datetime.now() + remaining_expected_experiment_time}', fg=typer.colors.BLUE))
# delete_configmap(k8s_sdn_api)
# delete_configmap(k8s_ovn_api)
@app.command()
def main(
experiment_factor_levels_path: str = typer.Argument(...),
es: str = typer.Option(..., envvar='ELASTICSEARCH_URL'),
es_index: str = typer.Option('snafu-dnsperf'),
sdn_kubeconfig_path: str = typer.Option(...),
# ovn_kubeconfig_path: str = typer.Option(...),
sleep_t: int = typer.Option(10),
block: int = typer.Option(1),
replicate: int = typer.Option(1, help="Experiment run index"),
measure_repetitions = typer.Option(1)
):
anyio.run(
_experiment,
experiment_factor_levels_path,
es,
es_index,
sdn_kubeconfig_path,
# ovn_kubeconfig_path,
sleep_t,
block,
replicate,
measure_repetitions
)
if __name__ == '__main__':
app()
| 33.940239 | 144 | 0.669327 |
c9c3b99508151de95afab6bc4448e129ac693d59 | 9,869 | py | Python | swift/test/unit/common/middleware/test_keystoneauth.py | DmitryMezhensky/Hadoop-and-Swift-integration | 00c2c9cf404f7480ec2c24f918c4b4c055b23800 | [
"Apache-1.1"
] | 5 | 2015-01-07T10:29:18.000Z | 2021-03-30T11:12:39.000Z | swift/test/unit/common/middleware/test_keystoneauth.py | steveloughran/Hadoop-and-Swift-integration | 667c39a3754a8bfeeb883dd9be497cac6e4ab852 | [
"Apache-1.1"
] | null | null | null | swift/test/unit/common/middleware/test_keystoneauth.py | steveloughran/Hadoop-and-Swift-integration | 667c39a3754a8bfeeb883dd9be497cac6e4ab852 | [
"Apache-1.1"
] | 5 | 2015-01-07T10:29:27.000Z | 2017-04-15T13:56:45.000Z | # Copyright (c) 2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import webob
from swift.common.middleware import keystoneauth
class FakeApp(object):
def __init__(self, status_headers_body_iter=None):
self.calls = 0
self.status_headers_body_iter = status_headers_body_iter
if not self.status_headers_body_iter:
self.status_headers_body_iter = iter([('404 Not Found', {}, '')])
def __call__(self, env, start_response):
self.calls += 1
self.request = webob.Request.blank('', environ=env)
if 'swift.authorize' in env:
resp = env['swift.authorize'](self.request)
if resp:
return resp(env, start_response)
status, headers, body = self.status_headers_body_iter.next()
return webob.Response(status=status, headers=headers,
body=body)(env, start_response)
class SwiftAuth(unittest.TestCase):
def setUp(self):
self.test_auth = keystoneauth.filter_factory({})(FakeApp())
def _make_request(self, path=None, headers=None, **kwargs):
if not path:
path = '/v1/%s/c/o' % self.test_auth._get_account_for_tenant('foo')
return webob.Request.blank(path, headers=headers, **kwargs)
def _get_identity_headers(self, status='Confirmed', tenant_id='1',
tenant_name='acct', user='usr', role=''):
return dict(X_IDENTITY_STATUS=status,
X_TENANT_ID=tenant_id,
X_TENANT_NAME=tenant_name,
X_ROLES=role,
X_USER_NAME=user)
def _get_successful_middleware(self):
response_iter = iter([('200 OK', {}, '')])
return keystoneauth.filter_factory({})(FakeApp(response_iter))
def test_confirmed_identity_is_authorized(self):
role = self.test_auth.reseller_admin_role
headers = self._get_identity_headers(role=role)
req = self._make_request('/v1/AUTH_acct/c', headers)
resp = req.get_response(self._get_successful_middleware())
self.assertEqual(resp.status_int, 200)
def test_confirmed_identity_is_not_authorized(self):
headers = self._get_identity_headers()
req = self._make_request('/v1/AUTH_acct/c', headers)
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 403)
def test_anonymous_is_authorized_for_permitted_referrer(self):
req = self._make_request(headers={'X_IDENTITY_STATUS': 'Invalid'})
req.acl = '.r:*'
resp = req.get_response(self._get_successful_middleware())
self.assertEqual(resp.status_int, 200)
def test_anonymous_is_not_authorized_for_unknown_reseller_prefix(self):
req = self._make_request(path='/v1/BLAH_foo/c/o',
headers={'X_IDENTITY_STATUS': 'Invalid'})
resp = req.get_response(self.test_auth)
self.assertEqual(resp.status_int, 401)
def test_blank_reseller_prefix(self):
conf = {'reseller_prefix': ''}
test_auth = keystoneauth.filter_factory(conf)(FakeApp())
account = tenant_id = 'foo'
self.assertTrue(test_auth._reseller_check(account, tenant_id))
def test_override_asked_for_but_not_allowed(self):
conf = {'allow_overrides': 'false'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 401)
def test_override_asked_for_and_allowed(self):
conf = {'allow_overrides': 'true'}
self.test_auth = keystoneauth.filter_factory(conf)(FakeApp())
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 404)
def test_override_default_allowed(self):
req = self._make_request('/v1/AUTH_account',
environ={'swift.authorize_override': True})
resp = req.get_response(self.test_auth)
self.assertEquals(resp.status_int, 404)
class TestAuthorize(unittest.TestCase):
def setUp(self):
self.test_auth = keystoneauth.filter_factory({})(FakeApp())
def _make_request(self, path, **kwargs):
return webob.Request.blank(path, **kwargs)
def _get_account(self, identity=None):
if not identity:
identity = self._get_identity()
return self.test_auth._get_account_for_tenant(identity['tenant'][0])
def _get_identity(self, tenant_id='tenant_id',
tenant_name='tenant_name', user='user', roles=None):
if not roles:
roles = []
return dict(tenant=(tenant_id, tenant_name), user=user, roles=roles)
def _check_authenticate(self, account=None, identity=None, headers=None,
exception=None, acl=None, env=None, path=None):
if not identity:
identity = self._get_identity()
if not account:
account = self._get_account(identity)
if not path:
path = '/v1/%s/c' % account
default_env = {'keystone.identity': identity,
'REMOTE_USER': identity['tenant']}
if env:
default_env.update(env)
req = self._make_request(path, headers=headers, environ=default_env)
req.acl = acl
result = self.test_auth.authorize(req)
if exception:
self.assertTrue(isinstance(result, exception))
else:
self.assertTrue(result is None)
return req
def test_authorize_fails_for_unauthorized_user(self):
self._check_authenticate(exception=webob.exc.HTTPForbidden)
def test_authorize_fails_for_invalid_reseller_prefix(self):
self._check_authenticate(account='BLAN_a',
exception=webob.exc.HTTPForbidden)
def test_authorize_succeeds_for_reseller_admin(self):
roles = [self.test_auth.reseller_admin_role]
identity = self._get_identity(roles=roles)
req = self._check_authenticate(identity=identity)
self.assertTrue(req.environ.get('swift_owner'))
def test_authorize_succeeds_as_owner_for_operator_role(self):
roles = self.test_auth.operator_roles.split(',')[0]
identity = self._get_identity(roles=roles)
req = self._check_authenticate(identity=identity)
self.assertTrue(req.environ.get('swift_owner'))
def _check_authorize_for_tenant_owner_match(self, exception=None):
identity = self._get_identity()
identity['user'] = identity['tenant'][1]
req = self._check_authenticate(identity=identity, exception=exception)
expected = bool(exception is None)
self.assertEqual(bool(req.environ.get('swift_owner')), expected)
def test_authorize_succeeds_as_owner_for_tenant_owner_match(self):
self.test_auth.is_admin = True
self._check_authorize_for_tenant_owner_match()
def test_authorize_fails_as_owner_for_tenant_owner_match(self):
self.test_auth.is_admin = False
self._check_authorize_for_tenant_owner_match(
exception=webob.exc.HTTPForbidden)
def test_authorize_succeeds_for_container_sync(self):
env = {'swift_sync_key': 'foo', 'REMOTE_ADDR': '127.0.0.1'}
headers = {'x-container-sync-key': 'foo', 'x-timestamp': None}
self._check_authenticate(env=env, headers=headers)
def test_authorize_fails_for_invalid_referrer(self):
env = {'HTTP_REFERER': 'http://invalid.com/index.html'}
self._check_authenticate(acl='.r:example.com', env=env,
exception=webob.exc.HTTPForbidden)
def test_authorize_fails_for_referrer_without_rlistings(self):
env = {'HTTP_REFERER': 'http://example.com/index.html'}
self._check_authenticate(acl='.r:example.com', env=env,
exception=webob.exc.HTTPForbidden)
def test_authorize_succeeds_for_referrer_with_rlistings(self):
env = {'HTTP_REFERER': 'http://example.com/index.html'}
self._check_authenticate(acl='.r:example.com,.rlistings', env=env)
def test_authorize_succeeds_for_referrer_with_obj(self):
path = '/v1/%s/c/o' % self._get_account()
env = {'HTTP_REFERER': 'http://example.com/index.html'}
self._check_authenticate(acl='.r:example.com', env=env, path=path)
def test_authorize_succeeds_for_user_role_in_roles(self):
acl = 'allowme'
identity = self._get_identity(roles=[acl])
self._check_authenticate(identity=identity, acl=acl)
def test_authorize_succeeds_for_tenant_name_user_in_roles(self):
identity = self._get_identity()
acl = '%s:%s' % (identity['tenant'][1], identity['user'])
self._check_authenticate(identity=identity, acl=acl)
def test_authorize_succeeds_for_tenant_id_user_in_roles(self):
identity = self._get_identity()
acl = '%s:%s' % (identity['tenant'][0], identity['user'])
self._check_authenticate(identity=identity, acl=acl)
if __name__ == '__main__':
unittest.main()
| 42.722944 | 79 | 0.663897 |
46297209b695f44825fae6e8441abc9feb1d14ac | 745 | py | Python | 7.py | sinasiruosnejad/leetcode | 8fe5a400bc03a5e129835e380ff9fe72af681d8a | [
"MIT"
] | null | null | null | 7.py | sinasiruosnejad/leetcode | 8fe5a400bc03a5e129835e380ff9fe72af681d8a | [
"MIT"
] | null | null | null | 7.py | sinasiruosnejad/leetcode | 8fe5a400bc03a5e129835e380ff9fe72af681d8a | [
"MIT"
] | null | null | null | x=int(input("\nplease enter an integer "))
number=[]
x_sign="positive"
if x<0:
x*=-1
x_sign="negative"
while x!=0:
temp=x%10
number.append(temp)
x=x//10
for i in range(len(number)-1,-1,-1):
if number[i]>0:
break
if number[i]==0:
number.pop(i)
for i in range(len(number)):
for j in range(1):
if number[j]>0:
break
if number[j]==0:
number.pop(j)
number_replace=[]
result=0
for i in range(len(number)-1,-1,-1):
number_replace.append(number[i])
for i in range(len(number_replace)):
result+=number_replace[i]*(10**i)
if x_sign=="negative":
result*=-1
r=2**31
if result>r-1 or result<-r:
result=0
print(result)
| 13.070175 | 42 | 0.558389 |
03ef27e7bac84dee4fdce494541338797692f554 | 786 | py | Python | validation/observable/port.py | ukncsc/edge-mod | 95737e71945f4a8823f20a554e5efb9841183a26 | [
"Unlicense"
] | 2 | 2016-08-23T07:55:01.000Z | 2016-09-27T15:13:32.000Z | validation/observable/port.py | ukncsc/edge-mod | 95737e71945f4a8823f20a554e5efb9841183a26 | [
"Unlicense"
] | null | null | null | validation/observable/port.py | ukncsc/edge-mod | 95737e71945f4a8823f20a554e5efb9841183a26 | [
"Unlicense"
] | 2 | 2020-10-02T13:27:10.000Z | 2021-04-11T09:45:16.000Z |
from adapters.certuk_mod.validation import ValidationStatus, FieldValidationInfo
from observable import ObservableValidationInfo
class PortValidationInfo(ObservableValidationInfo):
TYPE = 'PortObjectType'
def __init__(self, observable_data, **field_validation):
super(PortValidationInfo, self).__init__(PortValidationInfo.TYPE, observable_data, **field_validation)
self.port_value = field_validation.get('port_value')
@classmethod
def validate(cls, **observable_data):
port = observable_data.get('port_value')
if port:
port_validation = None
else:
port_validation = FieldValidationInfo(ValidationStatus.ERROR, 'Port value is missing')
return cls(observable_data, port_value=port_validation)
| 34.173913 | 110 | 0.740458 |
f6e4c66bc225bad7ad3f4ae95f2ebd251539f6a6 | 8,282 | py | Python | eli5/base.py | ophiry/eli5 | 5513bb93469e948790ee8e48364573e1103c6383 | [
"MIT"
] | 2 | 2020-06-09T14:20:27.000Z | 2020-10-14T05:24:36.000Z | eli5/base.py | mdjabc/eli5 | 4cd66ac5e0ffde0c002d8f46dc64d7091f49445f | [
"MIT"
] | null | null | null | eli5/base.py | mdjabc/eli5 | 4cd66ac5e0ffde0c002d8f46dc64d7091f49445f | [
"MIT"
] | 1 | 2020-04-19T05:45:11.000Z | 2020-04-19T05:45:11.000Z | # -*- coding: utf-8 -*-
from typing import Any, List, Tuple, Union, Optional
import numpy as np # type: ignore
from .base_utils import attrs
from .formatters.features import FormattedFeatureName
# @attrs decorator used in this file calls @attr.s(slots=True),
# creating attr.ib entries based on the signature of __init__.
@attrs
class Explanation(object):
""" An explanation for classifier or regressor,
it can either explain weights or a single prediction.
"""
def __init__(self,
estimator, # type: str
description=None, # type: Optional[str]
error=None, # type: Optional[str]
method=None, # type: Optional[str]
is_regression=False, # type: bool
targets=None, # type: Optional[List[TargetExplanation]]
feature_importances=None, # type: Optional[FeatureImportances]
decision_tree=None, # type: Optional[TreeInfo]
highlight_spaces=None, # type: Optional[bool]
transition_features=None, # type: Optional[TransitionFeatureWeights]
image=None, # type: Any
):
# type: (...) -> None
self.estimator = estimator
self.description = description
self.error = error
self.method = method
self.is_regression = is_regression
self.targets = targets
self.feature_importances = feature_importances
self.decision_tree = decision_tree
self.highlight_spaces = highlight_spaces
self.transition_features = transition_features
self.image = image # if arg is not None, assume we are working with images
def _repr_html_(self):
""" HTML formatting for the notebook.
"""
from eli5.formatters import fields
from eli5.formatters.html import format_as_html
return format_as_html(self, force_weights=False, show=fields.WEIGHTS)
@attrs
class FeatureImportances(object):
""" Feature importances with number of remaining non-zero features.
"""
def __init__(self, importances, remaining):
# type: (...) -> None
self.importances = importances # type: List[FeatureWeight]
self.remaining = remaining # type: int
@classmethod
def from_names_values(cls, names, values, std=None, **kwargs):
params = zip(names, values) if std is None else zip(names, values, std)
importances = [FeatureWeight(*x) for x in params] # type: ignore
return cls(importances, **kwargs)
@attrs
class TargetExplanation(object):
""" Explanation for a single target or class.
Feature weights are stored in the :feature_weights: attribute,
and features highlighted in text in the :weighted_spans: attribute.
Spatial values are stored in the :heatmap: attribute.
"""
def __init__(self,
target, # type: Union[str, int]
feature_weights=None, # type: Optional[FeatureWeights]
proba=None, # type: Optional[float]
score=None, # type: Optional[float]
weighted_spans=None, # type: Optional[WeightedSpans]
heatmap=None, # type: Optional[np.ndarray]
):
# type: (...) -> None
self.target = target
self.feature_weights = feature_weights
self.proba = proba
self.score = score
self.weighted_spans = weighted_spans
self.heatmap = heatmap
# List is currently used for unhashed features
Feature = Union[str, List, FormattedFeatureName]
@attrs
class FeatureWeights(object):
""" Weights for top features, :pos: for positive and :neg: for negative,
sorted by descending absolute value.
Number of remaining positive and negative features are stored in
:pos_remaining: and :neg_remaining: attributes.
"""
def __init__(self,
pos, # type: List[FeatureWeight]
neg, # type: List[FeatureWeight]
pos_remaining=0, # type: int
neg_remaining=0, # type: int
):
# type: (...) -> None
self.pos = pos
self.neg = neg
self.pos_remaining = pos_remaining
self.neg_remaining = neg_remaining
@attrs
class FeatureWeight(object):
def __init__(self,
feature, # type: Feature
weight, # type: float
std=None, # type: float
value=None, # type: Any
):
# type: (...) -> None
self.feature = feature
self.weight = weight
self.std = std
self.value = value
@attrs
class WeightedSpans(object):
""" Holds highlighted spans for parts of document - a DocWeightedSpans
object for each vectorizer, and other features not highlighted anywhere.
"""
def __init__(self,
docs_weighted_spans, # type: List[DocWeightedSpans]
other=None, # type: FeatureWeights
):
# type: (...) -> None
self.docs_weighted_spans = docs_weighted_spans
self.other = other
WeightedSpan = Tuple[
Feature,
List[Tuple[int, int]], # list of spans (start, end) for this feature
float, # feature weight
]
@attrs
class DocWeightedSpans(object):
""" Features highlighted in text. :document: is a pre-processed document
before applying the analyzer. :weighted_spans: holds a list of spans
for features found in text (span indices correspond to
:document:). :preserve_density: determines how features are colored
when doing formatting - it is better set to True for char features
and to False for word features.
"""
def __init__(self,
document, # type: str
spans, # type: List[WeightedSpan]
preserve_density=None, # type: bool
vec_name=None, # type: str
):
# type: (...) -> None
self.document = document
self.spans = spans
self.preserve_density = preserve_density
self.vec_name = vec_name
@attrs
class TransitionFeatureWeights(object):
""" Weights matrix for transition features. """
def __init__(self,
class_names, # type: List[str]
coef,
):
# type: (...) -> None
self.class_names = class_names
self.coef = coef
@attrs
class TreeInfo(object):
""" Information about the decision tree. :criterion: is the name of
the function to measure the quality of a split, :tree: holds all nodes
of the tree, and :graphviz: is the tree rendered in graphviz .dot format.
"""
def __init__(self,
criterion, # type: str
tree, # type: NodeInfo
graphviz, # type: str
is_classification, # type: bool
):
# type: (...) -> None
self.criterion = criterion
self.tree = tree
self.graphviz = graphviz
self.is_classification = is_classification
@attrs
class NodeInfo(object):
""" A node in a binary tree.
Pointers to left and right children are in :left: and :right: attributes.
"""
def __init__(self,
id, # type: int
is_leaf, # type: bool
value,
value_ratio,
impurity, # type: float
samples, # type: int
sample_ratio, # type: float
feature_name=None, # type: str
feature_id=None, # type: int
threshold=None, # type: float
left=None, # type: NodeInfo
right=None, # type: NodeInfo
):
# type: (...) -> None
self.id = id
self.is_leaf = is_leaf
self.value = value
self.value_ratio = value_ratio
self.impurity = impurity
self.samples = samples
self.sample_ratio = sample_ratio
self.feature_name = feature_name
self.feature_id = feature_id
self.threshold = threshold
self.left = left
self.right = right
| 34.65272 | 86 | 0.585607 |
fcce04dc369a096372a2c9239072faf214858317 | 863 | py | Python | xunit-autolabeler-v2/ast_parser/core/test_data/parser/nested_tags/nested_tags.py | GoogleCloudPlatform/repo-automation-playground | a4c8f104c246ede002f6c18fcebfc0496c8abb94 | [
"Apache-2.0"
] | 5 | 2019-07-11T17:35:44.000Z | 2021-10-09T01:49:04.000Z | xunit-autolabeler-v2/ast_parser/core/test_data/parser/nested_tags/nested_tags.py | GoogleCloudPlatform/repo-automation-playground | a4c8f104c246ede002f6c18fcebfc0496c8abb94 | [
"Apache-2.0"
] | 36 | 2019-08-27T18:20:21.000Z | 2022-01-12T21:29:00.000Z | xunit-autolabeler-v2/ast_parser/core/test_data/parser/nested_tags/nested_tags.py | GoogleCloudPlatform/repo-automation-playground | a4c8f104c246ede002f6c18fcebfc0496c8abb94 | [
"Apache-2.0"
] | 13 | 2019-10-30T19:39:51.000Z | 2021-04-04T09:31:52.000Z | # Copyright 2020 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START root_tag]
# [START nested_tag]
def nested_method():
return 'nested'
# [END nested_tag]
def root_method():
return 'root'
# [END root_tag]
# [START root_tag]
def another_root_method():
return 'another root'
# [END root_tag]
# [START empty_tag]
# [END empty_tag]
| 24.657143 | 74 | 0.73117 |
ae12d66ef700de699763c2374cf2418cfac3fd1e | 2,747 | py | Python | backup/www/apis.py | monkeychen/python-tutorial | a24785da6b4d857200b819ad4d960885b1ef7a20 | [
"Apache-2.0"
] | null | null | null | backup/www/apis.py | monkeychen/python-tutorial | a24785da6b4d857200b819ad4d960885b1ef7a20 | [
"Apache-2.0"
] | null | null | null | backup/www/apis.py | monkeychen/python-tutorial | a24785da6b4d857200b819ad4d960885b1ef7a20 | [
"Apache-2.0"
] | null | null | null | class Page(object):
"""
Page object for display pages.
"""
def __init__(self, item_count, page_index=1, page_size=10):
"""
Init Pagination by item_count, page_index and page_size.
>>> p1 = Page(100, 1)
>>> p1.page_count
10
>>> p1.offset
0
>>> p1.limit
10
>>> p2 = Page(90, 9, 10)
>>> p2.page_count
9
>>> p2.offset
80
>>> p2.limit
10
>>> p3 = Page(91, 10, 10)
>>> p3.page_count
10
>>> p3.offset
90
>>> p3.limit
10
"""
self.item_count = item_count
self.page_size = page_size
self.page_count = item_count // page_size + (1 if item_count % page_size > 0 else 0)
if (item_count == 0) or (page_index > self.page_count):
self.offset = 0
self.limit = 0
self.page_index = 1
else:
self.page_index = page_index
self.offset = self.page_size * (page_index - 1)
self.limit = self.page_size
self.has_next = self.page_index < self.page_count
self.has_previous = self.page_index > 1
def __str__(self):
return 'item_count: %s, page_count: %s, page_index: %s, page_size: %s, offset: %s, limit: %s' % (
self.item_count, self.page_count, self.page_index, self.page_size, self.offset, self.limit)
__repr__ = __str__
class APIError(Exception):
"""
the base APIError which contains error(required), data(optional) and message(optional).
"""
def __init__(self, error, data='', message=''):
super(APIError, self).__init__(message)
self.error = error
self.data = data
self.message = message
class APIValueError(APIError):
"""
Indicate the input value has error or invalid. The data specifies the error field of input form.
"""
def __init__(self, field, message=''):
super(APIValueError, self).__init__('value:invalid', field, message)
class APIResourceNotFoundError(APIError):
"""
Indicate the resource was not found. The data specifies the resource name.
"""
def __init__(self, field, message=''):
super(
APIResourceNotFoundError,
self).__init__(
'value:not found',
field,
message)
class APIPermissionError(APIError):
"""
Indicate the api has no permission.
"""
def __init__(self, message=''):
super(
APIPermissionError,
self).__init__(
'permission:forbidden',
'permission',
message)
if __name__ == '__main__':
import doctest
doctest.testmod()
| 26.413462 | 105 | 0.560976 |
047d3e5f9b977e118efe78905db4849a4157b033 | 902 | py | Python | python-example/elastic_search_query.py | scirag/elastic-search-hamming-distance-plugin | ba4ede2bbb2f9aef0f834236d806b356de0a9374 | [
"Apache-2.0"
] | 8 | 2017-02-21T04:34:57.000Z | 2021-07-12T09:22:00.000Z | python-example/elastic_search_query.py | scirag/elastic-search-hamming-distance-plugin | ba4ede2bbb2f9aef0f834236d806b356de0a9374 | [
"Apache-2.0"
] | 1 | 2021-04-21T12:11:03.000Z | 2021-04-21T12:11:03.000Z | python-example/elastic_search_query.py | scirag/elastic-search-hamming-distance-plugin | ba4ede2bbb2f9aef0f834236d806b356de0a9374 | [
"Apache-2.0"
] | 2 | 2017-11-22T12:03:11.000Z | 2018-06-20T10:02:43.000Z | from elasticsearch import Elasticsearch
ES_INDEX_NAME = 'tr.awakening'
ES_DOC_TYPE = 'image_info'
es = Elasticsearch()
def search_image(phash, min_score):
search_results = es.search(index=ES_INDEX_NAME, doc_type=ES_DOC_TYPE, body={
"query": {
"function_score": {
"min_score": min_score,
"query": {
"match_all": {}
},
"functions": [
{
"script_score": {
"script": "hamming_distance",
"lang": "native",
"params": {
"hash": phash,
"field": "hash"
}
}
}
]
}
}
})
return search_results
| 26.529412 | 80 | 0.372506 |
f1af1d5d6217937424d3c7816c1765c24ec18c75 | 6,228 | py | Python | src/transformers/configuration_mbart.py | Liang813/transformers | 08f534d2da47875a4b7eb1c125cfa7f0f3b79642 | [
"Apache-2.0"
] | null | null | null | src/transformers/configuration_mbart.py | Liang813/transformers | 08f534d2da47875a4b7eb1c125cfa7f0f3b79642 | [
"Apache-2.0"
] | null | null | null | src/transformers/configuration_mbart.py | Liang813/transformers | 08f534d2da47875a4b7eb1c125cfa7f0f3b79642 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2020 The Fairseq Authors and The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" MBART configuration """
from .configuration_bart import BartConfig
from .utils import logging
logger = logging.get_logger(__name__)
MBART_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"facebook/mbart-large-en-ro": "https://s3.amazonaws.com/models.huggingface.co/bert/facebook/mbart-large-en-ro/config.json",
"facebook/mbart-large-cc25": "https://s3.amazonaws.com/models.huggingface.co/bert/facebook/mbart-large-cc25/config.json",
}
class MBartConfig(BartConfig):
"""
This is the configuration class to store the configuration of a
:class:`~transformers.MBartForConditionalGeneration`. It is used to instantiate a BART model according to the
specified arguments, defining the model architecture.
Configuration objects inherit from :class:`~transformers.PretrainedConfig` and can be used to control the model
outputs. Read the documentation from :class:`~transformers.PretrainedConfig` for more information.
Args:
vocab_size (:obj:`int`, `optional`, defaults to 250027):
Vocabulary size of the MBART model. Defines the number of different tokens that can be represented by the
:obj:`inputs_ids` passed when calling :class:`~transformers.MBartForConditionalGeneration`.
d_model (:obj:`int`, `optional`, defaults to 1024):
Dimensionality of the layers and the pooler layer.
encoder_layers (:obj:`int`, `optional`, defaults to 12):
Number of encoder layers.
decoder_layers (:obj:`int`, `optional`, defaults to 12):
Number of decoder layers.
encoder_attention_heads (:obj:`int`, `optional`, defaults to 16):
Number of attention heads for each attention layer in the Transformer encoder.
decoder_attention_heads (:obj:`int`, `optional`, defaults to 16):
Number of attention heads for each attention layer in the Transformer decoder.
decoder_ffn_dim (:obj:`int`, `optional`, defaults to 4096):
Dimensionality of the "intermediate" (i.e., feed-forward) layer in decoder.
encoder_ffn_dim (:obj:`int`, `optional`, defaults to 4096):
Dimensionality of the "intermediate" (i.e., feed-forward) layer in decoder.
activation_function (:obj:`str` or :obj:`function`, `optional`, defaults to :obj:`"gelu"`):
The non-linear activation function (function or string) in the encoder and pooler. If string,
:obj:`"gelu"`, :obj:`"relu"`, :obj:`"swish"` and :obj:`"gelu_new"` are supported.
dropout (:obj:`float`, `optional`, defaults to 0.1):
The dropout probabilitiy for all fully connected layers in the embeddings, encoder, and pooler.
attention_dropout (:obj:`float`, `optional`, defaults to 0.0):
The dropout ratio for the attention probabilities.
activation_dropout (:obj:`float`, `optional`, defaults to 0.0):
The dropout ratio for activations inside the fully connected layer.
classifier_dropout (:obj:`float`, `optional`, defaults to 0.0):
The dropout ratio for classifier.
max_position_embeddings (:obj:`int`, `optional`, defaults to 1024):
The maximum sequence length that this model might ever be used with. Typically set this to something large
just in case (e.g., 512 or 1024 or 2048).
init_std (:obj:`float`, `optional`, defaults to 0.02):
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
add_bias_logits (:obj:`bool`, `optional`, defaults to :obj:`False`):
This should be completed, specific to marian.
normalize_before (:obj:`bool`, `optional`, defaults to :obj:`True`):
Call layernorm before attention ops.
normalize_embedding (:obj:`bool`, `optional`, defaults to :obj:`True`):
Call layernorm after embeddings. Only True for Bart.
static_position_embeddings (:obj:`bool`, `optional`, defaults to :obj:`False`):
Don't learn positional embeddings, use sinusoidal.
add_final_layer_norm (:obj:`bool`, `optional`, defaults to :obj:`True`):
Why not add another layernorm?
scale_embedding (:obj:`bool`, `optional`, defaults to :obj:`False`):
Scale embeddings by diving by sqrt(d_model).
eos_token_id (:obj:`int`, `optional`, defaults to 2)
End of stream token id.
pad_token_id (:obj:`int`, `optional`, defaults to 1)
Padding token id.
bos_token_id (:obj:`int`, `optional`, defaults to 0)
Beginning of stream token id.
encoder_layerdrop: (:obj:`float`, `optional`, defaults to 0.0):
The LayerDrop probability for the encoder. See the `LayerDrop paper <see
https://arxiv.org/abs/1909.11556>`__ for more details.
decoder_layerdrop: (:obj:`float`, `optional`, defaults to 0.0):
The LayerDrop probability for the decoder. See the `LayerDrop paper <see
https://arxiv.org/abs/1909.11556>`__ for more details.
extra_pos_embeddings: (:obj:`int`, `optional`, defaults to 2):
How many extra learned positional embeddings to use. Should be equal to :obj:`pad_token_id+1`.
is_encoder_decoder (:obj:`bool`, `optional`, defaults to :obj:`True`):
Whether this is an encoder/decoder model
force_bos_token_to_be_generated (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to force BOS token to be generated at step 1 (after ``decoder_start_token_id``).
"""
model_type = "mbart"
| 59.314286 | 127 | 0.679512 |
a3d43e30437db3376e0a1f198fc9eb2d5072e25b | 72 | py | Python | superset/run.py | neilchencn/incubator-superset | 5f9742afa215c0f906fec498e9f3910d74b85347 | [
"Apache-2.0"
] | null | null | null | superset/run.py | neilchencn/incubator-superset | 5f9742afa215c0f906fec498e9f3910d74b85347 | [
"Apache-2.0"
] | null | null | null | superset/run.py | neilchencn/incubator-superset | 5f9742afa215c0f906fec498e9f3910d74b85347 | [
"Apache-2.0"
] | 1 | 2019-11-07T13:23:03.000Z | 2019-11-07T13:23:03.000Z | from superset import app
app.run(debug=True, host='0.0.0.0', port=8088)
| 24 | 46 | 0.722222 |
a809f50faf5249e24daf29b0782c97a3cfe85940 | 32,017 | py | Python | python-shell/src/test/test_gaffer_functions.py | sw96411/gaffer-tools | 2dd4ff64cf6afa1dd3f9529977d7170370b11f58 | [
"Apache-2.0"
] | null | null | null | python-shell/src/test/test_gaffer_functions.py | sw96411/gaffer-tools | 2dd4ff64cf6afa1dd3f9529977d7170370b11f58 | [
"Apache-2.0"
] | null | null | null | python-shell/src/test/test_gaffer_functions.py | sw96411/gaffer-tools | 2dd4ff64cf6afa1dd3f9529977d7170370b11f58 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2016-2019 Crown Copyright
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import json
import unittest
from gafferpy import gaffer as g
class GafferFunctionsTest(unittest.TestCase):
examples = [
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.Concat",
"separator" : "\u0020"
}
''',
g.Concat(
separator=" "
)
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.Divide"
}
''',
g.Divide()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.DivideBy",
"by" : 3
}
''',
g.DivideBy(
by=3
)
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.ExtractKeys"
}
''',
g.ExtractKeys()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.DictionaryLookup",
"dictionary": {
"One": 1,
"Two": 2,
"Three": 3
}
}
''',
g.DictionaryLookup(dictionary=dict(One=1, Two=2, Three=3))
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.ExtractValue",
"key" : "blueKey"
}
''',
g.ExtractValue(
key="blueKey"
)
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.ExtractValues"
}
''',
g.ExtractValues()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.Identity"
}
''',
g.Identity()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.IsEmpty"
}
''',
g.IsEmpty()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.Longest"
}
''',
g.Longest()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.IterableLongest"
}
''',
g.IterableLongest()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.IterableFlatten",
"operator": {
"class": "uk.gov.gchq.koryphe.impl.binaryoperator.Max"
}
}
''',
g.IterableFlatten(operator=g.bop.Max())
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.IterableConcat"
}
''',
g.IterableConcat()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.Multiply"
}
''',
g.Multiply()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.MultiplyBy",
"by" : 4
}
''',
g.MultiplyBy(
by=4
)
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.MultiplyLongBy",
"by" : 4
}
''',
g.MultiplyLongBy(
by=4
)
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.Size"
}
''',
g.Size()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.ToString"
}
''',
g.ToString()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.ToString",
"charset": "UTF-16"
}
''',
g.ToString(charset="UTF-16")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.StringReplace",
"searchString": "replaceme",
"replacement": "withthis"
}
''',
g.StringReplace(search_string="replaceme", replacement="withthis")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.StringRegexReplace",
"regex": "repl.*me",
"replacement": "withthis"
}
''',
g.StringRegexReplace(regex="repl.*me", replacement="withthis")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.StringSplit",
"delimiter": " "
}
''',
g.StringSplit(delimiter=" ")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.StringRegexSplit",
"regex": "[ \\t]*"
}
''',
g.StringRegexSplit(regex="[ \t]*")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.StringJoin",
"delimiter": " "
}
''',
g.StringJoin(delimiter=" ")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.StringTrim"
}
''',
g.StringTrim()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.StringAppend",
"suffix": "test"
}
''',
g.StringAppend(suffix="test")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.StringPrepend",
"prefix": "test"
}
''',
g.StringPrepend(prefix="test")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.StringTruncate",
"length": 20,
"ellipses": false
}
''',
g.StringTruncate(length=20, ellipses=False)
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.ReverseString"
}
''',
g.ReverseString()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.DefaultIfNull",
"defaultValue": "test"
}
''',
g.DefaultIfNull(default_value="test")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.DefaultIfEmpty",
"defaultValue": "test"
}
''',
g.DefaultIfEmpty(default_value="test")
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.function.ToEntityId"
}
''',
g.ToEntityId()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.function.FromEntityId"
}
''',
g.FromEntityId()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.function.ToElementId"
}
''',
g.ToElementId()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.operation.function.FromElementId"
}
''',
g.FromElementId()
],
[
'''
{
"class": "uk.gov.gchq.gaffer.types.function.ToTypeValue"
}
''',
g.ToTypeValue()
],
[
'''
{
"class": "uk.gov.gchq.gaffer.types.function.ToTypeSubTypeValue"
}
''',
g.ToTypeSubTypeValue()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.generator.MapGenerator",
"fields" : {
"GROUP" : "Group Label",
"VERTEX" : "Vertex Label",
"SOURCE" : "Source Label",
"count" : "Count Label"
},
"constants" : {
"A Constant" : "Some constant value"
}
}
''',
g.MapGenerator(
fields={
'VERTEX': 'Vertex Label',
'count': 'Count Label',
'GROUP': 'Group Label',
'SOURCE': 'Source Label'
},
constants={
'A Constant': 'Some constant value'
}
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.generator.CsvGenerator",
"fields" : {
"GROUP" : "Group Label",
"VERTEX" : "Vertex Label",
"SOURCE" : "Source Label",
"count" : "Count Label"
},
"constants" : {
"A Constant" : "Some constant value"
},
"quoted" : true,
"commaReplacement": "-"
}
''',
g.CsvGenerator(
fields={
'VERTEX': 'Vertex Label',
'count': 'Count Label',
'GROUP': 'Group Label',
'SOURCE': 'Source Label'
},
constants={
'A Constant': 'Some constant value'
},
quoted=True,
comma_replacement="-"
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.data.generator.JsonToElementGenerator"
}
''',
g.JsonToElementGenerator()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.types.function.FreqMapExtractor",
"key" : "key1"
}
''',
g.FreqMapExtractor(key="key1")
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.function.FunctionMap",
"function" : {
"class" : "uk.gov.gchq.koryphe.impl.function.MultiplyBy",
"by" : 10
}
}
''',
g.FunctionMap(
function=g.MultiplyBy(by=10)
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.graph.function.walk.ExtractWalkEdges"
}
''',
g.ExtractWalkEdges()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.graph.function.walk.ExtractWalkEdgesFromHop",
"hop" : 2
}
''',
g.ExtractWalkEdgesFromHop(
hop=2
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.graph.function.walk.ExtractWalkEntities"
}
''',
g.ExtractWalkEntities()
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.graph.function.walk.ExtractWalkEntitiesFromHop",
"hop" : 1
}
''',
g.ExtractWalkEntitiesFromHop(
hop=1
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.graph.function.walk.ExtractWalkVertex"
}
''',
g.ExtractWalkVertex()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.Length",
"maxLength" : 100000
}
''',
g.Length(
max_length=100000
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.element.function.ExtractId",
"id" : "VERTEX"
}
''',
g.ExtractId(
id='VERTEX'
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.element.function.ExtractProperty",
"name" : "countByVehicleType"
}
''',
g.ExtractProperty(
name="countByVehicleType"
)
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.element.function.ExtractGroup"
}
''',
g.ExtractGroup()
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.CallMethod",
"method": "someMethod"
}
''',
g.CallMethod(method="someMethod")
],
[
'''
{
"class" : "uk.gov.gchq.gaffer.data.element.function.UnwrapEntityId"
}
''',
g.UnwrapEntityId()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.SetValue",
"value": "value2"
}
''',
g.SetValue(value="value2")
],
[
'''
{
"class":"uk.gov.gchq.koryphe.impl.function.If",
"predicate":{"class":"uk.gov.gchq.koryphe.impl.predicate.IsA","type":"java.lang.Integer"},
"then":{"class":"uk.gov.gchq.koryphe.impl.function.SetValue","value":"value2"},
"otherwise":{"class":"uk.gov.gchq.koryphe.impl.function.SetValue","value":"value3"}
}
''',
g.func.If(
predicate=g.IsA(type="java.lang.Integer"),
then=g.SetValue(value="value2"),
otherwise=g.SetValue(value="value3")
)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ToArray"
}
''',
g.func.ToArray()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ToList"
}
''',
g.func.ToList()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ToSet"
}
''',
g.func.ToSet()
],
[
'''
{
"class": "uk.gov.gchq.gaffer.types.function.ToFreqMap"
}
''',
g.func.ToFreqMap()
],
[
'''
{
"class": "uk.gov.gchq.gaffer.types.function.FreqMapPredicator",
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.IsA",
"type": "java.lang.String"
}
}
''',
g.FreqMapPredicator(
predicate=g.IsA(
type="java.lang.String"
)
)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.IterableFilter",
"predicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.IsA",
"type": "java.lang.String"
}
}
''',
g.func.IterableFilter(
predicate=g.IsA(type="java.lang.String")
)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.MapFilter"
}
''',
g.func.MapFilter()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.MapFilter",
"keyPredicate": {
"class":"uk.gov.gchq.koryphe.impl.predicate.StringContains",
"value":"someValue",
"ignoreCase":false
}
}
''',
g.func.MapFilter(
key_predicate=g.pred.StringContains(
value="someValue",
ignore_case=False
)
)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.FirstValid",
"predicate": {
"class":"uk.gov.gchq.koryphe.impl.predicate.StringContains",
"value":"someValue",
"ignoreCase":false
}
}
''',
g.func.FirstValid(
predicate=g.pred.StringContains(
value="someValue",
ignore_case=False
)
)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.FirstValid",
"predicate": {
"class":"uk.gov.gchq.koryphe.impl.predicate.StringContains",
"value":"someValue",
"ignoreCase":false
}
}
''',
g.func.FirstValid(
predicate={
"class": "uk.gov.gchq.koryphe.impl.predicate.StringContains",
"value": "someValue",
"ignoreCase": False
}
)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.MapFilter",
"keyPredicate": {
"class":"uk.gov.gchq.koryphe.impl.predicate.StringContains",
"value":"someValue",
"ignoreCase":false
},
"valuePredicate": {
"class" : "uk.gov.gchq.koryphe.impl.predicate.IsMoreThan",
"orEqualTo" : false,
"value" : 0
}
}
''',
g.func.MapFilter(
key_predicate=g.pred.StringContains(
value="someValue",
ignore_case=False
),
value_predicate=g.pred.IsMoreThan(
value=0,
or_equal_to=False
)
)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.MapFilter",
"keyValuePredicate": {
"class": "uk.gov.gchq.koryphe.impl.predicate.AreEqual"
}
}
''',
g.func.MapFilter(
key_value_predicate=g.pred.AreEqual()
)
],
[
'''
{
"class" : "uk.gov.gchq.koryphe.impl.function.CreateObject",
"objectClass" : "java.lang.Long"
}
''',
g.func.CreateObject(
object_class="java.lang.Long"
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.time.function.MaskTimestampSetByTimeRange",
"startTime": {
"java.lang.Long": 15300000000000
},
"endTime": {
"java.lang.Long": 15400000000000
}
}
''',
g.func.MaskTimestampSetByTimeRange(
start_time=g.long(15300000000000),
end_time=g.long(15400000000000)
)
],
[
'''
{
"class": "uk.gov.gchq.gaffer.time.function.MaskTimestampSetByTimeRange",
"startTime": {
"java.lang.Long": 15300000000000
},
"endTime": {
"java.lang.Long": 15400000000000
},
"timeUnit": "SECOND"
}
''',
g.func.MaskTimestampSetByTimeRange(
start_time=g.long(15300000000000),
end_time=g.long(15400000000000),
time_unit=g.TimeUnit.SECOND
)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.Base64Decode"
}
''',
g.func.Base64Decode()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.CsvLinesToMaps",
"delimiter": "|",
"header": ["my", "csv", "file"],
"firstRow": 1,
"quoted": true,
"quoteChar": "'"
}
''',
g.func.CsvLinesToMaps(delimiter='|', header=["my", "csv", "file"], first_row=1, quoted=True,
quote_char='\'')
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.CsvToMaps",
"delimiter": "|",
"header": ["my", "csv", "file"],
"firstRow": 1,
"quoted": true,
"quoteChar": "'"
}
''',
g.func.CsvToMaps(delimiter='|', header=["my", "csv", "file"], first_row=1, quoted=True, quote_char='\'')
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.CurrentDate"
}
''',
g.func.CurrentDate()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.CurrentTime"
}
''',
g.func.CurrentTime()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.DeserialiseJson",
"outputClass": "uk.gov.gchq.gaffer.data.element.Edge"
}
''',
g.func.DeserialiseJson(output_class=g.Edge.CLASS)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.DeserialiseJson"
}
''',
g.func.DeserialiseJson()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.DeserialiseXml"
}
''',
g.func.DeserialiseXml()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.Gunzip"
}
''',
g.func.Gunzip()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.Increment",
"increment": {
"java.lang.Long": 1000000
}
}
''',
g.Increment(increment=g.long(1000000))
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.MapToTuple"
}
''',
g.func.MapToTuple()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ParseDate",
"timeZone": "BST",
"format": "DD-MM-YYYY"
}
''',
g.func.ParseDate(time_zone="BST", format="DD-MM-YYYY")
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ParseTime",
"timeZone": "EST",
"format": "MM-DD-YYYY HH:mm:ss.SSS",
"timeUnit": "MICROSECOND"
}
''',
g.func.ParseTime(time_zone="EST", format="MM-DD-YYYY HH:mm:ss.SSS", time_unit=g.TimeUnit.MICROSECOND)
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ToDateString",
"format": "YYYY-MMM-dd"
}
''',
g.func.ToDateString(format="YYYY-MMM-dd")
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ToBytes",
"charset": "UTF-8"
}
''',
g.func.ToBytes(charset="UTF-8")
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ApplyBiFunction",
"function": {
"class": "uk.gov.gchq.koryphe.impl.binaryoperator.Sum"
}
}
''',
g.func.ApplyBiFunction(function=g.gaffer_binaryoperators.Sum())
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ApplyBiFunction",
"function": {
"class": "uk.gov.gchq.koryphe.impl.binaryoperator.Product"
}
}
''',
g.func.ApplyBiFunction(function={
"class": "uk.gov.gchq.koryphe.impl.binaryoperator.Product"
})
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.ToTuple"
}
''',
g.func.ToTuple()
],
[
'''
{
"class": "uk.gov.gchq.gaffer.data.element.function.ToPropertiesTuple"
}
''',
g.func.ToPropertiesTuple()
],
[
'''
{
"class": "uk.gov.gchq.gaffer.data.element.function.ToElementTuple"
}
''',
g.func.ToElementTuple()
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.FunctionChain",
"functions": [
{
"class": "uk.gov.gchq.koryphe.impl.function.Base64Decode"
},
{
"class": "uk.gov.gchq.koryphe.impl.function.CsvLinesToMaps",
"delimiter": "|",
"quoted": true
}
]
}
''',
g.FunctionChain(functions=[
g.Base64Decode(),
g.CsvLinesToMaps(delimiter="|", quoted=True)
])
],
[
'''
{
"class":"uk.gov.gchq.koryphe.tuple.function.TupleAdaptedFunction",
"selection":[0],
"function": {
"class": "uk.gov.gchq.gaffer.operation.function.ToEntityId"
},
"projection": [1]
}
''',
g.TupleAdaptedFunction(selection=[0], function=g.ToEntityId(), projection=[1])
],
[
'''
{
"class":"uk.gov.gchq.koryphe.impl.function.FunctionChain",
"functions": [
{
"class":"uk.gov.gchq.koryphe.tuple.function.TupleAdaptedFunction",
"selection":[0],
"function": {
"class": "uk.gov.gchq.koryphe.impl.function.ToUpperCase"
},
"projection": [1]
},
{
"class":"uk.gov.gchq.koryphe.tuple.function.TupleAdaptedFunction",
"selection": [1],
"function": {
"class": "uk.gov.gchq.koryphe.impl.function.ToSet"
},
"projection": [2]
}
]
}
''',
g.FunctionChain(functions=[
g.TupleAdaptedFunction(selection=[0], function=g.ToUpperCase(), projection=[1]),
g.TupleAdaptedFunction(selection=[1], function=g.gaffer_functions.ToSet(), projection=[2])
])
],
[
'''
{
"class": "uk.gov.gchq.koryphe.tuple.function.TupleAdaptedFunctionComposite",
"functions": [
{
"selection": [ "something" ],
"function": {
"class":"uk.gov.gchq.koryphe.impl.function.ToUpperCase"
},
"projection": [1]
}
]
}
''',
g.TupleAdaptedFunctionComposite(
functions=[g.FunctionContext(selection=["something"],
function=g.ToUpperCase(),
projection=[1]
)
]
),
],
[
'''
{
"class": "uk.gov.gchq.koryphe.impl.function.FunctionChain",
"functions": [
{
"class": "uk.gov.gchq.koryphe.tuple.function.TupleAdaptedFunctionComposite",
"functions": [
{
"selection": [0],
"function": {
"class":"uk.gov.gchq.koryphe.impl.function.ToUpperCase"
},
"projection": [1]
}
]
},
{
"class": "uk.gov.gchq.koryphe.tuple.function.TupleAdaptedFunctionComposite",
"functions": [
{
"selection": [1],
"function": {
"class":"uk.gov.gchq.koryphe.impl.function.ToSet"
},
"projection": [2]
}
]
}
]
}
''',
g.FunctionChain(functions=[
g.TupleAdaptedFunctionComposite(
functions=[g.FunctionContext(selection=[0], function=g.ToUpperCase(), projection=[1])]),
g.TupleAdaptedFunctionComposite(
functions=[g.FunctionContext(selection=[1], function=g.gaffer_functions.ToSet(), projection=[2])])
])
]
]
def test_examples(self):
for example in self.examples:
self.assertEqual(
json.loads(example[0]),
example[1].to_json(),
"json failed: \n" + example[0] + "\n"
+ g.JsonConverter.from_json(example[0]).to_code_string()
)
g.JsonConverter.from_json(example[0], validate=True)
if __name__ == "__main__":
unittest.main()
| 28.637746 | 118 | 0.361089 |
1e9317ac898de155060ef6acb321b2dd6808c57a | 2,477 | py | Python | consult/factory.py | guillaumepiot/cotidia-demo | 497177fa63942ee22288e93ed7d4867854110dd0 | [
"BSD-3-Clause"
] | null | null | null | consult/factory.py | guillaumepiot/cotidia-demo | 497177fa63942ee22288e93ed7d4867854110dd0 | [
"BSD-3-Clause"
] | 7 | 2020-02-11T23:47:40.000Z | 2022-03-11T23:42:02.000Z | consult/factory.py | guillaumepiot/cotidia-demo | 497177fa63942ee22288e93ed7d4867854110dd0 | [
"BSD-3-Clause"
] | null | null | null | import pytz
import factory
import factory.fuzzy
from faker import Faker
from faker.providers import profile, address, geo, python, date_time
from django.utils import timezone
from cotidia.team.models import Member
from consult.models import Customer, ServiceType
fake = Faker("en_GB")
fake.add_provider(profile)
fake.add_provider(address)
fake.add_provider(geo)
fake.add_provider(python)
fake.add_provider(date_time)
class CustomerFactory(factory.django.DjangoModelFactory):
class Meta:
model = "consult.customer"
title = factory.fuzzy.FuzzyChoice([c[0] for c in Customer.TITLE_CHOICES])
first_name = factory.Faker("first_name", locale="en_GB")
last_name = factory.Faker("last_name", locale="en_GB")
email = factory.Faker("email", locale="en_GB")
dob = factory.LazyFunction(lambda: fake.profile()["birthdate"])
address_line_1 = factory.LazyFunction(lambda: fake.street_address())
address_city = factory.LazyFunction(lambda: fake.city())
address_postcode = factory.LazyFunction(lambda: fake.postcode())
address_country = "GB"
@factory.post_generation
def post(obj, create, extracted, **kwargs):
location = fake.local_latlng(country_code="GB")
obj.lat = float(location[0])
obj.lng = float(location[1])
obj.save()
class ServiceTypeFactory(factory.django.DjangoModelFactory):
class Meta:
model = "consult.servicetype"
name = factory.LazyFunction(
lambda: fake.words(nb=1, ext_word_list=None, unique=True)[0].title()
)
class BookingFactory(factory.django.DjangoModelFactory):
class Meta:
model = "consult.booking"
datetime = factory.LazyFunction(lambda: timezone.now())
cost = factory.fuzzy.FuzzyChoice(["20", "50", "80", "150"])
service_type = factory.LazyFunction(
lambda: ServiceType.objects.all().order_by("?").first()
)
first_visit = factory.LazyFunction(lambda: fake.pybool())
notes = factory.LazyFunction(
lambda: fake.text(max_nb_chars=200, ext_word_list=None)
)
member = factory.LazyFunction(lambda: Member.objects.all().order_by("?").first())
customer = factory.LazyFunction(
lambda: Customer.objects.all().order_by("?").first()
)
@factory.post_generation
def post(obj, create, extracted, **kwargs):
obj.datetime = fake.date_time_between(
start_date="-150d", end_date="+150d", tzinfo=pytz.timezone("GMT")
)
obj.save()
| 31.35443 | 85 | 0.697214 |
5fd4f7c15a73051641acf21c82648794473f0249 | 57,771 | py | Python | mne/dipole.py | rylaw/mne-python | aa526c8ed7049046734ca28493d99e841672b0eb | [
"BSD-3-Clause"
] | 1 | 2022-01-04T21:37:36.000Z | 2022-01-04T21:37:36.000Z | mne/dipole.py | rylaw/mne-python | aa526c8ed7049046734ca28493d99e841672b0eb | [
"BSD-3-Clause"
] | null | null | null | mne/dipole.py | rylaw/mne-python | aa526c8ed7049046734ca28493d99e841672b0eb | [
"BSD-3-Clause"
] | 1 | 2021-04-01T15:56:39.000Z | 2021-04-01T15:56:39.000Z | # -*- coding: utf-8 -*-
"""Single-dipole functions and classes."""
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Eric Larson <larson.eric.d@gmail.com>
#
# License: Simplified BSD
from copy import deepcopy
import functools
from functools import partial
import re
import numpy as np
from .cov import read_cov, compute_whitener
from .io.constants import FIFF
from .io.pick import pick_types
from .io.proj import make_projector, _needs_eeg_average_ref_proj
from .bem import _fit_sphere
from .evoked import _read_evoked, _aspect_rev, _write_evokeds
from .transforms import _print_coord_trans, _coord_frame_name, apply_trans
from .viz.evoked import _plot_evoked
from .forward._make_forward import (_get_trans, _setup_bem,
_prep_meg_channels, _prep_eeg_channels)
from .forward._compute_forward import (_compute_forwards_meeg,
_prep_field_computation)
from .surface import (transform_surface_to, _compute_nearest,
_points_outside_surface)
from .bem import _bem_find_surface, _bem_surf_name
from .source_space import _make_volume_source_space, SourceSpaces, head_to_mni
from .parallel import parallel_func
from .utils import (logger, verbose, _time_mask, warn, _check_fname,
check_fname, _pl, fill_doc, _check_option, ShiftTimeMixin,
_svd_lwork, _repeated_svd, _get_blas_funcs)
@fill_doc
class Dipole(object):
u"""Dipole class for sequential dipole fits.
.. note:: This class should usually not be instantiated directly,
instead :func:`mne.read_dipole` should be used.
Used to store positions, orientations, amplitudes, times, goodness of fit
of dipoles, typically obtained with Neuromag/xfit, mne_dipole_fit
or certain inverse solvers. Note that dipole position vectors are given in
the head coordinate frame.
Parameters
----------
times : array, shape (n_dipoles,)
The time instants at which each dipole was fitted (sec).
pos : array, shape (n_dipoles, 3)
The dipoles positions (m) in head coordinates.
amplitude : array, shape (n_dipoles,)
The amplitude of the dipoles (Am).
ori : array, shape (n_dipoles, 3)
The dipole orientations (normalized to unit length).
gof : array, shape (n_dipoles,)
The goodness of fit.
name : str | None
Name of the dipole.
conf : dict
Confidence limits in dipole orientation for "vol" in m^3 (volume),
"depth" in m (along the depth axis), "long" in m (longitudinal axis),
"trans" in m (transverse axis), "qlong" in Am, and "qtrans" in Am
(currents). The current confidence limit in the depth direction is
assumed to be zero (although it can be non-zero when a BEM is used).
.. versionadded:: 0.15
khi2 : array, shape (n_dipoles,)
The χ^2 values for the fits.
.. versionadded:: 0.15
nfree : array, shape (n_dipoles,)
The number of free parameters for each fit.
.. versionadded:: 0.15
%(verbose)s
See Also
--------
fit_dipole
DipoleFixed
read_dipole
Notes
-----
This class is for sequential dipole fits, where the position
changes as a function of time. For fixed dipole fits, where the
position is fixed as a function of time, use :class:`mne.DipoleFixed`.
"""
@verbose
def __init__(self, times, pos, amplitude, ori, gof,
name=None, conf=None, khi2=None, nfree=None,
verbose=None): # noqa: D102
self.times = np.array(times)
self.pos = np.array(pos)
self.amplitude = np.array(amplitude)
self.ori = np.array(ori)
self.gof = np.array(gof)
self.name = name
self.conf = dict()
if conf is not None:
for key, value in conf.items():
self.conf[key] = np.array(value)
self.khi2 = np.array(khi2) if khi2 is not None else None
self.nfree = np.array(nfree) if nfree is not None else None
self.verbose = verbose
def __repr__(self): # noqa: D105
s = "n_times : %s" % len(self.times)
s += ", tmin : %0.3f" % np.min(self.times)
s += ", tmax : %0.3f" % np.max(self.times)
return "<Dipole | %s>" % s
@verbose
def save(self, fname, overwrite=False, *, verbose=None):
"""Save dipole in a .dip or .bdip file.
Parameters
----------
fname : str
The name of the .dip or .bdip file.
%(overwrite)s
.. versionadded:: 0.20
%(verbose_meth)s
Notes
-----
.. versionchanged:: 0.20
Support for writing bdip (Xfit binary) files.
"""
# obligatory fields
fname = _check_fname(fname, overwrite=overwrite)
if fname.endswith('.bdip'):
_write_dipole_bdip(fname, self)
else:
_write_dipole_text(fname, self)
@fill_doc
def crop(self, tmin=None, tmax=None, include_tmax=True):
"""Crop data to a given time interval.
Parameters
----------
tmin : float | None
Start time of selection in seconds.
tmax : float | None
End time of selection in seconds.
%(include_tmax)s
Returns
-------
self : instance of Dipole
The cropped instance.
"""
sfreq = None
if len(self.times) > 1:
sfreq = 1. / np.median(np.diff(self.times))
mask = _time_mask(self.times, tmin, tmax, sfreq=sfreq,
include_tmax=include_tmax)
for attr in ('times', 'pos', 'gof', 'amplitude', 'ori',
'khi2', 'nfree'):
if getattr(self, attr) is not None:
setattr(self, attr, getattr(self, attr)[mask])
for key in self.conf.keys():
self.conf[key] = self.conf[key][mask]
return self
def copy(self):
"""Copy the Dipoles object.
Returns
-------
dip : instance of Dipole
The copied dipole instance.
"""
return deepcopy(self)
@verbose
def plot_locations(self, trans, subject, subjects_dir=None,
mode='orthoview', coord_frame='mri', idx='gof',
show_all=True, ax=None, block=False, show=True,
scale=5e-3, color=(1.0, 0.0, 0.0), fig=None,
verbose=None, title=None):
"""Plot dipole locations in 3d.
Parameters
----------
trans : dict
The mri to head trans.
subject : str
The subject name corresponding to FreeSurfer environment
variable SUBJECT.
%(subjects_dir)s
mode : str
Can be ``'arrow'``, ``'sphere'`` or ``'orthoview'``.
.. versionadded:: 0.14.0
coord_frame : str
Coordinate frame to use, 'head' or 'mri'. Defaults to 'mri'.
.. versionadded:: 0.14.0
idx : int | 'gof' | 'amplitude'
Index of the initially plotted dipole. Can also be 'gof' to plot
the dipole with highest goodness of fit value or 'amplitude' to
plot the dipole with the highest amplitude. The dipoles can also be
browsed through using up/down arrow keys or mouse scroll. Defaults
to 'gof'. Only used if mode equals 'orthoview'.
.. versionadded:: 0.14.0
show_all : bool
Whether to always plot all the dipoles. If True (default), the
active dipole is plotted as a red dot and it's location determines
the shown MRI slices. The the non-active dipoles are plotted as
small blue dots. If False, only the active dipole is plotted.
Only used if mode equals 'orthoview'.
.. versionadded:: 0.14.0
ax : instance of matplotlib Axes3D | None
Axes to plot into. If None (default), axes will be created.
Only used if mode equals 'orthoview'.
.. versionadded:: 0.14.0
block : bool
Whether to halt program execution until the figure is closed.
Defaults to False. Only used if mode equals 'orthoview'.
.. versionadded:: 0.14.0
show : bool
Show figure if True. Defaults to True.
Only used if mode equals 'orthoview'.
scale : float
The scale of the dipoles if ``mode`` is 'arrow' or 'sphere'.
color : tuple
The color of the dipoles if ``mode`` is 'arrow' or 'sphere'.
fig : mayavi.mlab.Figure | None
Mayavi Scene in which to plot the alignment.
If ``None``, creates a new 600x600 pixel figure with black
background.
.. versionadded:: 0.14.0
%(verbose_meth)s
%(dipole_locs_fig_title)s
.. versionadded:: 0.21.0
Returns
-------
fig : instance of mayavi.mlab.Figure or matplotlib.figure.Figure
The mayavi figure or matplotlib Figure.
Notes
-----
.. versionadded:: 0.9.0
"""
_check_option('mode', mode, [None, 'arrow', 'sphere', 'orthoview'])
from .viz import plot_dipole_locations
return plot_dipole_locations(
self, trans, subject, subjects_dir, mode, coord_frame, idx,
show_all, ax, block, show, scale=scale, color=color, fig=fig,
title=title)
@verbose
def to_mni(self, subject, trans, subjects_dir=None,
verbose=None):
"""Convert dipole location from head coordinate system to MNI coordinates.
Parameters
----------
%(subject)s
%(trans_not_none)s
%(subjects_dir)s
%(verbose)s
Returns
-------
pos_mni : array, shape (n_pos, 3)
The MNI coordinates (in mm) of pos.
"""
mri_head_t, trans = _get_trans(trans)
return head_to_mni(self.pos, subject, mri_head_t,
subjects_dir=subjects_dir, verbose=verbose)
def plot_amplitudes(self, color='k', show=True):
"""Plot the dipole amplitudes as a function of time.
Parameters
----------
color : matplotlib color
Color to use for the trace.
show : bool
Show figure if True.
Returns
-------
fig : matplotlib.figure.Figure
The figure object containing the plot.
"""
from .viz import plot_dipole_amplitudes
return plot_dipole_amplitudes([self], [color], show)
def __getitem__(self, item):
"""Get a time slice.
Parameters
----------
item : array-like or slice
The slice of time points to use.
Returns
-------
dip : instance of Dipole
The sliced dipole.
"""
if isinstance(item, int): # make sure attributes stay 2d
item = [item]
selected_times = self.times[item].copy()
selected_pos = self.pos[item, :].copy()
selected_amplitude = self.amplitude[item].copy()
selected_ori = self.ori[item, :].copy()
selected_gof = self.gof[item].copy()
selected_name = self.name
selected_conf = dict()
for key in self.conf.keys():
selected_conf[key] = self.conf[key][item]
selected_khi2 = self.khi2[item] if self.khi2 is not None else None
selected_nfree = self.nfree[item] if self.nfree is not None else None
return Dipole(
selected_times, selected_pos, selected_amplitude, selected_ori,
selected_gof, selected_name, selected_conf, selected_khi2,
selected_nfree)
def __len__(self):
"""Return the number of dipoles.
Returns
-------
len : int
The number of dipoles.
Examples
--------
This can be used as::
>>> len(dipoles) # doctest: +SKIP
10
"""
return self.pos.shape[0]
def _read_dipole_fixed(fname):
"""Read a fixed dipole FIF file."""
logger.info('Reading %s ...' % fname)
info, nave, aspect_kind, comment, times, data, _ = _read_evoked(fname)
return DipoleFixed(info, data, times, nave, aspect_kind, comment=comment)
@fill_doc
class DipoleFixed(ShiftTimeMixin):
"""Dipole class for fixed-position dipole fits.
.. note:: This class should usually not be instantiated directly,
instead :func:`mne.read_dipole` should be used.
Parameters
----------
info : instance of Info
The measurement info.
data : array, shape (n_channels, n_times)
The dipole data.
times : array, shape (n_times,)
The time points.
nave : int
Number of averages.
aspect_kind : int
The kind of data.
comment : str
The dipole comment.
%(verbose)s
See Also
--------
read_dipole
Dipole
fit_dipole
Notes
-----
This class is for fixed-position dipole fits, where the position
(and maybe orientation) is static over time. For sequential dipole fits,
where the position can change a function of time, use :class:`mne.Dipole`.
.. versionadded:: 0.12
"""
@verbose
def __init__(self, info, data, times, nave, aspect_kind,
comment='', verbose=None): # noqa: D102
self.info = info
self.nave = nave
self._aspect_kind = aspect_kind
self.kind = _aspect_rev.get(aspect_kind, 'unknown')
self.comment = comment
self.times = times
self.data = data
self.verbose = verbose
self.preload = True
self._update_first_last()
def __repr__(self): # noqa: D105
s = "n_times : %s" % len(self.times)
s += ", tmin : %s" % np.min(self.times)
s += ", tmax : %s" % np.max(self.times)
return "<DipoleFixed | %s>" % s
def copy(self):
"""Copy the DipoleFixed object.
Returns
-------
inst : instance of DipoleFixed
The copy.
Notes
-----
.. versionadded:: 0.16
"""
return deepcopy(self)
@property
def ch_names(self):
"""Channel names."""
return self.info['ch_names']
@verbose
def save(self, fname, verbose=None):
"""Save dipole in a .fif file.
Parameters
----------
fname : str
The name of the .fif file. Must end with ``'.fif'`` or
``'.fif.gz'`` to make it explicit that the file contains
dipole information in FIF format.
%(verbose_meth)s
"""
check_fname(fname, 'DipoleFixed', ('-dip.fif', '-dip.fif.gz',
'_dip.fif', '_dip.fif.gz',),
('.fif', '.fif.gz'))
_write_evokeds(fname, self, check=False)
def plot(self, show=True, time_unit='s'):
"""Plot dipole data.
Parameters
----------
show : bool
Call pyplot.show() at the end or not.
time_unit : str
The units for the time axis, can be "ms" or "s" (default).
.. versionadded:: 0.16
Returns
-------
fig : instance of matplotlib.figure.Figure
The figure containing the time courses.
"""
return _plot_evoked(self, picks=None, exclude=(), unit=True, show=show,
ylim=None, xlim='tight', proj=False, hline=None,
units=None, scalings=None, titles=None, axes=None,
gfp=False, window_title=None, spatial_colors=False,
plot_type="butterfly", selectable=False,
time_unit=time_unit)
# #############################################################################
# IO
@verbose
def read_dipole(fname, verbose=None):
"""Read .dip file from Neuromag/xfit or MNE.
Parameters
----------
fname : str
The name of the .dip or .fif file.
%(verbose)s
Returns
-------
dipole : instance of Dipole or DipoleFixed
The dipole.
See Also
--------
Dipole
DipoleFixed
fit_dipole
Notes
-----
.. versionchanged:: 0.20
Support for reading bdip (Xfit binary) format.
"""
fname = _check_fname(fname, overwrite='read', must_exist=True)
if fname.endswith('.fif') or fname.endswith('.fif.gz'):
return _read_dipole_fixed(fname)
elif fname.endswith('.bdip'):
return _read_dipole_bdip(fname)
else:
return _read_dipole_text(fname)
def _read_dipole_text(fname):
"""Read a dipole text file."""
# Figure out the special fields
need_header = True
def_line = name = None
# There is a bug in older np.loadtxt regarding skipping fields,
# so just read the data ourselves (need to get name and header anyway)
data = list()
with open(fname, 'r') as fid:
for line in fid:
if not (line.startswith('%') or line.startswith('#')):
need_header = False
data.append(line.strip().split())
else:
if need_header:
def_line = line
if line.startswith('##') or line.startswith('%%'):
m = re.search('Name "(.*) dipoles"', line)
if m:
name = m.group(1)
del line
data = np.atleast_2d(np.array(data, float))
if def_line is None:
raise IOError('Dipole text file is missing field definition '
'comment, cannot parse %s' % (fname,))
# actually parse the fields
def_line = def_line.lstrip('%').lstrip('#').strip()
# MNE writes it out differently than Elekta, let's standardize them...
fields = re.sub(r'([X|Y|Z] )\(mm\)', # "X (mm)", etc.
lambda match: match.group(1).strip() + '/mm', def_line)
fields = re.sub(r'\((.*?)\)', # "Q(nAm)", etc.
lambda match: '/' + match.group(1), fields)
fields = re.sub('(begin|end) ', # "begin" and "end" with no units
lambda match: match.group(1) + '/ms', fields)
fields = fields.lower().split()
required_fields = ('begin/ms',
'x/mm', 'y/mm', 'z/mm',
'q/nam', 'qx/nam', 'qy/nam', 'qz/nam',
'g/%')
optional_fields = ('khi^2', 'free', # standard ones
# now the confidence fields (up to 5!)
'vol/mm^3', 'depth/mm', 'long/mm', 'trans/mm',
'qlong/nam', 'qtrans/nam')
conf_scales = [1e-9, 1e-3, 1e-3, 1e-3, 1e-9, 1e-9]
missing_fields = sorted(set(required_fields) - set(fields))
if len(missing_fields) > 0:
raise RuntimeError('Could not find necessary fields in header: %s'
% (missing_fields,))
handled_fields = set(required_fields) | set(optional_fields)
assert len(handled_fields) == len(required_fields) + len(optional_fields)
ignored_fields = sorted(set(fields) -
set(handled_fields) -
{'end/ms'})
if len(ignored_fields) > 0:
warn('Ignoring extra fields in dipole file: %s' % (ignored_fields,))
if len(fields) != data.shape[1]:
raise IOError('More data fields (%s) found than data columns (%s): %s'
% (len(fields), data.shape[1], fields))
logger.info("%d dipole(s) found" % len(data))
if 'end/ms' in fields:
if np.diff(data[:, [fields.index('begin/ms'),
fields.index('end/ms')]], 1, -1).any():
warn('begin and end fields differed, but only begin will be used '
'to store time values')
# Find the correct column in our data array, then scale to proper units
idx = [fields.index(field) for field in required_fields]
assert len(idx) >= 9
times = data[:, idx[0]] / 1000.
pos = 1e-3 * data[:, idx[1:4]] # put data in meters
amplitude = data[:, idx[4]]
norm = amplitude.copy()
amplitude /= 1e9
norm[norm == 0] = 1
ori = data[:, idx[5:8]] / norm[:, np.newaxis]
gof = data[:, idx[8]]
# Deal with optional fields
optional = [None] * 2
for fi, field in enumerate(optional_fields[:2]):
if field in fields:
optional[fi] = data[:, fields.index(field)]
khi2, nfree = optional
conf = dict()
for field, scale in zip(optional_fields[2:], conf_scales): # confidence
if field in fields:
conf[field.split('/')[0]] = scale * data[:, fields.index(field)]
return Dipole(times, pos, amplitude, ori, gof, name, conf, khi2, nfree)
def _write_dipole_text(fname, dip):
fmt = ' %7.1f %7.1f %8.2f %8.2f %8.2f %8.3f %8.3f %8.3f %8.3f %6.2f'
header = ('# begin end X (mm) Y (mm) Z (mm)'
' Q(nAm) Qx(nAm) Qy(nAm) Qz(nAm) g/%')
t = dip.times[:, np.newaxis] * 1000.
gof = dip.gof[:, np.newaxis]
amp = 1e9 * dip.amplitude[:, np.newaxis]
out = (t, t, dip.pos / 1e-3, amp, dip.ori * amp, gof)
# optional fields
fmts = dict(khi2=(' khi^2', ' %8.1f', 1.),
nfree=(' free', ' %5d', 1),
vol=(' vol/mm^3', ' %9.3f', 1e9),
depth=(' depth/mm', ' %9.3f', 1e3),
long=(' long/mm', ' %8.3f', 1e3),
trans=(' trans/mm', ' %9.3f', 1e3),
qlong=(' Qlong/nAm', ' %10.3f', 1e9),
qtrans=(' Qtrans/nAm', ' %11.3f', 1e9),
)
for key in ('khi2', 'nfree'):
data = getattr(dip, key)
if data is not None:
header += fmts[key][0]
fmt += fmts[key][1]
out += (data[:, np.newaxis] * fmts[key][2],)
for key in ('vol', 'depth', 'long', 'trans', 'qlong', 'qtrans'):
data = dip.conf.get(key)
if data is not None:
header += fmts[key][0]
fmt += fmts[key][1]
out += (data[:, np.newaxis] * fmts[key][2],)
out = np.concatenate(out, axis=-1)
# NB CoordinateSystem is hard-coded as Head here
with open(fname, 'wb') as fid:
fid.write('# CoordinateSystem "Head"\n'.encode('utf-8'))
fid.write((header + '\n').encode('utf-8'))
np.savetxt(fid, out, fmt=fmt)
if dip.name is not None:
fid.write(('## Name "%s dipoles" Style "Dipoles"'
% dip.name).encode('utf-8'))
_BDIP_ERROR_KEYS = ('depth', 'long', 'trans', 'qlong', 'qtrans')
def _read_dipole_bdip(fname):
name = None
nfree = None
with open(fname, 'rb') as fid:
# Which dipole in a multi-dipole set
times = list()
pos = list()
amplitude = list()
ori = list()
gof = list()
conf = dict(vol=list())
khi2 = list()
has_errors = None
while True:
num = np.frombuffer(fid.read(4), '>i4')
if len(num) == 0:
break
times.append(np.frombuffer(fid.read(4), '>f4')[0])
fid.read(4) # end
fid.read(12) # r0
pos.append(np.frombuffer(fid.read(12), '>f4'))
Q = np.frombuffer(fid.read(12), '>f4')
amplitude.append(np.linalg.norm(Q))
ori.append(Q / amplitude[-1])
gof.append(100 * np.frombuffer(fid.read(4), '>f4')[0])
this_has_errors = bool(np.frombuffer(fid.read(4), '>i4')[0])
if has_errors is None:
has_errors = this_has_errors
for key in _BDIP_ERROR_KEYS:
conf[key] = list()
assert has_errors == this_has_errors
fid.read(4) # Noise level used for error computations
limits = np.frombuffer(fid.read(20), '>f4') # error limits
for key, lim in zip(_BDIP_ERROR_KEYS, limits):
conf[key].append(lim)
fid.read(100) # (5, 5) fully describes the conf. ellipsoid
conf['vol'].append(np.frombuffer(fid.read(4), '>f4')[0])
khi2.append(np.frombuffer(fid.read(4), '>f4')[0])
fid.read(4) # prob
fid.read(4) # total noise estimate
return Dipole(times, pos, amplitude, ori, gof, name, conf, khi2, nfree)
def _write_dipole_bdip(fname, dip):
with open(fname, 'wb+') as fid:
for ti, t in enumerate(dip.times):
fid.write(np.zeros(1, '>i4').tobytes()) # int dipole
fid.write(np.array([t, 0]).astype('>f4').tobytes())
fid.write(np.zeros(3, '>f4').tobytes()) # r0
fid.write(dip.pos[ti].astype('>f4').tobytes()) # pos
Q = dip.amplitude[ti] * dip.ori[ti]
fid.write(Q.astype('>f4').tobytes())
fid.write(np.array(dip.gof[ti] / 100., '>f4').tobytes())
has_errors = int(bool(len(dip.conf)))
fid.write(np.array(has_errors, '>i4').tobytes()) # has_errors
fid.write(np.zeros(1, '>f4').tobytes()) # noise level
for key in _BDIP_ERROR_KEYS:
val = dip.conf[key][ti] if key in dip.conf else 0.
assert val.shape == ()
fid.write(np.array(val, '>f4').tobytes())
fid.write(np.zeros(25, '>f4').tobytes())
conf = dip.conf['vol'][ti] if 'vol' in dip.conf else 0.
fid.write(np.array(conf, '>f4').tobytes())
khi2 = dip.khi2[ti] if dip.khi2 is not None else 0
fid.write(np.array(khi2, '>f4').tobytes())
fid.write(np.zeros(1, '>f4').tobytes()) # prob
fid.write(np.zeros(1, '>f4').tobytes()) # total noise est
# #############################################################################
# Fitting
def _dipole_forwards(fwd_data, whitener, rr, n_jobs=1):
"""Compute the forward solution and do other nice stuff."""
B = _compute_forwards_meeg(rr, fwd_data, n_jobs, silent=True)
B = np.concatenate(B, axis=1)
assert np.isfinite(B).all()
B_orig = B.copy()
# Apply projection and whiten (cov has projections already)
_, _, dgemm = _get_ddot_dgemv_dgemm()
B = dgemm(1., B, whitener.T)
# column normalization doesn't affect our fitting, so skip for now
# S = np.sum(B * B, axis=1) # across channels
# scales = np.repeat(3. / np.sqrt(np.sum(np.reshape(S, (len(rr), 3)),
# axis=1)), 3)
# B *= scales[:, np.newaxis]
scales = np.ones(3)
return B, B_orig, scales
@verbose
def _make_guesses(surf, grid, exclude, mindist, n_jobs=1, verbose=None):
"""Make a guess space inside a sphere or BEM surface."""
if 'rr' in surf:
logger.info('Guess surface (%s) is in %s coordinates'
% (_bem_surf_name[surf['id']],
_coord_frame_name(surf['coord_frame'])))
else:
logger.info('Making a spherical guess space with radius %7.1f mm...'
% (1000 * surf['R']))
logger.info('Filtering (grid = %6.f mm)...' % (1000 * grid))
src = _make_volume_source_space(surf, grid, exclude, 1000 * mindist,
do_neighbors=False, n_jobs=n_jobs)[0]
assert 'vertno' in src
# simplify the result to make things easier later
src = dict(rr=src['rr'][src['vertno']], nn=src['nn'][src['vertno']],
nuse=src['nuse'], coord_frame=src['coord_frame'],
vertno=np.arange(src['nuse']), type='discrete')
return SourceSpaces([src])
def _fit_eval(rd, B, B2, fwd_svd=None, fwd_data=None, whitener=None,
lwork=None):
"""Calculate the residual sum of squares."""
if fwd_svd is None:
fwd = _dipole_forwards(fwd_data, whitener, rd[np.newaxis, :])[0]
uu, sing, vv = _repeated_svd(fwd, lwork, overwrite_a=True)
else:
uu, sing, vv = fwd_svd
gof = _dipole_gof(uu, sing, vv, B, B2)[0]
# mne-c uses fitness=B2-Bm2, but ours (1-gof) is just a normalized version
return 1. - gof
@functools.lru_cache(None)
def _get_ddot_dgemv_dgemm():
return _get_blas_funcs(np.float64, ('dot', 'gemv', 'gemm'))
def _dipole_gof(uu, sing, vv, B, B2):
"""Calculate the goodness of fit from the forward SVD."""
ddot, dgemv, _ = _get_ddot_dgemv_dgemm()
ncomp = 3 if sing[2] / (sing[0] if sing[0] > 0 else 1.) > 0.2 else 2
one = dgemv(1., vv[:ncomp], B) # np.dot(vv[:ncomp], B)
Bm2 = ddot(one, one) # np.sum(one * one)
gof = Bm2 / B2
return gof, one
def _fit_Q(fwd_data, whitener, B, B2, B_orig, rd, ori=None):
"""Fit the dipole moment once the location is known."""
from scipy import linalg
if 'fwd' in fwd_data:
# should be a single precomputed "guess" (i.e., fixed position)
assert rd is None
fwd = fwd_data['fwd']
assert fwd.shape[0] == 3
fwd_orig = fwd_data['fwd_orig']
assert fwd_orig.shape[0] == 3
scales = fwd_data['scales']
assert scales.shape == (3,)
fwd_svd = fwd_data['fwd_svd'][0]
else:
fwd, fwd_orig, scales = _dipole_forwards(fwd_data, whitener,
rd[np.newaxis, :])
fwd_svd = None
if ori is None:
if fwd_svd is None:
fwd_svd = linalg.svd(fwd, full_matrices=False)
uu, sing, vv = fwd_svd
gof, one = _dipole_gof(uu, sing, vv, B, B2)
ncomp = len(one)
one /= sing[:ncomp]
Q = np.dot(one, uu.T[:ncomp])
else:
fwd = np.dot(ori[np.newaxis], fwd)
sing = np.linalg.norm(fwd)
one = np.dot(fwd / sing, B)
gof = (one * one)[0] / B2
Q = ori * np.sum(one / sing)
ncomp = 3
# Counteract the effect of column normalization
Q *= scales[0]
B_residual_noproj = B_orig - np.dot(fwd_orig.T, Q)
return Q, gof, B_residual_noproj, ncomp
def _fit_dipoles(fun, min_dist_to_inner_skull, data, times, guess_rrs,
guess_data, fwd_data, whitener, ori, n_jobs, rank):
"""Fit a single dipole to the given whitened, projected data."""
from scipy.optimize import fmin_cobyla
parallel, p_fun, _ = parallel_func(fun, n_jobs)
# parallel over time points
res = parallel(p_fun(min_dist_to_inner_skull, B, t, guess_rrs,
guess_data, fwd_data, whitener,
fmin_cobyla, ori, rank)
for B, t in zip(data.T, times))
pos = np.array([r[0] for r in res])
amp = np.array([r[1] for r in res])
ori = np.array([r[2] for r in res])
gof = np.array([r[3] for r in res]) * 100 # convert to percentage
conf = None
if res[0][4] is not None:
conf = np.array([r[4] for r in res])
keys = ['vol', 'depth', 'long', 'trans', 'qlong', 'qtrans']
conf = {key: conf[:, ki] for ki, key in enumerate(keys)}
khi2 = np.array([r[5] for r in res])
nfree = np.array([r[6] for r in res])
residual_noproj = np.array([r[7] for r in res]).T
return pos, amp, ori, gof, conf, khi2, nfree, residual_noproj
'''Simplex code in case we ever want/need it for testing
def _make_tetra_simplex():
"""Make the initial tetrahedron"""
#
# For this definition of a regular tetrahedron, see
#
# http://mathworld.wolfram.com/Tetrahedron.html
#
x = np.sqrt(3.0) / 3.0
r = np.sqrt(6.0) / 12.0
R = 3 * r
d = x / 2.0
simplex = 1e-2 * np.array([[x, 0.0, -r],
[-d, 0.5, -r],
[-d, -0.5, -r],
[0., 0., R]])
return simplex
def try_(p, y, psum, ndim, fun, ihi, neval, fac):
"""Helper to try a value"""
ptry = np.empty(ndim)
fac1 = (1.0 - fac) / ndim
fac2 = fac1 - fac
ptry = psum * fac1 - p[ihi] * fac2
ytry = fun(ptry)
neval += 1
if ytry < y[ihi]:
y[ihi] = ytry
psum[:] += ptry - p[ihi]
p[ihi] = ptry
return ytry, neval
def _simplex_minimize(p, ftol, stol, fun, max_eval=1000):
"""Minimization with the simplex algorithm
Modified from Numerical recipes"""
y = np.array([fun(s) for s in p])
ndim = p.shape[1]
assert p.shape[0] == ndim + 1
mpts = ndim + 1
neval = 0
psum = p.sum(axis=0)
loop = 1
while(True):
ilo = 1
if y[1] > y[2]:
ihi = 1
inhi = 2
else:
ihi = 2
inhi = 1
for i in range(mpts):
if y[i] < y[ilo]:
ilo = i
if y[i] > y[ihi]:
inhi = ihi
ihi = i
elif y[i] > y[inhi]:
if i != ihi:
inhi = i
rtol = 2 * np.abs(y[ihi] - y[ilo]) / (np.abs(y[ihi]) + np.abs(y[ilo]))
if rtol < ftol:
break
if neval >= max_eval:
raise RuntimeError('Maximum number of evaluations exceeded.')
if stol > 0: # Has the simplex collapsed?
dsum = np.sqrt(np.sum((p[ilo] - p[ihi]) ** 2))
if loop > 5 and dsum < stol:
break
ytry, neval = try_(p, y, psum, ndim, fun, ihi, neval, -1.)
if ytry <= y[ilo]:
ytry, neval = try_(p, y, psum, ndim, fun, ihi, neval, 2.)
elif ytry >= y[inhi]:
ysave = y[ihi]
ytry, neval = try_(p, y, psum, ndim, fun, ihi, neval, 0.5)
if ytry >= ysave:
for i in range(mpts):
if i != ilo:
psum[:] = 0.5 * (p[i] + p[ilo])
p[i] = psum
y[i] = fun(psum)
neval += ndim
psum = p.sum(axis=0)
loop += 1
'''
def _fit_confidence(rd, Q, ori, whitener, fwd_data):
# As describedd in the Xfit manual, confidence intervals can be calculated
# by examining a linearization of model at the best-fitting location,
# i.e. taking the Jacobian and using the whitener:
#
# J = [∂b/∂x ∂b/∂y ∂b/∂z ∂b/∂Qx ∂b/∂Qy ∂b/∂Qz]
# C = (J.T C^-1 J)^-1
#
# And then the confidence interval is the diagonal of C, scaled by 1.96
# (for 95% confidence).
from scipy import linalg
direction = np.empty((3, 3))
# The coordinate system has the x axis aligned with the dipole orientation,
direction[0] = ori
# the z axis through the origin of the sphere model
rvec = rd - fwd_data['inner_skull']['r0']
direction[2] = rvec - ori * np.dot(ori, rvec) # orthogonalize
direction[2] /= np.linalg.norm(direction[2])
# and the y axis perpendical with these forming a right-handed system.
direction[1] = np.cross(direction[2], direction[0])
assert np.allclose(np.dot(direction, direction.T), np.eye(3))
# Get spatial deltas in dipole coordinate directions
deltas = (-1e-4, 1e-4)
J = np.empty((whitener.shape[0], 6))
for ii in range(3):
fwds = []
for delta in deltas:
this_r = rd[np.newaxis] + delta * direction[ii]
fwds.append(
np.dot(Q, _dipole_forwards(fwd_data, whitener, this_r)[0]))
J[:, ii] = np.diff(fwds, axis=0)[0] / np.diff(deltas)[0]
# Get current (Q) deltas in the dipole directions
deltas = np.array([-0.01, 0.01]) * np.linalg.norm(Q)
this_fwd = _dipole_forwards(fwd_data, whitener, rd[np.newaxis])[0]
for ii in range(3):
fwds = []
for delta in deltas:
fwds.append(np.dot(Q + delta * direction[ii], this_fwd))
J[:, ii + 3] = np.diff(fwds, axis=0)[0] / np.diff(deltas)[0]
# J is already whitened, so we don't need to do np.dot(whitener, J).
# However, the units in the Jacobian are potentially quite different,
# so we need to do some normalization during inversion, then revert.
direction_norm = np.linalg.norm(J[:, :3])
Q_norm = np.linalg.norm(J[:, 3:5]) # omit possible zero Z
norm = np.array([direction_norm] * 3 + [Q_norm] * 3)
J /= norm
J = np.dot(J.T, J)
C = linalg.pinvh(J, rcond=1e-14)
C /= norm
C /= norm[:, np.newaxis]
conf = 1.96 * np.sqrt(np.diag(C))
# The confidence volume of the dipole location is obtained from by
# taking the eigenvalues of the upper left submatrix and computing
# v = 4π/3 √(c^3 λ1 λ2 λ3) with c = 7.81, or:
vol_conf = 4 * np.pi / 3. * np.sqrt(
476.379541 * np.prod(linalg.eigh(C[:3, :3], eigvals_only=True)))
conf = np.concatenate([conf, [vol_conf]])
# Now we reorder and subselect the proper columns:
# vol, depth, long, trans, Qlong, Qtrans (discard Qdepth, assumed zero)
conf = conf[[6, 2, 0, 1, 3, 4]]
return conf
def _surface_constraint(rd, surf, min_dist_to_inner_skull):
"""Surface fitting constraint."""
dist = _compute_nearest(surf['rr'], rd[np.newaxis, :],
return_dists=True)[1][0]
if _points_outside_surface(rd[np.newaxis, :], surf, 1)[0]:
dist *= -1.
# Once we know the dipole is below the inner skull,
# let's check if its distance to the inner skull is at least
# min_dist_to_inner_skull. This can be enforced by adding a
# constrain proportional to its distance.
dist -= min_dist_to_inner_skull
return dist
def _sphere_constraint(rd, r0, R_adj):
"""Sphere fitting constraint."""
return R_adj - np.sqrt(np.sum((rd - r0) ** 2))
def _fit_dipole(min_dist_to_inner_skull, B_orig, t, guess_rrs,
guess_data, fwd_data, whitener, fmin_cobyla, ori, rank):
"""Fit a single bit of data."""
B = np.dot(whitener, B_orig)
# make constraint function to keep the solver within the inner skull
if 'rr' in fwd_data['inner_skull']: # bem
surf = fwd_data['inner_skull']
constraint = partial(_surface_constraint, surf=surf,
min_dist_to_inner_skull=min_dist_to_inner_skull)
else: # sphere
surf = None
constraint = partial(
_sphere_constraint, r0=fwd_data['inner_skull']['r0'],
R_adj=fwd_data['inner_skull']['R'] - min_dist_to_inner_skull)
# Find a good starting point (find_best_guess in C)
B2 = np.dot(B, B)
if B2 == 0:
warn('Zero field found for time %s' % t)
return np.zeros(3), 0, np.zeros(3), 0, B
idx = np.argmin([_fit_eval(guess_rrs[[fi], :], B, B2, fwd_svd)
for fi, fwd_svd in enumerate(guess_data['fwd_svd'])])
x0 = guess_rrs[idx]
lwork = _svd_lwork((3, B.shape[0]))
fun = partial(_fit_eval, B=B, B2=B2, fwd_data=fwd_data, whitener=whitener,
lwork=lwork)
# Tested minimizers:
# Simplex, BFGS, CG, COBYLA, L-BFGS-B, Powell, SLSQP, TNC
# Several were similar, but COBYLA won for having a handy constraint
# function we can use to ensure we stay inside the inner skull /
# smallest sphere
rd_final = fmin_cobyla(fun, x0, (constraint,), consargs=(),
rhobeg=5e-2, rhoend=5e-5, disp=False)
# simplex = _make_tetra_simplex() + x0
# _simplex_minimize(simplex, 1e-4, 2e-4, fun)
# rd_final = simplex[0]
# Compute the dipole moment at the final point
Q, gof, residual_noproj, n_comp = _fit_Q(
fwd_data, whitener, B, B2, B_orig, rd_final, ori=ori)
khi2 = (1 - gof) * B2
nfree = rank - n_comp
amp = np.sqrt(np.dot(Q, Q))
norm = 1. if amp == 0. else amp
ori = Q / norm
conf = _fit_confidence(rd_final, Q, ori, whitener, fwd_data)
msg = '---- Fitted : %7.1f ms' % (1000. * t)
if surf is not None:
dist_to_inner_skull = _compute_nearest(
surf['rr'], rd_final[np.newaxis, :], return_dists=True)[1][0]
msg += (", distance to inner skull : %2.4f mm"
% (dist_to_inner_skull * 1000.))
logger.info(msg)
return rd_final, amp, ori, gof, conf, khi2, nfree, residual_noproj
def _fit_dipole_fixed(min_dist_to_inner_skull, B_orig, t, guess_rrs,
guess_data, fwd_data, whitener,
fmin_cobyla, ori, rank):
"""Fit a data using a fixed position."""
B = np.dot(whitener, B_orig)
B2 = np.dot(B, B)
if B2 == 0:
warn('Zero field found for time %s' % t)
return np.zeros(3), 0, np.zeros(3), 0, np.zeros(6)
# Compute the dipole moment
Q, gof, residual_noproj = _fit_Q(guess_data, whitener, B, B2, B_orig,
rd=None, ori=ori)[:3]
if ori is None:
amp = np.sqrt(np.dot(Q, Q))
norm = 1. if amp == 0. else amp
ori = Q / norm
else:
amp = np.dot(Q, ori)
rd_final = guess_rrs[0]
# This will be slow, and we don't use it anyway, so omit it for now:
# conf = _fit_confidence(rd_final, Q, ori, whitener, fwd_data)
conf = khi2 = nfree = None
# No corresponding 'logger' message here because it should go *very* fast
return rd_final, amp, ori, gof, conf, khi2, nfree, residual_noproj
@verbose
def fit_dipole(evoked, cov, bem, trans=None, min_dist=5., n_jobs=1,
pos=None, ori=None, rank=None, verbose=None):
"""Fit a dipole.
Parameters
----------
evoked : instance of Evoked
The dataset to fit.
cov : str | instance of Covariance
The noise covariance.
bem : str | instance of ConductorModel
The BEM filename (str) or conductor model.
trans : str | None
The head<->MRI transform filename. Must be provided unless BEM
is a sphere model.
min_dist : float
Minimum distance (in millimeters) from the dipole to the inner skull.
Must be positive. Note that because this is a constraint passed to
a solver it is not strict but close, i.e. for a ``min_dist=5.`` the
fits could be 4.9 mm from the inner skull.
%(n_jobs)s
It is used in field computation and fitting.
pos : ndarray, shape (3,) | None
Position of the dipole to use. If None (default), sequential
fitting (different position and orientation for each time instance)
is performed. If a position (in head coords) is given as an array,
the position is fixed during fitting.
.. versionadded:: 0.12
ori : ndarray, shape (3,) | None
Orientation of the dipole to use. If None (default), the
orientation is free to change as a function of time. If an
orientation (in head coordinates) is given as an array, ``pos``
must also be provided, and the routine computes the amplitude and
goodness of fit of the dipole at the given position and orientation
for each time instant.
.. versionadded:: 0.12
%(rank_None)s
.. versionadded:: 0.20
%(verbose)s
Returns
-------
dip : instance of Dipole or DipoleFixed
The dipole fits. A :class:`mne.DipoleFixed` is returned if
``pos`` and ``ori`` are both not None, otherwise a
:class:`mne.Dipole` is returned.
residual : instance of Evoked
The M-EEG data channels with the fitted dipolar activity removed.
See Also
--------
mne.beamformer.rap_music
Dipole
DipoleFixed
read_dipole
Notes
-----
.. versionadded:: 0.9.0
"""
from scipy import linalg
# This could eventually be adapted to work with other inputs, these
# are what is needed:
evoked = evoked.copy()
# Determine if a list of projectors has an average EEG ref
if _needs_eeg_average_ref_proj(evoked.info):
raise ValueError('EEG average reference is mandatory for dipole '
'fitting.')
if min_dist < 0:
raise ValueError('min_dist should be positive. Got %s' % min_dist)
if ori is not None and pos is None:
raise ValueError('pos must be provided if ori is not None')
data = evoked.data
if not np.isfinite(data).all():
raise ValueError('Evoked data must be finite')
info = evoked.info
times = evoked.times.copy()
comment = evoked.comment
# Convert the min_dist to meters
min_dist_to_inner_skull = min_dist / 1000.
del min_dist
# Figure out our inputs
neeg = len(pick_types(info, meg=False, eeg=True, ref_meg=False,
exclude=[]))
if isinstance(bem, str):
bem_extra = bem
else:
bem_extra = repr(bem)
logger.info('BEM : %s' % bem_extra)
mri_head_t, trans = _get_trans(trans)
logger.info('MRI transform : %s' % trans)
bem = _setup_bem(bem, bem_extra, neeg, mri_head_t, verbose=False)
if not bem['is_sphere']:
# Find the best-fitting sphere
inner_skull = _bem_find_surface(bem, 'inner_skull')
inner_skull = inner_skull.copy()
R, r0 = _fit_sphere(inner_skull['rr'], disp=False)
# r0 back to head frame for logging
r0 = apply_trans(mri_head_t['trans'], r0[np.newaxis, :])[0]
inner_skull['r0'] = r0
logger.info('Head origin : '
'%6.1f %6.1f %6.1f mm rad = %6.1f mm.'
% (1000 * r0[0], 1000 * r0[1], 1000 * r0[2], 1000 * R))
del R, r0
else:
r0 = bem['r0']
if len(bem.get('layers', [])) > 0:
R = bem['layers'][0]['rad']
kind = 'rad'
else: # MEG-only
# Use the minimum distance to the MEG sensors as the radius then
R = np.dot(np.linalg.inv(info['dev_head_t']['trans']),
np.hstack([r0, [1.]]))[:3] # r0 -> device
R = R - [info['chs'][pick]['loc'][:3]
for pick in pick_types(info, meg=True, exclude=[])]
if len(R) == 0:
raise RuntimeError('No MEG channels found, but MEG-only '
'sphere model used')
R = np.min(np.sqrt(np.sum(R * R, axis=1))) # use dist to sensors
kind = 'max_rad'
logger.info('Sphere model : origin at (% 7.2f % 7.2f % 7.2f) mm, '
'%s = %6.1f mm'
% (1000 * r0[0], 1000 * r0[1], 1000 * r0[2], kind, R))
inner_skull = dict(R=R, r0=r0) # NB sphere model defined in head frame
del R, r0
accurate = False # can be an option later (shouldn't make big diff)
# Deal with DipoleFixed cases here
if pos is not None:
fixed_position = True
pos = np.array(pos, float)
if pos.shape != (3,):
raise ValueError('pos must be None or a 3-element array-like,'
' got %s' % (pos,))
logger.info('Fixed position : %6.1f %6.1f %6.1f mm'
% tuple(1000 * pos))
if ori is not None:
ori = np.array(ori, float)
if ori.shape != (3,):
raise ValueError('oris must be None or a 3-element array-like,'
' got %s' % (ori,))
norm = np.sqrt(np.sum(ori * ori))
if not np.isclose(norm, 1):
raise ValueError('ori must be a unit vector, got length %s'
% (norm,))
logger.info('Fixed orientation : %6.4f %6.4f %6.4f mm'
% tuple(ori))
else:
logger.info('Free orientation : <time-varying>')
fit_n_jobs = 1 # only use 1 job to do the guess fitting
else:
fixed_position = False
# Eventually these could be parameters, but they are just used for
# the initial grid anyway
guess_grid = 0.02 # MNE-C uses 0.01, but this is faster w/similar perf
guess_mindist = max(0.005, min_dist_to_inner_skull)
guess_exclude = 0.02
logger.info('Guess grid : %6.1f mm' % (1000 * guess_grid,))
if guess_mindist > 0.0:
logger.info('Guess mindist : %6.1f mm'
% (1000 * guess_mindist,))
if guess_exclude > 0:
logger.info('Guess exclude : %6.1f mm'
% (1000 * guess_exclude,))
logger.info('Using %s MEG coil definitions.'
% ("accurate" if accurate else "standard"))
fit_n_jobs = n_jobs
if isinstance(cov, str):
logger.info('Noise covariance : %s' % (cov,))
cov = read_cov(cov, verbose=False)
logger.info('')
_print_coord_trans(mri_head_t)
_print_coord_trans(info['dev_head_t'])
logger.info('%d bad channels total' % len(info['bads']))
# Forward model setup (setup_forward_model from setup.c)
ch_types = evoked.get_channel_types()
megcoils, compcoils, megnames, meg_info = [], [], [], None
eegels, eegnames = [], []
if 'grad' in ch_types or 'mag' in ch_types:
megcoils, compcoils, megnames, meg_info = \
_prep_meg_channels(info, exclude='bads',
accurate=accurate, verbose=verbose)
if 'eeg' in ch_types:
eegels, eegnames = _prep_eeg_channels(info, exclude='bads',
verbose=verbose)
# Ensure that MEG and/or EEG channels are present
if len(megcoils + eegels) == 0:
raise RuntimeError('No MEG or EEG channels found.')
# Whitener for the data
logger.info('Decomposing the sensor noise covariance matrix...')
picks = pick_types(info, meg=True, eeg=True, ref_meg=False)
# In case we want to more closely match MNE-C for debugging:
# from .io.pick import pick_info
# from .cov import prepare_noise_cov
# info_nb = pick_info(info, picks)
# cov = prepare_noise_cov(cov, info_nb, info_nb['ch_names'], verbose=False)
# nzero = (cov['eig'] > 0)
# n_chan = len(info_nb['ch_names'])
# whitener = np.zeros((n_chan, n_chan), dtype=np.float64)
# whitener[nzero, nzero] = 1.0 / np.sqrt(cov['eig'][nzero])
# whitener = np.dot(whitener, cov['eigvec'])
whitener, _, rank = compute_whitener(cov, info, picks=picks,
rank=rank, return_rank=True)
# Proceed to computing the fits (make_guess_data)
if fixed_position:
guess_src = dict(nuse=1, rr=pos[np.newaxis], inuse=np.array([True]))
logger.info('Compute forward for dipole location...')
else:
logger.info('\n---- Computing the forward solution for the guesses...')
guess_src = _make_guesses(inner_skull, guess_grid, guess_exclude,
guess_mindist, n_jobs=n_jobs)[0]
# grid coordinates go from mri to head frame
transform_surface_to(guess_src, 'head', mri_head_t)
logger.info('Go through all guess source locations...')
# inner_skull goes from mri to head frame
if 'rr' in inner_skull:
transform_surface_to(inner_skull, 'head', mri_head_t)
if fixed_position:
if 'rr' in inner_skull:
check = _surface_constraint(pos, inner_skull,
min_dist_to_inner_skull)
else:
check = _sphere_constraint(
pos, inner_skull['r0'],
R_adj=inner_skull['R'] - min_dist_to_inner_skull)
if check <= 0:
raise ValueError('fixed position is %0.1fmm outside the inner '
'skull boundary' % (-1000 * check,))
# C code computes guesses w/sphere model for speed, don't bother here
fwd_data = dict(coils_list=[megcoils, eegels], infos=[meg_info, None],
ccoils_list=[compcoils, None], coil_types=['meg', 'eeg'],
inner_skull=inner_skull)
# fwd_data['inner_skull'] in head frame, bem in mri, confusing...
_prep_field_computation(guess_src['rr'], bem, fwd_data, n_jobs,
verbose=False)
guess_fwd, guess_fwd_orig, guess_fwd_scales = _dipole_forwards(
fwd_data, whitener, guess_src['rr'], n_jobs=fit_n_jobs)
# decompose ahead of time
guess_fwd_svd = [linalg.svd(fwd, full_matrices=False)
for fwd in np.array_split(guess_fwd,
len(guess_src['rr']))]
guess_data = dict(fwd=guess_fwd, fwd_svd=guess_fwd_svd,
fwd_orig=guess_fwd_orig, scales=guess_fwd_scales)
del guess_fwd, guess_fwd_svd, guess_fwd_orig, guess_fwd_scales # destroyed
logger.info('[done %d source%s]' % (guess_src['nuse'],
_pl(guess_src['nuse'])))
# Do actual fits
data = data[picks]
ch_names = [info['ch_names'][p] for p in picks]
proj_op = make_projector(info['projs'], ch_names, info['bads'])[0]
fun = _fit_dipole_fixed if fixed_position else _fit_dipole
out = _fit_dipoles(
fun, min_dist_to_inner_skull, data, times, guess_src['rr'],
guess_data, fwd_data, whitener, ori, n_jobs, rank)
assert len(out) == 8
if fixed_position and ori is not None:
# DipoleFixed
data = np.array([out[1], out[3]])
out_info = deepcopy(info)
loc = np.concatenate([pos, ori, np.zeros(6)])
out_info['chs'] = [
dict(ch_name='dip 01', loc=loc, kind=FIFF.FIFFV_DIPOLE_WAVE,
coord_frame=FIFF.FIFFV_COORD_UNKNOWN, unit=FIFF.FIFF_UNIT_AM,
coil_type=FIFF.FIFFV_COIL_DIPOLE,
unit_mul=0, range=1, cal=1., scanno=1, logno=1),
dict(ch_name='goodness', loc=np.full(12, np.nan),
kind=FIFF.FIFFV_GOODNESS_FIT, unit=FIFF.FIFF_UNIT_AM,
coord_frame=FIFF.FIFFV_COORD_UNKNOWN,
coil_type=FIFF.FIFFV_COIL_NONE,
unit_mul=0, range=1., cal=1., scanno=2, logno=100)]
for key in ['hpi_meas', 'hpi_results', 'projs']:
out_info[key] = list()
for key in ['acq_pars', 'acq_stim', 'description', 'dig',
'experimenter', 'hpi_subsystem', 'proj_id', 'proj_name',
'subject_info']:
out_info[key] = None
out_info['bads'] = []
out_info._update_redundant()
out_info._check_consistency()
dipoles = DipoleFixed(out_info, data, times, evoked.nave,
evoked._aspect_kind, comment=comment)
else:
dipoles = Dipole(times, out[0], out[1], out[2], out[3], comment,
out[4], out[5], out[6])
residual = evoked.copy().apply_proj() # set the projs active
residual.data[picks] = np.dot(proj_op, out[-1])
logger.info('%d time points fitted' % len(dipoles.times))
return dipoles, residual
def get_phantom_dipoles(kind='vectorview'):
"""Get standard phantom dipole locations and orientations.
Parameters
----------
kind : str
Get the information for the given system:
``vectorview`` (default)
The Neuromag VectorView phantom.
``otaniemi``
The older Neuromag phantom used at Otaniemi.
Returns
-------
pos : ndarray, shape (n_dipoles, 3)
The dipole positions.
ori : ndarray, shape (n_dipoles, 3)
The dipole orientations.
Notes
-----
The Elekta phantoms have a radius of 79.5mm, and HPI coil locations
in the XY-plane at the axis extrema (e.g., (79.5, 0), (0, -79.5), ...).
"""
_check_option('kind', kind, ['vectorview', 'otaniemi'])
if kind == 'vectorview':
# these values were pulled from a scanned image provided by
# Elekta folks
a = np.array([59.7, 48.6, 35.8, 24.8, 37.2, 27.5, 15.8, 7.9])
b = np.array([46.1, 41.9, 38.3, 31.5, 13.9, 16.2, 20.0, 19.3])
x = np.concatenate((a, [0] * 8, -b, [0] * 8))
y = np.concatenate(([0] * 8, -a, [0] * 8, b))
c = [22.9, 23.5, 25.5, 23.1, 52.0, 46.4, 41.0, 33.0]
d = [44.4, 34.0, 21.6, 12.7, 62.4, 51.5, 39.1, 27.9]
z = np.concatenate((c, c, d, d))
signs = ([1, -1] * 4 + [-1, 1] * 4) * 2
elif kind == 'otaniemi':
# these values were pulled from an Neuromag manual
# (NM20456A, 13.7.1999, p.65)
a = np.array([56.3, 47.6, 39.0, 30.3])
b = np.array([32.5, 27.5, 22.5, 17.5])
c = np.zeros(4)
x = np.concatenate((a, b, c, c, -a, -b, c, c))
y = np.concatenate((c, c, -a, -b, c, c, b, a))
z = np.concatenate((b, a, b, a, b, a, a, b))
signs = [-1] * 8 + [1] * 16 + [-1] * 8
pos = np.vstack((x, y, z)).T / 1000.
# Locs are always in XZ or YZ, and so are the oris. The oris are
# also in the same plane and tangential, so it's easy to determine
# the orientation.
ori = list()
for pi, this_pos in enumerate(pos):
this_ori = np.zeros(3)
idx = np.where(this_pos == 0)[0]
# assert len(idx) == 1
idx = np.setdiff1d(np.arange(3), idx[0])
this_ori[idx] = (this_pos[idx][::-1] /
np.linalg.norm(this_pos[idx])) * [1, -1]
this_ori *= signs[pi]
# Now we have this quality, which we could uncomment to
# double-check:
# np.testing.assert_allclose(np.dot(this_ori, this_pos) /
# np.linalg.norm(this_pos), 0,
# atol=1e-15)
ori.append(this_ori)
ori = np.array(ori)
return pos, ori
def _concatenate_dipoles(dipoles):
"""Concatenate a list of dipoles."""
times, pos, amplitude, ori, gof = [], [], [], [], []
for dipole in dipoles:
times.append(dipole.times)
pos.append(dipole.pos)
amplitude.append(dipole.amplitude)
ori.append(dipole.ori)
gof.append(dipole.gof)
return Dipole(np.concatenate(times), np.concatenate(pos),
np.concatenate(amplitude), np.concatenate(ori),
np.concatenate(gof), name=None)
| 37.93237 | 82 | 0.561354 |
a5032e4771bf47da93952fc650a3495859fa7937 | 697 | py | Python | day5/day5.py | agnul/AdventOfCode2020 | d645ebc83757b75ddddf57d6ced1969b5a063708 | [
"Unlicense"
] | 2 | 2020-12-04T13:13:38.000Z | 2020-12-08T17:24:08.000Z | day5/day5.py | agnul/AdventOfCode2020 | d645ebc83757b75ddddf57d6ced1969b5a063708 | [
"Unlicense"
] | null | null | null | day5/day5.py | agnul/AdventOfCode2020 | d645ebc83757b75ddddf57d6ced1969b5a063708 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
def find_seat_id(boarding_pass):
lo, hi = 0, 1024
for c in boarding_pass:
m = int((lo + hi) / 2)
(lo, hi) = (lo, m) if c == 'F' or c == 'L' else (m, hi)
return lo
def solve_part_1(boarding_passes):
return max([find_seat_id(p) for p in boarding_passes])
def solve_part_2(boarding_passes):
seats = [0] * 1024
for p in boarding_passes:
seats[find_seat_id(p)] = 1
for i in range(1, 1023):
if seats[i - 1:i + 2] == [1, 0, 1]:
return i
return -1
if __name__ == "__main__":
passes = [l.rstrip() for l in open('input.txt').readlines()]
print(f'{solve_part_1(passes)}, {solve_part_2(passes)}')
| 26.807692 | 64 | 0.585366 |
6ebe22811b764c7e255b79131e3fb6679fd8086c | 2,229 | py | Python | gammapy/scripts/image_coordinates.py | grburgess/gammapy | 609e460698caca7223afeef5e71826c7b32728d1 | [
"BSD-3-Clause"
] | 3 | 2019-01-28T12:21:14.000Z | 2019-02-10T19:58:07.000Z | gammapy/scripts/image_coordinates.py | grburgess/gammapy | 609e460698caca7223afeef5e71826c7b32728d1 | [
"BSD-3-Clause"
] | null | null | null | gammapy/scripts/image_coordinates.py | grburgess/gammapy | 609e460698caca7223afeef5e71826c7b32728d1 | [
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from ..utils.scripts import get_parser
__all__ = ['image_coordinates']
log = logging.getLogger(__name__)
def image_coordinates_main(args=None):
parser = get_parser(image_coordinates)
parser.add_argument('infile', type=str,
help='Input FITS file name')
parser.add_argument('outfile', type=str,
help='Output FITS file name')
parser.add_argument('--make_coordinate_maps', action='store_true',
help='Create coordinate maps')
parser.add_argument('--make_distance_map', action='store_true',
help='Create distance to mask map')
parser.add_argument('--overwrite', action='store_true',
help='Overwrite existing output file?')
args = parser.parse_args(args)
image_coordinates(**vars(args))
def image_coordinates(infile,
outfile,
make_coordinate_maps,
make_distance_map,
overwrite):
"""Make maps that can be used to create profiles.
The following images can be created:
* LON -- Longitude coordinate
* LAT -- Latitude coordinate
* DIST -- Distance to mask
* SOLID_ANGLE -- Solid angle
"""
from astropy.io import fits
from gammapy.utils.fits import get_hdu
from gammapy.image import SkyImage, SkyMask
log.info('Reading {0}'.format(infile))
hdu = get_hdu(infile)
out_hdus = fits.HDUList()
if make_coordinate_maps:
image = SkyImage.empty_like(hdu)
log.info('Computing LON and LAT maps')
lon, lat = image.coordinates()
out_hdus.append(fits.ImageHDU(lon, hdu.header, 'LON'))
out_hdus.append(fits.ImageHDU(lat, hdu.header, 'LAT'))
if make_distance_map:
excl = SkyMask.from_image_hdu(hdu)
log.info('Computing DIST map')
dist = excl.exclusion_distance
out_hdus.append(fits.ImageHDU(dist, hdu.header, 'DIST'))
log.info('Writing {0}'.format(outfile))
out_hdus.writeto(outfile, clobber=overwrite)
| 34.828125 | 82 | 0.645132 |
ffd656b74ba0a8bdf1ecb54264f9b19a930331f4 | 21,615 | py | Python | electrum_sum/gui/qt/network_dialog.py | jakesum/electrum-sum | f34ac3c042cf73e90acd0771b87f60b750b6c799 | [
"MIT"
] | 3 | 2020-09-02T08:49:37.000Z | 2021-11-05T12:31:47.000Z | electrum_sum/gui/qt/network_dialog.py | jakesum/electrum-sum | f34ac3c042cf73e90acd0771b87f60b750b6c799 | [
"MIT"
] | 1 | 2021-05-04T13:47:57.000Z | 2021-05-04T13:47:57.000Z | electrum_sum/gui/qt/network_dialog.py | jakesum/electrum-sum | f34ac3c042cf73e90acd0771b87f60b750b6c799 | [
"MIT"
] | 3 | 2020-09-21T02:55:02.000Z | 2021-04-07T06:05:13.000Z | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import socket
import time
from enum import IntEnum
from typing import Tuple
from PyQt5.QtCore import Qt, pyqtSignal, QThread
from PyQt5.QtWidgets import (QTreeWidget, QTreeWidgetItem, QMenu, QGridLayout, QComboBox,
QLineEdit, QDialog, QVBoxLayout, QHeaderView, QCheckBox,
QTabWidget, QWidget, QLabel)
from PyQt5.QtGui import QFontMetrics
from electrum_sum.i18n import _
from electrum_sum import constants, blockchain
from electrum_sum.interface import serialize_server, deserialize_server
from electrum_sum.network import Network
from electrum_sum.logging import get_logger
from .util import Buttons, CloseButton, HelpButton, read_QIcon, char_width_in_lineedit
_logger = get_logger(__name__)
protocol_names = ['TCP', 'SSL']
protocol_letters = 'ts'
class NetworkDialog(QDialog):
def __init__(self, network, config, network_updated_signal_obj):
QDialog.__init__(self)
self.setWindowTitle(_('Network'))
self.setMinimumSize(500, 300)
self.nlayout = NetworkChoiceLayout(network, config)
self.network_updated_signal_obj = network_updated_signal_obj
vbox = QVBoxLayout(self)
vbox.addLayout(self.nlayout.layout())
vbox.addLayout(Buttons(CloseButton(self)))
self.network_updated_signal_obj.network_updated_signal.connect(
self.on_update)
network.register_callback(self.on_network, ['network_updated'])
def on_network(self, event, *args):
self.network_updated_signal_obj.network_updated_signal.emit(event, args)
def on_update(self):
self.nlayout.update()
class NodesListWidget(QTreeWidget):
def __init__(self, parent):
QTreeWidget.__init__(self)
self.parent = parent
self.setHeaderLabels([_('Connected node'), _('Height')])
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.create_menu)
def create_menu(self, position):
item = self.currentItem()
if not item:
return
is_server = not bool(item.data(0, Qt.UserRole))
menu = QMenu()
if is_server:
server = item.data(1, Qt.UserRole)
menu.addAction(_("Use as server"), lambda: self.parent.follow_server(server))
else:
chain_id = item.data(1, Qt.UserRole)
menu.addAction(_("Follow this branch"), lambda: self.parent.follow_branch(chain_id))
menu.exec_(self.viewport().mapToGlobal(position))
def keyPressEvent(self, event):
if event.key() in [ Qt.Key_F2, Qt.Key_Return ]:
self.on_activated(self.currentItem(), self.currentColumn())
else:
QTreeWidget.keyPressEvent(self, event)
def on_activated(self, item, column):
# on 'enter' we show the menu
pt = self.visualItemRect(item).bottomLeft()
pt.setX(50)
self.customContextMenuRequested.emit(pt)
def update(self, network: Network):
self.clear()
self.addChild = self.addTopLevelItem
chains = network.get_blockchains()
n_chains = len(chains)
for chain_id, interfaces in chains.items():
b = blockchain.blockchains.get(chain_id)
if b is None: continue
name = b.get_name()
if n_chains > 1:
x = QTreeWidgetItem([name + '@%d'%b.get_max_forkpoint(), '%d'%b.height()])
x.setData(0, Qt.UserRole, 1)
x.setData(1, Qt.UserRole, b.get_id())
else:
x = self
for i in interfaces:
star = ' *' if i == network.interface else ''
item = QTreeWidgetItem([i.host + star, '%d'%i.tip])
item.setData(0, Qt.UserRole, 0)
item.setData(1, Qt.UserRole, i.server)
x.addChild(item)
if n_chains > 1:
self.addTopLevelItem(x)
x.setExpanded(True)
h = self.header()
h.setStretchLastSection(False)
h.setSectionResizeMode(0, QHeaderView.Stretch)
h.setSectionResizeMode(1, QHeaderView.ResizeToContents)
super().update()
class ServerListWidget(QTreeWidget):
class Columns(IntEnum):
HOST = 0
PORT = 1
SERVER_STR_ROLE = Qt.UserRole + 100
def __init__(self, parent):
QTreeWidget.__init__(self)
self.parent = parent
self.setHeaderLabels([_('Host'), _('Port')])
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.create_menu)
def create_menu(self, position):
item = self.currentItem()
if not item:
return
menu = QMenu()
server = item.data(self.Columns.HOST, self.SERVER_STR_ROLE)
menu.addAction(_("Use as server"), lambda: self.set_server(server))
menu.exec_(self.viewport().mapToGlobal(position))
def set_server(self, s):
host, port, protocol = deserialize_server(s)
self.parent.server_host.setText(host)
self.parent.server_port.setText(port)
self.parent.set_server()
def keyPressEvent(self, event):
if event.key() in [ Qt.Key_F2, Qt.Key_Return ]:
self.on_activated(self.currentItem(), self.currentColumn())
else:
QTreeWidget.keyPressEvent(self, event)
def on_activated(self, item, column):
# on 'enter' we show the menu
pt = self.visualItemRect(item).bottomLeft()
pt.setX(50)
self.customContextMenuRequested.emit(pt)
def update(self, servers, protocol, use_tor):
self.clear()
for _host, d in sorted(servers.items()):
if _host.endswith('.onion') and not use_tor:
continue
port = d.get(protocol)
if port:
x = QTreeWidgetItem([_host, port])
server = serialize_server(_host, port, protocol)
x.setData(self.Columns.HOST, self.SERVER_STR_ROLE, server)
self.addTopLevelItem(x)
h = self.header()
h.setStretchLastSection(False)
h.setSectionResizeMode(self.Columns.HOST, QHeaderView.Stretch)
h.setSectionResizeMode(self.Columns.PORT, QHeaderView.ResizeToContents)
super().update()
class NetworkChoiceLayout(object):
def __init__(self, network: Network, config, wizard=False):
self.network = network
self.config = config
self.protocol = None
self.tor_proxy = None
self.tabs = tabs = QTabWidget()
server_tab = QWidget()
proxy_tab = QWidget()
blockchain_tab = QWidget()
tabs.addTab(blockchain_tab, _('Overview'))
tabs.addTab(server_tab, _('Server'))
tabs.addTab(proxy_tab, _('Proxy'))
fixed_width_hostname = 24 * char_width_in_lineedit()
fixed_width_port = 6 * char_width_in_lineedit()
# server tab
grid = QGridLayout(server_tab)
grid.setSpacing(8)
self.server_host = QLineEdit()
self.server_host.setFixedWidth(fixed_width_hostname)
self.server_port = QLineEdit()
self.server_port.setFixedWidth(fixed_width_port)
self.autoconnect_cb = QCheckBox(_('Select server automatically'))
self.autoconnect_cb.setEnabled(self.config.is_modifiable('auto_connect'))
self.server_host.editingFinished.connect(self.set_server)
self.server_port.editingFinished.connect(self.set_server)
self.autoconnect_cb.clicked.connect(self.set_server)
self.autoconnect_cb.clicked.connect(self.update)
msg = ' '.join([
_("If auto-connect is enabled, Electrum will always use a server that is on the longest blockchain."),
_("If it is disabled, you have to choose a server you want to use. Electrum will warn you if your server is lagging.")
])
grid.addWidget(self.autoconnect_cb, 0, 0, 1, 3)
grid.addWidget(HelpButton(msg), 0, 4)
grid.addWidget(QLabel(_('Server') + ':'), 1, 0)
grid.addWidget(self.server_host, 1, 1, 1, 2)
grid.addWidget(self.server_port, 1, 3)
label = _('Server peers') if network.is_connected() else _('Default Servers')
grid.addWidget(QLabel(label), 2, 0, 1, 5)
self.servers_list = ServerListWidget(self)
grid.addWidget(self.servers_list, 3, 0, 1, 5)
# Proxy tab
grid = QGridLayout(proxy_tab)
grid.setSpacing(8)
# proxy setting
self.proxy_cb = QCheckBox(_('Use proxy'))
self.proxy_cb.clicked.connect(self.check_disable_proxy)
self.proxy_cb.clicked.connect(self.set_proxy)
self.proxy_mode = QComboBox()
self.proxy_mode.addItems(['SOCKS4', 'SOCKS5'])
self.proxy_host = QLineEdit()
self.proxy_host.setFixedWidth(fixed_width_hostname)
self.proxy_port = QLineEdit()
self.proxy_port.setFixedWidth(fixed_width_port)
self.proxy_user = QLineEdit()
self.proxy_user.setPlaceholderText(_("Proxy user"))
self.proxy_password = QLineEdit()
self.proxy_password.setPlaceholderText(_("Password"))
self.proxy_password.setEchoMode(QLineEdit.Password)
self.proxy_password.setFixedWidth(fixed_width_port)
self.proxy_mode.currentIndexChanged.connect(self.set_proxy)
self.proxy_host.editingFinished.connect(self.set_proxy)
self.proxy_port.editingFinished.connect(self.set_proxy)
self.proxy_user.editingFinished.connect(self.set_proxy)
self.proxy_password.editingFinished.connect(self.set_proxy)
self.proxy_mode.currentIndexChanged.connect(self.proxy_settings_changed)
self.proxy_host.textEdited.connect(self.proxy_settings_changed)
self.proxy_port.textEdited.connect(self.proxy_settings_changed)
self.proxy_user.textEdited.connect(self.proxy_settings_changed)
self.proxy_password.textEdited.connect(self.proxy_settings_changed)
self.tor_cb = QCheckBox(_("Use Tor Proxy"))
self.tor_cb.setIcon(read_QIcon("tor_logo.png"))
self.tor_cb.hide()
self.tor_cb.clicked.connect(self.use_tor_proxy)
grid.addWidget(self.tor_cb, 1, 0, 1, 3)
grid.addWidget(self.proxy_cb, 2, 0, 1, 3)
grid.addWidget(HelpButton(_('Proxy settings apply to all connections: with Electrum servers, but also with third-party services.')), 2, 4)
grid.addWidget(self.proxy_mode, 4, 1)
grid.addWidget(self.proxy_host, 4, 2)
grid.addWidget(self.proxy_port, 4, 3)
grid.addWidget(self.proxy_user, 5, 2)
grid.addWidget(self.proxy_password, 5, 3)
grid.setRowStretch(7, 1)
# Blockchain Tab
grid = QGridLayout(blockchain_tab)
msg = ' '.join([
_("Electrum connects to several nodes in order to download block headers and find out the longest blockchain."),
_("This blockchain is used to verify the transactions sent by your transaction server.")
])
self.status_label = QLabel('')
grid.addWidget(QLabel(_('Status') + ':'), 0, 0)
grid.addWidget(self.status_label, 0, 1, 1, 3)
grid.addWidget(HelpButton(msg), 0, 4)
self.server_label = QLabel('')
msg = _("Electrum sends your wallet addresses to a single server, in order to receive your transaction history.")
grid.addWidget(QLabel(_('Server') + ':'), 1, 0)
grid.addWidget(self.server_label, 1, 1, 1, 3)
grid.addWidget(HelpButton(msg), 1, 4)
self.height_label = QLabel('')
msg = _('This is the height of your local copy of the blockchain.')
grid.addWidget(QLabel(_('Blockchain') + ':'), 2, 0)
grid.addWidget(self.height_label, 2, 1)
grid.addWidget(HelpButton(msg), 2, 4)
self.split_label = QLabel('')
grid.addWidget(self.split_label, 3, 0, 1, 3)
self.nodes_list_widget = NodesListWidget(self)
grid.addWidget(self.nodes_list_widget, 5, 0, 1, 5)
vbox = QVBoxLayout()
vbox.addWidget(tabs)
self.layout_ = vbox
# tor detector
self.td = td = TorDetector()
td.found_proxy.connect(self.suggest_proxy)
td.start()
self.fill_in_proxy_settings()
self.update()
def check_disable_proxy(self, b):
if not self.config.is_modifiable('proxy'):
b = False
for w in [self.proxy_mode, self.proxy_host, self.proxy_port, self.proxy_user, self.proxy_password]:
w.setEnabled(b)
def enable_set_server(self):
if self.config.is_modifiable('server'):
enabled = not self.autoconnect_cb.isChecked()
self.server_host.setEnabled(enabled)
self.server_port.setEnabled(enabled)
self.servers_list.setEnabled(enabled)
else:
for w in [self.autoconnect_cb, self.server_host, self.server_port, self.servers_list]:
w.setEnabled(False)
def update(self):
net_params = self.network.get_parameters()
host, port, protocol = net_params.host, net_params.port, net_params.protocol
proxy_config, auto_connect = net_params.proxy, net_params.auto_connect
if not self.server_host.hasFocus() and not self.server_port.hasFocus():
self.server_host.setText(host)
self.server_port.setText(str(port))
self.autoconnect_cb.setChecked(auto_connect)
interface = self.network.interface
host = interface.host if interface else _('None')
self.server_label.setText(host)
self.set_protocol(protocol)
self.servers = self.network.get_servers()
self.servers_list.update(self.servers, self.protocol, self.tor_cb.isChecked())
self.enable_set_server()
height_str = "%d "%(self.network.get_local_height()) + _('blocks')
self.height_label.setText(height_str)
n = len(self.network.get_interfaces())
status = _("Connected to {0} nodes.").format(n) if n else _("Not connected")
self.status_label.setText(status)
chains = self.network.get_blockchains()
if len(chains) > 1:
chain = self.network.blockchain()
forkpoint = chain.get_max_forkpoint()
name = chain.get_name()
msg = _('Chain split detected at block {0}').format(forkpoint) + '\n'
msg += (_('You are following branch') if auto_connect else _('Your server is on branch'))+ ' ' + name
msg += ' (%d %s)' % (chain.get_branch_size(), _('blocks'))
else:
msg = ''
self.split_label.setText(msg)
self.nodes_list_widget.update(self.network)
def fill_in_proxy_settings(self):
proxy_config = self.network.get_parameters().proxy
if not proxy_config:
proxy_config = {"mode": "none", "host": "localhost", "port": "9050"}
b = proxy_config.get('mode') != "none"
self.check_disable_proxy(b)
if b:
self.proxy_cb.setChecked(True)
self.proxy_mode.setCurrentIndex(
self.proxy_mode.findText(str(proxy_config.get("mode").upper())))
self.proxy_host.setText(proxy_config.get("host"))
self.proxy_port.setText(proxy_config.get("port"))
self.proxy_user.setText(proxy_config.get("user", ""))
self.proxy_password.setText(proxy_config.get("password", ""))
def layout(self):
return self.layout_
def set_protocol(self, protocol):
if protocol != self.protocol:
self.protocol = protocol
def change_protocol(self, use_ssl):
p = 's' if use_ssl else 't'
host = self.server_host.text()
pp = self.servers.get(host, constants.net.DEFAULT_PORTS)
if p not in pp.keys():
p = list(pp.keys())[0]
port = pp[p]
self.server_host.setText(host)
self.server_port.setText(port)
self.set_protocol(p)
self.set_server()
def follow_branch(self, chain_id):
self.network.run_from_another_thread(self.network.follow_chain_given_id(chain_id))
self.update()
def follow_server(self, server):
self.network.run_from_another_thread(self.network.follow_chain_given_server(server))
self.update()
def server_changed(self, x):
if x:
self.change_server(str(x.text(0)), self.protocol)
def change_server(self, host, protocol):
pp = self.servers.get(host, constants.net.DEFAULT_PORTS)
if protocol and protocol not in protocol_letters:
protocol = None
if protocol:
port = pp.get(protocol)
if port is None:
protocol = None
if not protocol:
if 's' in pp.keys():
protocol = 's'
port = pp.get(protocol)
else:
protocol = list(pp.keys())[0]
port = pp.get(protocol)
self.server_host.setText(host)
self.server_port.setText(port)
def accept(self):
pass
def set_server(self):
net_params = self.network.get_parameters()
net_params = net_params._replace(host=str(self.server_host.text()),
port=str(self.server_port.text()),
auto_connect=self.autoconnect_cb.isChecked())
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def set_proxy(self):
net_params = self.network.get_parameters()
if self.proxy_cb.isChecked():
proxy = { 'mode':str(self.proxy_mode.currentText()).lower(),
'host':str(self.proxy_host.text()),
'port':str(self.proxy_port.text()),
'user':str(self.proxy_user.text()),
'password':str(self.proxy_password.text())}
else:
proxy = None
self.tor_cb.setChecked(False)
net_params = net_params._replace(proxy=proxy)
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def suggest_proxy(self, found_proxy):
if found_proxy is None:
self.tor_cb.hide()
return
self.tor_proxy = found_proxy
self.tor_cb.setText("Use Tor proxy at port " + str(found_proxy[1]))
if (self.proxy_cb.isChecked()
and self.proxy_mode.currentIndex() == self.proxy_mode.findText('SOCKS5')
and self.proxy_host.text() == "127.0.0.1"
and self.proxy_port.text() == str(found_proxy[1])):
self.tor_cb.setChecked(True)
self.tor_cb.show()
def use_tor_proxy(self, use_it):
if not use_it:
self.proxy_cb.setChecked(False)
else:
socks5_mode_index = self.proxy_mode.findText('SOCKS5')
if socks5_mode_index == -1:
_logger.info("can't find proxy_mode 'SOCKS5'")
return
self.proxy_mode.setCurrentIndex(socks5_mode_index)
self.proxy_host.setText("127.0.0.1")
self.proxy_port.setText(str(self.tor_proxy[1]))
self.proxy_user.setText("")
self.proxy_password.setText("")
self.tor_cb.setChecked(True)
self.proxy_cb.setChecked(True)
self.check_disable_proxy(use_it)
self.set_proxy()
def proxy_settings_changed(self):
self.tor_cb.setChecked(False)
class TorDetector(QThread):
found_proxy = pyqtSignal(object)
def __init__(self):
QThread.__init__(self)
def run(self):
# Probable ports for Tor to listen at
ports = [9050, 9150]
while True:
for p in ports:
net_addr = ("127.0.0.1", p)
if TorDetector.is_tor_port(net_addr):
self.found_proxy.emit(net_addr)
break
else:
self.found_proxy.emit(None)
time.sleep(10)
@staticmethod
def is_tor_port(net_addr: Tuple[str, int]) -> bool:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(0.1)
s.connect(net_addr)
# Tor responds uniquely to HTTP-like requests
s.send(b"GET\n")
if b"Tor is not an HTTP Proxy" in s.recv(1024):
return True
except socket.error:
pass
return False
| 39.228675 | 146 | 0.634097 |
60f63a5e948bde84d5c95f611fee6209c2f98832 | 517 | py | Python | curricula/templatetags/level_embed.py | code-dot-org/curriculumbuilder | e40330006145b8528f777a8aec2abff5b309d1c7 | [
"Apache-2.0"
] | 3 | 2019-10-22T20:21:15.000Z | 2022-01-12T19:38:48.000Z | curricula/templatetags/level_embed.py | code-dot-org/curriculumbuilder | e40330006145b8528f777a8aec2abff5b309d1c7 | [
"Apache-2.0"
] | 67 | 2019-09-27T17:04:52.000Z | 2022-03-21T22:16:23.000Z | curricula/templatetags/level_embed.py | code-dot-org/curriculumbuilder | e40330006145b8528f777a8aec2abff5b309d1c7 | [
"Apache-2.0"
] | 1 | 2019-10-18T16:06:31.000Z | 2019-10-18T16:06:31.000Z | import re
from django import template
register = template.Library()
URL_RE = '^/(?P<sub>\w+)(?P<path>.+$)'
@register.filter(name='level_embed')
def level_embed(link):
match = re.match(URL_RE, link)
if match is not None:
try:
sub = match.group('sub')
path = match.group('path')
return "https://%s.code.org%s" %(sub, path)
except IndexError:
logger.exception('Failed to embed page' % link)
return link
else:
return link
| 24.619048 | 59 | 0.572534 |
f06b8aaa49907df5ba0ca1b5526b5a06cc4301a6 | 1,974 | py | Python | {{ cookiecutter.namespace }}/setup.py | TheChymera/ndx-template | 44ee210181d5770d037106acff345fe1d30e0477 | [
"BSD-3-Clause-LBNL"
] | 3 | 2020-02-06T01:18:47.000Z | 2022-01-05T21:21:38.000Z | {{ cookiecutter.namespace }}/setup.py | TheChymera/ndx-template | 44ee210181d5770d037106acff345fe1d30e0477 | [
"BSD-3-Clause-LBNL"
] | 38 | 2019-05-15T19:07:12.000Z | 2022-01-27T01:26:57.000Z | {{ cookiecutter.namespace }}/setup.py | TheChymera/ndx-template | 44ee210181d5770d037106acff345fe1d30e0477 | [
"BSD-3-Clause-LBNL"
] | 4 | 2019-05-15T18:03:42.000Z | 2020-05-08T11:38:53.000Z | # -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
from shutil import copy2
# load README.md/README.rst file
try:
if os.path.exists('README.md'):
with open('README.md', 'r') as fp:
readme = fp.read()
readme_type = 'text/markdown; charset=UTF-8'
elif os.path.exists('README.rst'):
with open('README.rst', 'r') as fp:
readme = fp.read()
readme_type = 'text/x-rst; charset=UTF-8'
else:
readme = ""
except Exception:
readme = ""
setup_args = {
'name': '{{ cookiecutter.namespace }}',
'version': '{{ cookiecutter.version }}',
'description': '{{ cookiecutter.description }}',
'long_description': readme,
'long_description_content_type': readme_type,
'author': '{{ cookiecutter.author }}',
'author_email': '{{ cookiecutter.email }}',
'url': '',
'license': '{{ cookiecutter.license }}',
'install_requires': [
'pynwb>=1.5.0',
'hdmf>=2.5.6'
],
'packages': find_packages('src/pynwb'),
'package_dir': {'': 'src/pynwb'},
'package_data': {'{{ cookiecutter.py_pkg_name }}': [
'spec/{{ cookiecutter.namespace }}.namespace.yaml',
'spec/{{ cookiecutter.namespace }}.extensions.yaml',
]},
'classifiers': [
"Intended Audience :: Developers",
"Intended Audience :: Science/Research",
],
'zip_safe': False
}
def _copy_spec_files(project_dir):
ns_path = os.path.join(project_dir, 'spec', '{{ cookiecutter.namespace }}.namespace.yaml')
ext_path = os.path.join(project_dir, 'spec', '{{ cookiecutter.namespace }}.extensions.yaml')
dst_dir = os.path.join(project_dir, 'src', 'pynwb', '{{ cookiecutter.py_pkg_name }}', 'spec')
if not os.path.exists(dst_dir):
os.mkdir(dst_dir)
copy2(ns_path, dst_dir)
copy2(ext_path, dst_dir)
if __name__ == '__main__':
_copy_spec_files(os.path.dirname(__file__))
setup(**setup_args)
| 29.909091 | 97 | 0.607396 |
c7e5e218473d8d278940cf5484d37c56bc0cdc5f | 226 | py | Python | mfr3dcore/math/__init__.py | cmoestl/3DCORE | 7c76b4bdd7e16dc87d876ff75e9441cb48061819 | [
"MIT"
] | 3 | 2020-04-28T09:10:22.000Z | 2021-01-26T15:15:36.000Z | mfr3dcore/math/__init__.py | cmoestl/3DCORE | 7c76b4bdd7e16dc87d876ff75e9441cb48061819 | [
"MIT"
] | null | null | null | mfr3dcore/math/__init__.py | cmoestl/3DCORE | 7c76b4bdd7e16dc87d876ff75e9441cb48061819 | [
"MIT"
] | 1 | 2020-06-18T04:35:58.000Z | 2020-06-18T04:35:58.000Z | # -*- coding: utf-8 -*-
from .csys_polar import csys_polar_to_xyz
from .csys_torus_v1 import csys_xyz_to_torusv1, csys_torusv1_to_xyz, csys_torusv1_to_xyz_jacobian
from .euler_rodrigues import errot, errot_compose, errot_get
| 37.666667 | 97 | 0.831858 |
3b4735a2a894fb9dae5673312488b950f04fbb36 | 7,643 | py | Python | IPython/utils/pickleutil.py | flexlee/ipython | 7528fbd76073c90262b9ac127de57c4c59b23a5c | [
"BSD-3-Clause-Clear"
] | 1 | 2017-02-09T20:01:11.000Z | 2017-02-09T20:01:11.000Z | IPython/utils/pickleutil.py | flexlee/ipython | 7528fbd76073c90262b9ac127de57c4c59b23a5c | [
"BSD-3-Clause-Clear"
] | null | null | null | IPython/utils/pickleutil.py | flexlee/ipython | 7528fbd76073c90262b9ac127de57c4c59b23a5c | [
"BSD-3-Clause-Clear"
] | null | null | null | # encoding: utf-8
"""Pickle related utilities. Perhaps this should be called 'can'."""
__docformat__ = "restructuredtext en"
#-------------------------------------------------------------------------------
# Copyright (C) 2008-2011 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-------------------------------------------------------------------------------
#-------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------
import copy
import logging
import sys
from types import FunctionType
try:
import cPickle as pickle
except ImportError:
import pickle
try:
import numpy
except:
numpy = None
import codeutil
import py3compat
from importstring import import_item
from IPython.config import Application
if py3compat.PY3:
buffer = memoryview
#-------------------------------------------------------------------------------
# Classes
#-------------------------------------------------------------------------------
class CannedObject(object):
def __init__(self, obj, keys=[]):
self.keys = keys
self.obj = copy.copy(obj)
for key in keys:
setattr(self.obj, key, can(getattr(obj, key)))
self.buffers = []
def get_object(self, g=None):
if g is None:
g = {}
for key in self.keys:
setattr(self.obj, key, uncan(getattr(self.obj, key), g))
return self.obj
class Reference(CannedObject):
"""object for wrapping a remote reference by name."""
def __init__(self, name):
if not isinstance(name, basestring):
raise TypeError("illegal name: %r"%name)
self.name = name
self.buffers = []
def __repr__(self):
return "<Reference: %r>"%self.name
def get_object(self, g=None):
if g is None:
g = {}
return eval(self.name, g)
class CannedFunction(CannedObject):
def __init__(self, f):
self._check_type(f)
self.code = f.func_code
if f.func_defaults:
self.defaults = [ can(fd) for fd in f.func_defaults ]
else:
self.defaults = None
self.module = f.__module__ or '__main__'
self.__name__ = f.__name__
self.buffers = []
def _check_type(self, obj):
assert isinstance(obj, FunctionType), "Not a function type"
def get_object(self, g=None):
# try to load function back into its module:
if not self.module.startswith('__'):
__import__(self.module)
g = sys.modules[self.module].__dict__
if g is None:
g = {}
if self.defaults:
defaults = tuple(uncan(cfd, g) for cfd in self.defaults)
else:
defaults = None
newFunc = FunctionType(self.code, g, self.__name__, defaults)
return newFunc
class CannedArray(CannedObject):
def __init__(self, obj):
self.shape = obj.shape
self.dtype = obj.dtype.descr if obj.dtype.fields else obj.dtype.str
if sum(obj.shape) == 0:
# just pickle it
self.buffers = [pickle.dumps(obj, -1)]
else:
# ensure contiguous
obj = numpy.ascontiguousarray(obj, dtype=None)
self.buffers = [buffer(obj)]
def get_object(self, g=None):
data = self.buffers[0]
if sum(self.shape) == 0:
# no shape, we just pickled it
return pickle.loads(data)
else:
return numpy.frombuffer(data, dtype=self.dtype).reshape(self.shape)
class CannedBytes(CannedObject):
wrap = bytes
def __init__(self, obj):
self.buffers = [obj]
def get_object(self, g=None):
data = self.buffers[0]
return self.wrap(data)
def CannedBuffer(CannedBytes):
wrap = buffer
#-------------------------------------------------------------------------------
# Functions
#-------------------------------------------------------------------------------
def _logger():
"""get the logger for the current Application
the root logger will be used if no Application is running
"""
if Application.initialized():
logger = Application.instance().log
else:
logger = logging.getLogger()
if not logger.handlers:
logging.basicConfig()
return logger
def _import_mapping(mapping, original=None):
"""import any string-keys in a type mapping
"""
log = _logger()
log.debug("Importing canning map")
for key,value in mapping.items():
if isinstance(key, basestring):
try:
cls = import_item(key)
except Exception:
if original and key not in original:
# only message on user-added classes
log.error("cannning class not importable: %r", key, exc_info=True)
mapping.pop(key)
else:
mapping[cls] = mapping.pop(key)
def can(obj):
"""prepare an object for pickling"""
import_needed = False
for cls,canner in can_map.iteritems():
if isinstance(cls, basestring):
import_needed = True
break
elif isinstance(obj, cls):
return canner(obj)
if import_needed:
# perform can_map imports, then try again
# this will usually only happen once
_import_mapping(can_map, _original_can_map)
return can(obj)
return obj
def can_dict(obj):
"""can the *values* of a dict"""
if isinstance(obj, dict):
newobj = {}
for k, v in obj.iteritems():
newobj[k] = can(v)
return newobj
else:
return obj
def can_sequence(obj):
"""can the elements of a sequence"""
if isinstance(obj, (list, tuple)):
t = type(obj)
return t([can(i) for i in obj])
else:
return obj
def uncan(obj, g=None):
"""invert canning"""
import_needed = False
for cls,uncanner in uncan_map.iteritems():
if isinstance(cls, basestring):
import_needed = True
break
elif isinstance(obj, cls):
return uncanner(obj, g)
if import_needed:
# perform uncan_map imports, then try again
# this will usually only happen once
_import_mapping(uncan_map, _original_uncan_map)
return uncan(obj, g)
return obj
def uncan_dict(obj, g=None):
if isinstance(obj, dict):
newobj = {}
for k, v in obj.iteritems():
newobj[k] = uncan(v,g)
return newobj
else:
return obj
def uncan_sequence(obj, g=None):
if isinstance(obj, (list, tuple)):
t = type(obj)
return t([uncan(i,g) for i in obj])
else:
return obj
#-------------------------------------------------------------------------------
# API dictionaries
#-------------------------------------------------------------------------------
# These dicts can be extended for custom serialization of new objects
can_map = {
'IPython.parallel.dependent' : lambda obj: CannedObject(obj, keys=('f','df')),
'numpy.ndarray' : CannedArray,
FunctionType : CannedFunction,
bytes : CannedBytes,
buffer : CannedBuffer,
}
uncan_map = {
CannedObject : lambda obj, g: obj.get_object(g),
}
# for use in _import_mapping:
_original_can_map = can_map.copy()
_original_uncan_map = uncan_map.copy()
| 27.492806 | 86 | 0.533168 |
a513b6bef4a0629c9cba7ea183dd6d5810e20d3f | 13,453 | py | Python | pandapower/test/shortcircuit/test_all_currents.py | ZhengLiu1119/pandapower | 4356a2e8fdc6afc5a8449c903d09d90d8fee5530 | [
"BSD-3-Clause"
] | null | null | null | pandapower/test/shortcircuit/test_all_currents.py | ZhengLiu1119/pandapower | 4356a2e8fdc6afc5a8449c903d09d90d8fee5530 | [
"BSD-3-Clause"
] | null | null | null | pandapower/test/shortcircuit/test_all_currents.py | ZhengLiu1119/pandapower | 4356a2e8fdc6afc5a8449c903d09d90d8fee5530 | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2016-2022 by University of Kassel and Fraunhofer Institute for Energy Economics
# and Energy System Technology (IEE), Kassel. All rights reserved.
import numpy as np
import pytest
import pandapower as pp
import pandapower.shortcircuit as sc
def three_bus_example():
net = pp.create_empty_network(sn_mva=56)
b1 = pp.create_bus(net, 110)
b2 = pp.create_bus(net, 110)
b3 = pp.create_bus(net, 110)
pp.create_ext_grid(net, b1, s_sc_max_mva=100., s_sc_min_mva=80., rx_min=0.4, rx_max=0.4)
pp.create_line(net, b1, b2, std_type="305-AL1/39-ST1A 110.0" , length_km=20.)
pp.create_line(net, b2, b3, std_type="N2XS(FL)2Y 1x185 RM/35 64/110 kV" , length_km=15.)
net.line["endtemp_degree"] = 80
pp.create_sgen(net, b2, sn_mva=2, p_mw=0, k=1.2)
net.ext_grid['x0x_min'] = 0.1
net.ext_grid['r0x0_min'] = 0.1
net.ext_grid['x0x_max'] = 0.1
net.ext_grid['r0x0_max'] = 0.1
net.line['r0_ohm_per_km'] = 0.1
net.line['x0_ohm_per_km'] = 0.1
net.line['c0_nf_per_km'] = 0.1
net.line["endtemp_degree"] = 80
return net
def three_bus_permuted_index():
net = pp.create_empty_network(sn_mva=67)
b1 = pp.create_bus(net, 110, index=4)
b2 = pp.create_bus(net, 110, index=3)
b3 = pp.create_bus(net, 110, index=0)
pp.create_ext_grid(net, b1, s_sc_max_mva=100., s_sc_min_mva=80., rx_min=0.4, rx_max=0.4)
pp.create_line(net, b1, b2, std_type="305-AL1/39-ST1A 110.0" , length_km=20., index=1)
pp.create_line(net, b2, b3, std_type="N2XS(FL)2Y 1x185 RM/35 64/110 kV" , length_km=15., index=0)
net.line["endtemp_degree"] = 80
pp.create_sgen(net, b2, sn_mva=2, p_mw=0, k=1.2)
return net
# def gen_three_bus_example():
# net = pp.create_empty_network(sn_mva=2)
# b1 = pp.create_bus(net, vn_kv=10.)
# b2 = pp.create_bus(net, vn_kv=10.)
# b3 = pp.create_bus(net, vn_kv=10.)
# #pp.create_bus(net, vn_kv=0.4, in_service=False)
# pp.create_gen(net, b2, vn_kv=10.5, xdss_pu=0.2, rdss_pu=0.001, cos_phi=0.8, p_mw=0.1, sn_mva=2.5)
# pp.create_line_from_parameters(net, b1, b2, length_km=1.0, max_i_ka=0.29,
# r_ohm_per_km=0.1548, x_ohm_per_km=0.0816814, c_nf_per_km=165)
# pp.create_line_from_parameters(net, b2, b3, length_km=1.0, max_i_ka=0.29,
# r_ohm_per_km=0.1548, x_ohm_per_km=0.0816814, c_nf_per_km=165)
# net.line["endtemp_degree"] = 165
# pp.create_ext_grid(net, b1, s_sc_max_mva=10., s_sc_min_mva=8., rx_min=0.4, rx_max=0.4)
# #pp.create_switch(net, b3, b1, et="b")
# return net
def net_transformer():
net = pp.create_empty_network(sn_mva=2)
b1a = pp.create_bus(net, vn_kv=10.)
b1b = pp.create_bus(net, vn_kv=10.)
b2 = pp.create_bus(net, vn_kv=.4)
pp.create_bus(net, vn_kv=0.4, in_service=False) #add out of service bus to test oos indexing
pp.create_ext_grid(net, b1a, s_sc_max_mva=100., s_sc_min_mva=40., rx_min=0.1, rx_max=0.1)
pp.create_switch(net, b1a, b1b, et="b")
pp.create_transformer_from_parameters(net, b1b, b2, vn_hv_kv=11., vn_lv_kv=0.42, vk_percent=6.,
vkr_percent=0.5, pfe_kw=14, shift_degree=0.0,
tap_side="hv", tap_neutral=0, tap_min=-2, tap_max=2, tap_pos=2,
tap_step_percent=2.5, parallel=2, sn_mva=0.4, i0_percent=0.5)
pp.create_shunt(net, b2, q_mvar=0.050, p_mw=0.0500) #adding a shunt shouldn't change the result
return net
def test_all_currents_sgen():
#
# eg--0---l0---1---l1---2
# |
# g
#
net = three_bus_example()
sc.calc_sc(net, case="max", ip=True, ith=True, branch_results=True, return_all_currents=True)
assert np.allclose(net.res_line_sc.ikss_ka.values,
np.array([0.01259673, 0.49593036, 0.48628848, 0., 0., 0.49888962]), atol=1e-5)
assert np.allclose(net.res_line_sc.ip_ka.values,
np.array([0.01781447, 0.92787447, 0.90729584, 0., 0., 0.92511655]), atol=1e-5)
assert np.allclose(net.res_line_sc.ith_ka.values,
np.array([0.01265116, 0.4981196, 0.48841266, 0., 0., 0.50106884]), atol=1e-5)
sc.calc_sc(net, case="min", ip=True, ith=True, branch_results=True, return_all_currents=True)
assert np.allclose(net.res_line_sc.ikss_ka.values,
np.array([0.01259673, 0.3989686, 0.39170662, 0., 0., 0.40431286]), atol=1e-5)
assert np.allclose(net.res_line_sc.ip_ka.values,
np.array([0.01781447, 0.74438751, 0.72793774, 0., 0., 0.74576565]), atol=1e-5)
assert np.allclose(net.res_line_sc.ith_ka.values,
np.array([0.01265116, 0.40071219, 0.39339323, 0., 0., 0.40605375]), atol=1e-5)
def test_all_currents_1ph_max():
# Only check coherence between branch currents and bus currents
#
# eg--0---l0---1---l1---2
# |
# g
#
# With generator
net = three_bus_example()
sc.calc_sc(net, case="max", fault='1ph', branch_results=True, return_all_currents=True)
i_bus_with_sgen = net.res_bus_sc.copy()
i_line_with_gen = net.res_line_sc.copy()
# Without generator
net = three_bus_example()
net.sgen.in_service = False
sc.calc_sc(net, case="max", fault='1ph')
i_bus_without_sgen = net.res_bus_sc.copy()
# Isolate sgen contrib
i_bus_only_sgen = i_bus_with_sgen - i_bus_without_sgen
assert np.isclose(i_line_with_gen.ikss_ka.loc[(0, 0)], i_bus_only_sgen.ikss_ka.at[0], atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(0, 1)], i_bus_without_sgen.ikss_ka.at[1], atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(0, 2)], i_bus_without_sgen.ikss_ka.at[2] -
(i_bus_only_sgen.ikss_ka.at[1] - i_bus_only_sgen.ikss_ka.at[2]) , atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(1, 0)], 0., atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(1, 1)], 0., atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(1, 2)], i_bus_with_sgen.ikss_ka.at[2], atol=1e-4)
def test_all_currents_1ph_min():
# Only check coherence between branch currents and bus currents
#
# eg--0---l0---1---l1---2
# |
# g
#
# With generator
net = three_bus_example()
sc.calc_sc(net, case="min", fault='1ph', branch_results=True, return_all_currents=True)
i_bus_with_sgen = net.res_bus_sc.copy()
i_line_with_gen = net.res_line_sc.copy()
# Without generator
net.sgen.in_service = False
sc.calc_sc(net, case="min", fault='1ph', branch_results=True)
i_bus_without_sgen = net.res_bus_sc.copy()
# Isolate sgen contrib
i_bus_only_sgen = i_bus_with_sgen - i_bus_without_sgen
assert np.isclose(i_line_with_gen.ikss_ka.loc[(0, 0)], i_bus_only_sgen.ikss_ka.at[0], atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(0, 1)], i_bus_without_sgen.ikss_ka.at[1], atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(0, 2)], i_bus_without_sgen.ikss_ka.at[2] -
(i_bus_only_sgen.ikss_ka.at[1] - i_bus_only_sgen.ikss_ka.at[2]) , atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(1, 0)], 0., atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(1, 1)], 0., atol=1e-4)
assert np.isclose(i_line_with_gen.ikss_ka.loc[(1, 2)], i_bus_with_sgen.ikss_ka.at[2], atol=1e-4)
def test_with_permuted_index():
# Check that if element's index are permuted the results are still consistent
#
# eg--4---l1---3---l0---1
# |
# g
#
net = three_bus_permuted_index()
sc.calc_sc(net, case="max", ip=True, ith=True, branch_results=True, return_all_currents=True)
assert np.allclose(net.res_line_sc.ikss_ka.loc[[(1, 4), (1, 3), (1, 0), (0, 4), (0, 3), (0, 0)]].values,
np.array([0.01259673, 0.49593036, 0.48628848, 0., 0., 0.49888962]), atol=1e-5)
sc.calc_sc(net, case="min", ip=True, ith=True, branch_results=True, return_all_currents=True)
assert np.allclose(net.res_line_sc.ikss_ka.loc[[(1, 4), (1, 3), (1, 0), (0, 4), (0, 3), (0, 0)]].values,
np.array([0.01259673, 0.3989686, 0.39170662, 0., 0., 0.40431286]), atol=1e-5)
def test_all_currents_with_oos_elements():
net = three_bus_example()
net.bus.in_service.loc[2] = False
net.line.in_service.loc[1] = False
sc.calc_sc(net, case="max", branch_results=True, return_all_currents=True)
assert np.allclose(net.res_line_sc.ikss_ka.loc[[(0, 0), (0, 1)]].values,
np.array([0.01259673, 0.49593036]), atol=1e-5)
assert np.allclose(net.res_line_sc.ikss_ka.loc[[(0, 2), (1, 0), (1, 1), (1, 2)]].values,
0, atol=1e-10)
sc.calc_sc(net, case="min", branch_results=True, return_all_currents=True)
assert np.allclose(net.res_line_sc.ikss_ka.loc[[(0, 0), (0, 1)]].values,
np.array([0.01259673, 0.3989686]), atol=1e-5)
assert np.allclose(net.res_line_sc.ikss_ka.loc[[(0, 2), (1, 0), (1, 1), (1, 2)]].values,
0, atol=1e-10)
# TODO: This example should not work anymore
# def test_branch_all_currents_gen():
# net = gen_three_bus_example()
# sc.calc_sc(net, case="max", branch_results=True, return_all_currents=True)
# assert np.allclose(net.res_line_sc.ikss_ka.values,
# np.array([0.76204252, 0.57040645, 0.55786693, 0., 0., 1.28698045]))
# sc.calc_sc(net, case="min", branch_results=True, return_all_currents=True)
# assert np.allclose(net.res_line_sc.ikss_ka.values,
# np.array([0.69255026, 0.45574755, 0.44487882, 0., 0., 1.10747517]))
def test_branch_all_currents_trafo():
net = net_transformer()
sc.calc_sc(net, case='max', ip=True, ith=True, lv_tol_percent=10., branch_results=True, return_all_currents=True)
assert (abs(net.res_trafo_sc.ikss_lv_ka.loc[(0,0)] - 0.) <1e-5)
assert (abs(net.res_trafo_sc.ikss_lv_ka.loc[(0,1)] - 0.) <1e-5)
assert (abs(net.res_trafo_sc.ikss_lv_ka.loc[(0,2)] - 16.992258758) <1e-5)
assert (abs(net.res_trafo_sc.ikss_hv_ka.loc[(0,0)] - 0.) <1e-5)
assert (abs(net.res_trafo_sc.ikss_hv_ka.loc[(0,1)] - 0.) <1e-5)
assert (abs(net.res_trafo_sc.ikss_hv_ka.loc[(0,2)] - 0.648795) <1e-5)
def test_against_single_sc_results_line():
net = three_bus_permuted_index()
sc.calc_sc(net, case="max", branch_results=True, return_all_currents=True)
multi_results = net.res_line_sc.copy()
for bus in net.bus.index:
sc.calc_sc(net, bus=bus, case="max", branch_results=True, return_all_currents=True)
line_bus_indices = [(line, bus) for line in net.line.index]
single_result = net.res_line_sc.ikss_ka.values
multi_result = multi_results.ikss_ka.loc[line_bus_indices].values
assert np.allclose(single_result, multi_result)
def test_against_single_sc_results_trafo():
net = net_transformer()
sc.calc_sc(net, case="max", branch_results=True, return_all_currents=True, inverse_y=False)
multi_results = net.res_trafo_sc.copy()
for bus in net.bus.index[net.bus.in_service]:
sc.calc_sc(net, bus=bus, case="max", branch_results=True, return_all_currents=True, inverse_y=False)
trafo_bus_indices = [(trafo, bus) for trafo in net.trafo.index]
single_result_lv = net.res_trafo_sc.ikss_lv_ka.values
multi_result_lv = multi_results.ikss_lv_ka.loc[trafo_bus_indices].values
assert np.allclose(single_result_lv, multi_result_lv)
single_result_hv = net.res_trafo_sc.ikss_hv_ka.values
multi_result_hv = multi_results.ikss_hv_ka.loc[trafo_bus_indices].values
assert np.allclose(single_result_hv, multi_result_hv)
def test_ward():
net = pp.create_empty_network(sn_mva=9)
pp.create_buses(net, 2, 110)
pp.create_ext_grid(net, 0, s_sc_max_mva=100, rx_max=0.1)
pp.create_line_from_parameters(net, 0, 1, 1, 0.5, 0.5, 0, 1000)
pp.create_ward(net, 1, 10, 5, 200, 100)
sc.calc_sc(net)
ikss_ka = [1.209707, 1.209818]
rk_ohm = [57.719840, 57.678686]
xk_ohm = [-1.834709, -2.740132]
assert np.allclose(net.res_bus_sc.ikss_ka, ikss_ka, atol=1e-6, rtol=0)
assert np.allclose(net.res_bus_sc.rk_ohm, rk_ohm, atol=1e-6, rtol=0)
assert np.allclose(net.res_bus_sc.xk_ohm, xk_ohm, atol=1e-6, rtol=0)
def test_xward():
net = pp.create_empty_network(sn_mva=4)
pp.create_buses(net, 2, 110)
pp.create_ext_grid(net, 0, s_sc_max_mva=100, rx_max=0.1)
pp.create_line_from_parameters(net, 0, 1, 1, 0.5, 0.5, 0, 1000)
pp.create_xward(net, 1, 10, 5, 200, 100, 3, 1, vm_pu=1.02)
sc.calc_sc(net)
ikss_ka = [1.209707, 1.209818]
rk_ohm = [57.719840, 57.678686]
xk_ohm = [-1.834709, -2.740132]
assert np.allclose(net.res_bus_sc.ikss_ka, ikss_ka, atol=1e-6, rtol=0)
assert np.allclose(net.res_bus_sc.rk_ohm, rk_ohm, atol=1e-6, rtol=0)
assert np.allclose(net.res_bus_sc.xk_ohm, xk_ohm, atol=1e-6, rtol=0)
if __name__ == '__main__':
pytest.main([__file__])
| 45.60339 | 118 | 0.639634 |
f1e046d9e53c2a3a81d71e2654f6c65a1b20420c | 5,265 | py | Python | imcsdk/mometa/bios/BiosVfSgx.py | ecoen66/imcsdk | b10eaa926a5ee57cea7182ae0adc8dd1c818b0ab | [
"Apache-2.0"
] | 31 | 2016-06-14T07:23:59.000Z | 2021-09-12T17:17:26.000Z | imcsdk/mometa/bios/BiosVfSgx.py | sthagen/imcsdk | 1831eaecb5960ca03a8624b1579521749762b932 | [
"Apache-2.0"
] | 109 | 2016-05-25T03:56:56.000Z | 2021-10-18T02:58:12.000Z | imcsdk/mometa/bios/BiosVfSgx.py | sthagen/imcsdk | 1831eaecb5960ca03a8624b1579521749762b932 | [
"Apache-2.0"
] | 67 | 2016-05-17T05:53:56.000Z | 2022-03-24T15:52:53.000Z | """This module contains the general information for BiosVfSgx ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class BiosVfSgxConsts:
VP_ENABLE_SGX_DISABLED = "Disabled"
VP_ENABLE_SGX_ENABLED = "Enabled"
_VP_ENABLE_SGX_DISABLED = "disabled"
_VP_ENABLE_SGX_ENABLED = "enabled"
VP_ENABLE_SGX_PLATFORM_DEFAULT = "platform-default"
VP_SGX_AUTO_REGISTRATION_AGENT_DISABLED = "Disabled"
VP_SGX_AUTO_REGISTRATION_AGENT_ENABLED = "Enabled"
_VP_SGX_AUTO_REGISTRATION_AGENT_DISABLED = "disabled"
_VP_SGX_AUTO_REGISTRATION_AGENT_ENABLED = "enabled"
VP_SGX_AUTO_REGISTRATION_AGENT_PLATFORM_DEFAULT = "platform-default"
VP_SGX_FACTORY_RESET_DISABLED = "Disabled"
VP_SGX_FACTORY_RESET_ENABLED = "Enabled"
_VP_SGX_FACTORY_RESET_DISABLED = "disabled"
_VP_SGX_FACTORY_RESET_ENABLED = "enabled"
VP_SGX_FACTORY_RESET_PLATFORM_DEFAULT = "platform-default"
VP_SGX_LE_WR_DISABLED = "Disabled"
VP_SGX_LE_WR_ENABLED = "Enabled"
_VP_SGX_LE_WR_DISABLED = "disabled"
_VP_SGX_LE_WR_ENABLED = "enabled"
VP_SGX_LE_WR_PLATFORM_DEFAULT = "platform-default"
VP_SGX_PACKAGE_INFO_IN_BAND_ACCESS_DISABLED = "Disabled"
VP_SGX_PACKAGE_INFO_IN_BAND_ACCESS_ENABLED = "Enabled"
_VP_SGX_PACKAGE_INFO_IN_BAND_ACCESS_DISABLED = "disabled"
_VP_SGX_PACKAGE_INFO_IN_BAND_ACCESS_ENABLED = "enabled"
VP_SGX_PACKAGE_INFO_IN_BAND_ACCESS_PLATFORM_DEFAULT = "platform-default"
VP_SGX_QOS_DISABLED = "Disabled"
VP_SGX_QOS_ENABLED = "Enabled"
_VP_SGX_QOS_DISABLED = "disabled"
_VP_SGX_QOS_ENABLED = "enabled"
VP_SGX_QOS_PLATFORM_DEFAULT = "platform-default"
class BiosVfSgx(ManagedObject):
"""This is BiosVfSgx class."""
consts = BiosVfSgxConsts()
naming_props = set([])
mo_meta = {
"classic": MoMeta("BiosVfSgx", "biosVfSgx", "Enable-Sgx", VersionMeta.Version421a, "InputOutput", 0x3ff, [], ["admin"], ['biosPlatformDefaults', 'biosSettings'], [], [None]),
}
prop_meta = {
"classic": {
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version421a, MoPropertyMeta.INTERNAL, None, None, None, None, [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version421a, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version421a, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version421a, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"vp_enable_sgx": MoPropertyMeta("vp_enable_sgx", "vpEnableSgx", "string", VersionMeta.Version421a, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
"vp_sgx_auto_registration_agent": MoPropertyMeta("vp_sgx_auto_registration_agent", "vpSgxAutoRegistrationAgent", "string", VersionMeta.Version421a, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
"vp_sgx_factory_reset": MoPropertyMeta("vp_sgx_factory_reset", "vpSgxFactoryReset", "string", VersionMeta.Version421a, MoPropertyMeta.READ_WRITE, 0x40, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
"vp_sgx_le_wr": MoPropertyMeta("vp_sgx_le_wr", "vpSgxLeWr", "string", VersionMeta.Version421a, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
"vp_sgx_package_info_in_band_access": MoPropertyMeta("vp_sgx_package_info_in_band_access", "vpSgxPackageInfoInBandAccess", "string", VersionMeta.Version421a, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
"vp_sgx_qos": MoPropertyMeta("vp_sgx_qos", "vpSgxQos", "string", VersionMeta.Version421a, MoPropertyMeta.READ_WRITE, 0x200, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
},
}
prop_map = {
"classic": {
"childAction": "child_action",
"dn": "dn",
"rn": "rn",
"status": "status",
"vpEnableSgx": "vp_enable_sgx",
"vpSgxAutoRegistrationAgent": "vp_sgx_auto_registration_agent",
"vpSgxFactoryReset": "vp_sgx_factory_reset",
"vpSgxLeWr": "vp_sgx_le_wr",
"vpSgxPackageInfoInBandAccess": "vp_sgx_package_info_in_band_access",
"vpSgxQos": "vp_sgx_qos",
},
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.child_action = None
self.status = None
self.vp_enable_sgx = None
self.vp_sgx_auto_registration_agent = None
self.vp_sgx_factory_reset = None
self.vp_sgx_le_wr = None
self.vp_sgx_package_info_in_band_access = None
self.vp_sgx_qos = None
ManagedObject.__init__(self, "BiosVfSgx", parent_mo_or_dn, **kwargs)
| 53.181818 | 294 | 0.699525 |
065f5ed2b644efe80a23f0f101ff309d30af19d3 | 1,601 | py | Python | test/utils_tests/opencas-py-tests/test_casadm_01.py | CAS-Linux-Jenkins/open-cas-linux | 6ef7195950abf6aa239a21963cdd2eca4779bdac | [
"BSD-3-Clause-Clear"
] | 2 | 2021-08-13T14:44:45.000Z | 2022-01-10T07:41:40.000Z | test/utils_tests/opencas-py-tests/test_casadm_01.py | josehu07/open-cas-linux-mf | 5c6870be8bbb6816645955b6e479c9b5c7c0074d | [
"BSD-3-Clause-Clear"
] | null | null | null | test/utils_tests/opencas-py-tests/test_casadm_01.py | josehu07/open-cas-linux-mf | 5c6870be8bbb6816645955b6e479c9b5c7c0074d | [
"BSD-3-Clause-Clear"
] | null | null | null | #
# Copyright(c) 2012-2020 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause-Clear
#
import pytest
import subprocess
import unittest.mock as mock
from opencas import casadm
from helpers import get_process_mock
@mock.patch("subprocess.run")
def test_run_cmd_01(mock_run):
mock_run.return_value = get_process_mock(0, "successes", "errors")
result = casadm.run_cmd(["casadm", "-L"])
assert result.exit_code == 0
assert result.stdout == "successes"
assert result.stderr == "errors"
mock_run.assert_called_once_with(
["casadm", "-L"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=mock.ANY,
)
@mock.patch("subprocess.run")
def test_run_cmd_02(mock_run):
mock_run.return_value = get_process_mock(4, "successes", "errors")
with pytest.raises(casadm.CasadmError):
casadm.run_cmd(["casadm", "-L"])
@mock.patch("subprocess.run")
def test_get_version_01(mock_run):
mock_run.return_value = get_process_mock(0, "0.0.1", "errors")
result = casadm.get_version()
assert result.exit_code == 0
assert result.stdout == "0.0.1"
assert result.stderr == "errors"
mock_run.assert_called_once_with(
[casadm.casadm_path, "--version", "--output-format", "csv"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=mock.ANY,
)
@mock.patch("subprocess.run")
def test_get_version_02(mock_run):
mock_run.return_value = get_process_mock(4, "successes", "errors")
with pytest.raises(casadm.CasadmError):
casadm.get_version()
| 27.603448 | 70 | 0.693317 |
dc333d22f9933b692c7be60f53987bb71d434ec5 | 1,599 | py | Python | utils.py | JueLin/textureSynthesis-stylegan2-pytorch | 60ecc5a5ae184a76ab63dc58b9d624e7895fdcef | [
"MIT"
] | 3 | 2022-03-25T02:46:16.000Z | 2022-03-28T17:31:27.000Z | utils.py | JueLin/textureSynthesis-stylegan2-pytorch | 60ecc5a5ae184a76ab63dc58b9d624e7895fdcef | [
"MIT"
] | 1 | 2022-03-29T17:02:29.000Z | 2022-03-30T08:05:36.000Z | utils.py | JueLin/textureSynthesis-stylegan2-pytorch | 60ecc5a5ae184a76ab63dc58b9d624e7895fdcef | [
"MIT"
] | null | null | null | import torch
import os
from PIL import Image
def load_image(filename, size=None, scale=None):
img = Image.open(filename)
if size is not None:
img = img.resize((size, size), Image.ANTIALIAS)
elif scale is not None:
img = img.resize((int(img.size[0] / scale), int(img.size[1] / scale)), Image.ANTIALIAS)
return img
def mkdir(path=None):
if not os.path.exists(path):
os.makedirs(path, exist_ok=True)
def save_image(filename, data):
img = data.clone().clamp(0, 255).numpy()
img = img.transpose(1, 2, 0).astype("uint8")
img = Image.fromarray(img)
img.save(filename)
def gram_matrix(y):
(b, ch, h, w) = y.size()
features = y.view(b, ch, w * h)
features_t = features.transpose(1, 2)
gram = features.bmm(features_t) / (ch * h * w)
return gram
def normalize_batch_vgg(batch):
# normalize using imagenet mean and std
mean = batch.new_tensor([0.485, 0.456, 0.406]).view(-1, 1, 1)
std = batch.new_tensor([0.229, 0.224, 0.225]).view(-1, 1, 1)
# batch = batch.div_(255.0)
return (batch - mean) / std
def denormalize_batch_vgg(batch):
mean = batch.new_tensor([0.485, 0.456, 0.406]).view(-1, 1, 1)
std = batch.new_tensor([0.229, 0.224, 0.225]).view(-1, 1, 1)
return batch * std + mean
def deprocess_image(batch, in_high=1, in_low=-1, out_high=1, out_low=0):
# from [in_low, in_high] to [0, 1] then to [out_low, out_high]
assert in_high>in_low, "Invalid input range"
assert out_high>out_low, "Invalid output range"
return (batch-in_low)/(in_high-in_low)*(out_high-out_low)+out_low | 34.021277 | 95 | 0.646654 |
cf7c35c5336b91da4bd31940d15268cee621b0cb | 1,147 | py | Python | Mundo 3/ex113.py | adonaifariasdev/cursoemvideo-python3 | 1fd35e45b24c52013fa3bc98e723971db8e6b7d1 | [
"MIT"
] | null | null | null | Mundo 3/ex113.py | adonaifariasdev/cursoemvideo-python3 | 1fd35e45b24c52013fa3bc98e723971db8e6b7d1 | [
"MIT"
] | null | null | null | Mundo 3/ex113.py | adonaifariasdev/cursoemvideo-python3 | 1fd35e45b24c52013fa3bc98e723971db8e6b7d1 | [
"MIT"
] | null | null | null | # Reescreva a função leiaInt() que fizemos no desafio 104, incluindo agora a possibilidade da digitação de
# um número de tipo inválido. Aproveite e crie também uma função leiaFloat() com a mesma funcionalidade.
def leiaInt(msg):
while True:
try:
n = int(input(msg))
except (ValueError, TypeError):
print('\033[0;31mERRO: por favor, digite um nùmero inteiro válido.\033[m')
continue
except (KeyboardInterrupt):
print('\033[0;31mUsuário preferiu não digitar esse número.\033[m')
return 0
else:
return n
def leiaFloat(msg):
while True:
try:
n = float(input(msg))
except (ValueError, TypeError):
print('\033[0;31mERRO: por favor, digite um nùmero real válido.\033[m')
continue
except (KeyboardInterrupt):
print('\033[0;31mUsuário preferiu não digitar esse número.\033[m')
return 0
else:
return n
n1 = leiaInt('Digite um Inteiro: ')
n2 = leiaFloat('Digite um Real: ')
print(f'O valor inteiro digitado foi {n1} e o real foi {n2}.')
| 33.735294 | 106 | 0.605057 |
d763304c9ef85dbe0239a64727999c72f6be353d | 8,237 | py | Python | pyod/models/mcd.py | paragon520/cascoKNN | 1de6ce6ca6615ef4cde4b6111727a55a26998594 | [
"BSD-2-Clause"
] | 2 | 2020-11-04T13:45:17.000Z | 2021-08-06T02:03:51.000Z | pyod/models/mcd.py | LiDan456/Pyod | 1bee06200c5e8c86977bbdec896b624af2c2e05a | [
"BSD-2-Clause"
] | null | null | null | pyod/models/mcd.py | LiDan456/Pyod | 1bee06200c5e8c86977bbdec896b624af2c2e05a | [
"BSD-2-Clause"
] | 1 | 2021-03-03T08:45:04.000Z | 2021-03-03T08:45:04.000Z | # -*- coding: utf-8 -*-
"""Outlier Detection with Minimum Covariance Determinant (MCD)
"""
# Author: Yue Zhao <yuezhao@cs.toronto.edu>
# License: BSD 2 clause
from __future__ import division
from __future__ import print_function
from sklearn.covariance import MinCovDet
from sklearn.utils.validation import check_is_fitted
from sklearn.utils.validation import check_array
from .base import BaseDetector
__all__ = ['MCD']
class MCD(BaseDetector):
"""An object for detecting outliers in a Gaussian distributed dataset using
Minimum Covariance Determinant (MCD): robust estimator of covariance.
The Minimum Covariance Determinant covariance estimator is to be applied
on Gaussian-distributed data, but could still be relevant on data
drawn from a unimodal, symmetric distribution. It is not meant to be used
with multi-modal data (the algorithm used to fit a MinCovDet object is
likely to fail in such a case).
One should consider projection pursuit methods to deal with multi-modal
datasets.
First fit a minimum covariance determinant model and then compute the
Mahalanobis distance as the outlier degree of the data
See :cite:`rousseeuw1999fast,hardin2004outlier` for details.
:param contamination: The amount of contamination of the data set,
i.e. the proportion of outliers in the data set. Used when fitting to
define the threshold on the decision function.
:type contamination: float in (0., 0.5), optional (default=0.1)
:param store_precision: Specify if the estimated precision is stored.
:type store_precision: bool, optional (default=True)
:param assume_centered: If True, the support of the robust location and
the covariance estimates is computed, and a covariance estimate is
recomputed from it, without centering the data.
Useful to work with data whose mean is significantly equal to
zero but is not exactly zero.
If False, the robust location and covariance are directly computed
with the FastMCD algorithm without additional treatment.
:type assume_centered: bool, optional (default=False)
:param support_fraction: The proportion of points to be included in the
support of the raw MCD estimate. Default is None, which implies that
the minimum value of support_fraction will be used within the
algorithm: [n_sample + n_features + 1] / 2
:type support_fraction: float, optional (default=None)
:param random_state: If int, random_state is the seed used by the random
number generator; If RandomState instance, random_state is the random
number generator; If None, the random number generator is the
RandomState instance used by `np.random`.
:type random_state: int, RandomState instance or None, optional
(default=None)
:var raw_location\_: The raw robust estimated location before correction
and re-weighting.
:vartype raw_location\_: array-like of shape (n_features,)
:var raw_covariance\_: The raw robust estimated covariance before
correction and re-weighting.
:vartype raw_covariance\_: array-like of shape (n_features, n_features)
:var raw_support\_: A mask of the observations that have been used to
compute the raw robust estimates of location and shape, before
correction and re-weighting.
:vartype raw_support\_: array-like of shape (n_samples,)
:var location\_: Estimated robust location
:vartype location\_: array-like of shape (n_features,)
:var covariance\_: Estimated robust covariance matrix
:vartype covariance\_: array-like of shape (n_features, n_features)
:var precision\_: Estimated pseudo inverse matrix.
(stored only if store_precision is True)
:vartype precision\_: array-like of shape (n_features, n_features)
:var support\_: A mask of the observations that have been used to compute
the robust estimates of location and shape.
:vartype support\_: array-like of shape (n_samples,)
:var decision_scores\_: The outlier scores of the training data.
The higher, the more abnormal. Outliers tend to have higher
scores. This value is available once the detector is
fitted.
:vartype decision_scores\_: numpy array of shape (n_samples,)
:var threshold\_: The threshold is based on ``contamination``. It is the
``n_samples * contamination`` most abnormal samples in
``decision_scores_``. The threshold is calculated for generating
binary outlier labels.
:vartype threshold\_: float
:var labels\_: The binary labels of the training data. 0 stands for inliers
and 1 for outliers/anomalies. It is generated by applying
``threshold_`` on ``decision_scores_``.
:vartype labels\_: int, either 0 or 1
"""
def __init__(self, contamination=0.1, store_precision=True,
assume_centered=False, support_fraction=None,
random_state=None):
super(MCD, self).__init__(contamination=contamination)
self.store_precision = store_precision
self.assume_centered = assume_centered
self.support_fraction = support_fraction
self.random_state = random_state
# noinspection PyIncorrectDocstring
def fit(self, X, y=None):
"""Fit the model using X as training data.
:param X: Training data. If array or matrix,
shape [n_samples, n_features],
or [n_samples, n_samples] if metric='precomputed'.
:type X: {array-like, sparse matrix, BallTree, KDTree}
:return: self
:rtype: object
"""
# Validate inputs X and y (optional)
X = check_array(X)
self._set_n_classes(y)
self.detector_ = MinCovDet(store_precision=self.store_precision,
assume_centered=self.assume_centered,
support_fraction=self.support_fraction,
random_state=self.random_state)
self.detector_.fit(X=X, y=y)
# Use mahalanabis distance as the outlier score
self.decision_scores_ = self.detector_.dist_
self._process_decision_scores()
return self
def decision_function(self, X):
check_is_fitted(self, ['decision_scores_', 'threshold_', 'labels_'])
X = check_array(X)
# Computer mahalanobis distance of the samples
return self.detector_.mahalanobis(X)
@property
def raw_location_(self):
"""The raw robust estimated location before correction and
re-weighting.
Decorator for scikit-learn MinCovDet attributes.
"""
return self.detector_.raw_location_
@property
def raw_covariance_(self):
"""The raw robust estimated location before correction and
re-weighting.
Decorator for scikit-learn MinCovDet attributes.
"""
return self.detector_.raw_covariance_
@property
def raw_support_(self):
"""A mask of the observations that have been used to compute
the raw robust estimates of location and shape, before correction
and re-weighting.
Decorator for scikit-learn MinCovDet attributes.
"""
return self.detector_.raw_support_
@property
def location_(self):
"""Estimated robust location.
Decorator for scikit-learn MinCovDet attributes.
"""
return self.detector_.location_
@property
def covariance_(self):
"""Estimated robust covariance matrix.
Decorator for scikit-learn MinCovDet attributes.
"""
return self.detector_.covariance_
@property
def precision_(self):
""" Estimated pseudo inverse matrix.
(stored only if store_precision is True)
Decorator for scikit-learn MinCovDet attributes.
"""
return self.detector_.precision_
@property
def support_(self):
"""A mask of the observations that have been used to compute
the robust estimates of location and shape.
Decorator for scikit-learn MinCovDet attributes.
"""
return self.detector_.support_
| 38.311628 | 79 | 0.690785 |
f9179f07a8248fe3e43971850615d21b4ded8df2 | 2,210 | py | Python | giftrocket/http.py | GiftRocket/giftrocket-python | dd761b869145b2a91faad344ac26147a2e2c3a97 | [
"MIT"
] | 3 | 2017-04-04T13:17:29.000Z | 2020-07-21T15:23:11.000Z | giftrocket/http.py | GiftRocket/giftrocket-python | dd761b869145b2a91faad344ac26147a2e2c3a97 | [
"MIT"
] | null | null | null | giftrocket/http.py | GiftRocket/giftrocket-python | dd761b869145b2a91faad344ac26147a2e2c3a97 | [
"MIT"
] | 1 | 2019-12-01T06:45:27.000Z | 2019-12-01T06:45:27.000Z | ##############################################################################
# Helper module that encapsulates the HTTPS request so that it can be used
# with multiple runtimes. PK Mar. 14
##############################################################################
import os
import json
try:
from urllib.parse import urlencode
except ImportError:
from urllib import urlencode
__version__ = '2.0.0'
ALLOWED_METHODS = {'delete', 'get', 'patch', 'post', 'put'}
def _requests(url, method, data):
import requests
normalized_method = method.lower()
if normalized_method in ALLOWED_METHODS:
return getattr(requests, normalized_method)(url, data=json.dumps(data), headers={
'User-Agent': 'GiftRocket Python v{}'.format(__version__),
'Content-type': 'application/json', 'Accept': 'text/plain'
})
else:
raise ValueError(
'Invalid request method {}'.format(method)
)
# Google App Engine
def _urlfetch(url, method, data):
from google.appengine.api import urlfetch
method = method.upper()
qs = urlencode(data)
if method == 'POST':
payload = qs
else:
payload = None
url += '?' + qs
headers = {
'User-Agent': 'GiftRocket Python v{}'.format(__version__)
}
res = urlfetch.fetch(
url,
follow_redirects=True,
method=method,
payload=payload,
headers=headers,
deadline=60 # seconds
)
# Add consistent interface across requests library and urlfetch
res.ok = res.status_code >= 200 and res.status_code < 300
res.text = res.content
return res
def to_requests(base_url, auth):
# _is_appengine caches one time computation of os.environ.get().
# Closure means that _is_appengine is not a file scope variable
_is_appengine = os.environ.get('SERVER_SOFTWARE', '').split('/')[0] in (
'Development',
'Google App Engine',
)
req = _urlfetch if _is_appengine else _requests
def inner(path, method, data=None):
return req(
"{}/{}".format(base_url, path),
method,
dict(data or {}, **auth)
)
return inner
| 27.283951 | 89 | 0.577828 |
8e4ff8be01311544bc9f807da1bc252bc189b618 | 15,603 | py | Python | airbyte-cdk/python/airbyte_cdk/sources/singer/singer_helpers.py | OTRI-Unipd/OTRI-airbyte | 50eeeb773f75246e86c6e167b0cd7d2dda6efe0d | [
"MIT"
] | 2 | 2022-03-02T13:46:05.000Z | 2022-03-05T12:31:28.000Z | airbyte-cdk/python/airbyte_cdk/sources/singer/singer_helpers.py | OTRI-Unipd/OTRI-airbyte | 50eeeb773f75246e86c6e167b0cd7d2dda6efe0d | [
"MIT"
] | 5 | 2022-02-22T14:49:48.000Z | 2022-03-19T10:43:08.000Z | airbyte-cdk/python/airbyte_cdk/sources/singer/singer_helpers.py | OTRI-Unipd/OTRI-airbyte | 50eeeb773f75246e86c6e167b0cd7d2dda6efe0d | [
"MIT"
] | 1 | 2022-03-11T06:21:24.000Z | 2022-03-11T06:21:24.000Z | #
# Copyright (c) 2021 Airbyte, Inc., all rights reserved.
#
import json
import os
import selectors
import subprocess
from dataclasses import dataclass
from datetime import datetime
from io import TextIOWrapper
from typing import Any, DefaultDict, Dict, Iterator, List, Mapping, Optional, Tuple
from airbyte_cdk.logger import log_by_prefix
from airbyte_cdk.models import (
AirbyteCatalog,
AirbyteMessage,
AirbyteRecordMessage,
AirbyteStateMessage,
AirbyteStream,
ConfiguredAirbyteCatalog,
ConfiguredAirbyteStream,
SyncMode,
Type,
)
_INCREMENTAL = "INCREMENTAL"
_FULL_TABLE = "FULL_TABLE"
def to_json(string):
try:
return json.loads(string)
except ValueError:
return False
def is_field_metadata(metadata):
if len(metadata.get("breadcrumb")) != 2:
return False
else:
return metadata.get("breadcrumb")[0] != "property"
def configured_for_incremental(configured_stream: ConfiguredAirbyteStream):
return configured_stream.sync_mode and configured_stream.sync_mode == SyncMode.incremental
def get_stream_level_metadata(metadatas: List[Dict[str, Any]]) -> Optional[Dict[str, Any]]:
for metadata in metadatas:
if not is_field_metadata(metadata) and "metadata" in metadata:
return metadata.get("metadata")
return None
@dataclass
class Catalogs:
singer_catalog: object
airbyte_catalog: AirbyteCatalog
@dataclass
class SyncModeInfo:
supported_sync_modes: Optional[List[SyncMode]] = None
source_defined_cursor: Optional[bool] = None
default_cursor_field: Optional[List[str]] = None
def set_sync_modes_from_metadata(airbyte_stream: AirbyteStream, metadatas: List[Dict[str, Any]]):
stream_metadata = get_stream_level_metadata(metadatas)
if stream_metadata:
# A stream is incremental if it declares replication keys or if forced-replication-method is set to incremental
replication_keys = stream_metadata.get("valid-replication-keys", [])
if len(replication_keys) > 0:
airbyte_stream.source_defined_cursor = True
airbyte_stream.supported_sync_modes = [SyncMode.incremental]
# TODO if there are multiple replication keys, allow configuring which one is used. For now we deterministically take the first
airbyte_stream.default_cursor_field = [sorted(replication_keys)[0]]
elif "forced-replication-method" in stream_metadata:
forced_replication_method = stream_metadata["forced-replication-method"]
if isinstance(forced_replication_method, dict):
forced_replication_method = forced_replication_method.get("replication-method", "")
if forced_replication_method.upper() == _INCREMENTAL:
airbyte_stream.source_defined_cursor = True
airbyte_stream.supported_sync_modes = [SyncMode.incremental]
elif forced_replication_method.upper() == _FULL_TABLE:
airbyte_stream.source_defined_cursor = False
airbyte_stream.supported_sync_modes = [SyncMode.full_refresh]
def override_sync_modes(airbyte_stream: AirbyteStream, overrides: SyncModeInfo):
airbyte_stream.source_defined_cursor = overrides.source_defined_cursor or False
if overrides.supported_sync_modes:
airbyte_stream.supported_sync_modes = overrides.supported_sync_modes
if overrides.default_cursor_field is not None:
airbyte_stream.default_cursor_field = overrides.default_cursor_field
class SingerHelper:
@staticmethod
def _transform_types(stream_properties: DefaultDict):
for field_name in stream_properties:
field_object = stream_properties[field_name]
# according to issue CDK: typing errors #9500, mypy raises error on this line
# '"Type[SingerHelper]" has no attribute "_parse_type"', it's need to fix
# ignored for now
field_object["type"] = SingerHelper._parse_type(field_object["type"]) # type: ignore
@staticmethod
def singer_catalog_to_airbyte_catalog(
singer_catalog: Dict[str, Any], sync_mode_overrides: Dict[str, SyncModeInfo], primary_key_overrides: Dict[str, List[str]]
) -> AirbyteCatalog:
"""
:param singer_catalog:
:param sync_mode_overrides: A dict from stream name to the sync modes it should use. Each stream in this dict must exist in the Singer catalog,
but not every stream in the catalog should exist in this
:param primary_key_overrides: A dict of stream name -> list of fields to be used as PKs.
:return: Airbyte Catalog
"""
airbyte_streams = []
# according to issue CDK: typing errors #9500, mypy raises error on this line
# 'Item "None" of "Optional[Any]" has no attribute "__iter__" (not iterable)'
# It occurs because default value isn't set, and it's None
# It's needed to set default value, ignored for now
for stream in singer_catalog.get("streams"): # type: ignore
name = stream.get("stream")
schema = stream.get("schema")
airbyte_stream = AirbyteStream(name=name, json_schema=schema)
if name in sync_mode_overrides:
override_sync_modes(airbyte_stream, sync_mode_overrides[name])
else:
set_sync_modes_from_metadata(airbyte_stream, stream.get("metadata", []))
if name in primary_key_overrides:
airbyte_stream.source_defined_primary_key = [[k] for k in primary_key_overrides[name]]
elif stream.get("key_properties"):
airbyte_stream.source_defined_primary_key = [[k] for k in stream["key_properties"]]
airbyte_streams += [airbyte_stream]
return AirbyteCatalog(streams=airbyte_streams)
@staticmethod
def _read_singer_catalog(logger, shell_command: str) -> Mapping[str, Any]:
completed_process = subprocess.run(
shell_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True
)
for line in completed_process.stderr.splitlines():
logger.log(*log_by_prefix(line, "ERROR"))
return json.loads(completed_process.stdout)
@staticmethod
def get_catalogs(
logger,
shell_command: str,
sync_mode_overrides: Dict[str, SyncModeInfo],
primary_key_overrides: Dict[str, List[str]],
excluded_streams: List,
) -> Catalogs:
singer_catalog = SingerHelper._read_singer_catalog(logger, shell_command)
streams = singer_catalog.get("streams", [])
if streams and excluded_streams:
# according to issue CDK: typing errors #9500, mypy raises error on this line
# 'Unsupported target for indexed assignment ("Mapping[str, Any]")'
# _read_singer_catalog returns Mapping, to fix this error it should be changed to MutableMapping
# ignored for now
singer_catalog["streams"] = [stream for stream in streams if stream["stream"] not in excluded_streams] # type: ignore
# according to issue CDK: typing errors #9500, mypy raises error on this line
# 'Argument 1 to "singer_catalog_to_airbyte_catalog" of "SingerHelper" has incompatible type "Mapping[str, Any]"; expected "Dict[str, Any]"'
# singer_catalog is Mapping, because _read_singer_catalog returns Mapping, but singer_catalog_to_airbyte_catalog expects Dict
# it's needed to check and fix, ignored for now
airbyte_catalog = SingerHelper.singer_catalog_to_airbyte_catalog(singer_catalog, sync_mode_overrides, primary_key_overrides) # type: ignore
return Catalogs(singer_catalog=singer_catalog, airbyte_catalog=airbyte_catalog)
@staticmethod
def read(logger, shell_command, is_message=(lambda x: True)) -> Iterator[AirbyteMessage]:
with subprocess.Popen(shell_command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, universal_newlines=True) as p:
for line, text_wrapper in SingerHelper._read_lines(p):
if text_wrapper is p.stdout:
out_json = to_json(line)
if out_json is not None and is_message(out_json):
message_data = SingerHelper._airbyte_message_from_json(out_json)
if message_data is not None:
yield message_data
else:
logger.log(*log_by_prefix(line, "INFO"))
else:
logger.log(*log_by_prefix(line, "ERROR"))
@staticmethod
def _read_lines(process: subprocess.Popen) -> Iterator[Tuple[str, TextIOWrapper]]:
sel = selectors.DefaultSelector()
# according to issue CDK: typing errors #9500, mypy raises error on this two lines
# 'Argument 1 to "register" of "DefaultSelector" has incompatible type "Optional[IO[Any]]"; expected "Union[int, HasFileno]"'
# 'Argument 1 to "register" of "DefaultSelector" has incompatible type "Optional[IO[Any]]"; expected "Union[int, HasFileno]"'
# It's need to check, ignored for now
sel.register(process.stdout, selectors.EVENT_READ) # type: ignore
sel.register(process.stderr, selectors.EVENT_READ) # type: ignore
eof = False
while not eof:
selects_list = sel.select()
empty_line_counter = 0
for key, _ in selects_list:
# according to issue CDK: typing errors #9500, mypy raises two errors on these lines
# 'Item "int" of "Union[int, HasFileno]" has no attribute "readline"'
# 'Item "HasFileno" of "Union[int, HasFileno]" has no attribute "readline"'
# It's need to check, ignored for now
line = key.fileobj.readline() # type: ignore
if not line:
empty_line_counter += 1
if empty_line_counter >= len(selects_list):
eof = True
try:
process.wait(timeout=60)
except subprocess.TimeoutExpired:
# according to issue CDK: typing errors #9500, mypy raises error on this line
# 'On Python 3 '{}'.format(b'abc') produces "b'abc'", not 'abc'; use '{!r}'.format(b'abc') if this is desired behavior'
# It's need to fix, ignored for now
raise Exception(f"Underlying command {process.args} is hanging") # type: ignore
if process.returncode != 0:
# according to issue CDK: typing errors #9500, mypy raises error on this line
# 'On Python 3 '{}'.format(b'abc') produces "b'abc'", not 'abc'; use '{!r}'.format(b'abc') if this is desired behavior'
# It's need to fix, ignored for now
raise Exception(f"Underlying command {process.args} failed with exit code {process.returncode}") # type: ignore
else:
# according to issue CDK: typing errors #9500, mypy raises error on this line
# 'Incompatible types in "yield" (actual type "Tuple[Any, Union[int, HasFileno]]", expected type "Tuple[str, TextIOWrapper]")'
# It's need to fix, ignored for now
yield line, key.fileobj # type: ignore
@staticmethod
def _airbyte_message_from_json(transformed_json: Mapping[str, Any]) -> Optional[AirbyteMessage]:
if transformed_json is None or transformed_json.get("type") == "SCHEMA" or transformed_json.get("type") == "ACTIVATE_VERSION":
return None
elif transformed_json.get("type") == "STATE":
out_record = AirbyteStateMessage(data=transformed_json["value"])
out_message = AirbyteMessage(type=Type.STATE, state=out_record)
else:
# todo: check that messages match the discovered schema
stream_name = transformed_json["stream"]
# according to issue CDK: typing errors #9500, mypy raises error on this line
# 'Incompatible types in assignment (expression has type "AirbyteRecordMessage", variable has type "AirbyteStateMessage")'
# type of out_record is first initialized as AirbyteStateMessage on the line 240
# however AirbyteRecordMessage is assigned on the line below, it causes error
# ignored
out_record = AirbyteRecordMessage( # type: ignore
stream=stream_name,
data=transformed_json["record"],
emitted_at=int(datetime.now().timestamp()) * 1000,
)
out_message = AirbyteMessage(type=Type.RECORD, record=out_record)
return out_message
@staticmethod
def create_singer_catalog_with_selection(masked_airbyte_catalog: ConfiguredAirbyteCatalog, discovered_singer_catalog: object) -> str:
combined_catalog_path = os.path.join("singer_rendered_catalog.json")
masked_singer_streams = []
stream_name_to_configured_stream = {
configured_stream.stream.name: configured_stream for configured_stream in masked_airbyte_catalog.streams
}
# according to issue CDK: typing errors #9500, mypy raises error on this line
# '"object" has no attribute "get"'
# discovered_singer_catalog type is set to object on the line 259, need to check
# ignored for now
for singer_stream in discovered_singer_catalog.get("streams"): # type: ignore
stream_name = singer_stream.get("stream")
if stream_name in stream_name_to_configured_stream:
new_metadatas = []
# support old style catalog.
singer_stream["schema"]["selected"] = True
if singer_stream.get("metadata"):
metadatas = singer_stream.get("metadata")
for metadata in metadatas:
new_metadata = metadata
new_metadata["metadata"]["selected"] = True
if not is_field_metadata(new_metadata):
configured_stream = stream_name_to_configured_stream[stream_name]
if configured_for_incremental(configured_stream):
replication_method = _INCREMENTAL
if configured_stream.cursor_field:
new_metadata["metadata"]["replication-key"] = configured_stream.cursor_field[0]
else:
replication_method = _FULL_TABLE
new_metadata["metadata"]["forced-replication-method"] = replication_method
new_metadata["metadata"]["replication-method"] = replication_method
else:
if "fieldExclusions" in new_metadata["metadata"]:
new_metadata["metadata"]["selected"] = True if not new_metadata["metadata"]["fieldExclusions"] else False
new_metadatas += [new_metadata]
singer_stream["metadata"] = new_metadatas
masked_singer_streams += [singer_stream]
combined_catalog = {"streams": masked_singer_streams}
with open(combined_catalog_path, "w") as fh:
fh.write(json.dumps(combined_catalog))
return combined_catalog_path
| 51.157377 | 151 | 0.648209 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.