id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
8140443 | <gh_stars>10-100
"""
Entry point for a gunicorn server, serves at /api
"""
from benchmarkstt.cli.entrypoints.api import create_app # pragma: no cover
application = create_app('/api', with_explorer=True) # pragma: no cover
| StarcoderdataPython |
5081962 | <reponame>maumg1196/GearDesign
# Generated by Django 2.2.1 on 2019-06-04 03:31
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('design', '0014_auto_20190603_2204'),
]
operations = [
migrations.AddField(
model_name='gear',
name='materialg',
field=models.CharField(max_length=100, null=True, verbose_name='Material del engrane'),
),
migrations.AddField(
model_name='gear',
name='materialp',
field=models.CharField(max_length=100, null=True, verbose_name='Material del piñón'),
),
]
| StarcoderdataPython |
8035493 | from rest_framework import serializers
from rest_framework.relations import SlugRelatedField, StringRelatedField
from frontend.api.models import TweetCountCache, Tweet, Article, TweetClusterMembership, Cluster, \
TweetClusterAttributeValue
class TweetCountCacheSerializer(serializers.ModelSerializer):
class Meta:
model = TweetCountCache
fields = ('count', 'day',)
# class TweetSerializer(serializers.ModelSerializer):
# id = serializers.SerializerMethodField('get_tweet_id')
#
# class Meta:
# model = Tweet
# fields = ('id',)
#
# def get_tweet_id(self, tweet):
# return str(tweet)
# class ArticleSerializer(serializers.ModelSerializer):
# id = serializers.SerializerMethodField('get_article_id')
#
# class Meta:
# model = Article
# fields = ('id',)
#
# def get_article_id(self, article):
# return str(article)
# class AttributeSerializer(serializers.ModelSerializer):
# class Meta:
# model = TweetClusterAttributes
# fields = ('name', )
class AttributeValueSerializer(serializers.ModelSerializer):
attribute = SlugRelatedField(slug_field='name', read_only=True)
class Meta:
model = TweetClusterAttributeValue
fields = ('attribute', 'value')
class TweetClusterMembershipSerializer(serializers.ModelSerializer):
tweet = StringRelatedField()
attributes = AttributeValueSerializer(source='tweetclusterattributevalue_set', many=True)
class Meta:
model = TweetClusterMembership
fields = ('tweet', 'attributes')
class ClusterSerializer(serializers.ModelSerializer):
tweets = TweetClusterMembershipSerializer(source='tweetclustermembership_set', many=True)
article = StringRelatedField()
url = serializers.HyperlinkedIdentityField(view_name='cluster-detail')
class Meta:
model = Cluster
fields = ('id', 'tweets', 'article', 'url', 'rumor_ration', )
| StarcoderdataPython |
6547576 | <reponame>TheWebCrafters/PyCraft<gh_stars>1-10
from terrain import *
class block(Block):
def __init__(self, renderer):
super().__init__("stone", renderer)
self.tex_coords = {
"top": self.renderer.texture_manager.texture_coords["stone.png"],
"bottom": self.renderer.texture_manager.texture_coords["stone.png"],
"left": self.renderer.texture_manager.texture_coords["stone.png"],
"right": self.renderer.texture_manager.texture_coords["stone.png"],
"front": self.renderer.texture_manager.texture_coords["stone.png"],
"back": self.renderer.texture_manager.texture_coords["stone.png"]
} | StarcoderdataPython |
6589815 | # -*- coding: utf-8 -*-
"""
actors exceptions module.
"""
from pyrin.core.exceptions import CoreException, CoreBusinessException
class ActorsException(CoreException):
"""
actors exception.
"""
pass
class ActorsBusinessException(CoreBusinessException, ActorsException):
"""
actors business exception.
"""
pass
class ActorDoesNotExistError(ActorsBusinessException):
"""
actor does not exist error.
"""
pass
class InvalidActorHookTypeError(ActorsException):
"""
invalid actor hook type error.
"""
pass
| StarcoderdataPython |
8131793 | <reponame>Asap7772/railrl_evalsawyer<filename>experiments/vitchyr/rig/reset-free/pusher/relabeling_sac_state.py
import rlkit.misc.hyperparameter as hyp
from rlkit.launchers.experiments.vitchyr.multiworld import (
relabeling_tsac_experiment,
)
from rlkit.launchers.launcher_util import run_experiment
if __name__ == "__main__":
# noinspection PyTypeChecker
variant = dict(
algo_kwargs=dict(
base_kwargs=dict(
num_epochs=1000,
num_steps_per_epoch=1000,
num_steps_per_eval=5000,
max_path_length=500,
num_updates_per_env_step=1,
batch_size=128,
discount=0.99,
min_num_steps_before_training=1000,
reward_scale=1,
render=False,
),
her_kwargs=dict(
observation_key='state_observation',
desired_goal_key='state_desired_goal',
),
twin_sac_kwargs=dict(),
),
env_id='SawyerPushAndReachXYEnv-ResetFree-v0',
replay_buffer_kwargs=dict(
max_size=int(1E6),
fraction_goals_are_rollout_goals=0.2,
fraction_resampled_goals_are_env_goals=0.5,
),
qf_kwargs=dict(
hidden_sizes=[400, 300],
),
vf_kwargs=dict(
hidden_sizes=[400, 300],
),
policy_kwargs=dict(
hidden_sizes=[400, 300],
),
algorithm='HER-tSAC',
version='normal',
save_video_period=100,
do_state_exp=True,
save_video=False,
observation_key='state_observation',
desired_goal_key='state_desired_goal',
)
search_space = {
'env_id': [
'SawyerPushXYEnv-WithResets-v0',
'SawyerPushAndReachXYEnv-WithResets-v0',
# 'SawyerPushXYEnv-CompleteResetFree-v1',
# 'SawyerPushAndReachXYEnv-CompleteResetFree-v0',
],
# 'env_kwargs.num_resets_before_puck_reset': [int(1e6)],
# 'env_kwargs.num_resets_before_hand_reset': [20, int(1e6)],
# 'algo_kwargs.base_kwargs.max_path_length': [100, 500],
'replay_buffer_kwargs.fraction_goals_are_rollout_goals': [0.5, 0.2],
'algo_kwargs.base_kwargs.min_num_steps_before_training': [1000, 10000],
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
n_seeds = 1
mode = 'local'
exp_prefix = 'dev'
n_seeds = 3
mode = 'ec2'
exp_prefix = 'her-push-sweep'
for exp_id, variant in enumerate(sweeper.iterate_hyperparameters()):
for i in range(n_seeds):
run_experiment(
relabeling_tsac_experiment,
exp_prefix=exp_prefix,
mode=mode,
variant=variant,
time_in_mins=23*60,
snapshot_mode='gap_and_last',
snapshot_gap=100,
)
| StarcoderdataPython |
342525 | <gh_stars>10-100
from gpflow.actions import Action
class RunOpAction(Action):
def __init__(self, op):
self.op = op
def run(self, context):
context.session.run(self.op)
| StarcoderdataPython |
1791388 | def main():
puzzleInput = open("python/day04.txt", "r").read()
# Part 1
assert(part1("aa bb cc dd ee") == 1)
assert(part1("aa bb cc dd aa") == 0)
assert(part1("aa bb cc dd aaa") == 1)
print(part1(puzzleInput))
# Part 2
assert(part2("abcde fghij") == 1)
assert(part2("abcde xyz ecdab") == 0)
assert(part2("a ab abc abd abf abj") == 1)
assert(part2("iiii oiii ooii oooi oooo") == 1)
assert(part2("oiii ioii iioi iiio") == 0)
print(part2(puzzleInput))
def part1(puzzleInput):
totalValid = 0
rows = puzzleInput.split("\n")
for row in rows:
phrases = set()
duplicate = True
words = row.split()
for word in words:
if word in phrases:
duplicate = False
phrases.add(word)
if (duplicate == True):
totalValid += 1
return totalValid
def part2(puzzleInput):
totalValid = 0
rows = puzzleInput.split("\n")
for row in rows:
phrases = set()
duplicate = True
words = row.split()
for word in words:
word = ''.join(sorted(word))
if word in phrases:
duplicate = False
phrases.add(word)
if (duplicate == True):
totalValid += 1
return totalValid
if __name__ == "__main__":
main() | StarcoderdataPython |
8103908 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'dw_inputs_fields.ui'
#
# Created by: PyQt5 UI code generator 5.14.1
#
# WARNING! All changes made in this file will be lost!
from qtpy import QtCore, QtGui, QtWidgets
class Ui_DockWidget(object):
def setupUi(self, DockWidget):
DockWidget.setObjectName("DockWidget")
DockWidget.resize(703, 557)
self.dockWidgetContents = QtWidgets.QWidget()
self.dockWidgetContents.setObjectName("dockWidgetContents")
self.gridLayout = QtWidgets.QGridLayout(self.dockWidgetContents)
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(self.dockWidgetContents)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 1, 1, 1)
self.label_2 = QtWidgets.QLabel(self.dockWidgetContents)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 0, 2, 1, 1)
self.label_12 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_12.setMinimumSize(QtCore.QSize(0, 0))
self.label_12.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_12.setFont(font)
self.label_12.setObjectName("label_12")
self.gridLayout.addWidget(self.label_12, 1, 0, 1, 1)
self.fontComboBox = QtWidgets.QFontComboBox(self.dockWidgetContents)
self.fontComboBox.setMinimumSize(QtCore.QSize(0, 0))
self.fontComboBox.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.fontComboBox.setObjectName("fontComboBox")
self.gridLayout.addWidget(self.fontComboBox, 1, 1, 1, 1)
self.fontComboBoxDis = QtWidgets.QFontComboBox(self.dockWidgetContents)
self.fontComboBoxDis.setEnabled(False)
self.fontComboBoxDis.setMinimumSize(QtCore.QSize(0, 0))
self.fontComboBoxDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.fontComboBoxDis.setObjectName("fontComboBoxDis")
self.gridLayout.addWidget(self.fontComboBoxDis, 1, 2, 1, 1)
self.label_3 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 2, 0, 1, 1)
self.comboBoxEdit = QtWidgets.QComboBox(self.dockWidgetContents)
self.comboBoxEdit.setEditable(True)
self.comboBoxEdit.setObjectName("comboBoxEdit")
self.comboBoxEdit.addItem("")
self.comboBoxEdit.addItem("")
self.comboBoxEdit.addItem("")
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/qss_icons/rc/window_undock.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.comboBoxEdit.addItem(icon, "")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/qss_icons/rc/window_undock_focus@2x.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.comboBoxEdit.addItem(icon1, "")
self.gridLayout.addWidget(self.comboBoxEdit, 2, 1, 1, 1)
self.comboBoxEditDis = QtWidgets.QComboBox(self.dockWidgetContents)
self.comboBoxEditDis.setEnabled(False)
self.comboBoxEditDis.setEditable(True)
self.comboBoxEditDis.setObjectName("comboBoxEditDis")
self.comboBoxEditDis.addItem("")
self.comboBoxEditDis.addItem("")
self.comboBoxEditDis.addItem("")
self.comboBoxEditDis.setItemText(2, "")
self.gridLayout.addWidget(self.comboBoxEditDis, 2, 2, 1, 1)
self.label_13 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_13.setMinimumSize(QtCore.QSize(0, 0))
self.label_13.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_13.setFont(font)
self.label_13.setObjectName("label_13")
self.gridLayout.addWidget(self.label_13, 3, 0, 1, 1)
self.lineEdit = QtWidgets.QLineEdit(self.dockWidgetContents)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEdit.sizePolicy().hasHeightForWidth())
self.lineEdit.setSizePolicy(sizePolicy)
self.lineEdit.setMinimumSize(QtCore.QSize(0, 0))
self.lineEdit.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.lineEdit.setObjectName("lineEdit")
self.gridLayout.addWidget(self.lineEdit, 3, 1, 1, 1)
self.lineEditDis = QtWidgets.QLineEdit(self.dockWidgetContents)
self.lineEditDis.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.lineEditDis.sizePolicy().hasHeightForWidth())
self.lineEditDis.setSizePolicy(sizePolicy)
self.lineEditDis.setMinimumSize(QtCore.QSize(0, 0))
self.lineEditDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.lineEditDis.setObjectName("lineEditDis")
self.gridLayout.addWidget(self.lineEditDis, 3, 2, 1, 1)
self.label_14 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_14.setMinimumSize(QtCore.QSize(0, 0))
self.label_14.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_14.setFont(font)
self.label_14.setObjectName("label_14")
self.gridLayout.addWidget(self.label_14, 4, 0, 1, 1)
self.textEdit = QtWidgets.QTextEdit(self.dockWidgetContents)
self.textEdit.setMinimumSize(QtCore.QSize(0, 0))
self.textEdit.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.textEdit.setObjectName("textEdit")
self.gridLayout.addWidget(self.textEdit, 4, 1, 1, 1)
self.textEditDis = QtWidgets.QTextEdit(self.dockWidgetContents)
self.textEditDis.setEnabled(False)
self.textEditDis.setMinimumSize(QtCore.QSize(0, 0))
self.textEditDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.textEditDis.setObjectName("textEditDis")
self.gridLayout.addWidget(self.textEditDis, 4, 2, 1, 1)
self.label_15 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_15.setMinimumSize(QtCore.QSize(0, 0))
self.label_15.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_15.setFont(font)
self.label_15.setObjectName("label_15")
self.gridLayout.addWidget(self.label_15, 5, 0, 1, 1)
self.plainTextEdit = QtWidgets.QPlainTextEdit(self.dockWidgetContents)
self.plainTextEdit.setMinimumSize(QtCore.QSize(0, 0))
self.plainTextEdit.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.plainTextEdit.setObjectName("plainTextEdit")
self.gridLayout.addWidget(self.plainTextEdit, 5, 1, 1, 1)
self.plainTextEditDis = QtWidgets.QPlainTextEdit(self.dockWidgetContents)
self.plainTextEditDis.setEnabled(False)
self.plainTextEditDis.setMinimumSize(QtCore.QSize(0, 0))
self.plainTextEditDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.plainTextEditDis.setObjectName("plainTextEditDis")
self.gridLayout.addWidget(self.plainTextEditDis, 5, 2, 1, 1)
self.label_16 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_16.setMinimumSize(QtCore.QSize(0, 0))
self.label_16.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_16.setFont(font)
self.label_16.setObjectName("label_16")
self.gridLayout.addWidget(self.label_16, 6, 0, 1, 1)
self.spinBox = QtWidgets.QSpinBox(self.dockWidgetContents)
self.spinBox.setMinimumSize(QtCore.QSize(0, 0))
self.spinBox.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.spinBox.setObjectName("spinBox")
self.gridLayout.addWidget(self.spinBox, 6, 1, 1, 1)
self.spinBoxDis = QtWidgets.QSpinBox(self.dockWidgetContents)
self.spinBoxDis.setEnabled(False)
self.spinBoxDis.setMinimumSize(QtCore.QSize(0, 0))
self.spinBoxDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.spinBoxDis.setObjectName("spinBoxDis")
self.gridLayout.addWidget(self.spinBoxDis, 6, 2, 1, 1)
self.label_17 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_17.setMinimumSize(QtCore.QSize(0, 0))
self.label_17.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_17.setFont(font)
self.label_17.setObjectName("label_17")
self.gridLayout.addWidget(self.label_17, 7, 0, 1, 1)
self.doubleSpinBox = QtWidgets.QDoubleSpinBox(self.dockWidgetContents)
self.doubleSpinBox.setMinimumSize(QtCore.QSize(0, 0))
self.doubleSpinBox.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.doubleSpinBox.setObjectName("doubleSpinBox")
self.gridLayout.addWidget(self.doubleSpinBox, 7, 1, 1, 1)
self.doubleSpinBoxDis = QtWidgets.QDoubleSpinBox(self.dockWidgetContents)
self.doubleSpinBoxDis.setEnabled(False)
self.doubleSpinBoxDis.setMinimumSize(QtCore.QSize(0, 0))
self.doubleSpinBoxDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.doubleSpinBoxDis.setObjectName("doubleSpinBoxDis")
self.gridLayout.addWidget(self.doubleSpinBoxDis, 7, 2, 1, 1)
self.label_18 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_18.setMinimumSize(QtCore.QSize(0, 0))
self.label_18.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_18.setFont(font)
self.label_18.setObjectName("label_18")
self.gridLayout.addWidget(self.label_18, 8, 0, 1, 1)
self.timeEdit = QtWidgets.QTimeEdit(self.dockWidgetContents)
self.timeEdit.setMinimumSize(QtCore.QSize(0, 0))
self.timeEdit.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.timeEdit.setObjectName("timeEdit")
self.gridLayout.addWidget(self.timeEdit, 8, 1, 1, 1)
self.timeEditDis = QtWidgets.QTimeEdit(self.dockWidgetContents)
self.timeEditDis.setEnabled(False)
self.timeEditDis.setMinimumSize(QtCore.QSize(0, 0))
self.timeEditDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.timeEditDis.setObjectName("timeEditDis")
self.gridLayout.addWidget(self.timeEditDis, 8, 2, 1, 1)
self.label_19 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_19.setMinimumSize(QtCore.QSize(0, 0))
self.label_19.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_19.setFont(font)
self.label_19.setObjectName("label_19")
self.gridLayout.addWidget(self.label_19, 9, 0, 1, 1)
self.dateEdit = QtWidgets.QDateEdit(self.dockWidgetContents)
self.dateEdit.setMinimumSize(QtCore.QSize(0, 0))
self.dateEdit.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.dateEdit.setObjectName("dateEdit")
self.gridLayout.addWidget(self.dateEdit, 9, 1, 1, 1)
self.dateEditDis = QtWidgets.QDateEdit(self.dockWidgetContents)
self.dateEditDis.setEnabled(False)
self.dateEditDis.setMinimumSize(QtCore.QSize(0, 0))
self.dateEditDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.dateEditDis.setObjectName("dateEditDis")
self.gridLayout.addWidget(self.dateEditDis, 9, 2, 1, 1)
self.label_20 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_20.setMinimumSize(QtCore.QSize(0, 0))
self.label_20.setMaximumSize(QtCore.QSize(16777215, 16777215))
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_20.setFont(font)
self.label_20.setObjectName("label_20")
self.gridLayout.addWidget(self.label_20, 10, 0, 1, 1)
self.dateTimeEdit = QtWidgets.QDateTimeEdit(self.dockWidgetContents)
self.dateTimeEdit.setMinimumSize(QtCore.QSize(0, 0))
self.dateTimeEdit.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.dateTimeEdit.setObjectName("dateTimeEdit")
self.gridLayout.addWidget(self.dateTimeEdit, 10, 1, 1, 1)
self.dateTimeEditDis = QtWidgets.QDateTimeEdit(self.dockWidgetContents)
self.dateTimeEditDis.setEnabled(False)
self.dateTimeEditDis.setMinimumSize(QtCore.QSize(0, 0))
self.dateTimeEditDis.setMaximumSize(QtCore.QSize(16777215, 16777215))
self.dateTimeEditDis.setObjectName("dateTimeEditDis")
self.gridLayout.addWidget(self.dateTimeEditDis, 10, 2, 1, 1)
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout.addItem(spacerItem, 11, 0, 1, 1)
self.label_51 = QtWidgets.QLabel(self.dockWidgetContents)
self.label_51.setAlignment(QtCore.Qt.AlignCenter)
self.label_51.setObjectName("label_51")
self.gridLayout.addWidget(self.label_51, 12, 0, 1, 3)
DockWidget.setWidget(self.dockWidgetContents)
self.retranslateUi(DockWidget)
self.fontComboBox.editTextChanged['QString'].connect(self.fontComboBoxDis.setEditText)
self.lineEdit.textEdited['QString'].connect(self.lineEditDis.setText)
self.spinBox.valueChanged['int'].connect(self.spinBoxDis.setValue)
self.doubleSpinBox.valueChanged['double'].connect(self.doubleSpinBoxDis.setValue)
self.timeEdit.timeChanged['QTime'].connect(self.timeEditDis.setTime)
self.dateEdit.dateTimeChanged['QDateTime'].connect(self.dateEditDis.setDateTime)
self.dateTimeEdit.dateTimeChanged['QDateTime'].connect(self.dateTimeEditDis.setDateTime)
QtCore.QMetaObject.connectSlotsByName(DockWidget)
def retranslateUi(self, DockWidget):
_translate = QtCore.QCoreApplication.translate
DockWidget.setWindowTitle(_translate("DockWidget", "Inputs - Fields"))
self.label.setText(_translate("DockWidget", "Enabled"))
self.label_2.setText(_translate("DockWidget", "Disabled"))
self.label_12.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_12.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_12.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_12.setText(_translate("DockWidget", "FontComboBox"))
self.fontComboBox.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.fontComboBox.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.fontComboBox.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.fontComboBoxDis.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.fontComboBoxDis.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.fontComboBoxDis.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_3.setText(_translate("DockWidget", "<html><head/><body><p><span style=\" font-weight:600;\">ComboBox</span></p></body></html>"))
self.comboBoxEdit.setItemText(0, _translate("DockWidget", "ComboBoxEditable"))
self.comboBoxEdit.setItemText(1, _translate("DockWidget", "Option 1 No Icon"))
self.comboBoxEdit.setItemText(2, _translate("DockWidget", "Option 2 No Icon"))
self.comboBoxEdit.setItemText(3, _translate("DockWidget", "Option 1 With Icon"))
self.comboBoxEdit.setItemText(4, _translate("DockWidget", "Option 2 With Icon"))
self.comboBoxEditDis.setItemText(0, _translate("DockWidget", "ComboBoxEditable"))
self.comboBoxEditDis.setItemText(1, _translate("DockWidget", "Second option"))
self.label_13.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_13.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_13.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_13.setText(_translate("DockWidget", "LineEdit"))
self.lineEdit.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.lineEdit.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.lineEdit.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.lineEdit.setText(_translate("DockWidget", "LineEdit"))
self.lineEditDis.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.lineEditDis.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.lineEditDis.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.lineEditDis.setText(_translate("DockWidget", "LineEdit"))
self.label_14.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_14.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_14.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_14.setText(_translate("DockWidget", "TextEdit"))
self.textEdit.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.textEdit.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.textEdit.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.textEdit.setHtml(_translate("DockWidget", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans Serif\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">TextEdit</span></p></body></html>"))
self.textEditDis.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.textEditDis.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.textEditDis.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.textEditDis.setHtml(_translate("DockWidget", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Sans Serif\'; font-size:9pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'Cantarell\'; font-size:11pt;\">TextEdit</span></p></body></html>"))
self.label_15.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_15.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_15.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_15.setText(_translate("DockWidget", "PlainTextEdit"))
self.plainTextEdit.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.plainTextEdit.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.plainTextEdit.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.plainTextEdit.setPlainText(_translate("DockWidget", "PlainTextEdit"))
self.plainTextEditDis.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.plainTextEditDis.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.plainTextEditDis.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.plainTextEditDis.setPlainText(_translate("DockWidget", "PlainTextEdit"))
self.label_16.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_16.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_16.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_16.setText(_translate("DockWidget", "SpinBox"))
self.spinBox.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.spinBox.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.spinBox.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.spinBoxDis.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.spinBoxDis.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.spinBoxDis.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_17.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_17.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_17.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_17.setText(_translate("DockWidget", "DoubleSpinBox"))
self.doubleSpinBox.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.doubleSpinBox.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.doubleSpinBox.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.doubleSpinBoxDis.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.doubleSpinBoxDis.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.doubleSpinBoxDis.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_18.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_18.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_18.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_18.setText(_translate("DockWidget", "TimeEdit"))
self.timeEdit.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.timeEdit.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.timeEdit.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.timeEditDis.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.timeEditDis.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.timeEditDis.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_19.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_19.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_19.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_19.setText(_translate("DockWidget", "DateEdit"))
self.dateEdit.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.dateEdit.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.dateEdit.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.dateEditDis.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.dateEditDis.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.dateEditDis.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_20.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_20.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_20.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_20.setText(_translate("DockWidget", "TimeDateEdit"))
self.dateTimeEdit.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.dateTimeEdit.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.dateTimeEdit.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.dateTimeEditDis.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.dateTimeEditDis.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.dateTimeEditDis.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_51.setToolTip(_translate("DockWidget", "This is a tool tip"))
self.label_51.setStatusTip(_translate("DockWidget", "This is a status tip"))
self.label_51.setWhatsThis(_translate("DockWidget", "This is \"what is this\""))
self.label_51.setText(_translate("DockWidget", "Inside DockWidget"))
from qdarkstyle import style_rc
| StarcoderdataPython |
4934685 | #! python3
class GSuggestion:
word = ""
annotation = ""
matched_length = 0
def __init__(self, word, annotation, matched_length):
self.word = word
self.annotation = annotation
self.matched_length = matched_length
class GRequest:
request = ""
suggestions = []
requested_pages = 0
max_pages = 32
requested_time = 0
class GPage:
word = ""
page_num = 0
suggestions = []
def __init__(self, word, page_num, suggestions):
self.word = word
self.page_num = page_num
self.suggestions = suggestions | StarcoderdataPython |
3268824 | <filename>python/orp/orp/authority_ner/new_extractAuthorities.py
#
# Copyright (C) Analytics Engines 2021
# <NAME> (<EMAIL>)
# <NAME> (<EMAIL>)
# <NAME> (<EMAIL>)
#
from lxml import etree as ET
import re
def checkInnerHTML(element):
children = element.getchildren()
if len(children) > 0:
resultStr = element.text or ''
for e in children:
if e.text:
resultStr += ET.tostring(e, encoding='unicode')
finalStr = " ".join(resultStr.split())
else:
finalStr = " ".join(element.text.split())
return finalStr
def get_htmlTextExtentMap(html):
tree = ET.HTML(html)
etree = ET.ElementTree(tree)
textExtents = []
for element in etree.iter():
textMap = {}
if element.text:
if element.tag == 'p':
text = checkInnerHTML(element)
else:
text = " ".join(element.text.split())
if text != "":
textMap['raw_text'] = text
path = etree.getelementpath(element)
textMap['extent_path'] = path
textExtents.append(textMap)
return textExtents
def get_regexMatchLocation(regex, htmlTextExtentMap):
named_entities = {}
for extent in htmlTextExtentMap:
iter = re.finditer(regex, extent['raw_text'])
for m in iter:
if m.group() not in named_entities.keys():
named_entities[m.group()] = [{'extent_start': extent['extent_path'], 'extent_end': extent['extent_path'], 'extent_char_start': m.start(0), 'extent_char_end': m.end(0)}]
else:
named_entities[m.group()].append({'extent_start': extent['extent_path'], 'extent_end': extent['extent_path'], 'extent_char_start': m.start(0), 'extent_char_end': m.end(0)})
postgres_namedEntities = []
for entity, extents in named_entities.items():
extent = {"type": "html"}
data = entity
extent['sections'] = extents
postgres_namedEntities.append((data,extent))
return postgres_namedEntities
def extract_entities_from_string(html, patterns: dict):
entityDict = {}
all_entities = []
for entity in patterns:
entityDict[entity] = {'pattern': re.compile(patterns[entity])}
entityDict[entity]['htmlExtentMap'] = get_htmlTextExtentMap(html)
entityDict[entity]['matches'] = get_regexMatchLocation(entityDict[entity]['pattern'], entityDict[entity]['htmlExtentMap'])
for entity in entityDict:
all_entities.append(entityDict[entity]['matches'])
all_entities_flattened = [item for sublist in all_entities for item in sublist]
return all_entities_flattened
# html_file = "data/debug.html"
# html_2 = "python/orp/orp/authority_ner/test/ukla_1994.html"
# html_3 = "python/orp/orp/authority_ner/test/test.html"
# patterns = {"departments": r"the (Department|Ministry|Office) of(\sthe)? ([A-Z][a-z]+(?=(,)?(\s([A-Z]|and|[a-z]))))(?:(((,\s)|(\s)|(\sand\s))([A-Z][a-z]+)+)+)?(?![^<>]*>)",
# "authorities": r"the ([A-Z][a-z]+(?=\s[A-Z])((?:\s[A-Z][a-z]+)?)+) (Administration|Agency|Assembly|Authority|Board|Commission|Committee|Corporation|Council|Court|Executive|Institute|Office|Ombudsman|Parliament|Registry|Regulator|Service|Tribunal|Trust)(?=\sfor ([A-Z][a-z]+))?(?:\sfor(\s[A-Z][a-z]+)+)?(?![^<>]*>)"}
# with open(html_2, 'r') as file:
# html = file.read()
# test = extract_entities_from_string(html, patterns)
# print('hello')
| StarcoderdataPython |
11282702 | """
Wrapper for loading templates from "templates" directories in INSTALLED_APPS
packages.
"""
from django.template.utils import get_app_template_dirs
from .filesystem import Loader as FilesystemLoader
class Loader(FilesystemLoader):
def get_dirs(self):
return get_app_template_dirs("templates")
| StarcoderdataPython |
11397113 | # -*- coding: utf-8 -*-
# @Author : feier
# @File : calc.py
# 计算器
class Calculator:
# 加法
def add(self, a, b):
return a + b
# 减法
def sub(self, a, b):
return a - b
# 乘法
def mul(self, a, b):
return a * b
# 除法
def div(self, a, b):
return a / b
| StarcoderdataPython |
8196694 | import multiprocessing
import os
import pickle
import numpy as np
import pandas as pd
import torch
from analysis import mig
from experiments import spec_util
from models import infogan, load_checkpoint
from morphomnist import io, measure
DATA_ROOT = "/vol/biomedic/users/dc315/mnist"
CHECKPOINT_ROOT = "/data/morphomnist/checkpoints"
MIG_ROOT = "/data/morphomnist/mig"
SPEC_TO_DATASET = {"plain": "plain",
"plain+thin+thic": "global",
"plain+swel+frac": "local"}
def encode(gan: infogan.InfoGAN, x):
with torch.no_grad():
_, hidden = gan.dis(x)
cat_logits, cont_mean, cont_logvar, bin_logit = gan.rec(hidden)
return cat_logits, cont_mean, cont_logvar, bin_logit
def interleave(arrays, which):
for a in arrays:
a[0] = a[0].copy()
for i in range(1, max(which) + 1):
idx = (which == i)
for a in arrays:
a[0][idx] = a[i][idx]
return [a[0] for a in arrays]
def load_test_data(data_dirs, weights=None):
metrics_paths = [os.path.join(data_dir, "t10k-morpho.csv") for data_dir in data_dirs]
images_paths = [os.path.join(data_dir, "t10k-images-idx3-ubyte.gz") for data_dir in data_dirs]
labels_paths = [os.path.join(data_dir, "t10k-labels-idx1-ubyte.gz") for data_dir in data_dirs]
metrics = list(map(pd.read_csv, metrics_paths))
images = list(map(io.load_idx, images_paths))
labels = list(map(io.load_idx, labels_paths))
if len(data_dirs) > 1:
if weights is not None:
weights = np.array(weights) / np.sum(weights)
which = np.random.choice(len(data_dirs), size=len(metrics[0]), p=weights)
metrics, images, labels = interleave([metrics, images, labels], which)
return metrics, images, labels, which
else:
return metrics[0], images[0], labels[0], None
def compute_mig(gan: infogan.InfoGAN, images, metrics, cols):
cat_logits, mean, logvar, bin_logits = encode(gan, images)
phi = torch.softmax(cat_logits.cpu(), dim=1).numpy()
mu = mean.cpu().numpy()
gamma = torch.sigmoid(bin_logits.cpu()).numpy() \
if bin_logits is not None else np.empty([metrics.shape[0], 0])
phi_ = phi.argmax(1)
gamma_ = gamma > .5
codes = np.column_stack([phi_, mu, gamma_])
factors = metrics[cols].values
discretize_codes = [False] + [True] * mu.shape[1] + [False] * gamma_.shape[1]
mig_score, mi, entropy = mig.mig(codes, factors, discretize_codes=discretize_codes, bins='auto')
print(mi / entropy)
print("MIG:", mig_score)
return mig_score, mi, entropy
def add_swel_frac(data_dir, metrics):
test_pert = io.load_idx(os.path.join(data_dir, "t10k-pert-idx1-ubyte.gz"))
metrics['swel'] = (test_pert == 3).astype(int)
metrics['frac'] = (test_pert == 4).astype(int)
def process(gan: infogan.InfoGAN, data, metrics, cols, pcorr_dir, spec, label, hrule=None):
mig_score, mi, entropy = compute_mig(gan, data, metrics, cols)
payload = {
'cols': cols,
'hrule': hrule,
'mig': mig_score,
'mi': mi,
'entropy': entropy
}
filename = f"{spec}_mig_{label}.pickle"
path = os.path.join(pcorr_dir, filename)
print("Saving output to", path)
with open(path, 'wb') as f:
pickle.dump(payload, f, pickle.HIGHEST_PROTOCOL)
def main(checkpoint_dir, mig_dir=None):
spec = os.path.split(checkpoint_dir)[-1]
_, latent_dims, dataset_names = spec_util.parse_setup_spec(spec)
device = torch.device('cuda')
gan = infogan.InfoGAN(*latent_dims)
trainer = infogan.Trainer(gan).to(device)
load_checkpoint(trainer, checkpoint_dir)
gan.eval()
dataset_name = SPEC_TO_DATASET['+'.join(dataset_names)]
data_dirs = [os.path.join(DATA_ROOT, dataset_name)]
test_metrics, test_images, test_labels, test_which = load_test_data(data_dirs)
print(test_metrics.head())
idx = np.random.permutation(10000)#[:1000]
X = torch.from_numpy(test_images[idx]).float().unsqueeze(1).to(device) / 255.
cols = ['length', 'thickness', 'slant', 'width', 'height']
test_cols = cols[:]
test_hrule = None
if 'swel+frac' in spec:
add_swel_frac(data_dirs[0], test_metrics)
test_cols += ['swel', 'frac']
test_hrule = len(cols)
if mig_dir is None:
mig_dir = checkpoint_dir
os.makedirs(mig_dir, exist_ok=True)
process(gan, X, test_metrics.loc[idx], test_cols, mig_dir, spec, 'test', test_hrule)
X_ = gan(10000).detach()
with multiprocessing.Pool() as pool:
sample_metrics = measure.measure_batch(X_.cpu().squeeze().numpy(), pool=pool)
sample_hrule = None
process(gan, X_, sample_metrics, cols, mig_dir, spec, 'sample', sample_hrule)
if __name__ == '__main__':
specs = [
"InfoGAN-10c2g62n_plain",
"InfoGAN-10c3g62n_plain+thin+thic",
"InfoGAN-10c2g2b62n_plain+swel+frac",
]
np.set_printoptions(precision=2, linewidth=100)
for spec in specs:
checkpoint_dir = os.path.join(CHECKPOINT_ROOT, spec)
main(checkpoint_dir, MIG_ROOT)
| StarcoderdataPython |
327924 | <filename>scripts/gcovr-3.3/doc/examples/test_examples.py<gh_stars>1-10
# Imports
import pyutilib.th as unittest
import glob
import os
from os.path import dirname, abspath, basename
import sys
import re
currdir = dirname(abspath(__file__))+os.sep
datadir = currdir
compilerre = re.compile("^(?P<path>[^:]+)(?P<rest>:.*)$")
dirre = re.compile("^([^%s]*/)*" % re.escape(os.sep))
xmlre = re.compile("\"(?P<path>[^\"]*/[^\"]*)\"")
datere = re.compile("date=\"[^\"]*\"")
versionre = re.compile("version=\"[^\"]*\"")
timestampre = re.compile("timestamp=\"[^\"]*\"")
failure = re.compile("^(?P<prefix>.+)file=\"(?P<path>[^\"]+)\"(?P<suffix>.*)$")
def filter(line):
# for xml, remove prefixes from everything that looks like a
# file path inside ""
line = xmlre.sub(
lambda match: '"'+re.sub("^[^/]+/", "", match.group(1))+'"',
line
)
# Remove date info
line = datere.sub( lambda match: 'date=""', line)
# Remove version info
line = versionre.sub( lambda match: 'version=""', line)
# Remove timestamp info
line = timestampre.sub( lambda match: 'timestamp=""', line)
if 'Running' in line:
return False
if "IGNORE" in line:
return True
pathmatch = compilerre.match(line) # see if we can remove the basedir
failmatch = failure.match(line) # see if we can remove the basedir
#print "HERE", pathmatch, failmatch
if failmatch:
parts = failmatch.groupdict()
#print "X", parts
line = "%s file=\"%s\" %s" % (parts['prefix'], dirre.sub("", parts['path']), parts['suffix'])
elif pathmatch:
parts = pathmatch.groupdict()
#print "Y", parts
line = dirre.sub("", parts['path']) + parts['rest']
return line
# Declare an empty TestCase class
class Test(unittest.TestCase): pass
if not sys.platform.startswith('win'):
# Find all *.sh files, and use them to define baseline tests
for file in glob.glob(datadir+'*.sh'):
bname = basename(file)
name=bname.split('.')[0]
if os.path.exists(datadir+name+'.txt'):
Test.add_baseline_test(cwd=datadir, cmd=file, baseline=datadir+name+'.txt', name=name, filter=filter)
# Execute the tests
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
1671606 | # Some Basic Examples
match = re.search(r'\d' , "it takes 2 to tango")
print match.group() # print 2
match = re.search(r'\s\w*\s', 'once upon a time')
match.group() # ' upon '
match = re.search(r'\s\w{1,3}\s', 'once upon a time')
match.group() # ' a '
match = re.search(r'\s\w*$', 'once upon a time')
match.group() # ' time'
match = re.search(r'\w*\s\d.*\d', 'take 2 grams of H2O')
match.group() # 'take 2 grams of H2'
match = re.search(r'^\w*.*\s', 'once upon a time')
match.group() # 'once upon a '
## NOTE THAT *, +, and { } are all "greedy":
## They repeat the previous regex token as many times as possible
## As a result, they may match more text than you want
## To make it non-greedy, use ?:
match = re.search(r'^\w*.*?\s', 'once upon a time')
match.group() # 'once '
## To further illustrate greediness, let's try matching an HTML tag:
match = re.search(r'<.+>', 'This is a <EM>first</EM> test')
match.group() # '<EM>first</EM>'
## But we didn't want this: we wanted just <EM>
## It's because + is greedy!
## Instead, we can make + "lazy"!
match = re.search(r'<.+?>', 'This is a <EM>first</EM> test')
match.group() # '<EM>'
## OK, moving on from greed and laziness
match = re.search(r'\d*\.?\d*','1432.75+60.22i') #note "\" before "."
match.group() # '1432.75'
match = re.search(r'\d*\.?\d*','1432+60.22i')
match.group() # '1432'
match = re.search(r'[AGTC]+', 'the sequence ATTCGT')
match.group() # 'ATTCGT'
re.search(r'\s+[A-Z]{1}\w+\s\w+', 'The bird-shit frog''s name is <NAME>').group() # ' Theloderma asper'
## NOTE THAT I DIRECTLY RETURNED THE RESULT BY APPENDING .group()
| StarcoderdataPython |
6501182 | import tensorflow as tf
import tensorflow_quantum as tfq
import numpy as np
import cirq
import sympy
class ReUploadPQC(tf.keras.layers.Layer):
def __init__(self, qubit, layers, obs) -> None:
super(ReUploadPQC, self).__init__()
self.num_params = len(qubit) * 3 * layers
self.layers = layers
self.qubits = qubit
self.theta = tf.Variable(initial_value=np.random.uniform(0, 2 * np.pi, (1, self.num_params)), dtype="float32", trainable=True)
self.w = tf.Variable(initial_value=np.random.uniform(0, 2 * np.pi, (1, self.num_params)), dtype="float32", trainable=True)
self.params = sympy.symbols("params0:%d"%self.num_params)
self.model = tfq.layers.ControlledPQC(self.make_circuit(layers, self.params), obs, differentiator=tfq.differentiators.Adjoint())
self.in_circuit = tfq.convert_to_tensor([cirq.Circuit()])
def make_circuit(self, layers, params):
c = cirq.Circuit()
for i in range(layers):
c = self.layer(c, params[len(self.qubits) * i * 3: (i * 3 + 3) * len(self.qubits)])
return c
def layer(self, cir, params):
for i in range(len(self.qubits)):
cir += cirq.ry(params[i*3]).on(self.qubits[i])
cir += cirq.rz(params[i*3 + 1]).on(self.qubits[i])
cir += cirq.ry(params[i*3 + 2]).on(self.qubits[i])
if len(self.qubits) > 1:
cir += cirq.CNOT(self.qubits[i], self.qubits[(i + 1) % len(self.qubits)])
return cir
# inputs = (batch, in_size)
def call(self, inputs):
num_batch = tf.gather(tf.shape(inputs), 0)
# (1, 1) -> (batch, 1)
input_circuits = tf.repeat(self.in_circuit, repeats=num_batch)
# (batch, in_size) -> (batch, num_params)
inputs = tf.tile(inputs, [1, int(self.num_params/inputs.shape[1])])
# (1, num_param) * (batch, num_params) -> (batch, num_params)
w = tf.math.multiply(self.w, inputs)
# (1, num_param) -> (batch, num_params)
thetas = tf.tile(self.theta, [num_batch, 1])
# (batch, num_params) + (batch, num_params) -> (batch, num_params)
params = thetas + w
return self.model([input_circuits, params])
| StarcoderdataPython |
11344446 | <filename>Projetos Python/pythonexercicios/aula20.py<gh_stars>0
def l():
print('-='*30)
def soma(a, b):
print(f'A = {a} e B = {b}')
s = a + b
print(f'A soma é {s}')
#Programa Principal
soma(4, 5)
soma(b=8, a=9)
soma(2, 1)
l()
def contador(*num):
tam = len(num)
print(f'Recebi os valores {num} e são ao todo {tam} números.')
contador(2, 1, 7)
contador(8, 0)
contador(4, 4, 7, 6, 2)
l()
def dobra(lst):
pos = 0
while pos < len(lst):
lst[pos] *= 2
pos += 1
valores = [6, 3, 9, 1, 0, 2]
dobra(valores)
print(valores)
l()
| StarcoderdataPython |
6692749 | import numpy as np
from sklearn.metrics import precision_recall_fscore_support, matthews_corrcoef
from deepbond import constants
from deepbond.models.utils import unroll, unmask
class BestValueEpoch:
def __init__(self, value, epoch):
self.value = value
self.epoch = epoch
class Stats(object):
"""
Keep stats information during training and evaluation
Args:
tags_vocab (dict): vocab object for tags field
"""
def __init__(self, tags_vocab):
self.tags_vocab = tags_vocab
# this attrs will be updated every time a new prediction is added
self.pred_classes = []
self.gold_classes = []
self.loss_accum = 0
# this attrs will be set when get_ methods are called
self.loss = None
self.prec_rec_f1 = None
self.ser = None
self.mcc = None
# this attrs will be set when calc method is called
self.best_prec_rec_f1 = BestValueEpoch(value=[0, 0, 0], epoch=1)
self.best_ser = BestValueEpoch(value=float('inf'), epoch=1)
self.best_mcc = BestValueEpoch(value=0, epoch=1)
self.best_loss = BestValueEpoch(value=float('inf'), epoch=1)
def reset(self):
self.pred_classes.clear()
self.gold_classes.clear()
self.loss_accum = 0
self.loss = None
self.prec_rec_f1 = None
self.ser = None
self.mcc = None
@property
def nb_batches(self):
return len(self.gold_classes)
def update(self, loss, pred_classes, golds):
self.loss_accum += loss
# unmask & flatten predictions and gold labels before storing them
mask = golds != constants.TAGS_PAD_ID
self.pred_classes.extend(unroll(unmask(pred_classes, mask)))
self.gold_classes.extend(unroll(unmask(golds, mask)))
def get_loss(self):
return self.loss_accum / self.nb_batches
def get_prec_rec_f1(self):
prec, rec, f1, _ = precision_recall_fscore_support(
self.gold_classes,
self.pred_classes,
beta=1.0,
pos_label=self.tags_vocab['.'],
average='binary'
)
return prec, rec, f1
def get_slot_error_rate(self):
slots = np.sum(self.gold_classes)
errors = np.sum(np.not_equal(self.gold_classes, self.pred_classes))
return errors / slots
def get_mcc(self):
mcc = matthews_corrcoef(self.gold_classes, self.pred_classes)
return mcc
def calc(self, current_epoch):
self.loss = self.get_loss()
self.prec_rec_f1 = self.get_prec_rec_f1()
self.ser = self.get_slot_error_rate()
self.mcc = self.get_mcc()
if self.loss < self.best_loss.value:
self.best_loss.value = self.loss
self.best_loss.epoch = current_epoch
if self.prec_rec_f1[2] > self.best_prec_rec_f1.value[2]:
self.best_prec_rec_f1.value[0] = self.prec_rec_f1[0]
self.best_prec_rec_f1.value[1] = self.prec_rec_f1[1]
self.best_prec_rec_f1.value[2] = self.prec_rec_f1[2]
self.best_prec_rec_f1.epoch = current_epoch
if self.ser < self.best_ser.value:
self.best_ser.value = self.ser
self.best_ser.epoch = current_epoch
if self.mcc > self.best_mcc.value:
self.best_mcc.value = self.mcc
self.best_mcc.epoch = current_epoch
def to_dict(self):
return {
'loss': self.loss,
'prec_rec_f1': self.prec_rec_f1,
'ser': self.ser,
'mcc': self.mcc,
'best_loss': self.best_loss,
'best_prec_rec_f1': self.best_prec_rec_f1,
'best_ser': self.best_ser,
'best_mcc': self.best_mcc,
}
| StarcoderdataPython |
3534690 | <gh_stars>0
'''
The detection code is partially derived and modified from app-2Class.py by <NAME>.
'''
from flask import Flask, request, render_template, redirect
import cv2
import numpy as np
import tensorflow as tf
from utils import label_map_util
from utils import visualization_utils as vis_util
from matplotlib import pyplot as plt
from werkzeug.utils import secure_filename
app = Flask(__name__, template_folder='templates')
from datetime import timedelta
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = timedelta(seconds=1) # avoid caching, which prevent showing the detection/splash result
import os
import sys
import random
# Root directory of the project
ROOT_DIR = os.path.abspath("../../")
sys.path.append(ROOT_DIR) # To find local version of the library
# Directory to save logs and trained model
CKPT_DIR = '/Users/hailieboomboom/Documents/GitHub/models/research/object_detection/data/faster_RCNN_melonstrawberry/frozen_inference_graph.pb'
LABEL_DIR = '/Users/hailieboomboom/Documents/GitHub/models/research/object_detection/data/faster_RCNN_melonstrawberry/fruit_labelmap.pbtxt'
UPLOAD_FOLDER = '/Users/hailieboomboom/Documents/GitHub/models/research/object_detection/image_uploaded'
ALLOWED_EXTENSIONS = set(['jpg'])
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
class TOD(object):
def __init__(self):
self.PATH_TO_CKPT = CKPT_DIR
self.PATH_TO_LABELS = LABEL_DIR
self.NUM_CLASSES = 2
self.detection_graph = self._load_model()
self.category_index = self._load_label_map()
# load the pre-trained model via the frozen inference graph
def _load_model(self):
detection_graph = tf.Graph()
with detection_graph.as_default():
od_graph_def = tf.GraphDef()
with tf.gfile.GFile(self.PATH_TO_CKPT, 'rb') as fid:
serialized_graph = fid.read()
od_graph_def.ParseFromString(serialized_graph)
tf.import_graph_def(od_graph_def, name='')
return detection_graph
# load the label map so that we know what object has been detected
def _load_label_map(self):
label_map = label_map_util.load_labelmap(self.PATH_TO_LABELS)
categories = label_map_util.convert_label_map_to_categories(label_map,
max_num_classes=self.NUM_CLASSES,
use_display_name=True)
category_index = label_map_util.create_category_index(categories)
return category_index
def detect(self, image):
count_result = 0
with self.detection_graph.as_default():
with tf.Session(graph=self.detection_graph) as sess:
# Expand dimensions since the model expects images to have shape: [1, None, None, 3]
image_np_expanded = np.expand_dims(image, axis=0)
image_tensor = self.detection_graph.get_tensor_by_name('image_tensor:0')
boxes = self.detection_graph.get_tensor_by_name('detection_boxes:0')
scores = self.detection_graph.get_tensor_by_name('detection_scores:0')
classes = self.detection_graph.get_tensor_by_name('detection_classes:0')
num_detections = self.detection_graph.get_tensor_by_name('num_detections:0')
# Actual detection.
(boxes, scores, classes, num_detections) = sess.run(
[boxes, scores, classes, num_detections],
feed_dict={image_tensor: image_np_expanded})
# Visualization of the results of a detection.
vis_util.visualize_boxes_and_labels_on_image_array(
image,
np.squeeze(boxes),
np.squeeze(classes).astype(np.int32),
np.squeeze(scores),
self.category_index,
use_normalized_coordinates=True,
line_thickness=8)
count_result = len(scores)
# cv2.namedWindow("detection", cv2.WINDOW_NORMAL)
cv2.imwrite('/Users/hailieboomboom/Documents/GitHub/models/research/object_detection/static/result.jpg',image)
cv2.waitKey(0)
return count_result
################################################################
def run_detection():
user_file_names = next(os.walk(UPLOAD_FOLDER))[2]
names_chosen = random.choice(user_file_names)
image = cv2.imread(os.path.join(UPLOAD_FOLDER, names_chosen))
print('\n-----------------', len([image]), '---------------\n')
detecotr = TOD()
detecotr.detect(image)
print("detection done")
def create_new_folder(local_dir):
newpath = local_dir
if not os.path.exists(newpath):
os.makedirs(newpath)
return newpath
@app.route('/')
def index():
return render_template('hello.html')
@app.route('/upload', methods = ['POST'])
def upload():
if request.method == 'POST' and request.files['image']:
print("enter!!!!!!!!!!!!!!")
#app.logger.info(app.config['UPLOAD_FOLDER'])
img = request.files['image']
#img_string = request.form['image']
img_name = secure_filename(img.filename)
print(img_name)
#print(img_string)
create_new_folder(app.config['UPLOAD_FOLDER'])
saved_path = os.path.join(app.config['UPLOAD_FOLDER'], img_name)
print("create upload dir success")
app.logger.info("saving {}".format(saved_path))
img.save(saved_path)
image = cv2.imread(saved_path)
print("image read successfully")
detector = TOD()
count_result = detector.detect(image)
print("Counting result is ")
print(count_result)
return render_template('complete.html', count_result = count_result)
# No caching at all for API endpoints.
@app.after_request
def add_header(response):
# response.cache_control.no_store = True
response.headers['Cache-Control'] = 'no-store, no-cache, must-revalidate, post-check=0, pre-check=0, max-age=0'
response.headers['Pragma'] = 'no-cache'
response.headers['Expires'] = '-1'
return response
'''
Main function to run Flask server
'''
if __name__ == '__main__':
app.run(host='0.0.0.0',port=80)
| StarcoderdataPython |
5195458 | import pandas as pd
import numpy as np
import seaborn as sb
import networkx as nx
import os
data_dir = ""
'''
sample usage for loading graph.npy:
from Graph_Helper import nodelist, delta
'''
'''
sample usage for loading graph.npy and converting the sparse arrays to numpy:
from Graph_Helper import nodelist, delta, convert_delta_to_np as convert
delta_as_numpy_arrays = convert(delta)
'''
# df = pd.read_csv(data_dir + 'movie_industry.csv', encoding="windows-1252")
# df2 = pd.read_csv(data_dir + 'the_oscar_award.csv')
# genome_scores = pd.read_csv(data_dir + 'genome-scores.csv')
# genome_tags = pd.read_csv(data_dir + 'genome-tags.csv')
# ratings = pd.read_csv(data_dir + 'ratings.csv')
# tags = pd.read_csv(data_dir + 'tags.csv')
# movies = pd.read_csv(data_dir + 'movies.csv')
class Graph:
def __init__(self,
dta,
year,
director_column="director",
actor_column="star"):
self.G = nx.Graph()
directors = set(dta[director_column].values)
actors = set(dta[actor_column].values)
for director in directors:
self.G.add_node((director, True))
for actor in actors:
self.G.add_node((actor, False))
for director in directors:
rows = dta[(dta["year"] == year) & (dta[director_column] == director)]
for index in rows.index.values:
self.G.add_edge((director, True), (rows.loc[index, actor_column], False),
weight=1)
def to_numpy(self):
return nx.adjacency_matrix(self.G, nodelist=self.G.nodes()).A
def to_sparse(self):
return nx.adjacency_matrix(self.G, nodelist=self.G.nodes())
def nodes(self):
return self.G.nodes()
def get_timesteps(dataset):
lower = dataset["year"].values[0]
upper = dataset["year"].values[-1]
nodelist = Graph(dataset, lower).nodes()
ret = []
for i in range(lower, upper + 1):
ret.append(Graph(dataset, i).to_numpy())
return np.array([nodelist, np.array(ret)])
def save_timesteps(dataset, file_name):
np.save(data_dir + file_name, get_timesteps(dataset))
def convert_delta_to_np(delta):
return np.array([entry.A for entry in delta])
def load_timesteps(file_name):
return np.load(file_name, allow_pickle=True)
nodelist, delta = load_timesteps(os.path.join(os.pardir, "data", "graph.npy"))
| StarcoderdataPython |
9673718 | import sys
sys.path.append('../..')
import torch
import logging
from typing import Optional
from dataclasses import dataclass, field
from transformers.file_utils import cached_property, torch_required
from seqlbtoolkit.bert_ner.config import BertBaseConfig
logger = logging.getLogger(__name__)
@dataclass
class BertArguments:
"""
Arguments regarding the training of Neural hidden Markov Model
"""
train_file: Optional[str] = field(
default='', metadata={'help': 'training data name'}
)
valid_file: Optional[str] = field(
default='', metadata={'help': 'development data name'}
)
test_file: Optional[str] = field(
default='', metadata={'help': 'test data name'}
)
output_dir: Optional[str] = field(
default='.',
metadata={"help": "The output folder where the model predictions and checkpoints will be written."},
)
num_em_train_epochs: Optional[int] = field(
default=15, metadata={'help': 'number of denoising model training epochs'}
)
num_em_valid_tolerance: Optional[int] = field(
default=10, metadata={"help": "How many tolerance epochs before quiting training"}
)
learning_rate: Optional[float] = field(
default=5e-5, metadata={'help': 'learning rate'}
)
warmup_ratio: Optional[int] = field(
default=0.2, metadata={'help': 'ratio of warmup steps for learning rate scheduler'}
)
lr_scheduler_type: Optional[str] = field(
default="linear", metadata={"help": "Default as `linear`. See the documentation of "
"`transformers.SchedulerType` for all possible values"},
)
weight_decay: Optional[float] = field(
default=0.01, metadata={'help': 'strength of weight decay'}
)
em_batch_size: Optional[int] = field(
default=128, metadata={'help': 'denoising model training batch size'}
)
max_length: Optional[int] = field(
default=512, metadata={'help': 'maximum sequence length'}
)
bert_model_name_or_path: Optional[str] = field(
default='', metadata={"help": "Path to pretrained BERT model or model identifier from huggingface.co/models; "
"Used to construct BERT embeddings if not exist"}
)
no_cuda: Optional[bool] = field(default=False, metadata={"help": "Disable CUDA even when it is available"})
log_dir: Optional[str] = field(
default=None,
metadata={"help": "the directory of the log file. Set to '' to disable logging"}
)
seed: Optional[int] = field(
default=42, metadata={"help": "Random seed that will be set at the beginning of training."}
)
batch_gradient_descent: Optional[bool] = field(
default=False, metadata={'help': 'whether use batch instead of mini-batch for gradient descent.'}
)
debug_mode: Optional[bool] = field(
default=False, metadata={"help": "Debugging mode with fewer training data"}
)
# The following three functions are copied from transformers.training_args
@cached_property
@torch_required
def _setup_devices(self) -> "torch.device":
if self.no_cuda:
device = torch.device("cpu")
self._n_gpu = 0
else:
device = torch.device("cuda")
self._n_gpu = 1
return device
@property
@torch_required
def device(self) -> "torch.device":
"""
The device used by this process.
"""
return self._setup_devices
@property
@torch_required
def n_gpu(self) -> "int":
"""
The number of GPUs used by this process.
Note:
This will only be greater than one when you have multiple GPUs available but are not using distributed
training. For distributed training, it will always be 1.
"""
# Make sure `self._n_gpu` is properly setup.
_ = self._setup_devices
return self._n_gpu
@dataclass
class BertConfig(BertArguments, BertBaseConfig):
pass
| StarcoderdataPython |
8164321 | from . import average_color, brightest_n_pixels, nucleus_detection
available_job_types = {
module.__name__.replace(f"{module.__package__}.", ''): module
for module in [
average_color,
brightest_n_pixels,
nucleus_detection,
]
}
| StarcoderdataPython |
1600650 | <filename>Imu.py<gh_stars>1-10
import ctypes
class Imu():
def __init__(self):
self.accel = list()
self.gyro = list()
def getRandom(self, factor):
self.accel.append((factor+1)*11)
self.accel.append((factor+1)*12)
self.accel.append((factor+1)*13)
self.gyro.append((factor+1)*21)
self.gyro.append((factor+1)*22)
self.gyro.append((factor+1)*23)
return self
def __str__(self):
return f"""
Accelerometers = {self.accel}
Gyroscopes = {self.gyro}"""
if __name__ == "__main__":
imu = Imu().getRandom(1)
print(imu) | StarcoderdataPython |
198697 | """
Copyright 2020 Inmanta
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contact: <EMAIL>
"""
from typing import List, Set
import pytest
from compiler.dataflow.conftest import DataflowTestHelper, get_dataflow_node
from inmanta.ast import MultiException
from inmanta.execute.dataflow import AssignableNode, AssignableNodeReference, AttributeNode, DataflowGraph
from inmanta.execute.dataflow.root_cause import UnsetRootCauseAnalyzer
def get_attribute_node(graph: DataflowGraph, attr: str) -> AttributeNode:
node_ref: AssignableNodeReference = get_dataflow_node(graph, attr)
node: AssignableNode = next(node_ref.nodes())
assert isinstance(node, AttributeNode)
return node
@pytest.mark.parametrize("attribute_equivalence", [True, False])
@pytest.mark.parametrize("variable_equivalence", [True, False])
def test_dataflow_model_root_cause(
dataflow_test_helper: DataflowTestHelper, attribute_equivalence: bool, variable_equivalence: bool
) -> None:
dataflow_test_helper.compile(
"""
entity C:
number i
end
entity V:
number n
number i
end
index V(i)
entity U:
end
U.v [1] -- V
entity X:
number n
end
implement C using std::none
implement V using std::none
implement U using std::none
implement X using std::none
c = C()
%s
u = U()
x = X()
u.v = V(n = 42, i = c.i)
x.n = u.v.n
%s
"""
% (
"""
c.i = cc.i
cc = C(i = c.i)
"""
if attribute_equivalence
else "",
"""
c.i = i
i = c.i
"""
if variable_equivalence
else "",
),
MultiException,
)
graph: DataflowGraph = dataflow_test_helper.get_graph()
x_n: AttributeNode = get_attribute_node(graph, "x.n")
c_i: AttributeNode = get_attribute_node(graph, "c.i")
u_v: AttributeNode = get_attribute_node(graph, "u.v")
attributes: List[AttributeNode] = [x_n, c_i, u_v]
root_causes: Set[AttributeNode] = {c_i}
if attribute_equivalence:
cc_i: AttributeNode = get_attribute_node(graph, "cc.i")
attributes.append(cc_i)
root_causes.add(cc_i)
assert UnsetRootCauseAnalyzer(attributes).root_causes() == root_causes
def test_cyclic_model_a(dataflow_test_helper: DataflowTestHelper):
dataflow_test_helper.compile(
"""
entity A:
number n
end
implement A using std::none
x = A()
y = A()
z = A()
x.n = y.n
y.n = x.n
x.n = z.n
""",
MultiException,
)
graph: DataflowGraph = dataflow_test_helper.get_graph()
x_n: AttributeNode = get_attribute_node(graph, "x.n")
y_n: AttributeNode = get_attribute_node(graph, "y.n")
z_n: AttributeNode = get_attribute_node(graph, "z.n")
attributes: List[AttributeNode] = [x_n, y_n, z_n]
root_causes: Set[AttributeNode] = {z_n}
assert UnsetRootCauseAnalyzer(attributes).root_causes() == root_causes
def test_cyclic_model_b(dataflow_test_helper: DataflowTestHelper):
"""
This model has an equivalence that
1. is to be ignored as a root
2. cause two things (that now become roots)
"""
dataflow_test_helper.compile(
"""
entity A:
number n
end
implement A using std::none
x = A()
y = A()
y.n = n
x.n = n
n = m
m = n
""",
MultiException,
)
graph: DataflowGraph = dataflow_test_helper.get_graph()
x_n: AttributeNode = get_attribute_node(graph, "x.n")
y_n: AttributeNode = get_attribute_node(graph, "y.n")
attributes: List[AttributeNode] = [x_n, y_n]
root_causes: Set[AttributeNode] = {x_n, y_n}
assert UnsetRootCauseAnalyzer(attributes).root_causes() == root_causes
| StarcoderdataPython |
12821130 | import grok.tests.grokker.priority
class AlphaSub(grok.tests.grokker.priority.Alpha):
pass
class BetaSub(grok.tests.grokker.priority.Beta):
pass
class GammaSub(grok.tests.grokker.priority.Gamma):
pass
| StarcoderdataPython |
3317767 | <gh_stars>1-10
"""
project metadata
"""
try:
import importlib.metadata as importlib_metadata
except ModuleNotFoundError:
import importlib_metadata
__version__ = importlib_metadata.version(__name__)
| StarcoderdataPython |
5082071 | <gh_stars>1-10
"""
Orka Discord Bot
Copyright (c) 2017 <NAME>
"""
###########
# IMPORTS #
###########
import discord
import random
import markovify
from os import path, makedirs
from scripts import *
###################
# OTHER FUNCTIONS #
###################
def add_msg(channel, text, mode='a+'):
"""
Appends a message to the end of a file.
"""
with open('channels/{0}.txt'.format(channel), '{0}'.format(mode), encoding="utf_8") as file:
file.write('{0}\n'.format(text))
def make_markov_model(channel):
with open('channels/{0}.txt'.format(channel), 'r', encoding="utf_8") as file:
model = markovify.NewlineText(file)
global model
#######
# BOT #
#######
class Orka(discord.Client):
async def on_ready(self):
print('Logging in...')
print('Logged in as {0}; ID #{1}'.format(client.user.name, client.user.id))
print('Setting status...')
await client.change_presence(game=discord.Game(name='https://github.com/rivermont/orka'))
print('Gathering available text channels...')
for server in client.servers:
for channel in server.channels:
if channel.type == discord.ChannelType.text:
if channel.permissions_for(server.me).read_messages:
print('Read access in: ' + server.name + '/' + channel.name)
read.append(channel)
print('Downloading logs from readable text channels...')
for channel in read:
add_msg(channel, '', mode='w+')
async for message in client.logs_from(channel, limit=1000):
add_msg(channel, message.content, mode='a')
print('Ready.')
async def on_member_join(self, member):
general = self.get_server("256600580837998592").get_channel("256600580837998592")
await client.send_message(
general,
'Welcome, @{0}! Please familiarize yourself with our #rules, then go wild!'.format(member.name)
)
async def on_message(self, message):
print('Received message..')
content = message.content
channel = message.channel
add_msg(channel, content)
# General commands
if message.content.startswith('!flip'):
# Flips a coin on two choices. Defaults to Heads or Tails.
print('Flipping coin...')
if len(content.split()) == 1:
choice_ = random.choice(['Heads', 'Tails'])
await client.send_message(channel, choice_)
elif len(content.split()) == 2:
await client.send_message(channel, 'Only one option supplied. Must be two or none.')
elif len(content.split()) == 3:
options = content.split()[1:]
flip = random.choice(options)
await client.send_message(channel, flip)
elif len(content.split()) > 3:
await client.send_message(channel, 'Too many options supplied. Must be two or none.')
elif content.startswith('!roll'):
# Rolls a dice. Defaults to a d6.
print('Rolling die...')
if len(content.split()) == 1:
roll = random.randint(1, 6)
await client.send_message(channel, 'You rolled a {0}.'.format(roll))
if len(content.split()) == 2:
input_ = content.split()[1]
roll = random.randint(1, int(input_))
await client.send_message(channel, 'You rolled a {0}.'.format(roll))
elif content.startswith('!convert'):
# Converts Kelvin/Celsius/Fahrenheit
input_ = content.split()
try:
amount = int(input_[1][:-1])
unit_from = input_[1][-1]
unit_to = input_[2]
result = convert(amount, unit_from, unit_to)
if result == "Error":
raise IndexError
else:
await client.send_message(channel, 'Converted {0}{1} to {2}{3}.'.format(amount, unit_from, result, unit_to))
except IndexError:
print('Invalid input.')
await client.send_message(channel, 'Invalid input. Must be in format `!convert 23U U`.')
# Moderation commands
elif content.startswith('@stop'):
print('Stopping bot...')
await client.logout()
elif content.startswith('@logs'):
async for m in client.logs_from(channel):
add_msg(channel, m.content)
elif content.startswith('@generate'):
print('Generating markov model for channel {0}'.format(channel))
make_markov_model(channel)
await client.send_message(channel, 'Successfully generated markov model.')
elif content.startswith('!sentence'):
# Generates a single line from the current markov model
# Under moderation b/c that's where @generate is
sentence = ''
try:
sentence = model.make_sentence(tries=1000)
except NameError:
print('No available markov model.')
await client.send_message(channel, 'No available markov model.')
if not bool(sentence):
await client.send_message(channel, 'No sentence generated.')
else:
await client.send_message(channel, sentence)
elif content.startswith('@save'):
with open('model.json', 'w+') as f:
f.write(model.to_json())
elif content.startswith('@test'):
# Generic testing function
pass
#######
# RUN #
#######
client = Orka()
read = []
if __name__ == '__main__':
if not path.exists('channels\\'):
makedirs('channels\\')
client.run()
| StarcoderdataPython |
4896396 | """Tests the figures.backfill module
"""
from __future__ import absolute_import
from datetime import datetime
import pytest
from dateutil.relativedelta import relativedelta
from dateutil.rrule import rrule, MONTHLY
from six.moves import range
from six.moves import zip
from django.db import connection
from django.utils.timezone import utc
from figures.backfill import backfill_monthly_metrics_for_site
from figures.models import SiteMonthlyMetrics
from tests.factories import (
CourseOverviewFactory,
OrganizationFactory,
OrganizationCourseFactory,
StudentModuleFactory,
SiteFactory)
from tests.helpers import organizations_support_sites
if organizations_support_sites():
from tests.factories import UserOrganizationMappingFactory
@pytest.fixture
@pytest.mark.django_db
def backfill_test_data(db):
"""
TODO: make counts different for each course per month
"""
months_back = 6
sm_per_month = [10+i for i in range(months_back+1)]
site = SiteFactory()
now = datetime.utcnow().replace(tzinfo=utc)
first_month = now - relativedelta(months=months_back)
last_month = now - relativedelta(months=1)
course_overviews = [CourseOverviewFactory() for i in range(1)]
count_check = []
sm = []
for i, dt in enumerate(rrule(freq=MONTHLY, dtstart=first_month, until=last_month)):
for co in course_overviews:
sm_count = sm_per_month[i]
month_sm = [StudentModuleFactory(course_id=co.id,
created=dt,
modified=dt) for i in range(sm_count)]
sm += month_sm
count_check.append(dict(month=dt, month_sm=month_sm, sm_count=sm_count))
if organizations_support_sites():
org = OrganizationFactory(sites=[site])
for co in course_overviews:
OrganizationCourseFactory(organization=org, course_id=str(co.id))
for rec in sm:
UserOrganizationMappingFactory(user=rec.student,
organization=org)
else:
org = OrganizationFactory()
return dict(
site=site,
organization=org,
course_overview=course_overviews,
student_modules=sm,
first_month=first_month,
now=now,
months_back=months_back,
sm_per_month=sm_per_month,
count_check=count_check
)
def patched__get_fill_month_raw_sql_for_month(site_ids, month_for):
"""Get SQL statement for fill_month use_raw_sql option... that works with SQLite in test.
"""
if (connection.vendor == 'sqlite'):
month = str(month_for.month).zfill(2)
year = month_for.year
return """\
SELECT COUNT(DISTINCT student_id) from courseware_studentmodule
where id in {}
and strftime('%m', datetime(modified)) = '{}'
and strftime('%Y', datetime(modified)) = '{}'
""".format(site_ids, month, year)
@pytest.mark.freeze_time('2019-09-01 12:00:00')
@pytest.mark.parametrize('use_raw_sql', (True, False))
def test_backfill_monthly_metrics_for_site(monkeypatch, backfill_test_data, use_raw_sql):
"""Simple coverage and data validation check for the function under test
Example backfilled results
[(<SiteMonthlyMetrics: id:1, month_for:2019-09-01, site:site-0.example.com>,
True),
(<SiteMonthlyMetrics: id:2, month_for:2019-10-01, site:site-0.example.com>,
True),
(<SiteMonthlyMetrics: id:3, month_for:2019-11-01, site:site-0.example.com>,
True),
(<SiteMonthlyMetrics: id:4, month_for:2019-12-01, site:site-0.example.com>,
True),
(<SiteMonthlyMetrics: id:5, month_for:2020-01-01, site:site-0.example.com>,
True),
(<SiteMonthlyMetrics: id:6, month_for:2020-02-01, site:site-0.example.com>,
True),
(<SiteMonthlyMetrics: id:7, month_for:2020-03-01, site:site-0.example.com>,
True),
(<SiteMonthlyMetrics: id:8, month_for:2020-04-01, site:site-0.example.com>,
True)]
TODO: Update test data and test to have the created and modified dates different
and make sure that `modified` dates are used in the production code and not
`created` dates
"""
# monkeypatch the function which produces the raw sql w/one will work in test
monkeypatch.setattr(
"figures.pipeline.site_monthly_metrics._get_fill_month_raw_sql_for_month",
patched__get_fill_month_raw_sql_for_month
)
site = backfill_test_data['site']
count_check = backfill_test_data['count_check']
assert not SiteMonthlyMetrics.objects.count()
backfilled = backfill_monthly_metrics_for_site(site=site, overwrite=True, use_raw_sql=use_raw_sql)
assert len(backfilled) == backfill_test_data['months_back']
assert len(backfilled) == SiteMonthlyMetrics.objects.count()
assert len(backfilled) == len(count_check)
for rec, check_rec in zip(backfilled, count_check):
assert rec['obj'].active_user_count == check_rec['sm_count']
assert rec['obj'].month_for.year == check_rec['month'].year
assert rec['obj'].month_for.month == check_rec['month'].month
| StarcoderdataPython |
4948763 | from itertools import zip_longest
from elecsim.role.plants.costs.plant_cost_calculation import PlantCostCalculations
"""
File name: non_fuel_lcoe_calculation
Date created: 18/12/2018
Feature: # Calculates the costs of non fuel plants such as LCOE and marginal cost.
"""
__author__ = "<NAME>"
__copyright__ = "Copyright 2018, <NAME>"
__license__ = "MIT"
__email__ = "<EMAIL>"
class NonFuelCostCalculation(PlantCostCalculations):
def __init__(self, construction_year, capacity_mw, average_load_factor, pre_dev_period, construction_period, operating_period, pre_dev_spend_years, construction_spend_years, pre_dev_cost_per_mw, construction_cost_per_mw, infrastructure, fixed_o_and_m_per_mw, variable_o_and_m_per_mwh, insurance_cost_per_mw, connection_cost_per_mw, efficiency):
"""
Power plant of plant_type that does not use plant_type.
"""
super().__init__(capacity_mw, construction_year, average_load_factor, pre_dev_period, construction_period, operating_period, pre_dev_spend_years, construction_spend_years, pre_dev_cost_per_mw, construction_cost_per_mw, infrastructure, fixed_o_and_m_per_mw, variable_o_and_m_per_mwh, insurance_cost_per_mw, connection_cost_per_mw)
def calculate_lcoe(self, discount_rate):
"""
Function which calculates the levelised cost of electricity for this power plant instance
:return: Returns LCOE value for power plant
"""
# Calculations to convert into total costs for this power plant instance
elec_gen, total_costs = self.calculate_total_costs()
# Discount data
discounted_total_costs = self._discount_data(total_costs, discount_rate)
discounted_electricity_generated = self._discount_data(elec_gen, discount_rate)
# Sum total costs over life time of plant
discounted_costs_sum = sum(discounted_total_costs)
discounted_electricity_sum = sum(discounted_electricity_generated)
lcoe = discounted_costs_sum/discounted_electricity_sum
return lcoe
def calculate_total_costs(self):
capex = self._capex()
opex = self._opex_cost()
elec_gen = self._electricity_generated()
total_costs = self._total_costs(capex, opex)
return elec_gen, total_costs
def calculate_short_run_marginal_cost(self, model):
return self.variable_o_and_m_per_mwh
def _total_costs(self, capex, opex):
"""
Calculates total costs of plant by adding capital expenses plus operating expenses.
:return: Total costs over lifetime of plant
"""
total_costs = [x + y for x, y in zip_longest(capex, opex, fillvalue=0)]
return total_costs
| StarcoderdataPython |
3206089 | """
combine_expression_files.py
Combine expression files
"""
import sys
sys.path.append('./volumetric_analysis')
import argparse
from collections import defaultdict
import aux
if __name__=="__main__":
parser = argparse.ArgumentParser(description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('exp_file',
action = 'store',
help = 'Path to file with list of how to combine files')
parser.add_argument('duration',
action = 'store',
help = 'total time covered')
parser.add_argument('fout',
action = 'store',
help = 'Path ot output file')
params = parser.parse_args()
exp = aux.read.into_list2(params.exp_file)
dur = float(params.duration)
data = defaultdict(lambda: defaultdict(lambda:0))
for (f,weight) in exp:
c = float(weight) / dur
for [gene,cell,w] in aux.read.into_list2(f):
data[gene][cell] += c*float(w)
with open(params.fout,'w') as fout:
for g in sorted(data):
for c in sorted(data[g]):
tmp = ','.join([g,c,str(data[g][c])+'\n'])
fout.write(tmp)
| StarcoderdataPython |
4802842 | <reponame>step21/videohash
from shutil import which
from subprocess import Popen, PIPE
from .utils import does_path_exists, get_list_of_all_files_in_dir
from .exceptions import DownloadOutPutDirDoesNotExist, DownloadFailed
# Python module to download the video from the input URL.
# Uses yt-dlp to download the video.
class Download:
"""
Class that downloads the video prior to frames extraction.
Tries to download the lowest quality video possible.
Uses yt-dlp to download the videos.
"""
def __init__(
self,
url: str,
output_dir: str,
worst: bool = True,
) -> None:
"""
:param url: The URL of the video. The video will be
downloaded from this url. Must be a string.
:param output_dir: The directory where the downloaded video will be stored.
Must be a string and path must be absolute.
:param worst: The quality of video downloaded by yt-dlp.
True for worst quality and False for the default settings
of the downloader. Default value for worst is True.
:return: None
:rtype: NoneType
"""
self.url = url
self.output_dir = output_dir
self.worst = worst
if not does_path_exists(self.output_dir):
raise DownloadOutPutDirDoesNotExist(
f"No directory found at '{self.output_dir}' for storing the downloaded video. Can not download the video."
)
self.yt_dlp_path = str(which("yt-dlp"))
self.download_video()
def download_video(self) -> None:
"""Download the video from URL
:return: None
:rtype: NoneType
"""
worst = " "
if self.worst:
worst = " -f worst "
command = (
f'"{self.yt_dlp_path}"'
+ worst
+ " "
+ '"'
+ self.url
+ '"'
+ " -o "
+ '"'
+ self.output_dir
+ "video_file.%(ext)s"
+ '"'
)
process = Popen(command, shell=True, stdout=PIPE, stderr=PIPE)
output, error = process.communicate()
yt_dlp_output = output.decode()
yt_dlp_error = error.decode()
if len(get_list_of_all_files_in_dir(self.output_dir)) == 0:
raise DownloadFailed(
f"'{self.yt_dlp_path}' failed to download the video at"
+ f" '{self.url}'.\n{yt_dlp_output}\n{yt_dlp_error}"
)
| StarcoderdataPython |
5100987 |
import os
import angr
from nose.tools import assert_true
test_location = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests')
def test_thumb_firmware_cfg():
# Test an ARM firmware sample.
#
# This tests CFG, but also the Gym (the ThumbSpotter, etc)
# Also requires proper relocs support, or You're Gonna Have a Bad Time(tm)
# In short, a very comprehensive high level test
path = os.path.join(test_location, "armel", "i2c_master_read-nucleol152re.elf")
p = angr.Project(path, auto_load_libs=False)
# This is the canonical way to carve up a nasty firmware thing.
cfg = p.analyses.CFGFast(resolve_indirect_jumps=True, force_complete_scan=False, normalize=True)
# vfprintf should return; this function has a weird C++ thing that gets compiled as a tail-call.
# The function itself must return, and _NOT_ contain its callee.
vfprintf = cfg.kb.functions[p.loader.find_symbol('vfprintf').rebased_addr]
assert_true(vfprintf.returning)
assert_true(len(list(vfprintf.blocks)) == 1)
# The function should have one "transition"
block = list(vfprintf.endpoints_with_type['transition'])[0]
assert_true(len(block.successors()) == 1)
succ = list(block.successors())[0]
assert_true(succ.addr == 0x080081dd)
f2 = p.kb.functions[succ.addr]
assert_true(f2.name == '_vfprintf_r')
assert_true(f2.returning)
if __name__ == "__main__":
test_thumb_firmware_cfg()
| StarcoderdataPython |
6524475 | #!C:\Users\denis\AppData\Local\Programs\Python\Python36\python
import psycopg2
from datetime import date
class DataBaseInteraction():
def __init__(self):
self.user = 'postgres'
self.password = '<PASSWORD>'
self.host = 'localhost'
self.port = '5432'
self.database = 'ProjectManagement'
self.today = date.today()
def dbConnect(self, query):
try:
connection = psycopg2.connect(user = self.user,
password = <PASSWORD>,
host = self.host,
port = self.port,
database = self.database)
cursor = connection.cursor()
if (self.query.startswith('SELECT')):
cursor.execute(self.query)
data = cursor.fetchall()
return data
if(self.query.startswith('INSERT')):
cursor.execute(self.query)
connection.commit()
except (Exception, psycopg2.Error) as error:
print("Error while connecting to PostgreSQL:", error)
finally:
if connection:
cursor.close()
connection.close()
def selectFrom(self, table):
self.query = 'SELECT * FROM %s' % table
data = self.dbConnect(self.query)
if table == 'projects':
for d in data:
print('Project Code' + ' ' + 'Project Name' + ' ' + 'Description'\
+ ' ' + 'Project Manager' + ' ' + 'Start Date' + ' ' + 'Count Task')
print('-----------------------------------------------------------------------------------------------\n')
print(d[0], '\t\t', d[1], '\t\t ', d[2], '\t\t ',
d[3], '\t\t ', d[4], ' ', d[5])
if table == 'tasks':
for d in data:
print('Project Code' + ' ' + 'Task Code' + ' ' + 'Task Name' + ' ' + 'Description'\
+ ' ' + 'Project Performer' + ' ' + 'Start Date' + ' ' + 'Time Spent'\
+ ' ' + 'Estimated Closed' + ' ' + 'Used Software' + ' ' + 'Status')
print('-----------------------------------------------------------------------------------------------\n')
print(d[0], '\t\t', d[1], '\t\t ', d[2], '\t\t ',
d[3], '\t\t ', d[4], ' ', d[5],
d[6], '\t\t ', d[7], ' ', d[8], '\t\t ', d[9])
def projectInsert(self, table, pname, desc, mangPerf):
self.query = 'INSERT INTO {0}("ProjectCode", "Project Name", "Description", "ManagerId", "Start Date", "Count Task")\
VALUES(1, \'{1}\', \'{2}\', \'{3}\', \'{4}\', (SELECT COUNT(\'tasks.TaskCode\') FROM tasks))'.format(table, pname, desc, mangPerf, self.today)
self.dbConnect(self.query)
def taskInsert(self, table, pcode, tname, desc, mangPerf, estClosed, usSoft, statusId):
self.query = 'INSERT INTO {0} VALUES(\'{1}\', 1, \'{2}\', \'{3}\', \'{4}\',\
\'{5}\', \'100\', \'{6}\', \'{7}\', \'{8}\')'.format(table, pcode, tname, desc,
mangPerf, self.today, estClosed, usSoft, statusId)
self.dbConnect(self.query)
def changeTo(self):
pass
def deleteFrom(self):
pass | StarcoderdataPython |
1650529 | <filename>setup.py<gh_stars>1-10
from setuptools import setup, find_packages
import codecs
import pathlib
import re
here = pathlib.Path(__file__).parent.resolve()
def read(*parts):
"""
Build an absolute path from *parts* and and return the contents of the
resulting file. Assume UTF-8 encoding.
"""
with codecs.open(pathlib.PurePath(here, *parts), "rb", "utf-8") as f:
return f.read()
def find_version(*file_paths):
"""
Build a path from *file_paths* and search for a ``__version__``
string inside.
"""
version_file = read(*file_paths)
version_match = re.search(
r"^__version__ = ['\"]([^'\"]*)['\"]", version_file, re.M
)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
meta_path = pathlib.PurePath('src', 'nspyre', '__init__.py')
version = find_version(meta_path)
long_description = (here / 'README.md').read_text(encoding='utf-8')
setup(
name='nspyre',
version=version,
license='BSD 3-Clause License',
description='Networked Scientific Python Research Environment',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/nspyre-org/nspyre',
author='<NAME>',
author_email='<EMAIL>',
maintainer='<NAME>',
maintainer_email='<EMAIL>',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: IPython',
'Framework :: Jupyter',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: Implementation :: CPython',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Physics',
'Topic :: Scientific/Engineering :: Visualization',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Software Development :: User Interfaces',
'Topic :: System :: Distributed Computing',
'Topic :: System :: Logging',
],
keywords='nspyre, measurement toolkit, experimentation platform, physics, science, research',
package_dir={'': 'src'},
packages=find_packages(where='src'),
zip_safe=False,
python_requires='>=3.8, <4',
install_requires=[
# SciPy
'numpy>=1.19.1',
'scipy>=1.5.2',
'pandas>=1.1.2',
# MongoDB
'pymongo>=3.10.1',
# Qt
'pyqt5>=5.12.3',
'pyqtgraph>=0.11.0',
'qscintilla>=2.11.2',
# VISA
'pyvisa>=1.10.1',
# Lantz
'pint>=0.15',
'pimpmyclass>=0.4.3',
'lantzdev>=0.5.2',
# Utilities
'parse>=1.18.0',
'tqdm>=4.49.0',
'rpyc>=4.1.5',
],
extras_require={
'dev': [
'pytest>=6.1.2',
'pytest-cov',
'psutil>=5.7.3',
]
},
test_requires=[
'pytest>=6.1.2',
'pytest-cov',
'psutil>=5.7.3',
],
test_suite='tests',
entry_points={
'console_scripts': [
'nspyre=nspyre.gui:main',
'nspyre-config=nspyre.config.config_cli:main',
'nspyre-mongodb=nspyre.mongodb:main',
'nspyre-inserv=nspyre.inserv:main',
],
},
project_urls={
'Bug Reports': 'https://github.com/nspyre-org/nspyre/issues',
'Source': 'https://github.com/nspyre-org/nspyre/',
},
include_package_data=True,
options={'bdist_wheel': {'universal': '1'}},
)
| StarcoderdataPython |
6566459 | <gh_stars>1-10
import pytest
import cv2
import numpy as np
from skimage import img_as_ubyte
from plantcv.plantcv import image_fusion, Spectral_data
def test_image_fusion(test_data):
"""Test for PlantCV."""
# Read in test data
# 16-bit image
img1 = cv2.imread(test_data.fmax, -1)
img2 = cv2.imread(test_data.fmin)
# 8-bit image
img2 = img_as_ubyte(img2)
fused_img = image_fusion(img1, img2, [480.0], [550.0, 640.0, 800.0])
assert isinstance(fused_img, Spectral_data)
def test_image_fusion_size_diff(test_data):
"""Test for PlantCV."""
img1 = cv2.imread(test_data.small_bin_img, 0)
img2 = np.copy(img1)
img2 = img2[0:10, 0:10]
with pytest.raises(RuntimeError):
_ = image_fusion(img1, img2, [480.0, 550.0, 670.0], [480.0, 550.0, 670.0])
| StarcoderdataPython |
45598 | # -*- coding: utf-8 -*-
from django.db import models
from datetime import date
from django.utils import timezone
from user.models import Person,Customer
from .price_category import PriceCategory
from core.models import Address
from core.mixins import TimeStampedMixin,PartComposMixin,ThumbnailMixin
from core.utils import combine_datetime_pk
#from .relationship import Record
# Create your models here.
class Sku(models.Model):
sku = models.CharField("款号",max_length = 20)
description = models.CharField("描述",max_length = 50,blank=True)
def __str__(self):
return self.sku
class Meta:
verbose_name = "款式"
verbose_name_plural = verbose_name
class Merchandise( TimeStampedMixin,
ThumbnailMixin,
PartComposMixin,
models.Model):
description = models.CharField("描述",max_length = 50)
legacy_id = models.CharField("旧条码",max_length = 50,blank=True)
net_weight = models.FloatField("净重(g)",blank=True)
def carat(self):
return self.net_weight/0.2
carat.short_description = '克拉(Ct)'
carat.admin_order_field = 'net_weight'
sku = models.ForeignKey(
Sku,
on_delete=models.SET_NULL,
null=True,
blank=True,
related_name='merchandise',
verbose_name='款式',
)
depots = models.ManyToManyField(
'Depot',
through = 'MerchandiseDepot',
related_name = 'merchandise',
)
position = models.CharField("库柜",max_length=20,blank=True)
price_category = models.ForeignKey(
PriceCategory,
on_delete=models.CASCADE,
verbose_name="价格类别",
)
price = models.DecimalField("标价",default = 0,max_digits = 10,decimal_places = 2)
margin = models.DecimalField("价格浮动",default = 0,max_digits = 10,decimal_places = 2)
manufacture = models.CharField("产地",max_length=10,blank=True)
records = models.ManyToManyField(
'Record',
through='MerchandiseRecord',
related_name='merchandises',
)
MT_JEWEL = 'JE'
MT_ACCESSORY = 'AC'
MT_PEARL = 'PE'
MT_DIAMOND = 'DM'
MT_COLORED_GEM = 'CG'
MT_OTHER = ''
MERCHANDISE_TYPE = (
(MT_JEWEL,'成品'),
(MT_ACCESSORY,'配件'),
(MT_PEARL,'裸珠'),
(MT_DIAMOND,'钻石'),
(MT_COLORED_GEM,'彩宝'),
(MT_OTHER,'其它')
)
merchandise_type = models.CharField("类型",max_length=4,choices=MERCHANDISE_TYPE,blank=True)
def __str__(self):
return self.description
def serialId(self):
return combine_datetime_pk(self.id,12,self.created)
def get_merchandise_params(self):
return {}
class Meta:
verbose_name = "商品"
verbose_name_plural = verbose_name
ordering = ['-id']
# chain or ring have size
class Jewel(Merchandise):
JEWEL_TYPE = (
("R","戒指"),
("项链",(
("P","项坠"),
("N","珠链"),
)),
("耳饰",(
("D","耳钉"),
("G","耳钩"),
("X","耳线"),
("J","耳夹"),
)),
("W","手链"),
("B","胸针"),
("H","头饰"),
("","其它"),
)
jewel_type = models.CharField('类别',max_length=5,choices=JEWEL_TYPE,default="")
size = models.DecimalField('长度/手寸',default = 0,max_digits = 5,decimal_places = 2)
major_gem = models.CharField('主石',max_length=20,blank=True,default='')
minor_gem = models.CharField('配石',max_length=20,blank=True,default='')
METAL_TYPE = (
("PT","铂金"),
("24KG","24K金"),
("18KY","18K黄"),
("18KW","18K白"),
("18KR","18K红"),
("14KY","14K黄"),
("14KW","14K白"),
("14KR","14K红"),
("10KY","14K黄"),
("10KW","14K白"),
("10KR","14K红"),
("SILV","纯银"),
("S925","S925"),
("GONB","铜镀金"),
("ALLO","合金"),
("","其它")
)
metal_type = models.CharField('金属',max_length=4,choices = METAL_TYPE,default="")
metal_weight = models.FloatField("金重(g)",blank=True,null=True)
def __str__(self):
return "成品"+self.description
class Meta:
verbose_name = "成品"
verbose_name_plural = verbose_name
class Gem(Merchandise):
pass
'''
size, 直径
shape, 形状
color, 颜色
luster, 光泽
surface, 表皮
nacre,珠层
'''
#大小 光泽 瑕疵 颜色
#圆珠 圆度(正圆,近圆,扁圆)
#水滴
#mabe
#巴洛克 keshi(无核)
class Pearl(Gem):
PEARL_TYPE = (
("","其它"),
("海水珍珠",(
("AWHT","南洋白珍珠"),
("SSGD","南洋金珍珠"),
("TBLK","大溪地黑珍珠"),
("AKOY","AKOYA"),
("MABE","马贝珠"),
("KESH","KESHI"),
("CONC","海螺珠"),
)
),
("淡水珍珠",(
("FRWT","淡水珍珠"),
("BARQ","巴洛克珍珠"),
("EDSN","爱迪生珍珠"),
)
)
)
pearl_type = models.CharField(max_length=4,choices=PEARL_TYPE,default="")
min_size = models.DecimalField("最小直径(mm)",default = 0,max_digits = 5,decimal_places = 2)
max_size = models.DecimalField("最大直径(mm)",default = 0,max_digits = 5,decimal_places = 2)
#color
body_color = models.CharField("体色",max_length=10)
overtone = models.CharField("伴色",max_length=10)
IRIDESCENCE = (("","N/A"),("A","强"),("B","明显"),("C","一般"))
iridescence = models.CharField("晕彩",max_length=1,choices=IRIDESCENCE,default="")
LUSTER = (("","N/A"),("A","极强"),("B","强"),("C","中"),("D","弱"))
luster = models.CharField("光泽",max_length=1,choices=LUSTER)
SURFACE = (("","N/A"),("A","无瑕"),("B","微瑕"),("C","小瑕"),("D","瑕疵"),("E","重瑕"))
surface = models.CharField("表皮",max_length=1,choices=SURFACE)
NACRE = (("","N/A"),("A","特厚"),("B","厚"),("C","中"),("D","薄"),("E","极薄"))
nacre = models.CharField("珠层",max_length=1,choices=NACRE)
def __str__(self):
return "珍珠"
class Meta:
verbose_name = "珍珠"
verbose_name_plural = verbose_name
class Diamond(Gem):
COLOR = (
("","其它"),
("D","D"),
("E","E"),
("F","F"),
("G","G"),
("H","H"),
("I","I"),
("J","J"),
("K","K"),
)
color = models.CharField("颜色",max_length=1,choices=COLOR,default="")
CLARITY = (
("","其它"),
("FL","FL"),
("IF","IF"),
("VVS1","VVS1"),
("VVS2","VVS2"),
("VS1","VS1"),
("VS2","VS2"),
("SI1","SI1"),
("SI2","SI2"),
)
clarity = models.CharField("净度",max_length=4,choices=CLARITY,default="")
CUT = (
("","其它"),
("EX","EX"),
("VG","VG"),
("G","G"),
)
cut = models.CharField("切工",max_length=2,choices=CUT,default="")
def __str__(self):
return "钻石"+"{:.2f}".format(self.net_weight/0.2)+"ct"
class Meta:
verbose_name = "钻石"
verbose_name_plural = verbose_name
class ColoredGem(Gem):
def __str__(self):
return "彩宝"
class Meta:
verbose_name = "彩宝"
verbose_name_plural = verbose_name
proxy = True
| StarcoderdataPython |
5162288 | <filename>kNN/kNN.py
from numpy import *
import operator
import matplotlib
import matplotlib.pyplot as plt
def createDataSet():
group = array ([[1.0, 1.1], [1.0, 1.0], [0, 0], [0, 0.1]])
labels = ['A', 'A', 'B', 'B']
return group, labels
group, labels = createDataSet()
print(group)
print(labels)
def classify0(inX, dataSet, labels, k):
dataSetSize = dataSet.shape[0]
diffMat = tile(inX, (dataSetSize, 1)) - dataSet
sqDiffMat = diffMat ** 2
sqDistances = sqDiffMat.sum(axis = 1)
distances = sqDistances ** 0.5
sortedDistIndicies = distances.argsort()
classCount = {}
for i in range(k):
voteIlabel = labels[sortedDistIndicies[i]]
classCount[voteIlabel] = classCount.get(voteIlabel, 0) + 1
sortedClassCount = sorted(classCount.items(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
def file2Matrix(fileName):
fr = open(fileName)
numberOfLines = len(fr.readlines())
returnMat = zeros((numberOfLines, 3))
classLabelVector = []
fr = open(fileName)
index = 0
for line in fr.readlines():
line = line.strip()
listFromLine = line.split('\t')
returnMat[index, :] = listFromLine[0:3]
classLabelVector.append(listFromLine[-1])
index += 1
return returnMat, array(classLabelVector, dtype=int32)
# print(classify0([0,0], group, labels, 3))
# print(classify0([0,1], group, labels, 3))
# print(classify0([1,0], group, labels, 3))
# print(classify0([1,1], group, labels, 3))
# print(file2Matrix('datingTestSet.txt'))
fig = plt.figure()
ax = fig.add_subplot(111)
#챠트 확인하기
# ax.scatter(datingDataMat[:, 1], datingDataMat[:, 2], 20.0*array(datingLabels), 20.0*array(datingLabels))
# plt.show()
def autoNorm(dataSet):
minVals = dataSet.min(0)
maxVals = dataSet.max(0)
ranges = maxVals - minVals
normDataSet = zeros(shape(dataSet))
m = dataSet.shape[0]
normDataSet = dataSet - tile(minVals, (m,1))
normDataSet = normDataSet / tile(ranges, (m, 1))
return normDataSet, ranges, minVals
#데이터 정규화 하기
# normMat, ranges, minVals = autoNorm(datingDataMat)
def datingClassTest():
hoRatio = 0.10
# datingDataMat, datingLabels = file2Matrix('datingTestSet2.txt')
# normMat, ranges, minVals = autoNorm(datingDataMat)
m = normMat.shape[0]
numTestVecs = int(m*hoRatio)
errorCount = 0.0
for i in range(numTestVecs):
classifierResult = classify0(normMat[i, :], normMat[numTestVecs:m, :], datingLabels[numTestVecs:m], 3)
# print("계산 값 - %d 실제 값- %d" % (classifierResult, datingLabels[i]))
if (classifierResult != datingLabels[i]):
errorCount += 1.0
print("error count %f" % (errorCount / float(numTestVecs)))
def classifyPerson():
resultList = ['별로임', '살짝...?', '개좋아']
percentTats = float(input("비디오 게임 얼마나 하니?"))
ffMiles = float(input("1년에 비행기 어느정도 타?"))
iceCream = float(input("1년에 아이스크림을 몇리터나 먹어?"))
inArr = array([ffMiles, percentTats, iceCream])
classifierResult = classify0((inArr - minVals)/ranges, normMat, datingLabels, 3)
print("넌 아마 이 사람을...", resultList[int(classifierResult) - 1])
datingDataMat, datingLabels = file2Matrix('datingTestSet2.txt')
normMat, ranges, minVals = autoNorm(datingDataMat)
datingClassTest()
classifyPerson()
print(datingDataMat)
print(autoNorm(datingDataMat)) | StarcoderdataPython |
3487748 | <filename>lain_cli/imagecheck.py
# -*- coding: utf-8 -*-
from argh.decorators import arg
import lain_sdk.mydocker as docker
from lain_cli.utils import check_phase, get_domain, lain_yaml
from lain_sdk.util import error, info
def _check_phase_tag(phase):
yml = lain_yaml(ignore_prepare=True)
meta_version = yml.meta_version
if meta_version is None:
error("please git commit.")
return None
domain = get_domain(phase)
registry = "registry.%s" % domain
metatag = "meta-%s" % meta_version
releasetag = "release-%s" % meta_version
tag_list = docker.get_tag_list_in_registry(registry, yml.appname)
tag_ok = True
if metatag not in tag_list:
tag_ok = False
error("%s/%s:%s not exist." % (registry, yml.appname, metatag))
else:
info("%s/%s:%s exist." % (registry, yml.appname, metatag))
if releasetag not in tag_list:
tag_ok = False
error("%s/%s:%s not exist." % (registry, yml.appname, releasetag))
else:
info("%s/%s:%s exist." % (registry, yml.appname, releasetag))
return tag_ok
@arg('phase', help="lain phase, can be added by lain config save")
def check(phase):
"""
Check current version of release and meta images in the remote registry
"""
check_phase(phase)
tag_ok = _check_phase_tag(phase)
if tag_ok:
info("Image Tag OK in registry")
else:
error("Image Tag not OK in registry")
| StarcoderdataPython |
227987 | from datetime import datetime
class User (object):
def __init__(self, username, password):
self.username = username
self.password = password
self.role = "normal"
def __repr__(self):
return '<User {}'.format(self.username)
class Admin(User):
def __init__(self, username, password):
super().__init__(username, password)
self.role = "admin"
class Moderator(User):
def __init__(self, username, password):
super().__init__(username, password)
self.role = "moderator"
class Comment(object):
"""
Comments are simply a message, a timestamp, and the author.
"""
def __init__(self, message, author):
self.message = message
self.author = author
self.timestamp = datetime.utcnow()
class Reply(Comment):
def __init__(self, message, author, reply_msg):
super.__init__(message, author)
self.repy_msg = reply_msg
| StarcoderdataPython |
3331703 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Test example app."""
import os
import signal
import subprocess
import time
import pytest
def _create_example_app(app_name):
"""Example app fixture."""
current_dir = os.getcwd()
# go to example directory
project_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
exampleappdir = os.path.join(project_dir, 'examples')
os.chdir(exampleappdir)
# setup example
cmd = 'FLASK_APP={0} ./app-setup.sh'.format(app_name)
exit_status = subprocess.call(cmd, shell=True)
assert exit_status == 0
# Starting example web app
cmd = 'FLASK_APP={0} flask run --debugger -p 5000'.format(app_name)
webapp = subprocess.Popen(cmd, stdout=subprocess.PIPE,
preexec_fn=os.setsid, shell=True)
time.sleep(10)
# return webapp
yield webapp
# stop server
os.killpg(webapp.pid, signal.SIGTERM)
# tear down example app
cmd = 'FLASK_APP={0} ./app-teardown.sh'.format(app_name)
subprocess.call(cmd, shell=True)
# return to the original directory
os.chdir(current_dir)
@pytest.yield_fixture
def example_app():
for i in _create_example_app('app.py'):
yield i
@pytest.yield_fixture
def perms_app():
for i in _create_example_app('permsapp.py'):
yield i
def test_example_app(example_app):
"""Test example app."""
# load fixtures
cmd = 'FLASK_APP={0} ./app-fixtures.sh'.format('app.py')
exit_status = subprocess.call(cmd, shell=True)
assert exit_status == 0
# open page
cmd = 'curl http://localhost:5000/records/1'
output = subprocess.check_output(cmd, shell=True).decode('utf-8')
assert '<NAME>' in output
cmd = 'curl http://localhost:5000/records/2'
output = subprocess.check_output(cmd, shell=True).decode('utf-8')
assert '<NAME>' in output
def test_example_permsapp(perms_app):
"""Test example permsapp."""
# load fixtures
cmd = 'FLASK_APP={0} ./app-fixtures.sh'.format('permsapp.py')
exit_status = subprocess.call(cmd, shell=True)
assert exit_status == 0
# open page
cmd = 'curl http://localhost:5000/records/1'
output = subprocess.check_output(cmd, shell=True).decode('utf-8')
assert '<NAME>' in output
cmd = 'curl http://localhost:5000/records/2'
output = subprocess.check_output(cmd, shell=True).decode('utf-8')
assert 'Redirect' in output
| StarcoderdataPython |
192836 | import tensorflow as tf
import tensorflow.keras.backend as K
from tensorflow.keras.layers import Layer
class RoiPoolingConv(Layer):
"""
Define ROI Pooling Convolutional Layer for 2D inputs.
"""
def __init__(self, pool_size, num_rois, **kwargs):
self.image_data_format = K.image_data_format()
assert self.image_data_format in {'channels_last',
'channels_first'}, 'image_data_format must be in {channels_last, channels_first}'
self.pool_size = pool_size
self.num_rois = num_rois
self.nb_channels = None
super(RoiPoolingConv, self).__init__(**kwargs)
def build(self, input_shape):
if self.image_data_format == 'channels_first':
# input_shape = (num_rois,512,7,7)
self.nb_channels = input_shape[0][1]
elif self.image_data_format == 'channels_last':
# input_shape = (num_rois,7,7,512)
self.nb_channels = input_shape[0][3]
super(RoiPoolingConv, self).build(input_shape)
def compute_output_shape(self, input_shape):
if self.image_data_format == 'channels_first':
return None, self.num_rois, self.nb_channels, self.pool_size, self.pool_size
else:
return None, self.num_rois, self.pool_size, self.pool_size, self.nb_channels
def call(self, x, mask=None):
assert (len(x) == 2)
# x[0] is image with shape (rows, cols, channels)
img = x[0]
# x[1] is roi with shape (num_rois,4) with ordering (x,y,w,h)
rois = x[1]
input_shape = tf.shape(img)
outputs = []
for roi_idx in range(self.num_rois):
x = rois[0, roi_idx, 0]
y = rois[0, roi_idx, 1]
w = rois[0, roi_idx, 2]
h = rois[0, roi_idx, 3]
row_length = w / float(self.pool_size)
col_length = h / float(self.pool_size)
num_pool_regions = self.pool_size
if self.image_data_format == 'channels_first':
for jy in range(num_pool_regions):
for ix in range(num_pool_regions):
x1 = x + ix * row_length
x2 = x1 + row_length
y1 = y + jy * col_length
y2 = y1 + col_length
x1 = tf.cast(x1, tf.int32)
x2 = tf.cast(x2, tf.int32)
y1 = tf.cast(y1, tf.int32)
y2 = tf.cast(y2, tf.int32)
x2 = x1 + tf.maximum(1, x2 - x1)
y2 = y1 + tf.maximum(1, y2 - y1)
new_shape = [input_shape[0], input_shape[1], y2 - y1, x2 - x1]
x_crop = img[:, :, y1:y2, x1:x2]
xm = tf.reshape(x_crop, new_shape)
pooled_val = tf.math.maximum(xm, axis=(2, 3))
outputs.append(pooled_val)
elif self.image_data_format == 'channels_last':
x = tf.cast(x, tf.int32)
y = tf.cast(y, tf.int32)
w = tf.cast(w, tf.int32)
h = tf.cast(h, tf.int32)
rs = tf.image.resize(img[:, y:y + h, x:x + w, :], (self.pool_size, self.pool_size))
outputs.append(rs)
final_output = tf.concat(outputs, axis=0)
final_output = tf.reshape(final_output, (1, self.num_rois, self.pool_size, self.pool_size, self.nb_channels))
if self.image_data_format == 'channels_first':
final_output = K.permute_dimensions(final_output, (0, 1, 4, 2, 3))
else:
final_output = K.permute_dimensions(final_output, (0, 1, 2, 3, 4))
return final_output
def get_config(self):
config = {'pool_size': self.pool_size,
'num_rois': self.num_rois}
base_config = super(RoiPoolingConv, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
| StarcoderdataPython |
6524050 | #!/usr/bin/python
# HockeyBox
# by <NAME>, <EMAIL>
# Based on HockeyBox3.py by <NAME>
#
# Use 4-space tabs for indentation
# vim :set ts=4 sw=4 sts=4 et:
HOCKEYBOX_VERSION = "201811.1"
import RPi.GPIO as GPIO
from time import sleep
import os, random, vlc
from collections import deque
print "--------------------------------------------"
print "HockeyBox %s" % HOCKEYBOX_VERSION
print "by <NAME>, <EMAIL>"
print "Based on HockeyBox3.py (2016) by <NAME>"
print "--------------------------------------------"
print "RPI %s" % GPIO.RPI_INFO
print "RPi.GPIO %s" % GPIO.VERSION
print "--------------------------------------------"
BASE_MP3_DIR = "/media/pi/HOCKEYBOX"
GOAL_MP3_DIR = BASE_MP3_DIR + "/goal"
WARMUP_MP3_DIR = BASE_MP3_DIR + "/warmup"
BTW_MP3_DIR = BASE_MP3_DIR + "/btw"
INTERMISSION_MP3_DIR = BASE_MP3_DIR + "/intermission"
PENALTY_MP3_DIR = BASE_MP3_DIR + "/penalty"
POWERPLAY_MP3_DIR = BASE_MP3_DIR + "/powerplay"
USANTHEM_MP3_DIR = BASE_MP3_DIR + "/usanthem"
CDNANTHEM_MP3_DIR = BASE_MP3_DIR + "/cdnanthem"
# Track which songs have been played
btw_played_songs = deque([])
BTW_REPEAT_THRESHOLD = 25
intermission_num_played = 0
intermission_played_songs = deque([])
INTERMISSION_REPEAT_THRESHOLD = 5
goal_played_songs = deque([])
GOAL_REPEAT_THRESHOLD = 4
penalty_played_songs = deque([])
PENALTY_REPEAT_THRESHOLD = 4
powerplay_played_songs = deque([])
POWERPLAY_REPEAT_THRESHOLD = 4
#
# GPIO Setup
#
# Set GPIO to BCM mode
GPIO.setmode (GPIO.BCM)
inputs = []
outputs = []
# Setup input channels
INPUT_WARMUP=25
inputs.append(INPUT_WARMUP)
INPUT_BTW=21
inputs.append(INPUT_BTW)
INPUT_INTERMISSION=12
inputs.append(INPUT_INTERMISSION)
INPUT_GOAL=20
inputs.append(INPUT_GOAL)
INPUT_PENALTY=23
inputs.append(INPUT_PENALTY)
INPUT_POWERPLAY=16
inputs.append(INPUT_POWERPLAY)
INPUT_USANTHEM=7
inputs.append(INPUT_USANTHEM)
INPUT_CDNANTHEM=8
inputs.append(INPUT_CDNANTHEM)
INPUT_STOP=24
inputs.append(INPUT_STOP)
GPIO.setup(inputs, GPIO.IN)
# Setup output channels
OUTPUT_WARMUP=27
outputs.append(OUTPUT_WARMUP)
OUTPUT_BTW=26
outputs.append(OUTPUT_BTW)
OUTPUT_INTERMISSION=22
outputs.append(OUTPUT_INTERMISSION)
OUTPUT_GOAL=17
outputs.append(OUTPUT_GOAL)
OUTPUT_PENALTY=19
outputs.append(OUTPUT_PENALTY)
OUTPUT_POWERPLAY=6
outputs.append(OUTPUT_POWERPLAY)
OUTPUT_USANTHEM=5
outputs.append(OUTPUT_USANTHEM)
OUTPUT_CDNANTHEM=4
outputs.append(OUTPUT_CDNANTHEM)
OUTPUT_STOP=13
outputs.append(OUTPUT_STOP)
GPIO.setup(outputs, GPIO.OUT)
#
# VLC Player Setup
#
# Define our VLC object
instance = vlc.Instance()
player = instance.media_player_new()
list_player = instance.media_list_player_new()
list_events = list_player.event_manager()
def intermission_item_played(event):
global intermission_num_played
intermission_num_played += 1
print "Items Played: %d" % intermission_num_played
#sleep(1)
list_events.event_attach(vlc.EventType.MediaListPlayerNextItemSet, intermission_item_played)
#
# Function Definitions
#
#
# change_lights_after_input
# Handle button light changes after a button is pushed
#
def change_lights_after_input(p_output):
# Turn all button lights off
GPIO.output(outputs, GPIO.HIGH)
sleep(0.2)
# Turn on the STOP light and button that was pressed
GPIO.output(OUTPUT_STOP, GPIO.LOW)
GPIO.output(p_output, GPIO.LOW)
#
# pick_random_song
# Picking random MP3 from specified directory
#
def pick_random_song(p_mp3_dir):
# Loop here until file is .mp3 and not a dotfile
while True:
song = random.choice(os.listdir(p_mp3_dir))
if song.endswith(".mp3") and not song.startswith("."):
break
song_path = p_mp3_dir + "/" + song
return song_path
#
# play_song
# Play specified song (mp3 file path) through VLC MediaPlayer instance
#
def play_song(p_song):
# Stop playing if anything is currently playing
if player.is_playing():
player.stop()
print "Playing %s" % p_song
player.set_media(instance.media_new(p_song))
player.play()
#
# GOAL
#
def play_goal(channel):
print "GOAL"
change_lights_after_input(OUTPUT_GOAL)
new_song = ""
while True:
new_song = pick_random_song(GOAL_MP3_DIR)
if new_song in goal_played_songs:
print "Song %s has already been played, skipping." % new_song
else:
goal_played_songs.append(new_song)
break;
# Keep list at GOAL_REPEAT_THRESHOLD
if len(goal_played_songs) > GOAL_REPEAT_THRESHOLD:
print "Removing %s from goal_played_songs list" % goal_played_songs[0]
goal_played_songs.popleft()
play_song(new_song)
#
# WARM-UP
#
def play_warmup(channel):
print "WARMUP"
change_lights_after_input(OUTPUT_WARMUP)
play_song(pick_random_song(WARMUP_MP3_DIR))
#
# US ANTHEM
#
def play_usanthem(channel):
print "USANTHEM"
change_lights_after_input(OUTPUT_USANTHEM)
play_song(pick_random_song(USANTHEM_MP3_DIR))
#
# CDN ANTHEM
#
def play_cdnanthem(channel):
print "CDNANTHEM"
change_lights_after_input(OUTPUT_CDNANTHEM)
play_song(pick_random_song(CDNANTHEM_MP3_DIR))
#
# PENALTY
#
def play_penalty(channel):
print "PENALTY"
change_lights_after_input(OUTPUT_PENALTY)
new_song = ""
while True:
new_song = pick_random_song(PENALTY_MP3_DIR)
if new_song in penalty_played_songs:
print "Song %s has already been played, skipping." % new_song
else:
penalty_played_songs.append(new_song)
break;
# Keep list at PENALTY_REPEAT_THRESHOLD
if len(penalty_played_songs) > PENALTY_REPEAT_THRESHOLD:
print "Removing %s from penalty_played_songs list" % penalty_played_songs[0]
penalty_played_songs.popleft()
play_song(new_song)
#
# POWERPLAY
#
def play_powerplay(channel):
print "POWERPLAY"
change_lights_after_input(OUTPUT_POWERPLAY)
new_song = ""
while True:
new_song = pick_random_song(POWERPLAY_MP3_DIR)
if new_song in powerplay_played_songs:
print "Song %s has already been played, skipping." % new_song
else:
powerplay_played_songs.append(new_song)
break;
# Keep list at POWERPLAY_REPEAT_THRESHOLD
if len(powerplay_played_songs) > POWERPLAY_REPEAT_THRESHOLD:
print "Removing %s from powerplay_played_songs list" % powerplay_played_songs[0]
powerplay_played_songs.popleft()
play_song(new_song)
#
# INTERMISSION
#
def play_intermission(channel):
print "INTERMISSION"
change_lights_after_input(OUTPUT_INTERMISSION)
# If we queue N songs but only play P, we should remove the last N-P songs from the played list
global intermission_num_played
if intermission_num_played > 0:
reclaim_count = INTERMISSION_REPEAT_THRESHOLD - intermission_num_played
print "Taking back %d songs from the already-played list." % reclaim_count
for i in range(reclaim_count):
print "Reclaiming %s from intermission_played_songs list" % intermission_played_songs[-1]
intermission_played_songs.pop()
# Now remove any others over the threshold
while len(intermission_played_songs) > INTERMISSION_REPEAT_THRESHOLD:
print "Removing %s from intermission_played_songs list" % intermission_played_songs[-1]
intermission_played_songs.pop()
# Build Song List
intermission_num_played = 0
intermission_playlist = instance.media_list_new()
new_song = ""
while True:
new_song = pick_random_song(INTERMISSION_MP3_DIR)
if new_song in intermission_played_songs:
print "Song %s has already been added to the playlist, skipping." % new_song
else:
print "Adding song %s to intermission play list." % new_song
intermission_played_songs.append(new_song)
intermission_playlist.add_media(instance.media_new(new_song))
if intermission_playlist.count() >= INTERMISSION_REPEAT_THRESHOLD:
break;
list_player.set_media_list(intermission_playlist)
list_player.play()
#
# BTW
#
def play_btw(channel):
print "BTW"
change_lights_after_input(OUTPUT_BTW)
new_song = ""
while True:
new_song = pick_random_song(BTW_MP3_DIR)
if new_song in btw_played_songs:
print "Song %s has already been played, skipping." % new_song
else:
btw_played_songs.append(new_song)
break;
# Keep list at BTW_REPEAT_THRESHOLD
if len(btw_played_songs) > BTW_REPEAT_THRESHOLD:
print "Removing %s from btw_played_songs list" % btw_played_songs[0]
btw_played_songs.popleft()
play_song(new_song)
#
# STOP
#
def stop_playback(channel):
print "STOP"
sleep(0.3)
if player.is_playing():
print "Stopping player"
player.stop()
if list_player.is_playing():
print "Stopping list player"
list_player.stop()
GPIO.output(outputs, GPIO.HIGH)
print "Music Stopped"
for output in outputs:
# GPIO.LOW turns the button lights on
GPIO.output(output, GPIO.LOW)
sleep(0.05)
GPIO.output(OUTPUT_STOP, GPIO.HIGH)
# Define event detections and their callbacks
GPIO.add_event_detect(INPUT_GOAL, GPIO.RISING, callback=play_goal, bouncetime=1000)
GPIO.add_event_detect(INPUT_WARMUP, GPIO.RISING, callback=play_warmup, bouncetime=1000)
GPIO.add_event_detect(INPUT_USANTHEM, GPIO.RISING, callback=play_usanthem, bouncetime=1000)
GPIO.add_event_detect(INPUT_CDNANTHEM, GPIO.RISING, callback=play_cdnanthem, bouncetime=1000)
GPIO.add_event_detect(INPUT_PENALTY, GPIO.RISING, callback=play_penalty, bouncetime=1000)
GPIO.add_event_detect(INPUT_POWERPLAY, GPIO.RISING, callback=play_powerplay, bouncetime=1000)
GPIO.add_event_detect(INPUT_INTERMISSION, GPIO.RISING, callback=play_intermission, bouncetime=1000)
GPIO.add_event_detect(INPUT_BTW, GPIO.RISING, callback=play_btw, bouncetime=1000)
GPIO.add_event_detect(INPUT_STOP, GPIO.RISING, callback=stop_playback, bouncetime=1000)
# Flicker the lights
print "Light 'em up."
for output in outputs:
# GPIO.HIGH turns the button lights off
GPIO.output(output, GPIO.HIGH)
sleep(0.1)
for output in outputs:
# GPIO.LOW turns the button lights on
GPIO.output(output, GPIO.LOW)
sleep(0.1)
GPIO.output(OUTPUT_STOP, GPIO.HIGH)
print "***********************************"
print "HockeyBox ready, waiting for input."
print "***********************************"
# Begin main loop, polling for input
while True:
# Event detection should be running during this loop
sleep(0.02)
# Wonder if we should put a wait_for_edge on INPUT_STOP in here?
# This will likely never be called, but good practice
GPIO.cleanup()
| StarcoderdataPython |
3580764 | from .evpn import *
from .evi import *
from .vni import *
from .esi import *
| StarcoderdataPython |
112790 | from __future__ import with_statement, print_function
import pytest
try:
from unittest import mock
except ImportError:
import mock
from k2catalogue import proposal_urls
@pytest.fixture
def campaign():
return 1
@pytest.fixture
def mapper(campaign):
return proposal_urls.BuildCampaignMapping(campaign)
def create_mock_row(proposal_id, pi, title, url):
return mock.Mock(find_all=lambda *args: [
mock.Mock(string=proposal_id),
mock.Mock(string=pi),
mock.Mock(string=title),
mock.Mock(a={'href': url})])
@pytest.fixture
def mock_row():
return create_mock_row('GO1001', 'Giampapa',
'Characterizing the Variability of the Nearby Late-Type Dwarf Stars',
'docs/Campaigns/C1/GO1001_Giampapa.pdf')
def test_build_mapping(mapper, mock_row):
with mock.patch('k2catalogue.proposal_urls.BuildCampaignMapping.table_rows',
new_callable=mock.PropertyMock) as mock_table_rows:
mock_table_rows.return_value = [mock_row, ]
mapping = mapper.create()
assert mapping['GO1001'] == {
'pi': 'Giampapa',
'title': ('Characterizing the Variability of the Nearby '
'Late-Type Dwarf Stars'),
'url': 'http://keplerscience.arc.nasa.gov/K2/docs/Campaigns/C1/GO1001_Giampapa.pdf'}
def test_build_url(mapper):
assert 'C01' in mapper.url
def test_response(mapper):
assert mapper.response.status_code == 200
def test_soup(mapper):
assert hasattr(mapper.soup, 'find_all')
def test_find_table(mapper):
assert mapper.table
def test_extract_contents(mapper, mock_row):
result = mapper.extract_contents(mock_row)
assert result == ('GO1001', 'Giampapa',
'Characterizing the Variability of the Nearby Late-Type Dwarf Stars',
'http://keplerscience.arc.nasa.gov/K2/docs/Campaigns/C1/GO1001_Giampapa.pdf')
def test_invalid_html(mapper):
entries = (
mock.Mock(string='proposal_id'),
None,
None,
None,
)
row = mock.Mock(find_all=lambda *args: entries)
result = mapper.extract_contents(row)
assert result is None
| StarcoderdataPython |
1891465 | # #####################################################################################################################
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. #
# #
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance #
# with the License. A copy of the License is located at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# or in the 'license' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES #
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions #
# and limitations under the License. #
# #####################################################################################################################
from tenacity import retry, retry_if_exception_type, stop_after_attempt
from util.helpers import get_quicksight_client
from util.logging import get_logger
from util.quicksight_resource import QuickSightFailure, QuickSightResource
from util.source_entity import SourceEntity
logger = get_logger(__name__)
class Analysis(QuickSightResource):
def __init__(
self, quicksight_application=None, data_sets=None, quicksight_template_arn=None, data_source=None, props=None
):
super().__init__(quicksight_application=quicksight_application, type="analysis", props=props)
self.use_props(props)
self.data_sets = data_sets
self.data_source = data_source
self.quicksight_template_arn = quicksight_template_arn
self.config_data = dict()
self._load_config(self.type, ["main"], self.config_data)
self.source_entity = SourceEntity(
data_sets, quicksight_template_arn, self.config_data, source_entity_type="SourceTemplate"
)
@retry(retry=retry_if_exception_type(QuickSightFailure), stop=stop_after_attempt(3))
def create(self):
logger.info(f"requesting quicksight create_analysis: {self.id}")
quicksight_client = get_quicksight_client()
try:
response = quicksight_client.create_analysis(
AwsAccountId=self.aws_account_id,
AnalysisId=self.id,
Name=self.name,
Permissions=self._get_permissions(),
SourceEntity=self._get_source_entity(),
)
logger.info(f"finished quicksight create_analysis for id:{self.id} " f"response: {response}")
except quicksight_client.exceptions.ResourceExistsException:
response = quicksight_client.describe_analysis(AwsAccountId=self.aws_account_id, AnalysisId=self.id)
response = response["Analysis"]
except quicksight_client.exceptions.InvalidParameterValueException as exc:
logger.error(str(exc))
raise QuickSightFailure()
self.arn = response["Arn"]
return response
def delete(self):
logger.info(f"requesting quicksight delete_analysis id:{self.id}")
quicksight_client = get_quicksight_client()
response = quicksight_client.delete_analysis(AwsAccountId=self.aws_account_id, AnalysisId=self.id)
logger.info(f"finished quicksight delete_analysis for id:{self.id} " f"response: {response}")
return response
def _get_permissions(self):
# The principal is the owner of the resource and create the resources and is given full actions for the type
permissions = [
{
"Principal": self.principal_arn,
"Actions": [
"quicksight:RestoreAnalysis",
"quicksight:UpdateAnalysisPermissions",
"quicksight:DeleteAnalysis",
"quicksight:QueryAnalysis",
"quicksight:DescribeAnalysisPermissions",
"quicksight:DescribeAnalysis",
"quicksight:UpdateAnalysis"
],
}
]
return permissions
def _get_source_entity(self):
return self.source_entity.get_source_entity()
| StarcoderdataPython |
112707 | <reponame>truekonrads/mirv-metasploit
#!/usr/bin/env python
#
# this program is used to find source code that includes linux kernel headers directly
# (e.g. with #include <linux/...> or #include <asm/...>)
#
# then it lists
import sys, cpp, glob, os, re, getopt, kernel
from utils import *
from defaults import *
program_dir = find_program_dir()
wanted_archs = kernel_archs
wanted_include = os.path.normpath(program_dir + '/../original')
wanted_config = os.path.normpath(program_dir + '/../original/config')
def usage():
print """\
usage: find_headers.py [options] (file|directory|@listfile)+
options:
-d <include-dir> specify alternate kernel headers
'include' directory
('%s' by default)
-c <file> specify alternate .config file
('%s' by default)
-a <archs> used to specify an alternative list
of architectures to support
('%s' by default)
-v enable verbose mode
this program is used to find all the kernel headers that are used
by a set of source files or directories containing them. the search
is recursive to find *all* required files.
""" % ( wanted_include, wanted_config, string.join(kernel_archs,",") )
sys.exit(1)
try:
optlist, args = getopt.getopt( sys.argv[1:], 'vc:d:a:' )
except:
# unrecognized option
print "error: unrecognized option"
usage()
for opt, arg in optlist:
if opt == '-a':
wanted_archs = string.split(arg,',')
elif opt == '-d':
wanted_include = arg
elif opt == '-c':
wanted_config = arg
elif opt == '-v':
kernel.verboseSearch = 1
kernel.verboseFind = 1
verbose = 1
else:
usage()
if len(args) < 1:
usage()
kernel_root = wanted_include
if not os.path.exists(kernel_root):
sys.stderr.write( "error: directory '%s' does not exist\n" % kernel_root )
sys.exit(1)
if not os.path.isdir(kernel_root):
sys.stderr.write( "error: '%s' is not a directory\n" % kernel_root )
sys.exit(1)
if not os.path.isdir(kernel_root+"/linux"):
sys.stderr.write( "error: '%s' does not have a 'linux' directory\n" % kernel_root )
sys.exit(1)
if not os.path.exists(wanted_config):
sys.stderr.write( "error: file '%s' does not exist\n" % wanted_config )
sys.exit(1)
if not os.path.isfile(wanted_config):
sys.stderr.write( "error: '%s' is not a file\n" % wanted_config )
sys.exit(1)
# find all architectures in the kernel tree
re_asm_ = re.compile(r"asm-(\w+)")
archs = []
for dir in os.listdir(kernel_root):
m = re_asm_.match(dir)
if m:
if verbose: print ">> found kernel arch '%s'" % m.group(1)
archs.append(m.group(1))
# if we're using the 'kernel_headers' directory, there is only asm/
# and no other asm-<arch> directories (arm is assumed, which sucks)
#
in_kernel_headers = False
if len(archs) == 0:
# this can happen when we're using the 'kernel_headers' directory
if os.path.isdir(kernel_root+"/asm"):
in_kernel_headers = True
archs = [ "arm" ]
# if the user has specified some architectures with -a <archs> ensure that
# all those he wants are available from the kernel include tree
if wanted_archs != None:
if in_kernel_headers and wanted_archs != [ "arm" ]:
sys.stderr.write( "error: when parsing kernel_headers, 'arm' architecture only is supported at the moment\n" )
sys.exit(1)
missing = []
for arch in wanted_archs:
if arch not in archs:
missing.append(arch)
if len(missing) > 0:
sys.stderr.write( "error: the following requested architectures are not in the kernel tree: " )
for a in missing:
sys.stderr.write( " %s" % a )
sys.stderr.write( "\n" )
sys.exit(1)
archs = wanted_archs
# helper function used to walk the user files
def parse_file(path, parser):
parser.parseFile(path)
# remove previous destination directory
#destdir = "/tmp/bionic-kernel-headers/"
#cleanup_dir(destdir)
# try to read the config file
try:
cparser = kernel.ConfigParser()
cparser.parseFile( wanted_config )
except:
sys.stderr.write( "error: can't parse '%s'" % wanted_config )
sys.exit(1)
kernel_config = cparser.getDefinitions()
# first, obtain the list of kernel files used by our clients
fparser = kernel.HeaderScanner()
walk_source_files( args, parse_file, fparser, excludes=["kernel_headers"] )
headers = fparser.getHeaders()
files = fparser.getFiles()
# now recursively scan the kernel headers for additionnal sub-included headers
hparser = kernel.KernelHeaderFinder(headers,archs,kernel_root,kernel_config)
headers = hparser.scanForAllArchs()
if 0: # just for debugging
dumpHeaderUsers = False
print "the following %d headers:" % len(headers)
for h in sorted(headers):
if dumpHeaderUsers:
print " %s (%s)" % (h, repr(hparser.getHeaderUsers(h)))
else:
print " %s" % h
print "are used by the following %d files:" % len(files)
for f in sorted(files):
print " %s" % f
sys.exit(0)
for h in sorted(headers):
print h
sys.exit(0)
| StarcoderdataPython |
6683848 | __authors__ = "<NAME> (1813064), <NAME> (1713179), <NAME> (1626034)"
# maintainer = who fixes buggs?
__maintainer = __authors__
__date__ = "2020-05-01"
__version__ = "1.0"
__status__ = "Ready"
# kernel imports
import numpy as np
# own data imports
from constants import activationFunction
class neuron:
def __init__(self, layerName, layerNeuronNumber, input = 0, isBiasNeuron = False, isInputNeuron = False, isOutputNeuron=False, activationFunc = activationFunction):
# init neuron via params
self.isBiasNeuron = isBiasNeuron
self.isInputNeuron = isInputNeuron
self.isOutputNeuron = isOutputNeuron
self.input = input
self.activationFunc = activationFunc
self.layerName = layerName
self.layerNeuronNumber = layerNeuronNumber
# further init
self.neuronName = ""
# backpropagation
self.delta = 0.0
# if isBias initialise neuron as bias neuron
if isBiasNeuron:
self.neuronName = "Bias" + str(self.layerNeuronNumber)
self.input = 1
pass
else:
self.neuronName = "Neuron" + str(self.layerNeuronNumber)
pass
pass
def getOutput(self):
if self.isBiasNeuron:
return 1
pass
elif self.isInputNeuron:
return self.input
pass
else:
return self.activationFunc(self.input)
pass
pass
def __str__(self):
return self.neuronName + ": " + str(self.getOutput())
pass
def setInput(self, newInput):
self.input = newInput
pass
def getInput(self):
return self.input
pass
def setDelta(self, newDeltaValue):
self.delta = newDeltaValue
pass
def getDelta(self):
return self.delta
pass
pass
| StarcoderdataPython |
3316676 | <filename>web/admin.py
from django.contrib import admin
from django.template import loader
from django.utils.translation import ugettext_lazy as _
from web.models import Origin, DemoUser
@admin.register(Origin)
class OriginAdmin(admin.ModelAdmin):
def changeform_view(
self,
request,
object_id=None,
form_url='',
extra_context=None
):
extra_context = extra_context or {}
if object_id:
origin_obj = Origin.objects.filter(id=object_id).first()
if origin_obj:
extra_context['user_id'] = origin_obj.user_id
extra_context['username'] = origin_obj.user.username if origin_obj.user else ''
return super(OriginAdmin, self).changeform_view(
request, object_id, form_url, extra_context
)
def user_count(self, obj):
ggacUserRegisterCounts = DemoUser.objects.filter(origin=obj.id).values('origin').count()
return ggacUserRegisterCounts
user_count.short_description = '注册用户'
def origin_url(self, obj):
if obj.id is not None:
return 'http://localhost/?origin=' + str(obj.id)
else:
return '(保存后可见)'
origin_url.short_description = '渠道链接'
readonly_fields = ('origin_url', )
fields = ('origin_name', 'user', 'origin_url')
list_display = ('origin_name', 'user_count', 'user')
search_fields = ('user__username', 'user__mobile')
change_form_template = loader.get_template('web/admin/change_form_origin.html')
@admin.register(DemoUser)
class DemoUserAdmin(admin.ModelAdmin):
list_display = ('username', 'origin', 'is_active',)
list_filter = ('origin__origin_name', 'groups', 'is_staff', 'is_active')
filter_horizontal = ('groups', 'user_permissions',)
fieldsets = (
(None, {'fields': (
'username', 'password', 'email',)}),
# (_('Personal info'), {'fields': ('first_name', 'last_name', 'email')}),
(_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
(_('Important dates'), {'fields': ('last_login', 'date_joined')}),
)
| StarcoderdataPython |
1750657 | # input for cross-testing of test LAR files with a Python environment
from larlib import *
import networkx as nx
lines = tuple(open("test/csv/test1.V", 'r'))
V = [list(eval(line)) for line in lines]
lines = tuple(open("test/csv/test1.EV", 'r'))
EV = [list(eval(line)) for line in lines]
VIEW(STRUCT(MKPOLS((V,[[u-1,v-1] for u,v in EV]))))
G = nx.Graph()
G.add_nodes_from(range(1,len(V)))
G.add_edges_from(EV)
bcc = [sub.edges() for sub in nx.biconnected_component_subgraphs(G, copy=True)
if len(sub.edges())>1]
VIEW(MKPOL([V,CAT(bcc),1]))
VIEW(EXPLODE(1.2,1.2,1)(MKPOLS((V,[[u-1,v-1] for u,v in CAT(bcc) ]))))
| StarcoderdataPython |
5036077 | import mmap
import argparse
import logging
import os
from os import path
import random
import string
import time
FORMAT = '%(asctime)-15s %(message)s'
logging.basicConfig(format=FORMAT, level=logging.INFO)
parser = argparse.ArgumentParser(description="Parse Search query log")
parser.add_argument('-n', type=int, default=1, help='file count')
parser.add_argument('-s', type=int, default=100, help='file size in mb')
parser.add_argument('--loop', type=int, default=1000, help='loop count')
parser.add_argument('--dir', type=str, default="/home/feng/mmap_tmp", help='test dir')
args = parser.parse_args()
def rand_chars(n):
r = int(random.random() * len(string.ascii_letters))
return string.ascii_letters[r] * n
def get_stats(times):
times = sorted(times)
pos_50 = times[int(len(times) * 0.50)]
pos_80 = times[int(len(times) * 0.80)]
pos_95 = times[int(len(times) * 0.95)]
pos_99 = times[int(len(times) * 0.99)]
return "50%%=%.4fms, avg=%.4fms 80%%=%.4fms, 95%%=%.4fms, 99%%=%.4fms, max=%.4fms" % (
pos_50, sum(times) / len(times), pos_80, pos_95, pos_99, max(times))
def main():
if not path.exists(args.dir):
logging.info('create dir: %s', args.dir)
os.makedirs(args.dir)
read_size = 1024 * 16 # read/write 16k a time
times, mmaps, size = [], [], args.s * 1024 * 1024 # file size in M
for i in range(args.n): # make sure file is the right size
f = open('%s/file-%s' % (args.dir, i), 'a+b')
os.ftruncate(f.fileno(), size)
for i in range(args.n): # mmap, record the time
start = time.time()
f = open('%s/file-%s' % (args.dir, i), 'a+b')
mmaps.append(mmap.mmap(f.fileno(), 0))
times.append((time.time() - start) * 1000) # in ms
logging.info('mmap %d files, each %dM: %s', args.n, args.s, get_stats(times))
for i in xrange(args.loop):
for m in mmaps:
loc = int(random.random() * size) - read_size
if loc < 0:
loc = 0
if loc % 20: # ~95% read
c = len(m[loc:loc + read_size])
else: # ~5% write
m[loc:loc + read_size] = rand_chars(read_size)
if __name__ == "__main__":
main()
| StarcoderdataPython |
386773 | # PROJECT : django-easy-validator
# TIME : 18-1-2 上午9:44
# AUTHOR : <NAME>
# EMAIL : <EMAIL>
# CELL : 13811754531
# WECHAT : 13811754531
# https://github.com/youngershen/
from io import BytesIO
from django.test import TestCase
from django.core.files.uploadedfile import InMemoryUploadedFile
from validator import Validator, BaseRule
class AlphaNumber(Validator):
code = 'alpha_number'
message = {
'code': {
'alpha_number': '{VALUE} is not a alpha number type series.'
}
}
class Array(Validator):
ids = 'array'
message = {
'ids': {
'array': '{VALUE} is not a array type series.'
}
}
class Between(Validator):
age = 'between:10,20'
message = {
'age': {
'between': '{VALUE} is not between 10 to 20'
}
}
class Boolean(Validator):
remember_me = 'boolean'
message = {
'remember_me': {
'boolean': '{VALUE} is not a boolean type value.'
}
}
class TestRule(BaseRule):
name = 'test_rule'
message = 'test custom rule failed'
description = 'just for custom rule test'
def check_value(self):
self.status = True if self.field_value == 'test' else False
def check_null(self):
pass
class TestRuleValidator(Validator):
name = 'test_rule'
class Required(Validator):
username = 'required'
message = {
'username': {
'required': 'username is required'
}
}
class Accepted(Validator):
remember = 'accepted'
message = {
'remember': {
'accepted': 'input of {VALUE} is not accepted in {FLAGS}'
}
}
class AcceptedCustom(Validator):
remember = 'accepted:shi,fou'
message = {
'remember': {
'accepted': 'you just input {VALUE}'
}
}
class Date(Validator):
birthday = 'date'
message = {
'birthday': {
'date': 'date format is invalid'
}
}
class DateCustom(Validator):
birthday = 'date:%Y'
message = {
'birthday': {
'date': 'date format is not ok'
}
}
class DateBefore(Validator):
expired_at = 'date_before:1990-12-12'
message = {
'expired_at': {
'date_before': 'date is not before 1990-12-12'
}
}
class DateBeforeCustom(Validator):
expired_at = 'date_before:1990,%Y,%Y'
message = {
'expired_at': {
'date_before': 'date is not before 1990'
}
}
class DateAfter(Validator):
due_at = 'date_after:1990-12-12'
message = {
'due_at': {
'date_after': 'date is not after 1990-12-12'
}
}
class DateAfterCustom(Validator):
due_at = 'date_after:1990,%Y,%Y'
message = {
'due_at': {
'date_after': 'date is not after 1990'
}
}
class DateRange(Validator):
period = 'date_range:1990-12-12, 1991-12-12'
message = {
'period': {
'date_range': 'date is not in range of {BEGIN} to {END}'
}
}
class Datetime(Validator):
now = 'datetime'
message = {
'now': {
'datetime': 'it is not a datetime format string'
}
}
class DatetimeBefore(Validator):
due_at = 'datetime_before:1990-12-12 15:31:10'
message = {
'due_at': {
'datetime_before': 'the input is not before {DATETIME}'
}
}
class DatetimeAfter(Validator):
after_at = 'datetime_after:1990-12-12 15:31:10'
message = {
'after_at': {
'datetime_after': 'the input is not after {DATETIME}'
}
}
class DatetimeRange(Validator):
range_at = 'datetime_range:1990-12-12 15:31:10,1991-12-12 15:31:10'
message = {
'range_at': {
'datetime_range': 'the input is not after {BEGIN} to {END}'
}
}
class ActiveUrl(Validator):
url = 'active_url'
message = {
'url': {
'active_url': 'it is not a active url'
}
}
class Numberic(Validator):
number = 'numberic'
message = {
'number': {
'numberic': '{VALUE} of number is not numberic'
}
}
class Digits(Validator):
card = 'digits'
message = {
'card': {
'digits': '{VALUE} of card is not digits'
}
}
class Regex(Validator):
parse_args = False
identity = 'regex:[0-9a-z]{3,5}'
message = {
'identity': {
'regex': '{VALUE} of identity is not match the pattern {REGEX}'
}
}
class Email(Validator):
email = 'email'
message = {
'email': {
'email': '{VALUE} is not an email address'
}
}
class MinLength(Validator):
username = 'min_length:4'
message = {
'username': {
'min_length': '{VALUE} of username is shotter than 4'
}
}
class MaxLength(Validator):
username = 'max_length:7'
message = {
'username': {
'max_length': '{VALUE} of username is longger than 7'
}
}
class IDS(Validator):
ids = 'ids'
message = {
'ids': {
'ids': '{VALUE} of ids is not a id series'
}
}
class Cellphone(Validator):
cellphone = 'cellphone'
message = {
'cellphone': {
'cellphone': '{VALUE} is not a cellphone number'
}
}
class Alphabet(Validator):
alphabet = 'alphabet'
message = {
'alphabet': {
'alphabet': '{VALUE} of alphabet is not alphabet'
}
}
class Switch(Validator):
accepted = 'switch:ok,good,awesome'
message = {
'accepted': {
'switch': '{VALUE} of accepted is not in [{SWITCH}]'
}
}
class Unique(Validator):
user_id = 'unique:AUTH_USER_MODEL,id'
message = {
'user_id': {
'unique': '{VALUE} of {MODEL} with id is not unique'
}
}
class Size(Validator):
username = 'size:string,5'
number = 'size:number,5'
profile = 'size:array,2'
avatar = 'size:file,13.903'
message = {
'username': {
'size': 'size of username is not equals to 5'
},
'number': {
'size': 'size of number is not equals to 5'
},
'profile': {
'size': 'size of profile is not equals to 2'
},
'avatar': {
'size': 'size of avatar is not equals to 13.903KB'
}
}
class Min(Validator):
age = 'min:number,15'
message = {
'age': {
'min': 'sorry we do not support service to people who is under 15.'
}
}
class Max(Validator):
age = 'max:number,50'
message = {
'age': {
'max': 'sorry we do not support service to people who is older than 50.'
}
}
class File(Validator):
file = 'file:png,jpeg,zip,rar'
message = {
'file': {
'file': 'file is not allowed to upload'
}
}
class AlphaDash(Validator):
username = 'alpha_dash'
message = {
'username': {
'alpha_dash': 'username should only includes alphabet and dash characters.'
}
}
class Username(Validator):
username = 'username'
message = {
'username': {
'username': 'the input {VALUE} is not a proper username.'
}
}
class PasswordLow(Validator):
password = '<PASSWORD>'
message = {
'password': {
'password': 'the input is not a proper password.'
}
}
class PasswordMiddle(Validator):
password = '<PASSWORD>'
message = {
'password': {
'password': 'the input is not a proper password.'
}
}
class PasswordHigh(Validator):
password = '<PASSWORD>'
message = {
'password': {
'password': 'the input is not a proper password.'
}
}
class ASCII(Validator):
seq = 'ascii'
class Same(Validator):
password = '<PASSWORD>'
password_confirm = '<PASSWORD>'
class Decimal(Validator):
price = 'required|decimal'
class Exist(Validator):
uid = 'required|exist:AUTH_USER_MODEL,id'
class UniqueAgainst(Validator):
username = 'required|unique_against:AUTH_USER_MODEL, username, youngershen'
class PrintableASCII(Validator):
username = 'pascii'
message = {
'username': {
'pascii': '用户名不能为空'
}
}
class PrintableASCIINoBlank(Validator):
username = 'pascii:true'
message = {
'username': {
'pascii': '用户名不能为空'
}
}
class Unblank(Validator):
msg = 'unblank'
message = {
'msg': {
'unblank': 'msg is not be able to be blank'
}
}
class Integer(Validator):
age = 'integer'
message = {
'age': {
'integer': 'this it not a integer'
}
}
class PosInteger(Validator):
age = 'pos_integer'
message = {
'age': {
'pos_integer': 'this it not a pos integer'
}
}
class NegInteger(Validator):
neg = 'neg_integer'
message = {
'neg': {
'neg_integer': 'this is not a neg integer'
}
}
class Percentage(Validator):
discount = 'percentage'
message = {
'discount': {
'percentage': 'this is not a precentage value'
}
}
class IPAddress(Validator):
ip = 'ip_address'
message = {
'ip': {
'ip_address': 'this is not an ip address'
}
}
# ======================================================================================================================
class IPAddressTestCase(TestCase):
def setUp(self) -> None:
self.validator = IPAddress
self.valid_data = {
'ip': '127.0.0.1'
}
self.valid_data2 = {
'ip': '2001:0db8:85a3:0000:0000:8a2e:0370:7334'
}
self.invalid_data = {
'ip': '-10'
}
def test_valid(self):
validator = self.validator(self.valid_data)
validator.validate()
self.assertTrue(validator.validate())
validator = self.validator(self.valid_data2)
validator.validate()
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
validator.validate()
self.assertFalse(validator.validate())
class PercentageTestCase(TestCase):
def setUp(self) -> None:
self.validator = Percentage
self.valid_data = {
'discount': '10'
}
self.invalid_data = {
'discount': '-10'
}
def test_valid(self):
validator = self.validator(self.valid_data)
validator.validate()
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
validator.validate()
self.assertFalse(validator.validate())
class NegIntegerTestCase(TestCase):
def setUp(self) -> None:
self.validator = NegInteger
self.valid_data = {
'neg': '-1'
}
self.valid_data2 = {
'neg': -2
}
self.invalid_data = {
'neg': '2'
}
self.invalid_data2 = {
'neg': 2
}
def test_valid(self):
validator = self.validator(self.valid_data)
validator.validate()
self.assertTrue(validator.validate())
validator = self.validator(self.valid_data2)
validator.validate()
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
validator.validate()
self.assertFalse(validator.validate())
validator = self.validator(self.invalid_data2)
validator.validate()
self.assertFalse(validator.validate())
class PosIntegerTestCase(TestCase):
def setUp(self) -> None:
self.validator = PosInteger
self.valid_data = {
'age': 32
}
self.valid_data2 = {
'age': '+32'
}
self.invalid_data = {
'age': '-32'
}
self.invalid_data2 = {
'age': '-1'
}
def test_valid(self):
validator = self.validator(self.valid_data)
validator.validate()
self.assertTrue(validator.validate())
validator = self.validator(self.valid_data2)
validator.validate()
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
validator.validate()
self.assertFalse(validator.validate())
validator = self.validator(self.invalid_data2)
validator.validate()
self.assertFalse(validator.validate())
class IntegerTestCase(TestCase):
def setUp(self):
self.validator = Integer
self.valid_data = {
'age': '10'
}
self.valid_data2 = {
'age': ''
}
self.invalid_data = {
'age': 'aa'
}
self.invalid_data2 = {
'age': '-b'
}
def test_valid(self):
validator = self.validator(self.valid_data)
validator.validate()
self.assertTrue(validator.validate())
validator = self.validator(self.valid_data2)
validator.validate()
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
validator.validate()
self.assertFalse(validator.validate())
validator = self.validator(self.invalid_data2)
validator.validate()
self.assertFalse(validator.validate())
class UnblankTestCase(TestCase):
def setUp(self):
self.validator = Unblank
self.valid_data = {
'msg': 'hello'
}
self.invalid_data = {
'msg': ''
}
self.invalid_data2 = {
'msg': '\r\n\r\n'
}
self.invalid_data3 = {
'msg2': 'test'
}
def test_valid(self):
validator = self.validator(self.valid_data)
validator.validate()
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
validator.validate()
self.assertTrue(validator.get_status())
validator = self.validator(self.invalid_data2)
validator.validate()
self.assertFalse(validator.get_status())
validator = self.validator(self.invalid_data3)
validator.validate()
self.assertTrue(validator.get_status())
class RequiredTestCase(TestCase):
def setUp(self):
self.validator = Required
self.valid_data = {
'username': 'test'
}
self.invalid_data = {
'username': ''
}
self.message = {
'username': {
'required': 'username is required'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class AcceptedTestCase(TestCase):
def setUp(self):
self.validator = Accepted
self.valid_data = {
'remember': 'yes'
}
self.invalid_data = {
'remember': 'none'
}
self.message = {
'remember': {
'accepted': 'input of none is not accepted in yes, no, true, false, 0, 1'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class AcceptedCustomTestCase(TestCase):
def setUp(self):
self.validator = AcceptedCustom
self.valid_data = {
'remember': 'shi'
}
self.invalid_data = {
'remember': 'bushi'
}
self.message = {
'remember': {
'accepted': 'you just input bushi'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
self.assertDictEqual(validator.get_message(), self.message)
class DateTestCase(TestCase):
def setUp(self):
self.validator = Date
self.valid_data = {
'birthday': '1990-12-12'
}
self.invalid_data = {
'birthday': 'not a date'
}
self.message = {
'birthday': {
'date': 'date format is invalid'
}
}
def test_vald(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
self.assertDictEqual(validator.get_message(), self.message)
class DateCustomTestCase(TestCase):
def setUp(self):
self.validator = DateCustom
self.valid_data = {
'birthday': '1990'
}
self.invalid_data = {
'birthday': 'not a date'
}
self.message = {
'birthday': {
'date': 'date format is not ok'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
self.assertDictEqual(validator.get_message(), self.message)
class DateBeforeTestCase(TestCase):
def setUp(self):
self.validator = DateBefore
self.valid_data = {
'expired_at': '1982-11-30'
}
self.invalid_data = {
'expired_at': '1991-04-25'
}
self.message = {
'expired_at': {
'date_before': 'date is not before 1990-12-12'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
self.assertDictEqual(self.message, validator.get_message())
class DateBeforeCustomTestCase(TestCase):
def setUp(self):
self.validator = DateBeforeCustom
self.valid_data = {
'expired_at': '1989'
}
self.invalid_data = {
'expired_at': '1991'
}
self.message = {
'expired_at': {
'date_before': 'date is not before 1990'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
self.assertDictEqual(self.message, validator.get_message())
class DateAfterTestCase(TestCase):
def setUp(self):
self.validator = DateAfter
self.valid_data = {
'due_at': '1991-04-25'
}
self.invalid_data = {
'due_at': '1982-11-30'
}
self.message = {
'due_at': {
'date_after': 'date is not after 1990-12-12'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
self.assertDictEqual(validator.get_message(), self.message)
class DateAfterCustomTestCase(TestCase):
def setUp(self):
self.validator = DateAfterCustom
self.valid_data = {
'due_at': '1991'
}
self.invalid_data = {
'due_at': '1989'
}
self.message = {
'due_at': {
'date_after': 'date is not after 1990'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
self.assertDictEqual(validator.get_message(), self.message)
class DateRangeTestCase(TestCase):
def setUp(self):
self.validator = DateRange
self.valid_data = {
'period': '1991-01-01'
}
self.invalid_data = {
'period': '1992-12-12'
}
self.message = {
'period': {
'date_range': 'date is not in range of 1990-12-12 to 1991-12-12'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(self.message, message)
class DatetimeTestCase(TestCase):
def setUp(self):
self.validator = Datetime
self.valid_data = {
'now': '1987-10-5 12:55:01'
}
self.invalid_data = {
'now': 'not a datetime string'
}
self.message = {
'now': {
'datetime': 'it is not a datetime format string'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class DatetimeBeforeTestCase(TestCase):
def setUp(self):
self.validator = DatetimeBefore
self.valid_data = {
'due_at': '1989-11-11 12:12:00'
}
self.invalid_data = {
'due_at': '2018-06-01 12:55:01'
}
self.message = {
'due_at': {
'datetime_before': 'the input is not before 1990-12-12 15:31:10'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class DatetimeAfterTestCase(TestCase):
def setUp(self):
self.validator = DatetimeAfter
self.valid_data = {
'after_at': '2011-11-11 12:12:00'
}
self.invalid_data = {
'after_at': '1955-11-11 12:12:00'
}
self.message = {
'after_at': {
'datetime_after': 'the input is not after 1990-12-12 15:31:10'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class DatetimeRangeTestCase(TestCase):
def setUp(self):
self.validator = DatetimeRange
self.valid_data = {
'range_at': '1991-01-12 15:31:10'
}
self.invalid_data = {
'range_at': '1988-01-12 15:31:10'
}
self.message = {
'range_at': {
'datetime_range': 'the input is not after 1990-12-12 15:31:10 to 1991-12-12 15:31:10'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class ActiveUrlTestCase(TestCase):
def setUp(self):
self.validator = ActiveUrl
self.valid_data = {
'url': 'baidu.com'
}
self.invalid_data = {
'url': 'www.sfsdf.sdffs'
}
self.message = {
'url': {
'active_url': 'it is not a active url'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
class NumberciTestCase(TestCase):
def setUp(self):
self.validator = Numberic
self.valid_data = {
'number': '123'
}
self.invalid_data = {
'number': 'abcdef'
}
self.message = {
'number': {
'numberic': 'abcdef of number is not numberic'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def tst_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class DigitsTestCase(TestCase):
def setUp(self):
self.validator = Digits
self.valid_data = {
'card': '12345'
}
self.invalid_data = {
'card': 'abcdef'
}
self.message = {
'card': {
'digits': 'abcdef of card is not digits'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class RegexTestCase(TestCase):
def setUp(self):
self.validator = Regex
self.valid_data = {
'identity': 'ab12'
}
self.invalid_data = {
'identity': '1'
}
self.message = {
'identity': {
'regex': '1 of identity is not match the pattern [0-9a-z]{3,5}'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class EmailTestCase(TestCase):
def setUp(self):
self.validator = Email
self.valid_data = {
'email': '<EMAIL>'
}
self.invalid_data = {
'email': 'i am a little bear'
}
self.message = {
'email': {
'email': 'i am a little bear is not an email address'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class MinLengthTestCase(TestCase):
def setUp(self):
self.validator = MinLength
self.valid_data = {
'username': 'abacdef'
}
self.invalid_data = {
'username': 'a'
}
self.message = {
'username': {
'min_length': 'a of username is shotter than 4'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class MaxLengthTestCase(TestCase):
def setUp(self):
self.validator = MaxLength
self.valid_data = {
'username': 'abacde'
}
self.invalid_data = {
'username': 'abcdefgh'
}
self.message = {
'username': {
'max_length': 'abcdefgh of username is longger than 7'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class IDSTestCase(TestCase):
def setUp(self):
self.validator = IDS
self.valid_data = {
'ids': '1,2,3,4'
}
self.invalid_data = {
'ids': 'a,b,c,d'
}
self.message = {
'ids': {
'ids': 'a,b,c,d of ids is not a id series'
}
}
def test_valid(self):
validator = IDS(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = IDS(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class CellphoneTestCase(TestCase):
def setUp(self):
self.validator = Cellphone
self.valid_data = {
'cellphone': '13811754531'
}
self.invalid_data = {
'123456789123456789'
}
self.message = {
'cellphone': {
'cellphone': '123456789123456789 is not a cellphone number'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def tst_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class AlphabetTestCase(TestCase):
def setUp(self):
self.validator = Alphabet
self.valid_data = {
'alphabet': 'abcdef'
}
self.invalid_data = {
'alphabet': '123456'
}
self.message = {
'alphabet': {
'alphabet': '123456 of alphabet is not alphabet'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class SwitchTestCase(TestCase):
def setUp(self):
self.validator = Switch
self.valid_data = {
'accepted': 'ok'
}
self.invalid_data = {
'accepted': 'bad'
}
self.message = {
'accepted': {
'switch': 'bad of accepted is not in [ok,good,awesome]'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class UniqueTestCase(TestCase):
def setUp(self):
from django.contrib.auth.models import User
User.objects.create_user('test', 'test')
self.validator = Unique
self.valid_data = {
'user_id': '2'
}
self.invalid_data = {
'user_id': '1'
}
self.message = {
'user_id': {
'unique': '1 of AUTH_USER_MODEL with id is not unique'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class SizeTestCase(TestCase):
def setUp(self):
self.avatar = self.get_avatar()
self.validator = Size
self.valid_data = {
'username': 'abcde',
'number': '5',
'profile': 'age,12',
'avatar': self.avatar
}
self.invalid_data = {
'username': '',
'number': '',
'profile': '',
'avatar': ''
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def get_avatar(self):
buffer = BytesIO()
self.get_temp_file(buffer)
avatar = InMemoryUploadedFile(
file=buffer,
field_name='avatar',
name='avatar',
size=len(buffer.getvalue()),
charset=None,
content_type='image/jpeg'
)
self.assertTrue(avatar.content_type)
return avatar
@staticmethod
def get_temp_file(buffer):
with open('tests/assets/linux.jpeg', mode='rb') as f:
buffer.write(f.read())
class MinTestCase(TestCase):
def setUp(self):
self.validator = Min
self.valid_data = {
'age': 20
}
self.invalid_data = {
'age': 10
}
self.message = {
'age': {
'min': 'sorry we do not support service to people who is under 15.'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class AlphaDashTestCase(TestCase):
def setUp(self):
self.validator = AlphaDash
self.valid_data = {
'username': 'abc_def'
}
self.invalid_data = {
'username': '#%#@'
}
self.message = {
'username': {
'alpha_dash': 'username should only includes alphabet and dash characters.'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class MaxTestCase(TestCase):
def setUp(self):
self.validator = Max
self.valid_data = {
'age': 15
}
self.invalid_data = {
'age': 55
}
self.message = {
'age': {
'max': 'sorry we do not support service to people who is older than 50.'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class FileTestCase(TestCase):
def setUp(self):
self.validator = File
self.valid_data = {
'file': self.get_file()
}
self.invalid_data = {
'file': self.get_file('tgz')
}
self.message = {
'file': {
'file': 'file is not allowed to upload'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
@staticmethod
def get_file(_type='jpeg'):
buffer = BytesIO()
with open('tests/assets/linux.' + _type, mode='rb') as f:
buffer.write(f.read())
file = InMemoryUploadedFile(
file=buffer,
field_name='file',
name='file.' + _type,
size=len(buffer.getvalue()),
charset=None,
content_type='image/jpeg'
)
return file
class CustomRuleTestCase(TestCase):
def setUp(self):
self.extra_rules = {
TestRule.get_name(): TestRule
}
self.validator = TestRuleValidator
self.message = {
'name': {
'test_rule': 'test custom rule failed'
}
}
self.valid_data = {
'name': 'test',
}
self.invalid_data = {
'name': 'toast'
}
def test_valid(self):
validator = self.validator(extra_rules=self.extra_rules, data=self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(extra_rules=self.extra_rules, data=self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class UsernameTestCase(TestCase):
def setUp(self):
self.validator = Username
self.valid_data = {
'username': 'abc8848cba'
}
self.invalid_data = {
'username': '123ABCdef'
}
self.message = {
'username': {
'username': 'the input 123ABCdef is not a proper username.'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
valiadtor = self.validator(self.invalid_data)
self.assertFalse(valiadtor.validate())
message = valiadtor.get_message()
self.assertDictEqual(message, self.message)
class PasswordTestCase(TestCase):
def setUp(self):
self.validator1 = PasswordLow
self.validator2 = PasswordMiddle
self.validator3 = PasswordHigh
self.valid_data1 = {
'password': '<PASSWORD>'
}
self.valid_data2 = {
'password': '<PASSWORD>'
}
self.valid_data3 = {
'password': '<PASSWORD>!@#'
}
self.invalid_data1 = {
'password': '<PASSWORD>'
}
self.invalid_data2 = {
'password': '<PASSWORD>'
}
self.invalid_data3 = {
'password': '<PASSWORD>'
}
def test_low(self):
validator = self.validator1(self.valid_data1)
self.assertTrue(validator.validate())
validator = self.validator1(self.invalid_data1)
self.assertFalse(validator.validate())
def test_middle(self):
validator = self.validator2(self.valid_data2)
self.assertTrue(validator.validate())
validator = self.validator2(self.invalid_data2)
self.assertFalse(validator.validate())
def test_high(self):
validator = self.validator3(self.valid_data3)
self.assertTrue(validator.validate())
validator = self.validator3(self.invalid_data3)
self.assertFalse(validator.validate())
class ASCIITestCase(TestCase):
def setUp(self):
self.validator = ASCII
self.valid_data = {
'seq': 'a '
}
self.invalid_data = {
'seq': '你好世界'
}
self.message = {
'seq': {
'ascii': 'the input 你好世界 value is not a proper ASCII character.'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class BooleanTestCase(TestCase):
def setUp(self):
self.validator = Boolean
self.valid_data = {
'remember_me': 'true'
}
self.invalid_data = {
'remember_me': 'haha'
}
self.message = {
'remember_me': {
'boolean': 'haha is not a boolean type value.'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class BetweenTestCase(TestCase):
def setUp(self):
self.validator = Between
self.valid_data = {
'age': 15
}
self.invalid_data = {
'age': 25
}
self.message = {
'age': {
'between': '25 is not between 10 to 20'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class ArrayTestCase(TestCase):
def setUp(self):
self.validator = Array
self.valid_data = {
'ids': '1, 2, 3, 4'
}
self.invalid_data = {
'ids': 'abcdef'
}
self.message = {
'ids': {
'array': 'abcdef is not a array type series.'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class AlphaNumberTest(TestCase):
def setUp(self):
self.validator = AlphaNumber
self.valid_data = {
'code': 'abc123'
}
self.invalid_data = {
'code': '密码'
}
self.message = {
'code': {
'alpha_number': '密码 is not a alpha number type series.'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
class SameTestCase(TestCase):
def setUp(self):
self.validator = Same
self.valid_data = {
'password': '<PASSWORD>',
'password_confirm': '<PASSWORD>'
}
self.invalid_data = {
'password': '<PASSWORD>',
'password_confirm': '<PASSWORD>'
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
class DecimalTestCase(TestCase):
def setUp(self):
self.validator = Decimal
self.valid_data = {
'price': '-123.456'
}
self.valid_data1 = {
'price': '+123.456'
}
self.valid_data2 = {
'price': 123.456
}
self.valid_data3 = {
'price': -123.456
}
self.invalid_data = {
'price': 'abcdef'
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
validator = self.validator(self.valid_data1)
self.assertTrue(validator.validate())
validator = self.validator(self.valid_data2)
self.assertTrue(validator.validate())
validator = self.validator(self.valid_data3)
self.assertTrue(validator.validate())
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
class ExistTestCase(TestCase):
def setUp(self):
self.setup_users()
self.validator = Exist
self.valid_data = {
'uid': '1'
}
self.invalid_data = {
'uid': 'test'
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
@staticmethod
def setup_users():
from django.contrib.auth.models import User
User.objects.create_user(username='youngershen',
email='<EMAIL>',
password='<PASSWORD>')
class UniqueAgainstTestCase(TestCase):
def setUp(self):
self.setup_users()
self.validator = UniqueAgainst
self.valid_data = {
'username': 'youngershen'
}
self.invalid_data = {
'username': 'bear'
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
@staticmethod
def setup_users():
from django.contrib.auth.models import User
User.objects.create_user(username='youngershen',
email='<EMAIL>',
password='<PASSWORD>')
User.objects.create_user(username='bear',
email='<EMAIL>',
password='<PASSWORD>')
class PrintableASCIITestCase(TestCase):
def setUp(self):
self.validator = PrintableASCII
self.valid_data = {
'username': 'abcdef@123456'
}
self.invalid_data = {
'username': chr(555)
}
self.valid_data_blank = {
'username': ' '
}
self.message = {
'username': {
'pascii': '用户名不能为空'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
validator = self.validator(self.valid_data_blank)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
validator = self.validator(self.valid_data_blank)
self.assertTrue(validator.validate())
class PrintableASCIINoBlankTestCase(TestCase):
def setUp(self):
self.validator = PrintableASCIINoBlank
self.valid_data = {
'username': 'abcdef@<PASSWORD>'
}
self.invalid_data = {
'username': chr(555)
}
self.invalid_data_blank = {
'username': ' '
}
self.message = {
'username': {
'pascii': '用户名不能为空'
}
}
def test_valid(self):
validator = self.validator(self.valid_data)
self.assertTrue(validator.validate())
def test_invalid(self):
validator = self.validator(self.invalid_data)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
validator = self.validator(self.invalid_data_blank)
self.assertFalse(validator.validate())
message = validator.get_message()
self.assertDictEqual(message, self.message)
| StarcoderdataPython |
8115365 | from django.db import models
class Coordinate(models.Model):
latitude = models.FloatField()
longitude = models.FloatField()
def __str__(self):
return f"[{self.latitude}, {self.longitude}]"
| StarcoderdataPython |
6633145 | <filename>tests/inlineasm/asmblbx.py
# test bl and bx instructions
@micropython.asm_thumb
def f(r0):
# jump over the internal functions
b(entry)
label(func1)
add(r0, 2)
bx(lr)
label(func2)
sub(r0, 1)
bx(lr)
label(entry)
bl(func1)
bl(func2)
print(f(0))
print(f(1))
| StarcoderdataPython |
29043 | <reponame>neriphy/numeros_primos
#Evaludador de numero primo
#Created by @neriphy
numero = input("Ingrese el numero a evaluar: ")
divisor = numero - 1
residuo = True
while divisor > 1 and residuo == True:
if numero%divisor != 0:
divisor = divisor - 1
print("Evaluando")
residuo = True
elif numero%divisor == 0:
residuo = False
if residuo == True:
print(numero,"es un numero primo")
if residuo == False:
print(numero,"no es un numero primo")
| StarcoderdataPython |
1845294 | <filename>environment.py
import logging
logging.basicConfig(level=logging.DEBUG)
def before_all(context):
context.mobile_platform = context.config.userdata.get(
'mobile_platform', 'ios')
| StarcoderdataPython |
1676910 | <reponame>DaveeFTW/infinity
from ecdsa.ellipticcurve import CurveFp, Point
import hashlib
from ecdsa.numbertheory import inverse_mod
from ecdsa import SigningKey
from ecdsa.curves import Curve
import psptool.kirk as kirk
from .common import expand_seed, prx_header, set_kirk_cmd_1
from Crypto.Util.strxor import strxor as xor
from Crypto.Hash import SHA1
class prx_header_9(object):
def __init__(self, header):
prx = prx_header(header)
self.header = prx.personalisation() + prx.btcnf_id() + prx.sha1_hash() + prx.kirk_aes_key() + prx.kirk_cmac_key() + \
prx.kirk_cmac_header_hash() + prx.kirk_cmac_data_hash() + \
prx.kirk_metadata() + prx.elf_info()
def tag(self):
return self.header[:0x4]
def btcnf_id(self):
return self.header[0x5C:0x6C]
def sha1_hash(self):
return self.header[0x6C:0x80]
def personalisation(self):
return self.header[:0x5C]
def prx_ecdsa(self):
return self.header[0x34:0x5C]
def kirk_aes_key(self):
return self.header[0x80:0x90]
def kirk_cmac_key(self):
return self.header[0x90:0xA0]
def kirk_cmac_header_hash(self):
return self.header[0xA0:0xB0]
def kirk_cmac_data_hash(self):
return self.header[0xB0:0xC0]
def kirk_metadata(self):
return self.header[0xC0:0xD0]
def kirk_block(self):
return self.header[0x80:0xC0]
def elf_info(self):
return self.header[0xD0:]
def decrypt_header(self, key):
self.header = self.header[:0x5C] + kirk.kirk7(
self.header[0x5C:0x5C+0x60], key) + self.header[0x5C+0x60:]
def decrypt(prx, meta, **kwargs):
xorbuf = expand_seed(meta['seed'], meta['key'])
# check if range contains nonzero
if any(x != 0 for x in prx[0xD4:0xD4+0x30]):
return False
p = prx_header_9(prx)
print(meta['pubkey'])
print(p.prx_ecdsa().hex())
# check ECDSA signature
# kirk.kirk11(bytes.fromhex(meta['pubkey']), p.prx_ecdsa(
# ), prx[4:0x104] + b'\x00'*0x28 + prx[0x12C:])
h2 = SHA1.new()
h2.update(prx[4:0x104] + b'\x00'*0x28 + prx[0x12C:])
print(h2.hexdigest())
# decrypt the header information
p.decrypt_header(meta['key'])
# calculate SHA1 of header
h = SHA1.new()
h.update(p.tag())
h.update(xorbuf[:0x10])
h.update(b'\x00'*0x58)
h.update(p.btcnf_id())
h.update(p.kirk_aes_key())
h.update(p.kirk_cmac_key())
h.update(p.kirk_cmac_header_hash())
h.update(p.kirk_cmac_data_hash())
h.update(p.kirk_metadata())
h.update(p.elf_info())
# sanity check that our SHA1 actually matches
if h.digest() != p.sha1_hash():
return False
# decrypt the kirk block
header = xor(p.kirk_block(), xorbuf[0x10:0x50])
header = kirk.kirk7(header, meta['key'])
header = xor(header, xorbuf[0x50:])
# prepare the kirk block
block = header + b'\x00'*0x30
block = set_kirk_cmd_1(block)
block = block + p.kirk_metadata() + b'\x00'*0x10 + \
p.elf_info() + prx[0x150:]
return kirk.kirk1(block)
| StarcoderdataPython |
12825911 | import os
import time
import math
import random
import numpy as np
import h5py
import matplotlib.pyplot as plt
import torch
import torch.backends.cudnn as cudnn
import torch.nn as nn
import torch.nn.functional as F
from torch import optim
from torch.autograd import Variable
from pytorch3d.io import save_ply, save_obj, load_objs_as_meshes, load_obj, load_ply
from pytorch3d.structures import Meshes
from pytorch3d.renderer import (
look_at_view_transform,
FoVPerspectiveCameras,
PointLights,
DirectionalLights,
Materials,
RasterizationSettings,
MeshRenderer,
MeshRasterizer,
SoftPhongShader,
TexturesUV,
Textures,
TexturesVertex
)
import cv2
import mcubes
from typing import List
from ..preprocessing.utils import shapenet_cam_params
from .ShapeNetRendering import ShapeNetRendering
from .utils import *
from .modelSVR import IM_SVR
class IM_SVR_DD(IM_SVR):
def __init__(self, config):
super().__init__(config)
self.shapenet_cam_params = shapenet_cam_params
def load_data(self, config):
'''
Overrides base class method in order to only load data required for deep dreaming.
:param config:
:return:
'''
# get config values
z_base = int(config.interpol_z1)
z_target = int(config.interpol_z2)
self.crop_edge = self.view_size - self.crop_size
data_hdf5_name = self.data_dir + '/' + self.dataset_load + '.hdf5'
if os.path.exists(data_hdf5_name):
data_dict = h5py.File(data_hdf5_name, 'r')
offset_x = int(self.crop_edge / 2)
offset_y = int(self.crop_edge / 2)
# reshape to NCHW
# get the shape of the first two cropped pictures
cropped_shape = np.reshape(
data_dict['pixels'][0:2, :, offset_y:offset_y + self.crop_size, offset_x:offset_x + self.crop_size],
[-1, self.view_num, 1, self.crop_size, self.crop_size]).shape
self.data_pixels = np.empty(shape=cropped_shape)
# now grab only the data that is needed. This must be done iteratively or hdf5 can throw and error
# (selection indices must be of increasing order only)
for ind, z in enumerate([z_base, z_target]):
self.data_pixels[ind, ...] = np.reshape(
data_dict['pixels'][z, :, offset_y:offset_y + self.crop_size, offset_x:offset_x + self.crop_size],
[self.view_num, 1, self.crop_size, self.crop_size])
else:
print("error: cannot load " + data_hdf5_name)
exit(0)
def get_activation(self, output_list):
'''
A wrapper function to establish the forward hook
:param out:
:return:
'''
def hook(model, input, output):
output_list[0] = output
return hook
def get_zvec(self, z_num):
if z_num < len(self.data_pixels):
batch_view = self.data_pixels[z_num:z_num + 1, self.test_idx].astype(np.float32) / 255.0
batch_view = torch.from_numpy(batch_view)
batch_view = batch_view.to(self.device)
z_vec_, _ = self.im_network(batch_view, None, None, is_training=False)
z_vec = z_vec_.detach().cpu().numpy()
return (z_vec)
else:
print("z_num not a valid number")
def interpolate_z(self, config):
'''
A method to create the meshes from latent z vectors linearly interpolated between two vectors.
:param config:
:return:
'''
# TODO: uncomment load data
super().load_data(config=config)
# TODO: load previous checkpoint
self.load_checkpoint()
z1 = int(config.interpol_z1)
z2 = int(config.interpol_z2)
interpol_steps = int(config.interpol_steps)
result_base_directory = config.interpol_directory
self.result_dir_name = 'interpol_' + str(z1) + '_' + str(z2)
self.result_dir = result_base_directory + '/' + self.result_dir_name
print(self.result_dir)
# Create output directory
if not os.path.isdir(self.result_dir):
os.mkdir(self.result_dir)
print('creating directory ' + self.result_dir)
# get the z vectors via forward pass through encoder
z1_vec = self.get_zvec(z1)
print(z1_vec)
z2_vec = self.get_zvec(z2)
print(z2_vec)
# compute linear interpolation between vectors
fraction = np.linspace(0, 1, interpol_steps)
interpolated_z = np.multiply.outer(np.ones_like(fraction), z1_vec) + np.multiply.outer(fraction,
z2_vec - z1_vec)
interpolated_z = interpolated_z.astype(np.float64)
self.out_filenames = []
for z_index in np.arange(interpol_steps):
self.out_filenames.append(self.result_dir + "/" + "out_{:.2f}.ply".format(fraction[z_index]))
for z_index in np.arange(interpol_steps):
start_time = time.time()
model_z = interpolated_z[z_index:z_index + 1].astype(np.float64)
# print('current latent vector:')
# print(model_z.shape)
model_z = torch.from_numpy(model_z).float()
model_z = model_z.to(self.device)
self.im_network.eval()
model_float = self.z2voxel(model_z)
vertices, triangles = mcubes.marching_cubes(model_float, self.sampling_threshold)
vertices = (vertices.astype(np.float32) - 0.5) / self.real_size - 0.5
# vertices = self.optimize_mesh(vertices,model_z)
write_ply_triangle(self.result_dir + "/" + "out_{:.2f}.ply".format(fraction[z_index]), vertices, triangles)
end_time = time.time() - start_time
print("computed interpolation {} in {} seconds".format(z_index, end_time))
def create_saved_images(self, images, name):
num_images = int(images.shape[0])
cols = 3
rows = -int(-num_images // cols)
# convert back to grayscale
rescale_images = images
print(images.max())
print(images.min())
fig, axs = plt.subplots(nrows=rows,
ncols=cols,
sharex='all',
sharey='all',
figsize=(cols * 2, rows * 2),
gridspec_kw={'wspace': 0, 'hspace': 0}
)
for ax, im in zip(axs.flatten(), range(num_images)):
ax.imshow(rescale_images[im, 0, :, :], cmap='gray', vmin=0, vmax=1)
ax.axis('off')
plt.savefig(self.result_dir + '/' + name)
# output shape as ply
def create_model_mesh(self, batch_view, num, config):
# TODO: uncomment load checkpoint
# load previous checkpoint
self.load_checkpoint()
self.im_network.eval()
model_z, _ = self.im_network(batch_view, None, None, is_training=False)
model_float = self.z2voxel(model_z)
print('model_float shape')
print(model_float.shape)
# This transform nescessary to accomodate coordinate transform induced in marching cubes
model_float = np.flip(np.transpose(model_float, (2, 1, 0)), 0)
vertices, triangles = mcubes.marching_cubes(model_float, self.sampling_threshold)
vertices = (vertices.astype(np.float32) - 0.5) / self.real_size - 0.5
# vertices = self.optimize_mesh(vertices,model_z)
full_path = self.result_dir + "/" + str(num) + "_vox.ply"
write_ply_triangle(full_path, vertices, triangles)
print("created .ply for image {}".format(num))
return full_path
def cv2_image_transform(self, img):
'''
Basic image transform used as input to IM_SVR
:param img:
:return:
'''
'''
imgo = img[:, :, :3] * 255
imgo = cv2.cvtColor(imgo, cv2.COLOR_BGR2GRAY)
imga = (img[:, :, 3])
img_out = imgo * imga + 255.0 * (1 - imga)
img_out = np.round(img_out).astype(np.uint8)
'''
img[:, :, :3] = img[:, :, :3] * 255
img_out = cv2.cvtColor(img[:, :, :], cv2.COLOR_BGRA2GRAY) / 255
# img_out = np.round(img_out).astype(np.uint8)
# print(img_out.shape)
img_out = cv2.resize(img_out, dsize=(128, 128))
img_out = img_out[np.newaxis, :, :].astype(np.float32)
return img_out
def annealing_view(self, ply_path):
# param_num = self.test_idx
param_num = 7
# get image transform
R, T = look_at_view_transform(
dist=shapenet_cam_params["distance"][param_num] * 3,
elev=shapenet_cam_params["elevation"][param_num],
azim=shapenet_cam_params["azimuth"][param_num])
cameras = FoVPerspectiveCameras(device=self.device,
R=R,
T=T,
fov=shapenet_cam_params["field_of_view"][param_num]
)
raster_settings = RasterizationSettings(
image_size=128,
blur_radius=0.0,
faces_per_pixel=1,
)
lights = PointLights(device=self.device, location=[[0.0, 0.0, -3.0]])
renderer = MeshRenderer(
rasterizer=MeshRasterizer(
cameras=cameras,
raster_settings=raster_settings
),
shader=SoftPhongShader(
device=self.device,
cameras=cameras,
lights=lights
)
)
verts = []
faces = []
verts_rgb = []
titles = []
vert, face = load_ply(ply_path)
verts.append(vert.to(self.device))
faces.append(face.to(self.device))
verts_rgb.append(torch.ones_like(vert).to(self.device))
textures = Textures(verts_rgb=verts_rgb)
interpol_mesh = Meshes(verts, faces, textures)
image = renderer(interpol_mesh).cpu().numpy()
print(image.shape)
reformatted_image = self.cv2_image_transform(image[0])
print(reformatted_image.min())
out = torch.from_numpy(reformatted_image).unsqueeze(0).type(torch.float32).to(self.device)
# print(out)
return out
def annealing_view_pytorch3d(self, ply_paths: List[str]):
verts = []
faces = []
verts_rgb = []
for ply_path in ply_paths:
vert, face = load_ply(ply_path)
verts.append(vert.to(self.device))
faces.append(face.to(self.device))
verts_rgb.append(torch.ones_like(vert).to(self.device))
# verts_rgb.append(torch.rand(size=vert.size()).to(self.device))
textures = Textures(verts_rgb=verts_rgb)
interpol_mesh = Meshes(verts, faces, textures)
# print(interpol_mesh.isempty())
# print(interpol_mesh.num_verts_per_mesh())
image = self.shapenet_render.render(model_ids=[0],
meshes=interpol_mesh,
device=self.device
).cpu().numpy()
# print(image.shape)
reformatted_image = self.cv2_image_transform(image[0])
out = torch.from_numpy(reformatted_image).unsqueeze(0).type(torch.float32).to(self.device)
return out
def latent_gradient(self, base_batch_view, target_batch_view, step, config):
style_activation = self.style_activation.clone()
# zero gradients
self.im_network.zero_grad()
# re-register forward hook on each forward pass.
# self.target_layer.register_forward_hook(self.get_activation(self.target_activation))
z_vec_, _ = self.im_network(base_batch_view, None, None, is_training=False)
base_activation = self.target_activation[0]
# compute best feature maps
features, width, height = style_activation.shape
style_activation = style_activation.view(features, -1)
comp_base_activation = base_activation.squeeze().view(features, -1)
# Matrix of best matching feature maps.
A = torch.matmul(torch.transpose(comp_base_activation, 0, 1), style_activation)
# A = comp_base_activation.T.dot(style_activation)
loss = comp_base_activation[:, torch.argmax(A, 1)].view(features, width, height).detach()
# run the graph in reverse
base_activation.backward(loss.unsqueeze(0))
return base_batch_view.grad
def deep_dream(self, config):
# TODO: uncomment load data
super().load_data(config)
# TODO: uncomment checkpoint load
# load previous checkpoint
self.load_checkpoint()
# get config values
z_base = int(config.interpol_z1)
base_im_num = int(config.z1_im_view)
z_target = int(config.interpol_z2)
target_im_num = int(config.z1_im_view)
# instantiate camera rendering class
self.shapenet_render = ShapeNetRendering(model_nums=[z_base, z_target],
R2N2_dir=config.R2N2_dir,
model_views=[[base_im_num], [target_im_num]],
splitfile=config.splitfile
)
# set the dreaming rate and boundary size
self.dream_rate = config.dream_rate
annealing_step = config.annealing_rate
# Set up forward hook to pull values
self.layer_num = config.layer_num
# list index includes as zero entry the generator module itself.
# 2 layers up front should not be used
num_model_layers = len(list(self.im_network.img_encoder.named_children())) - 2
if self.layer_num < 2 or self.layer_num >= num_model_layers:
print('Layer number is too large: select layer numbers from 2 to {}'.format(num_model_layers))
exit(0)
# Get target layer
# self.target_layer = list(list(self.im_network.img_encoder.children())[self.layer_num].children())[-1]
self.target_layer = list(self.im_network.img_encoder.children())[self.layer_num]
self.target_activation = [None]
# register forward hook
self.target_layer.register_forward_hook(self.get_activation(self.target_activation))
interpol_steps = int(config.interpol_steps)
result_base_directory = config.interpol_directory
result_dir_name = 'DeepDream_SVR' + str(z_base) + '_' + str(z_target) + '_layer_' + str(self.layer_num)
self.result_dir = result_base_directory + '/' + result_dir_name
# Create output directory
# TODO: re-create directory
if not os.path.isdir(self.result_dir):
os.mkdir(self.result_dir)
print('creating directory ' + self.result_dir)
# store images
num_images = interpol_steps // annealing_step
annealing_images = np.empty(shape=(num_images + 2, 1, 128, 128))
deepdream_images = np.empty(shape=(num_images + 2, 1, 128, 128))
# TODO: remove dummy data
# batch_view = np.random.random(size=(1, 1, 128, 128))
batch_view = self.data_pixels[z_base:z_base + 1, base_im_num, ...].astype(np.float32) / 255.0
base_batch_view_ = torch.from_numpy(batch_view).type(torch.float32).to(self.device)
base_batch_view = torch.autograd.Variable(base_batch_view_, requires_grad=True)
deepdream_images[0, ...] = batch_view[0, ...]
# TODO: uncomment mesh save
self.create_model_mesh(base_batch_view, 'base', config)
# TODO: remove dummy data
# batch_view = np.random.random(size=(1, 1, 128, 128))
batch_view = self.data_pixels[z_target:z_target + 1, target_im_num, ...].astype(np.float32) / 255.0
target_batch_view = torch.from_numpy(batch_view).type(torch.float32).to(self.device)
deepdream_images[1, ...] = batch_view[0, ...]
# TODO: uncomment mesh save
self.create_model_mesh(target_batch_view, 'target', config)
# get target activation
z_vec_, _ = self.im_network(target_batch_view, None, None, is_training=False)
self.style_activation = self.target_activation[0].data.clone().detach().squeeze()
for step in range(interpol_steps):
start_time = time.perf_counter()
# mask zero valued areas
mask = base_batch_view < 1.99e5
grad = self.latent_gradient(base_batch_view, target_batch_view, step, config)
grad = grad[mask]
# print(grad.shape)
# mask low value fluctuations, one standard deviation below mean
grad_mean = grad.mean()
# print(grad_mean)
grad_var = torch.pow(torch.mean(torch.pow(grad - grad_mean, 2)), .5)
# print(grad_var)
# grad[grad < grad_mean - grad_var] = 0
grad_step = grad * self.dream_rate / torch.abs(grad_mean)
# grad_step = self.dream_rate * (grad - grad_mean) / grad_var
# print(grad_step.shape)
# print(torch.max(grad_step))
# clamp output to min,max input values.
# base_batch_view.data = torch.clamp(base_batch_view.data - grad_step, min=0., max=1.)
with torch.no_grad():
base_batch_view.data[mask] += grad_step
base_batch_view.clamp_(min=0, max=1)
print(base_batch_view.shape)
# apply a mask to remove border artifacts
border = 8
# right border
base_batch_view.data[..., :, 0:border] = 1
# left border
base_batch_view[..., :, -border:] = 1
# top border
base_batch_view[..., 0:border, :] = 1
# bottom border
base_batch_view[..., -border:, :] = 1
# print(torch.max(grad))
# Make sure gradients flow on the update
# base_batch_view.requires_grad = True
# create ply models
if (step) % annealing_step == 0:
if step != 0:
# TODO: uncomment mesh save
# save model
ply_path = self.create_model_mesh(base_batch_view, step, config)
# save image
deepdream_images[step // annealing_step + 1, ...] = base_batch_view.clone().detach().cpu().numpy()[
0, ...]
# get a new annealing model image
with torch.no_grad():
# base_batch_view.data = self.annealing_view(ply_path=ply_path)
base_batch_view.data = self.annealing_view_pytorch3d(ply_paths=[ply_path])
# save image
annealing_images[step // annealing_step + 1, ...] = base_batch_view.clone().detach().cpu().numpy()[
0, ...]
end_time = time.perf_counter()
print('Completed dream {} in {} seconds'.format(step, end_time - start_time))
self.create_model_mesh(base_batch_view, step, config)
self.create_saved_images(deepdream_images, 'deepdream_images')
self.create_saved_images(annealing_images, 'annealing_images')
print('Done Dreaming..')
| StarcoderdataPython |
3300286 | import sys
from operator import itemgetter
import cv2
import matplotlib.pyplot as plt
import numpy as np
# -----------------------------#
# 计算原始输入图像
# 每一次缩放的比例
# -----------------------------#
def calculateScales(img):
pr_scale = 1.0
h, w, _ = img.shape
# --------------------------------------------#
# 将最大的图像大小进行一个固定
# 如果图像的短边大于500,则将短边固定为500
# 如果图像的长边小于500,则将长边固定为500
# --------------------------------------------#
if min(w, h) > 500:
pr_scale = 500.0 / min(h, w)
w = int(w * pr_scale)
h = int(h * pr_scale)
elif max(w, h) < 500:
pr_scale = 500.0 / max(h, w)
w = int(w * pr_scale)
h = int(h * pr_scale)
# ------------------------------------------------#
# 建立图像金字塔的scales,防止图像的宽高小于12
# ------------------------------------------------#
scales = []
factor = 0.709
factor_count = 0
minl = min(h, w)
while minl >= 12:
scales.append(pr_scale * pow(factor, factor_count))
minl *= factor
factor_count += 1
return scales
# -----------------------------#
# 将长方形调整为正方形
# -----------------------------#
def rect2square(rectangles):
w = rectangles[:, 2] - rectangles[:, 0]
h = rectangles[:, 3] - rectangles[:, 1]
l = np.maximum(w, h).T
rectangles[:, 0] = rectangles[:, 0] + w * 0.5 - l * 0.5
rectangles[:, 1] = rectangles[:, 1] + h * 0.5 - l * 0.5
rectangles[:, 2:4] = rectangles[:, 0:2] + np.repeat([l], 2, axis=0).T
return rectangles
# -------------------------------------#
# 非极大抑制
# -------------------------------------#
def NMS(rectangles, threshold):
if len(rectangles) == 0:
return rectangles
boxes = np.array(rectangles)
x1 = boxes[:, 0]
y1 = boxes[:, 1]
x2 = boxes[:, 2]
y2 = boxes[:, 3]
s = boxes[:, 4]
area = np.multiply(x2 - x1 + 1, y2 - y1 + 1)
I = np.array(s.argsort())
pick = []
while len(I) > 0:
xx1 = np.maximum(x1[I[-1]], x1[I[0:-1]]) # I[-1] have hightest prob score, I[0:-1]->others
yy1 = np.maximum(y1[I[-1]], y1[I[0:-1]])
xx2 = np.minimum(x2[I[-1]], x2[I[0:-1]])
yy2 = np.minimum(y2[I[-1]], y2[I[0:-1]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
o = inter / (area[I[-1]] + area[I[0:-1]] - inter)
pick.append(I[-1])
I = I[np.where(o <= threshold)[0]]
result_rectangle = boxes[pick].tolist()
return result_rectangle
# -------------------------------------#
# 对pnet处理后的结果进行处理
# 为了方便理解,我将代码进行了重构
# 具体代码与视频有较大区别
# -------------------------------------#
def detect_face_12net(cls_prob, roi, out_side, scale, width, height, threshold):
# -------------------------------------#
# 计算特征点之间的步长
# -------------------------------------#
stride = 0
if out_side != 1:
stride = float(2 * out_side - 1) / (out_side - 1)
# -------------------------------------#
# 获得满足得分门限的特征点的坐标
# -------------------------------------#
(y, x) = np.where(cls_prob >= threshold)
# -----------------------------------------#
# 获得满足得分门限的特征点得分
# 最终获得的score的shape为:[num_box, 1]
# -------------------------------------------#
score = np.expand_dims(cls_prob[y, x], -1)
# -------------------------------------------------------#
# 将对应的特征点的坐标转换成位于原图上的先验框的坐标
# 利用回归网络的预测结果对先验框的左上角与右下角进行调整
# 获得对应的粗略预测框
# 最终获得的boundingbox的shape为:[num_box, 4]
# -------------------------------------------------------#
boundingbox = np.concatenate([np.expand_dims(x, -1), np.expand_dims(y, -1)], axis=-1)
top_left = np.fix(stride * boundingbox + 0)
bottom_right = np.fix(stride * boundingbox + 11)
boundingbox = np.concatenate((top_left, bottom_right), axis=-1)
boundingbox = (boundingbox + roi[y, x] * 12.0) * scale
# -------------------------------------------------------#
# 将预测框和得分进行堆叠,并转换成正方形
# 最终获得的rectangles的shape为:[num_box, 5]
# -------------------------------------------------------#
rectangles = np.concatenate((boundingbox, score), axis=-1)
rectangles = rect2square(rectangles)
rectangles[:, [1, 3]] = np.clip(rectangles[:, [1, 3]], 0, height)
rectangles[:, [0, 2]] = np.clip(rectangles[:, [0, 2]], 0, width)
return rectangles
# -------------------------------------#
# 对Rnet处理后的结果进行处理
# 为了方便理解,我将代码进行了重构
# 具体代码与视频有较大区别
# -------------------------------------#
def filter_face_24net(cls_prob, roi, rectangles, width, height, threshold):
# -------------------------------------#
# 利用得分进行筛选
# -------------------------------------#
pick = cls_prob[:, 1] >= threshold
score = cls_prob[pick, 1:2]
rectangles = rectangles[pick, :4]
roi = roi[pick, :]
# -------------------------------------------------------#
# 利用Rnet网络的预测结果对粗略预测框进行调整
# 最终获得的rectangles的shape为:[num_box, 4]
# -------------------------------------------------------#
w = np.expand_dims(rectangles[:, 2] - rectangles[:, 0], -1)
h = np.expand_dims(rectangles[:, 3] - rectangles[:, 1], -1)
rectangles[:, [0, 2]] = rectangles[:, [0, 2]] + roi[:, [0, 2]] * w
rectangles[:, [1, 3]] = rectangles[:, [1, 3]] + roi[:, [1, 3]] * w
# -------------------------------------------------------#
# 将预测框和得分进行堆叠,并转换成正方形
# 最终获得的rectangles的shape为:[num_box, 5]
# -------------------------------------------------------#
rectangles = np.concatenate((rectangles, score), axis=-1)
rectangles = rect2square(rectangles)
rectangles[:, [1, 3]] = np.clip(rectangles[:, [1, 3]], 0, height)
rectangles[:, [0, 2]] = np.clip(rectangles[:, [0, 2]], 0, width)
return np.array(NMS(rectangles, 0.7))
# -------------------------------------#
# 对onet处理后的结果进行处理
# 为了方便理解,我将代码进行了重构
# 具体代码与视频有较大区别
# -------------------------------------#
def filter_face_48net(cls_prob, roi, pts, rectangles, width, height, threshold):
# -------------------------------------#
# 利用得分进行筛选
# -------------------------------------#
pick = cls_prob[:, 1] >= threshold
score = cls_prob[pick, 1:2]
rectangles = rectangles[pick, :4]
pts = pts[pick, :]
roi = roi[pick, :]
w = np.expand_dims(rectangles[:, 2] - rectangles[:, 0], -1)
h = np.expand_dims(rectangles[:, 3] - rectangles[:, 1], -1)
# -------------------------------------------------------#
# 利用Onet网络的预测结果对预测框进行调整
# 通过解码获得人脸关键点与预测框的坐标
# 最终获得的face_marks的shape为:[num_box, 10]
# 最终获得的rectangles的shape为:[num_box, 4]
# -------------------------------------------------------#
face_marks = np.zeros_like(pts)
face_marks[:, [0, 2, 4, 6, 8]] = w * pts[:, [0, 1, 2, 3, 4]] + rectangles[:, 0:1]
face_marks[:, [1, 3, 5, 7, 9]] = h * pts[:, [5, 6, 7, 8, 9]] + rectangles[:, 1:2]
rectangles[:, [0, 2]] = rectangles[:, [0, 2]] + roi[:, [0, 2]] * w
rectangles[:, [1, 3]] = rectangles[:, [1, 3]] + roi[:, [1, 3]] * w
# -------------------------------------------------------#
# 将预测框和得分进行堆叠
# 最终获得的rectangles的shape为:[num_box, 15]
# -------------------------------------------------------#
rectangles = np.concatenate((rectangles, score, face_marks), axis=-1)
rectangles[:, [1, 3]] = np.clip(rectangles[:, [1, 3]], 0, height)
rectangles[:, [0, 2]] = np.clip(rectangles[:, [0, 2]], 0, width)
return np.array(NMS(rectangles, 0.3))
| StarcoderdataPython |
1680893 | """
In this file it's been declered the function to create an instance of the Flask class
using the configuration avialable in the setting python file.
the function recived a string as a configuration name passing to the dict imported
the function also return a Flask instance.
"""
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_bootstrap import Bootstrap
from setting import config
from flask_login import LoginManager
# Initializing variables to use later in the create app function
bootstrap = Bootstrap()
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.login_view = "main.show_requesters"
# create function
def create_app(config_name : str) -> Flask:
app =Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
db.init_app(app)
login_manager.init_app(app)
from app.main import main as main_blueprint
app.register_blueprint(main_blueprint)
from app.auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix="/auth")
return app | StarcoderdataPython |
6616640 | # -*- coding: utf-8 -*-
# src/app.py
"""
API
------------------------------------------------------------------------
Create app
------------------------------------------------------------------------
"""
from flask import Flask, Response
from flask_cors import CORS
from flask_swagger_ui import get_swaggerui_blueprint
from .config import app_config
from .controllers.VelhaController import velha_api as velha_blueprint
def create_app(env_name):
"""
param: env_name
DOC API USING SWAGGER UI
Create app
"""
# app initiliazation
app = Flask(__name__)
#CORS(app, resources={r"/api/*": {"origins": "*"}})
CORS(app)
app.config.from_object(app_config[env_name])
### swagger specific ###
swagger_url = '/apidocs'
api_url = '/static/api/api.yml'
swagger_blueprint = get_swaggerui_blueprint(
swagger_url,
api_url,
config={
'app_name': "API Jogo da Velha",
'layout': "BaseLayout",
'filter': True
}
)
app.register_blueprint(swagger_blueprint, url_prefix=swagger_url)
### end swagger specific ###
app.register_blueprint(velha_blueprint, url_prefix='/v1/api/game')
@app.route('/', methods=['GET'])
def index():
"""
Home
"""
return Response(
mimetype="application/json",
response={"Bem vindo ao Jogo da Velha - Documentação: /apidocs"},
status=200
)
return app
| StarcoderdataPython |
3315795 | {
'targets': [
{
'target_name': 'dmp',
'sources': [
'src/diff_match_patch.cpp',
'src/dmp.cc'
],
'cflags': [ '-std=c++11' ],
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions'],
'conditions': [
['OS=="mac"', {
'include_dirs': [
'/usr/local/opt/qt/include',
'/usr/local/opt/qt/include/QtCore'
],
'libraries': [
'/usr/local/opt/qt/lib/QtCore.framework/QtCore'
],
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'MACOSX_DEPLOYMENT_TARGET': '10.12',
'OTHER_CPLUSPLUSFLAGS': [ '-std=c++11', '-stdlib=libc++' ],
'OTHER_LDFLAGS': [ '-stdlib=libc++' ]
}
}],
['OS=="linux"', {
'include_dirs': [
'/usr/local/include',
'/usr/local/include/QtCore'
],
'cflags': [
'<!@(pkg-config --cflags Qt5Core)'
],
'ldflags': [
'<!@(pkg-config --libs-only-L --libs-only-other Qt5Core)'
],
'libraries': [
'<!@(pkg-config --libs-only-l Qt5Core)'
]
}]
]
},
{
'target_name': 'dmp-test',
'type': 'executable',
'sources': [
'src/diff_match_patch_test.cpp',
'src/diff_match_patch.cpp'
],
'cflags': [ '-std=c++11' ],
'cflags!': [ '-fno-exceptions' ],
'cflags_cc!': [ '-fno-exceptions'],
'conditions': [
['OS=="mac"', {
'include_dirs': [
'/usr/local/opt/qt/include',
'/usr/local/opt/qt/include/QtCore'
],
'libraries': [
'/usr/local/opt/qt/lib/QtCore.framework/QtCore'
],
'xcode_settings': {
'GCC_ENABLE_CPP_EXCEPTIONS': 'YES',
'MACOSX_DEPLOYMENT_TARGET': '10.12',
'OTHER_CPLUSPLUSFLAGS': [ '-std=c++11', '-stdlib=libc++' ],
'OTHER_LDFLAGS': [ '-stdlib=libc++' ]
}
}],
['OS=="linux"', {
'include_dirs': [
'/usr/local/include',
'/usr/local/include/QtCore'
],
'cflags': [
'<!@(pkg-config --cflags Qt5Core)'
],
'ldflags': [
'<!@(pkg-config --libs-only-L --libs-only-other Qt5Core)'
],
'libraries': [
'<!@(pkg-config --libs-only-l Qt5Core)'
]
}]
]
}
]
}
| StarcoderdataPython |
3446348 | <gh_stars>1-10
from tkinter import *
from PIL import ImageTk, Image
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from selenium.webdriver.chrome.options import Options
#Selenium Ayarları
chrome_options = Options()
chrome_options.headless = True
#CHROME WEB DRİVER CHROME VERSİYON 88 İÇİN ÇALIŞMAKTADIR!
driver = webdriver.Chrome(executable_path="chromedriver.exe",options=chrome_options)
app = Tk()
app.geometry("930x640")
app.resizable(False,False)
app.title("Mentor")
app.iconphoto(False,PhotoImage(file='icon.png'))
C = Canvas(app,bg="blue", height=1080, width=1920)
img = Image.open("background.jpg")
img = img.resize((1920,1080))
img = ImageTk.PhotoImage(img)
C.create_image(960,530,image=img)
C.pack(side='top', fill='both', expand='yes')
x = IntVar()
x.set(0)
bolum_variable = IntVar()
hesaplandi = False
def sifirla():
item_names = [turkce_dogru,turkce_yanlis,mat_dogru,mat_yanlis,sos_dogru,
sos_yanlis,fen_dogru,fen_yanlis,mat2_dogru,mat2_yanlis,fizik_dogru,
fizik_yanlis,kimya_dogru,kimya_yanlis,biyoloji_dogru,biyoloji_yanlis,edebiyat_dogru,
edebiyat_yanlis,tarih1_dogru,tarih1_yanlis,cografya1_dogru,cografya1_yanlis,
tarih2_dogru,tarih2_yanlis,cografya2_dogru,cografya2_yanlis,felsefe_dogru,
felsefe_yanlis,din_dogru,din_yanlis,dil_dogru,dil_yanlis]
for i in item_names:
i.delete(first=0,last=len(i.get()))
def hedefAra():
driver.get("https://www.google.com/search?q="+hedef_uni.get()+" "+hedef_bolum.get()+" "+"yök atlas")
try:
driver.find_element_by_xpath('//a[starts-with(@href,"https://yokatlas.yok.gov.tr/")]').click()
sleep(.5)
driver.find_element_by_xpath('//*[@id="headingOne"]/a/h4').click()
sleep(.5)
hedef_puan = driver.find_element_by_xpath('/html/body/div[2]/div[1]/div[7]/div/div[1]/div[2]/div/div/table[3]/tbody/tr[1]/td[2]').get_attribute("innerHTML")
hedef_puan_label["text"] = "Hedef Puan: "+ hedef_puan
except:
hedef_puan_label["text"]="Hata! Tekrar Deneyin."
degerlendirme_label["text"] = ""
finally:
if hesaplandi == True:
if int(hedef_puan[:3])-int(yks_puan_dinamik["text"][:3]) <= 0:
degerlendirme_label["fg"] = "green"
degerlendirme_label["text"] = "Tebrikler, Hedefine Ulaştın!\nAynen Devam!"
elif 0< int(hedef_puan[:3])-int(yks_puan_dinamik["text"][:3]) <= 30:
degerlendirme_label["fg"] = "#07588f"
degerlendirme_label["text"] = "Ha Gayret!\nHedefine Yaklaşıyorsun!"
else:
degerlendirme_label["fg"] = "red"
degerlendirme_label["text"] ="Daha Çok Çalışman Lazım!\nBırakmak Yok!"
def hesapla():
global hesaplandi
try:
driver.get('https://www.basarisiralamalari.com/tyt-yks-puan-hesaplama/')
diploma_puan = float(diploma_entry.get())
if x.get() == 1:
diploma_puan = diploma_puan/2
except:
hesap_hata["text"] = "Bir Hata Oluştu!"
#TYT Kısmı
driver.find_element_by_xpath('//*[@id="diploma-notu"]').send_keys(str(diploma_puan))
driver.find_element_by_xpath('//*[@id="tyt-tr-d"]').send_keys(turkce_dogru.get())
driver.find_element_by_xpath('//*[@id="tyt-tr-y"]').send_keys(turkce_yanlis.get())
driver.find_element_by_xpath('//*[@id="tyt-mat-d"]').send_keys(mat_dogru.get())
driver.find_element_by_xpath('//*[@id="tyt-mat-y"]').send_keys(mat_yanlis.get())
driver.find_element_by_xpath('//*[@id="tyt-sos-d"]').send_keys(sos_dogru.get())
driver.find_element_by_xpath('//*[@id="tyt-sos-y"]').send_keys(sos_yanlis.get())
driver.find_element_by_xpath('//*[@id="tyt-fen-d"]').send_keys(biyoloji_dogru.get())
driver.find_element_by_xpath('//*[@id="tyt-fen-y"]').send_keys(biyoloji_yanlis.get())
driver.find_element_by_xpath('//*[@id="btn_tyt"]').click()
tytpuan =driver.find_element_by_css_selector('#tyt-puan-yer').get_attribute('value')
tytsiralama = driver.find_element_by_css_selector('#tyt-siralama-yer').get_attribute('value')
#AYT Kısmı
driver.find_element_by_xpath('//*[@id="yks-mat-d"]').send_keys(mat2_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-mat-y"]').send_keys(mat2_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-fiz-d"]').send_keys(fizik_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-fiz-y"]').send_keys(fizik_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-kim-d"]').send_keys(kimya_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-kim-y"]').send_keys(kimya_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-biy-d"]').send_keys(biyoloji_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-biy-y"]').send_keys(biyoloji_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-ede-d"]').send_keys(edebiyat_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-ede-y"]').send_keys(edebiyat_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-tar-1-d"]').send_keys(tarih1_dogru.<EMAIL>())
driver.find_element_by_xpath('//*[@id="yks-tar-1-y"]').send_keys(tarih1_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-cog-1-d"]').send_keys(cografya1_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-cog-1-y"]').send_keys(cografya1_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-tar-2-d"]').send_keys(tarih2_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-tar-2-y"]').send_keys(tarih2_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-cog-2-d"]').send_keys(cografya2_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-cog-2-y"]').send_keys(cografya2_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-fel-d"]').send_keys(felsefe_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-fel-y"]').send_keys(felsefe_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-din-d"]').send_keys(din_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-din-y"]').send_keys(din_yanlis.get())
driver.find_element_by_xpath('//*[@id="yks-dil-d"]').send_keys(dil_dogru.get())
driver.find_element_by_xpath('//*[@id="yks-dil-y"]').send_keys(dil_yanlis.get())
#Sonuçlandırma Kısmı
driver.find_element_by_xpath('//*[@id="singleContent"]/div[6]/div[1]/button').click()
if bolum_variable.get()==1:
ykspuan = driver.find_element_by_css_selector('#yks-sayisal-puan-yer').get_attribute('value')
ykssiralama = driver.find_element_by_css_selector('#yks-sayisal-siralama-yer').get_attribute('value')
elif bolum_variable.get()==2:
ykspuan = driver.find_element_by_css_selector('#yks-esit-agirlik-puan-yer').get_attribute('value')
ykssiralama = driver.find_element_by_css_selector('#yks-esit-agirlik-siralama-yer').get_attribute('value')
elif bolum_variable.get()==3:
ykspuan = driver.find_element_by_css_selector('#yks-sozel-puan-yer').get_attribute('value')
ykssiralama = driver.find_element_by_css_selector('#yks-sozel-siralama-yer').get_attribute('value')
elif bolum_variable.get()==4:
ykspuan = driver.find_element_by_css_selector('#yks-dil-puan-yer').get_attribute('value')
ykssiralama = driver.find_element_by_css_selector('#yks-dil-siralama-yer').get_attribute('value')
yks_puan_dinamik["text"] = ykspuan
yks_siralama_label["text"] = " "+ ykssiralama
tyt_puan_dinamik["text"] = tytpuan
tyt_siralama_label["text"] = " "+ tytsiralama
hesaplandi =True
#KİŞİSEL HEDEF DİZAYN
hedef_uni_label = Label(app, text='Hedef Üniversite:',font="Lucida 10 bold").place(x=10,y=130)
hedef_uni = Entry(app,width=20,fg="black")
hedef_uni.place(x=10,y=160)
hedef_bolum_label = Label(app, text='Hedef Bölüm:',font="Lucida 10 bold").place(x=10,y=190)
hedef_bolum = Entry(app,width=20,fg="black")
hedef_bolum.place(x=10,y=220)
ara_button = Button(app,text="Ara",font="Lucida 12 bold",command=hedefAra,bg="#808080",fg="#e6e6e6",width=9).place(x=20,y=250)
hedef_puan_label = Label(app,font="Lucida 10 bold")
hedef_puan_label.place(x=0,y=330)
degerlendirme_label = Label(app,font="Lucida 10 bold")
degerlendirme_label.place(x=0,y=390)
#G<NAME>AYN
gecensene_label = Label(app, text='Geçen sene bir bölüme yerleştim',font="Lucida 10 bold",bg="#f4f4f4").place(x=285,y=70)
gecensene_check = Checkbutton(app,variable=x,bg="#ececec").place(x=260,y=70)
diploma_label = Label(app, text='Diploma Notu:',font="Lucida 10 bold").place(x=260,y=25)
diploma_entry= Entry(app,width=21,bg="#808080",fg="white",justify="center")
diploma_entry.place(x=370,y=25)
#TYT DİZAYN
turkce_label = Label(app, text='Türkçe',font="Lucida 10 bold").place(x=310,y=150)
turkce_dogru =Entry(app,width=20,bg="#808080",fg="white",justify="center")
turkce_dogru.place(x=270,y=200)
turkce_yanlis =Entry(app,width=20,bg="#808080",fg="white",justify="center")
turkce_yanlis.place(x=270,y=250)
mat_label = Label(app, text='Matematik',font="Lucida 10 bold").place(x=455,y=150)
mat_dogru =Entry(app,width=20,bg="#808080",fg="white",justify="center")
mat_dogru.place(x=430,y=200)
mat_yanlis =Entry(app,width=20,bg="#808080",fg="white",justify="center")
mat_yanlis.place(x=430,y=250)
sos_label = Label(app, text='<NAME>',font="Lucida 10 bold",bg="#f9f9f9").place(x=600,y=150)
sos_dogru =Entry(app,width=20,bg="#808080",fg="white",justify="center")
sos_dogru.place(x=590,y=200)
sos_yanlis =Entry(app,width=20,bg="#808080",fg="white",justify="center")
sos_yanlis.place(x=590,y=250)
fen_label = Label(app, text="F<NAME>imleri",font="Lucida 10 bold").place(x=765,y=150)
fen_dogru =Entry(app,width=20,bg="#808080",fg="white",justify="center")
fen_dogru.place(x=750,y=200)
fen_yanlis =Entry(app,width=20,bg="#808080",fg="white",justify="center")
fen_yanlis.place(x=750,y=250)
tyt_dogru_label = Label(app, text='Doğru:',font="Lucida 10 bold",bg="#e6e6e6").place(x=190,y=200)
tyt_yanlis_label = Label(app, text='Yanlış:',font="Lucida 10 bold",bg="#e0e0e0").place(x=190,y=250)
#AYT DİZAYN
mat2_label = Label(app, text='Matematik',font="Lucida 10 bold",bg="#e5e5e5").place(x=190,y=350)
mat2_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
mat2_dogru.place(x=270,y=350)
mat2_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
mat2_yanlis.place(x=400,y=350)
fizik_label = Label(app, text='Fizik',font="Lucida 10 bold",bg="#e4e4e4").place(x=190,y=385)
fizik_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
fizik_dogru.place(x=270,y=385)
fizik_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
fizik_yanlis.place(x=400,y=385)
kimya_label = Label(app, text='Kimya',font="Lucida 10 bold").place(x=190,y=420)
kimya_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
kimya_dogru.place(x=270,y=420)
kimya_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
kimya_yanlis.place(x=400,y=420)
biyoloji_label = Label(app, text='Biyoloji',font="Lucida 10 bold",bg="#fafafa").place(x=190,y=455)
biyoloji_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
biyoloji_dogru.place(x=270,y=455)
biyoloji_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
biyoloji_yanlis.place(x=400,y=455)
edebiyat_label = Label(app, text='Edebiyat',font="Lucida 10 bold",bg="#f9f9f9").place(x=190,y=490)
edebiyat_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
edebiyat_dogru.place(x=270,y=490)
edebiyat_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
edebiyat_yanlis.place(x=400,y=490)
tarih1_label = Label(app, text='Tarih-1',font="Lucida 10 bold").place(x=190,y=525)
tarih1_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
tarih1_dogru.place(x=270,y=525)
tarih1_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
tarih1_yanlis.place(x=400,y=525)
cografya1_label = Label(app, text='Coğrafya-1',font="Lucida 10 bold").place(x=510,y=350)
cografya1_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
cografya1_dogru.place(x=619,y=350)
cografya1_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
cografya1_yanlis.place(x=749,y=350)
tarih2_label = Label(app, text='Tarih-2',font="Lucida 10 bold").place(x=510,y=385)
tarih2_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
tarih2_dogru.place(x=619,y=385)
tarih2_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
tarih2_yanlis.place(x=749,y=385)
cografya2_label = Label(app, text='Coğrafya-2',font="Lucida 10 bold").place(x=510,y=420)
cografya2_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
cografya2_dogru.place(x=619,y=420)
cografya2_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
cografya2_yanlis.place(x=749,y=420)
felsefe_label = Label(app, text='Felsefe',font="Lucida 10 bold").place(x=510,y=455)
felsefe_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
felsefe_dogru.place(x=619,y=455)
felsefe_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
felsefe_yanlis.place(x=749,y=455)
din_label = Label(app, text='Din Kültürü',font="Lucida 10 bold").place(x=510,y=490)
din_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
din_dogru.place(x=619,y=490)
din_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
din_yanlis.place(x=749,y=490)
dil_label = Label(app, text='Yabancı Dil',font="Lucida 10 bold").place(x=510,y=525)
dil_dogru =Entry(app,width=15,bg="#808080",fg="white",justify="center")
dil_dogru.place(x=619,y=525)
dil_yanlis =Entry(app,width=15,bg="#808080",fg="white",justify="center")
dil_yanlis.place(x=749,y=525)
ayt_dogru_label = Label(app, text='Doğru',font="Lucida 10 bold",bg="#f3f3f3").place(x=295,y=300)
ayt_yanlis_label = Label(app, text='Yanlış',font="Lucida 10 bold",bg="#fcfcfc").place(x=425,y=300)
ayt_dogru_label1 = Label(app, text='Doğru',font="Lucida 10 bold",bg="#fdfdfd").place(x=645,y=300)
ayt_yanlis_label1 = Label(app, text='Yanlış',font="Lucida 10 bold",bg="#fefefe").place(x=775,y=300)
#SONUÇ DİZAYN
puan_label = Label(app, text='PUAN',font="Lucida 12 bold",bg="#fbfbfb").place(x=657,y=10)
siralama_label = Label(app, text='SIRALAMA',font="Lucida 12 bold",bg="#f5f5f5").place(x=780,y=10)
tyt_puan_label = Label(app, text='TYT -->',font="Lucida 12 bold",bg="#fdfdfd").place(x=550,y=40)
tyt_puan_dinamik = Label(app,font="Lucida 12 bold",bg="#f5f5f5",justify="center")
tyt_puan_dinamik.place(x=650,y=40)
tyt_siralama_label = Label(app,font="Lucida 12 bold",bg="#fbfbfb",justify="center")
tyt_siralama_label.place(x=790,y=40)
yks_puan_label = Label(app, text='YKS -->',font="Lucida 12 bold",bg="#f7f7f7").place(x=550,y=70)
yks_puan_dinamik = Label(app,font="Lucida 12 bold",bg="#f5f5f5",justify="center")
yks_puan_dinamik.place(x=650,y=70)
yks_siralama_label = Label(app,font="Lucida 12 bold",bg="#fbfbfb",justify="center")
yks_siralama_label.place(x=790,y=70)
say_radio = Radiobutton(app,text="SAY",font="Lucida 9 bold",variable=bolum_variable,value=1)
say_radio.place(x=290,y=110)
say_radio.select() #Varsayılan olarak SAY seçilmesi için ekliyoruz.
ea_radio = Radiobutton(app,text="EA",font="Lucida 9 bold",variable=bolum_variable,value=2)
ea_radio.place(x=340,y=110)
soz_radio = Radiobutton(app,text="SÖZ",font="Lucida 9 bold",variable=bolum_variable,value=3)
soz_radio.place(x=380,y=110)
dil_radio = Radiobutton(app,text="DİL",font="Lucida 9 bold",variable=bolum_variable,value=4)
dil_radio.place(x=430,y=110)
hesapla_button = Button(app,text="Hesapla",font="Lucida 12 bold",command=hesapla,bg="#808080",fg="#e6e6e6").place(x=500,y=580)
sıfırla_button = Button(app,text="X",font="Lucida 12 bold",bg="#808080",fg="#e6e6e6",command=sifirla).place(x=580,y=580)
hesap_hata = Label(app,font="Lucida 11 bold",bg="#e8e8e8")
hesap_hata.place(x=380,y=585)
#Uygulamanın çalışması için.
app.mainloop() | StarcoderdataPython |
9675575 | from protorpc import messages
from protorpc import remote
from protorpc.wsgi import service
from team import Team
import logging
package = 'SaintsSchedule'
# Create the request string containing the user's name
class ScheduleRequest(messages.Message):
team_id = messages.StringField(1, required=True)
# Create the response string
class ScheduleResponse(messages.Message):
schedule = messages.StringField(1, required=True)
# Create the RPC service to exchange messages
class ScheduleService(remote.Service):
@remote.method(ScheduleRequest, ScheduleResponse)
def schedule(self, request):
t = Team()
games = t.getGames(request.team_id)
return ScheduleResponse(schedule=games)
# Map the RPC service and path (/schedule)
app = service.service_mappings([('/schedule.*', ScheduleService)]) | StarcoderdataPython |
3358576 | <filename>tests/commands/run/test_scheduler.py
import subprocess
import json
import pytest
from unittest import mock
from BALSAMIC.commands.run.scheduler import SbatchScheduler
from BALSAMIC.commands.run.scheduler import QsubScheduler
from BALSAMIC.commands.run.scheduler import submit_job
from BALSAMIC.commands.run.scheduler import read_sample_config
from BALSAMIC.commands.run.scheduler import write_sacct_file
from BALSAMIC.commands.run.scheduler import submit_job
from BALSAMIC.commands.run.scheduler import main as scheduler_main
from BALSAMIC.utils.cli import get_schedulerpy
from BALSAMIC.utils.cli import createDir
def test_scheduler_slurm_py(snakemake_job_script, tumor_normal_config, tmpdir,
capsys):
# GIVEN a jobscript, dependencies, joutput job id, and sample comamnd
test_jobid = '999999999999'
test_return_value = 'Submitted batch job ' + test_jobid
scheduler_args = [
'9000', '9001', '9002', snakemake_job_script['snakescript']
]
scheduler_profile_slurm = 'slurm'
with open(tumor_normal_config, 'r') as input_config:
sample_config = json.load(input_config)
# Create directory for log and script
script_dir = createDir(sample_config['analysis']['script'])
log_dir = createDir(sample_config['analysis']['log'])
# Construct scheduler's cmd
scheduler_cmd = [
"--sample-config", tumor_normal_config, "--profile",
scheduler_profile_slurm, "--qos", "low", "--account", "development",
"--log-dir", log_dir, "--script-dir", script_dir, "--result-dir",
sample_config['analysis']['result']
]
scheduler_cmd.extend(scheduler_args)
# WHEN calling scheduler_main with mocked subprocess
with mock.patch.object(subprocess, 'run') as mocked:
mocked.return_value.stdout = test_return_value.encode('utf-8')
scheduler_main(scheduler_cmd)
# THEN sacct file should be written with the job id(s)
with open(log_dir + '/sample_tumor_normal.sacct', 'r') as fin:
assert fin.read() == test_jobid + "\n"
# THEN captured output is job id
captured = capsys.readouterr()
assert captured.out == test_jobid + "\n"
def test_scheduler_qsub_py(snakemake_job_script, tumor_normal_config, tmpdir,
capsys):
# GIVEN a jobscript, dependencies, joutput job id, and sample comamnd
test_jobname = 'script.sh'
test_return_value = f'Your job 31415 ("{test_jobname}") has been submitted'
scheduler_args = [
'1000', '1001', '1002', snakemake_job_script['snakescript']
]
scheduler_profile_qsub = 'qsub'
with open(tumor_normal_config, 'r') as input_config:
sample_config = json.load(input_config)
# Create directory for log and script
script_dir = createDir(sample_config['analysis']['script'])
log_dir = createDir(sample_config['analysis']['log'])
# Construct scheduler's cmd
scheduler_cmd = [
"--sample-config", tumor_normal_config, "--profile",
scheduler_profile_qsub, "--qos", "low", "--account", "development",
"--log-dir", log_dir, "--script-dir", script_dir, "--result-dir",
sample_config['analysis']['result']
]
scheduler_cmd.extend(scheduler_args)
# WHEN calling scheduler_main with mocked subprocess
with mock.patch.object(subprocess, 'run') as mocked:
mocked.return_value.stdout = test_return_value.encode('utf-8')
scheduler_main(scheduler_cmd)
# THEN sacct file should be written with the job id(s)
with open(log_dir + '/sample_tumor_normal.sacct', 'r') as fin:
assert fin.read() == test_jobname + "\n"
# THEN captured output is job id
captured = capsys.readouterr()
assert captured.out == test_jobname + "\n"
def test_submit_job_slurm(snakemake_job_script):
# GIVEN a jobid
test_jobid = '1234'
test_return_value = 'Submitted batch job ' + test_jobid
# WHEN getting jobid for slurm
with mock.patch.object(subprocess, 'run') as mocked:
mocked.return_value.stdout = test_return_value.encode('utf-8')
actual_jobid = submit_job(['random_command'], 'slurm')
# THEN output jobid should match
assert actual_jobid == test_jobid
def test_submit_job_qsub(snakemake_job_script):
# GIVEN a jobid
test_jobname = 'script.sh'
test_return_value = f'Your job 31415 ("{test_jobname}") has been submitted'
# WHEN getting jobid for slurm
with mock.patch.object(subprocess, 'run') as mocked:
mocked.return_value.stdout = test_return_value.encode('utf-8')
actual_jobname = submit_job(['random_command'], 'qsub')
# THEN output jobid should match
assert actual_jobname == test_jobname
def test_SbatchScheduler():
# GIVEN values for sbatch command
sbatch_cmd = SbatchScheduler()
sbatch_cmd.account = "development"
sbatch_cmd.dependency = "afterok:12345"
sbatch_cmd.error = "test_job.err"
sbatch_cmd.output = "test_job.out"
sbatch_cmd.mail_type = "FAIL"
sbatch_cmd.mail_user = "<EMAIL>"
sbatch_cmd.ntasks = "2"
sbatch_cmd.qos = "low"
sbatch_cmd.time = "01:00:00"
sbatch_cmd.script = "example_script.sh"
# WHEN sbatch command is built
sbatch_cmd = sbatch_cmd.build_cmd()
# THEN sbatch command string is constructed
assert isinstance(sbatch_cmd, str)
assert sbatch_cmd == (
'sbatch --account "development" --dependency "afterok:12345" --error "test_job.err" '
'--output "test_job.out" --mail-type "FAIL" --mail-user "<EMAIL>" '
'--ntasks "2" --qos "low" --time "01:00:00" example_script.sh')
def test_qsub_scheduler():
# GIVEN values for qsub command
qsub_cmd = QsubScheduler()
qsub_cmd.account = "development"
qsub_cmd.dependency = ['test_jobname.sh']
qsub_cmd.error = "test_job.err"
qsub_cmd.output = "test_job.out"
qsub_cmd.mail_type = "FAIL"
qsub_cmd.mail_user = "<EMAIL>"
qsub_cmd.ntasks = "2"
qsub_cmd.qos = "low"
qsub_cmd.time = "01:00:00"
qsub_cmd.script = "example_script.sh"
# WHEN qsub command is built
qsub_cmd = qsub_cmd.build_cmd()
# THEN qsub command should be constructed
assert isinstance(qsub_cmd, str)
assert qsub_cmd == (
'qsub -V -S /bin/bash -q development -e test_job.err -o test_job.out -m s -M '
'<EMAIL> -p low -l excl=1 -pe mpi 2 -hold_jid test_jobname.sh example_script.sh '
)
def test_read_sample_config_err(config_files):
with pytest.raises(Exception):
# GIVEN a bed file instead of json file
bed_file = config_files['panel_bed_file']
# WHEN calling read_sample_config
# THEN It should raise the exception error
assert read_sample_config(bed_file)
def test_write_sacct_file_err():
with pytest.raises(FileNotFoundError):
# GIVEN a non-existing file path and jobid
dummy_file_path = "dummy/dummy_fname"
dummy_jobid = "12345"
# WHEN calling write_sacct_file
# THEN It should raise the exception
assert write_sacct_file(dummy_file_path, dummy_jobid)
def test_submit_job_err():
with pytest.raises(subprocess.CalledProcessError):
# GIVEN a wrong command
sbatch_cmd = "SBATCH jobscript.sh"
profile = 'slurm'
# WHEN calling submit_job function
# THEN it should return the exit code 1 and raise the subprocess error
assert submit_job(sbatch_cmd, profile)
| StarcoderdataPython |
1984977 | import sys
import re
def password_check(password, rules_dict):
'''
Verify that any given password string complies
with the requirements defined in the dictionary
'''
str_len = len(password) >= rules_dict['length']
has_numbers = True
if rules_dict['must_have_numbers']:
has_numbers = bool(re.search(r'\d', password))
has_caps = True
if rules_dict['must_have_caps']:
has_caps = bool(re.search(r'[A-Z]+', password))
return str_len and has_numbers and has_caps
def _rules_dictionary(length=10, must_have_numbers=True, must_have_caps=True):
return {'length': length,
'must_have_numbers': must_have_numbers,
'must_have_caps': must_have_caps}
if __name__ == "__main__":
'''
Other arguments than the 1st one ignored
no input checking whatsoever
'''
try:
password = sys.argv[1]
print(password_check(password, _rules_dictionary()))
except IndexError as e:
raise Exception('Provide password as argument!') from e
| StarcoderdataPython |
281812 | import pytest
from src.vk_scheduler import VkPost
from src.models import IdentifiedRedditPost
from src.postgres import get_approved_anime_posts, connect_to_db
# pytest -n auto
# OR
# pytest -x ./tests/test_vk_scheduler.py
@pytest.mark.parametrize(
"anime_posts, result_display_message, result_hidden_messages",
[
(
[
{
"post_id": "k4sfyd",
"sub_name": "awwnime",
"source_link": "https://www.pixiv.net/en/artworks/86035852",
"visible_tags": ["Hololive", "Minato_Aqua"],
"invisible_tags": None,
"phash": "e1d86596c8c69bc5",
},
{
"post_id": "ha64th",
"sub_name": "awwnime",
"source_link": "https://www.pixiv.net/en/artworks/82372378",
"visible_tags": ["Hololive", "Inugami_Korone"],
"invisible_tags": None,
"phash": "e2b19852a7d2692f",
},
{
"post_id": "hex3jb",
"sub_name": "awwnime",
"source_link": "https://twitter.com/frengchiano2/status/1275694907261321216?s=19",
"visible_tags": ["Hololive", "Uruha_Rushia"],
"invisible_tags": None,
"phash": "abd095f30d9266cc",
},
],
"#Hololive@kotanima_arts",
[
"#MinatoAqua@kotanima_arts\n https://www.pixiv.net/en/artworks/86035852",
"#InugamiKorone@kotanima_arts\n https://www.pixiv.net/en/artworks/82372378",
"#UruhaRushia@kotanima_arts\n https://twitter.com/frengchiano2/status/1275694907261321216?s=19",
],
),
(
[
{
"post_id": "k4sfyd",
"sub_name": "awwnime",
"source_link": "https://www.pixiv.net/en/artworks/86035852",
"visible_tags": ["Hololive", "Minato_Aqua"],
"invisible_tags": None,
"phash": "e1d86596c8c69bc5",
},
{
"post_id": "ha64th",
"sub_name": "awwnime",
"source_link": "https://www.pixiv.net/en/artworks/82372378",
"visible_tags": ["Hololive", "Inugami_Korone"],
"invisible_tags": None,
"phash": "e2b19852a7d2692f",
},
],
"#Hololive@kotanima_arts",
[
"#MinatoAqua@kotanima_arts\n https://www.pixiv.net/en/artworks/86035852",
"#InugamiKorone@kotanima_arts\n https://www.pixiv.net/en/artworks/82372378",
],
),
(
[
{
"post_id": "k4sfyd",
"sub_name": "awwnime",
"source_link": "https://www.pixiv.net/en/artworks/86035852",
"visible_tags": ["Hololive", "Minato_Aqua"],
"invisible_tags": None,
"phash": "e1d86596c8c69bc5",
},
],
"#Hololive@kotanima_arts\n#MinatoAqua@kot<PASSWORD>",
["\n https://www.pixiv.net/en/artworks/86035852"],
),
],
)
def test_VkPost(anime_posts, result_display_message, result_hidden_messages):
anime_posts = [IdentifiedRedditPost.from_dict(post) for post in anime_posts]
# print(anime_posts)
vk_post = VkPost(owner_id=0, last_post_date=-1, reddit_posts=anime_posts)
display_message = vk_post._get_main_post_message(anime_posts)
assert display_message == result_display_message
print(display_message)
hidden_messages = vk_post._get_list_of_hidden_messages(anime_posts)
assert hidden_messages == result_hidden_messages
print(hidden_messages)
| StarcoderdataPython |
8110787 | import os
import sys
import json
import pytest
import subprocess
import time
from kat.harness import Query, is_ingress_class_compatible
from abstract_tests import AmbassadorTest, HTTP, ServiceType
from kat.utils import namespace_manifest
from tests.utils import KUBESTATUS_PATH
from ambassador.utils import parse_bool
class IngressStatusTest1(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "172.16.17.32"
}]
}
}
def init(self):
self.target = HTTP()
def manifests(self) -> str:
return """
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
kubernetes.io/ingress.class: ambassador
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
spec:
rules:
- http:
paths:
- backend:
service:
name: {self.target.path.k8s}
port:
number: 80
path: /{self.name}/
pathType: Prefix
""" + super().manifests()
def queries(self):
if True or sys.platform != 'darwin':
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
# If you run these tests individually, the time between running kubestatus
# and the ingress resource actually getting updated is longer than the
# time spent waiting for resources to be ready, so this test will fail (most of the time)
time.sleep(1)
yield Query(self.url(self.name + "/"))
yield Query(self.url(f'need-normalization/../{self.name}/'))
def check(self):
if not parse_bool(os.environ.get("AMBASSADOR_PYTEST_INGRESS_TEST", "false")):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if False and sys.platform == 'darwin':
pytest.xfail('not supported on Darwin')
for r in self.results:
if r.backend:
assert r.backend.name == self.target.path.k8s, (r.backend.name, self.target.path.k8s)
assert r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/'
# check for Ingress IP here
ingress_cmd = ["tools/bin/kubectl", "get", "-n", "default", "-o", "json", "ingress", self.path.k8s]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
ingress_out, _ = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert ingress_json['status'] == self.status_update, f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
class IngressStatusTest2(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "192.168.127.12"
}]
}
}
def init(self):
self.target = HTTP()
def manifests(self) -> str:
return """
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
kubernetes.io/ingress.class: ambassador
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
spec:
rules:
- http:
paths:
- backend:
service:
name: {self.target.path.k8s}
port:
number: 80
path: /{self.name}/
pathType: Prefix
""" + super().manifests()
def queries(self):
if True or sys.platform != 'darwin':
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
# If you run these tests individually, the time between running kubestatus
# and the ingress resource actually getting updated is longer than the
# time spent waiting for resources to be ready, so this test will fail (most of the time)
time.sleep(1)
yield Query(self.url(self.name + "/"))
yield Query(self.url(f'need-normalization/../{self.name}/'))
def check(self):
if not parse_bool(os.environ.get("AMBASSADOR_PYTEST_INGRESS_TEST", "false")):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if False and sys.platform == 'darwin':
pytest.xfail('not supported on Darwin')
for r in self.results:
if r.backend:
assert r.backend.name == self.target.path.k8s, (r.backend.name, self.target.path.k8s)
assert r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/'
# check for Ingress IP here
ingress_cmd = ["tools/bin/kubectl", "get", "-n", "default", "-o", "json", "ingress", self.path.k8s]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
ingress_out, _ = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert ingress_json['status'] == self.status_update, f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
class IngressStatusTestAcrossNamespaces(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "172.16.31.10"
}]
}
}
def init(self):
self.target = HTTP(namespace="alt-namespace")
def manifests(self) -> str:
return namespace_manifest("alt-namespace") + """
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
kubernetes.io/ingress.class: ambassador
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
namespace: alt-namespace
spec:
rules:
- http:
paths:
- backend:
service:
name: {self.target.path.k8s}
port:
number: 80
path: /{self.name}/
pathType: Prefix
""" + super().manifests()
def queries(self):
if True or sys.platform != 'darwin':
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
# If you run these tests individually, the time between running kubestatus
# and the ingress resource actually getting updated is longer than the
# time spent waiting for resources to be ready, so this test will fail (most of the time)
time.sleep(1)
yield Query(self.url(self.name + "/"))
yield Query(self.url(f'need-normalization/../{self.name}/'))
def check(self):
if not parse_bool(os.environ.get("AMBASSADOR_PYTEST_INGRESS_TEST", "false")):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if False and sys.platform == 'darwin':
pytest.xfail('not supported on Darwin')
for r in self.results:
if r.backend:
assert r.backend.name == self.target.path.k8s, (r.backend.name, self.target.path.k8s)
assert r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/'
# check for Ingress IP here
ingress_cmd = ["tools/bin/kubectl", "get", "-o", "json", "ingress", self.path.k8s, "-n", "alt-namespace"]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
ingress_out, _ = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert ingress_json['status'] == self.status_update, f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
class IngressStatusTestWithAnnotations(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "192.168.127.12"
}]
}
}
def init(self):
self.target = HTTP()
def manifests(self) -> str:
return """
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
getambassador.io/config: |
---
apiVersion: getambassador.io/v3alpha1
kind: Mapping
name: {self.name}-nested
hostname: "*"
prefix: /{self.name}-nested/
service: http://{self.target.path.fqdn}
ambassador_id: [{self.ambassador_id}]
kubernetes.io/ingress.class: ambassador
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
spec:
rules:
- http:
paths:
- backend:
service:
name: {self.target.path.k8s}
port:
number: 80
path: /{self.name}/
pathType: Prefix
""" + super().manifests()
def queries(self):
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
# If you run these tests individually, the time between running kubestatus
# and the ingress resource actually getting updated is longer than the
# time spent waiting for resources to be ready, so this test will fail (most of the time)
time.sleep(1)
yield Query(self.url(self.name + "/"))
yield Query(self.url(self.name + "-nested/"))
yield Query(self.url(f'need-normalization/../{self.name}/'))
def check(self):
if not parse_bool(os.environ.get("AMBASSADOR_PYTEST_INGRESS_TEST", "false")):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
# check for Ingress IP here
ingress_cmd = ["tools/bin/kubectl", "get", "-n", "default", "-o", "json", "ingress", self.path.k8s]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
ingress_out, _ = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert ingress_json['status'] == self.status_update, f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
class SameIngressMultipleNamespaces(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "172.16.31.10"
}]
}
}
def init(self):
self.target = HTTP()
self.target1 = HTTP(name="target1", namespace="same-ingress-1")
self.target2 = HTTP(name="target2", namespace="same-ingress-2")
def manifests(self) -> str:
return namespace_manifest("same-ingress-1") + """
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
kubernetes.io/ingress.class: ambassador
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
namespace: same-ingress-1
spec:
rules:
- http:
paths:
- backend:
service:
name: {self.target.path.k8s}-target1
port:
number: 80
path: /{self.name}-target1/
pathType: Prefix
""" + namespace_manifest("same-ingress-2") + """
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
kubernetes.io/ingress.class: ambassador
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
namespace: same-ingress-2
spec:
rules:
- http:
paths:
- backend:
service:
name: {self.target.path.k8s}-target2
port:
number: 80
path: /{self.name}-target2/
pathType: Prefix
""" + super().manifests()
def queries(self):
if True or sys.platform != 'darwin':
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
# If you run these tests individually, the time between running kubestatus
# and the ingress resource actually getting updated is longer than the
# time spent waiting for resources to be ready, so this test will fail (most of the time)
time.sleep(1)
yield Query(self.url(self.name + "-target1/"))
yield Query(self.url(self.name + "-target2/"))
def check(self):
if not parse_bool(os.environ.get("AMBASSADOR_PYTEST_INGRESS_TEST", "false")):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if False and sys.platform == 'darwin':
pytest.xfail('not supported on Darwin')
for namespace in ['same-ingress-1', 'same-ingress-2']:
# check for Ingress IP here
ingress_cmd = ["tools/bin/kubectl", "get", "-n", "default", "-o", "json", "ingress", self.path.k8s, "-n", namespace]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
ingress_out, _ = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert ingress_json['status'] == self.status_update, f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
class IngressStatusTestWithIngressClass(AmbassadorTest):
status_update = {
"loadBalancer": {
"ingress": [{
"ip": "172.16.17.32"
}]
}
}
def init(self):
self.target = HTTP()
if not is_ingress_class_compatible():
self.xfail = 'IngressClass is not supported in this cluster'
def manifests(self) -> str:
return """
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: {self.name.k8s}-ext
rules:
- apiGroups: ["networking.k8s.io"]
resources: ["ingressclasses"]
verbs: ["get", "list", "watch"]
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: {self.name.k8s}-ext
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: {self.name.k8s}-ext
subjects:
- kind: ServiceAccount
name: {self.path.k8s}
namespace: {self.namespace}
---
apiVersion: networking.k8s.io/v1
kind: IngressClass
metadata:
annotations:
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
spec:
controller: getambassador.io/ingress-controller
---
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
annotations:
getambassador.io/ambassador-id: {self.ambassador_id}
name: {self.name.k8s}
spec:
ingressClassName: {self.name.k8s}
rules:
- http:
paths:
- backend:
service:
name: {self.target.path.k8s}
port:
number: 80
path: /{self.name}/
pathType: Prefix
""" + super().manifests()
def queries(self):
if True or sys.platform != 'darwin':
text = json.dumps(self.status_update)
update_cmd = [KUBESTATUS_PATH, 'Service', '-n', 'default', '-f', f'metadata.name={self.name.k8s}', '-u', '/dev/fd/0']
subprocess.run(update_cmd, input=text.encode('utf-8'), timeout=10)
# If you run these tests individually, the time between running kubestatus
# and the ingress resource actually getting updated is longer than the
# time spent waiting for resources to be ready, so this test will fail (most of the time)
time.sleep(1)
yield Query(self.url(self.name + "/"))
yield Query(self.url(f'need-normalization/../{self.name}/'))
def check(self):
if not parse_bool(os.environ.get("AMBASSADOR_PYTEST_INGRESS_TEST", "false")):
pytest.xfail('AMBASSADOR_PYTEST_INGRESS_TEST not set, xfailing...')
if False and sys.platform == 'darwin':
pytest.xfail('not supported on Darwin')
for r in self.results:
if r.backend:
assert r.backend.name == self.target.path.k8s, (r.backend.name, self.target.path.k8s)
assert r.backend.request.headers['x-envoy-original-path'][0] == f'/{self.name}/'
# check for Ingress IP here
ingress_cmd = ["tools/bin/kubectl", "get", "-n", "default", "-o", "json", "ingress", self.path.k8s]
ingress_run = subprocess.Popen(ingress_cmd, stdout=subprocess.PIPE)
ingress_out, _ = ingress_run.communicate()
ingress_json = json.loads(ingress_out)
assert ingress_json['status'] == self.status_update, f"Expected Ingress status to be {self.status_update}, got {ingress_json['status']} instead"
| StarcoderdataPython |
1922231 | <filename>wordsearch.py
#!/usr/bin/env python
DEFAULT_MIN_LENGTH = 3
class InvalidInput(ValueError): pass
class Puzzle(object):
SENTINEL = object()
def __init__(self, data):
if len(data) < 2:
raise InvalidInput("Must have more than one row")
len_1 = len(data[0])
for i, row in enumerate(data):
if len(row) != len_1:
raise InvalidInput("Row %i is not %i long" % (i + 1, len_1))
for j, c in enumerate(row):
if len(c) != 1:
raise InvalidInput("(%i,%i) not 1 character %r" % (
j+1, i+1, c,
))
self.data = data
@classmethod
def build_dictionary(cls, f):
from_string = isinstance(f, str)
if from_string:
f = open(f)
try:
results = {}
for s in f:
s = s.strip()
if not s.islower(): continue
loc = results
for letter in s.strip():
loc = loc.setdefault(letter, {})
loc[cls.SENTINEL] = True
return results
finally:
if from_string:
f.close()
@staticmethod
def dir_to_desc(dir_row, dir_col):
if dir_row > 0:
direction = "south"
elif dir_row < 0:
direction = "north"
else:
direction = ""
if dir_col > 0:
direction += "east"
elif dir_col < 0:
direction += "west"
return direction
@staticmethod
def parse_file(f):
from_string = isinstance(f, str)
if from_string:
f = open(f)
try:
return [row.strip().lower() for row in f]
finally:
if from_string:
f.close()
def find_words(self, dictionary,
allow_diagonals=True,
allow_reverse=True,
min_length=DEFAULT_MIN_LENGTH,
):
directions = [
(a,b)
for a in (-1,0,1)
for b in (-1,0,1)
if not (a == 0 and b == 0)
]
if not allow_diagonals:
directions = [
(a,b) for (a,b) in directions
if a == 0 or b == 0
]
if not allow_reverse:
directions = [
(a,b) for (a,b) in directions
if not (a < 0 or b < 0)
]
data = self.data
max_row = len(data)
max_col = len(data[0])
for i, row in enumerate(data):
for j, c in enumerate(row):
if c not in dictionary: continue
for inc_i, inc_j in directions:
d = dictionary
i2, j2, c2 = i, j, c
so_far = []
while True:
if len(so_far) >= min_length and self.SENTINEL in d:
yield ''.join(so_far), (i, j, inc_i, inc_j)
if not (0 <= i2 < max_row and 0 <= j2 < max_col):
break
c2 = data[i2][j2]
so_far.append(c2)
if c2 in d:
d = d[c2]
else:
break
i2 += inc_i
j2 += inc_j
if __name__ == "__main__":
from sys import argv, exit, stderr
import os
from optparse import OptionParser, OptionGroup
parser = OptionParser(
usage="Usage: %prog [options] puzzle.txt",
)
parser.add_option("-d", "--dictionary",
help="Specify an alternate dictionary (one word per line)",
action="store",
dest="dictionary",
default="/usr/share/dict/words",
)
search_options = OptionGroup(parser, "Search options")
search_options.add_option("--no-diagonals",
help="Disallow diagonals",
action="store_false",
dest="allow_diagonals",
default=True,
)
search_options.add_option("--no-reverse", "--no-backwards",
help="Disallow reverse matches",
action="store_false",
dest="allow_reverse",
default=True,
)
search_options.add_option("-l", "--min-length",
action="store",
type="int",
dest="min_length",
metavar="LENGTH",
help="Set the minimum length of interest (default=%i)" %
DEFAULT_MIN_LENGTH,
default=DEFAULT_MIN_LENGTH,
)
parser.add_option_group(search_options)
options, args = parser.parse_args()
if len(args) != 1:
parser.print_help()
exit(1)
fname = args[0]
data = Puzzle.parse_file(fname)
try:
puzzle = Puzzle(data)
except InvalidInput as e:
stderr.write("Malformed puzzle: %s\n" % e)
exit(1)
dictionary = os.path.expanduser(options.dictionary)
for answer in puzzle.find_words(
Puzzle.build_dictionary(dictionary),
allow_reverse=options.allow_reverse,
allow_diagonals=options.allow_diagonals,
min_length=options.min_length
):
word, (row, col, dir_row, dir_col) = answer
direction = Puzzle.dir_to_desc(dir_row, dir_col)
print("%s at row %i, col %i going %s" % (
word,
row + 1,
col + 1,
direction
))
| StarcoderdataPython |
8100516 | # flip half input images and steering angles np.fliplr(image), -steering_angle
# use l and r camera images by pretending they are in center, and adding/subtracting correction.
# if l image, add correction, if r image, subtract correction
# start with 160x320x3 image into network by reading using cv2 (W, H), output steering angle
# clip top 50 pix, and bottom 20 pix using
# model.add(Cropping2D(cropping=((50,20), (0,0)), input_shape=(160,320,3)))
# normalize pixels using Lambda(lambda x: (x / 255.0) - 0.5)
import os
import csv
import cv2
import numpy as np
import sklearn
from enum import Enum
from keras.optimizers import Adam
from keras.models import load_model, save_model
from keras.models import Sequential
from keras.layers import Lambda, Cropping2D, Conv2D, Dense, Flatten, MaxPool2D, Dropout
from sklearn.model_selection import train_test_split
shuffle = sklearn.utils.shuffle
ceil = np.ceil
join = os.path.join
class ImagePos(Enum):
center=0
left=1
right=2
class BehavioralCloning(object):
def __init__(self):
self.batch_size = 32
self.crop_up, self.crop_down = 50, 20
self.orig_dims = (160, 320, 3)
self.model_name = 'my_model_5.h5'
def get_train_val_data(self):
samples = []
file_names = [
# r'.\driving_logs\driving_log_train.csv',
r'.\driving_logs\driving_log_train2.csv']
#file_name = '/home/data/driving_log.csv'
# file_name = './driving_log_train.csv'
for i, file_name in enumerate(file_names):
with open(file_name) as csvfile:
reader = csv.reader(csvfile)
for line in reader:
samples.append([line, i])
train_samples, validation_samples = train_test_split(samples, test_size=0.2)
return train_samples, validation_samples
def generator(self, samples, batch_size=32):
def img_name(dir_name, img_details, imagePos: ImagePos):
if dir_name.startswith("/opt/carnd_p3/data/"):
return join(dir_name, img_details[imagePos.value].split("/")[-1])
else:
return join(dir_name, img_details[imagePos.value].split("\\")[-1])
num_samples = len(samples)
correction = .15
# dir_name = r'/home/data/IMG/'
dir_names = [
# r'.\IMG_folders\IMG_train',
r'.\IMG_folders\IMG_train2']
while 1: # Loop forever so the generator never terminates
shuffle(samples)
for offset in range(0, num_samples, batch_size):
batch_samples = samples[offset:offset + batch_size]
images = []
angles = []
for batch_sample in batch_samples:
dir_index = batch_sample[1]
dir_name = dir_names[dir_index]
img_details = batch_sample[0]
center_name = img_name(dir_name, img_details, ImagePos.center)
center_image = cv2.imread(center_name)
if center_image is None:
print("Image doesn't exist")
continue
center_angle = float(img_details[3])
center_flipped_image = np.fliplr(center_image)
center_flipped_angle = -center_angle
left_name = img_name(dir_name, img_details, ImagePos.left)
left_image = cv2.imread(left_name)
left_angle = float(img_details[3]) + correction
right_name = img_name(dir_name, img_details, ImagePos.right)
right_image = cv2.imread(right_name)
right_angle = float(img_details[3]) - correction
images.extend([center_image, center_flipped_image, left_image, right_image])
angles.extend([center_angle, center_flipped_angle, left_angle, right_angle])
X_train = np.array(images)
y_train = np.array(angles)
# yield shuffle(X_train, y_train)
yield X_train, y_train
def create_model(self):
model = Sequential()
model.add(Cropping2D(cropping=((self.crop_up, self.crop_down), (0, 0)), input_shape=self.orig_dims))
# 90, 360, 3
dims_1 = (self.orig_dims[0] - self.crop_up - self.crop_down, self.orig_dims[1], self.orig_dims[2])
print(dims_1)
model.add(Lambda(lambda x: (x / 255.0) - 0.5))
filters, kernel_size, stride = 24, 5, (1, 1)
model.add(Conv2D(filters=filters, kernel_size=kernel_size, strides=stride,
padding='valid', activation='relu', input_shape=dims_1))
model.add(MaxPool2D((2,2)))
filters, kernel_size, stride = 36, 5, (1, 1)
model.add(Conv2D(filters=filters, kernel_size=kernel_size, strides=stride,
padding='valid', activation='relu'))
model.add(MaxPool2D((2,2)))
filters, kernel_size, stride = 48, 3, (1, 1)
model.add(Conv2D(filters=filters, kernel_size=kernel_size, strides=stride,
padding='valid', activation='relu'))
model.add(MaxPool2D((1,2)))
filters, kernel_size, stride = 64, 3, (1, 1)
model.add(Conv2D(filters=filters, kernel_size=kernel_size, strides=stride,
padding='valid', activation='relu'))
model.add(MaxPool2D((2,2)))
filters, kernel_size, stride = 64, 3, (1, 1)
model.add(Conv2D(filters=filters, kernel_size=kernel_size, strides=stride,
padding='valid', activation='relu'))
model.add(Dropout(.3))
model.add(Flatten())
model.add(Dense(100))
model.add(Dense(50))
model.add(Dense(10))
model.add(Dense(1))
return model
def load_my_model(self):
# return load_model(f".{os.sep}{self.model_name}")
num = int(self.model_name.split('_')[-1].split('.h5')[0]) - 1
return load_model(join(os.getcwd(), "my_model_{}.h5".format(num)))
def train_model(self):
train_samples, validation_samples = self.get_train_val_data()
# compile and train the model using the generator function
train_generator = self.generator(train_samples, batch_size=self.batch_size)
validation_generator = self.generator(validation_samples, batch_size=self.batch_size)
# model = self.load_my_model()
model = self.create_model()
optimizer = Adam(lr=.0005)
# print(model.summary())
model.compile(loss='mse', optimizer=optimizer)
model.fit_generator(train_generator, steps_per_epoch=ceil(len(train_samples)/self.batch_size),
validation_data=validation_generator,
validation_steps=ceil(len(validation_samples)/self.batch_size),
epochs=2, verbose=1)
# model.save(fr'.{os.sep}{self.model_name}')
save_model(model, join(os.getcwd(), self.model_name))
if __name__ == '__main__':
inst = BehavioralCloning()
inst.train_model()
| StarcoderdataPython |
3256416 | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"Test shared vpc resources in root module."
import pytest
@pytest.fixture(scope='module')
def mod(plan):
return plan.modules['module.net-svpc-access']
def test_host_vpc(plan):
"Test that the vpc project is set as shared vpc host."
mod = plan.modules['module.net-vpc-host']
resources = [v['values'] for v in mod.resources.values() if v['type'] ==
'google_compute_shared_vpc_host_project']
assert resources[0]['project'] == plan.outputs['host_project_id']
def test_service_projects(plan, mod):
"Test that service projects are registered with the shared vpc."
resources = [v['values'] for v in mod.resources.values() if v['type'] ==
'google_compute_shared_vpc_service_project']
assert len(resources) == 2
assert set([r['host_project'] for r in resources]) == set(
[plan.outputs['host_project_id']])
assert sorted([r['service_project'] for r in resources]) == sorted(
plan.outputs['service_project_ids'].values())
def test_subnet_users(plan, mod):
"Test that the network user role is assigned on subnets."
resources = [v['values'] for v in mod.resources.values() if v['type'] ==
'google_compute_subnetwork_iam_binding']
assert len(resources) == 2
assert set([r['project'] for r in resources]) == set(
[plan.outputs['host_project_id']])
assert sorted([r['subnetwork'] for r in resources]) == ['gce', 'gke']
def test_service_agent(plan, mod):
"Test that the service agent role is assigned for gke only."
resources = [v['values'] for v in mod.resources.values() if v['type'] ==
'google_project_iam_binding']
assert resources[0] == {
'project': plan.outputs['host_project_id'],
'role': 'roles/container.hostServiceAgentUser'
}
| StarcoderdataPython |
11201342 | # Copyright (c) 2020 BombDash
from __future__ import annotations
from typing import TYPE_CHECKING
import random
import ba
from bastd.actor import bomb as stdbomb
from bastd.actor.bomb import BombFactory, ExplodeMessage
from bastd.gameutils import SharedObjects
from ._redefine import redefine_class_methods, redefine_flag, RedefineFlag
if TYPE_CHECKING:
from typing import Callable, List, Type, Optional, Dict, Sequence, Any
_bombs: List[MeBomb] = []
_blasts: Dict[str, Callable] = {}
def add_bomb(mebomb: MeBomb):
_bombs.append(mebomb)
def bomb(bomb_type: str, arm_time: float = None, fuse_time: float = None,
blast_coefficient: float = 1, sticky: bool = False, impact: bool = False,
is_mine: bool = False):
def decorator(cls: Type[MeBomb]):
add_bomb(cls(bomb_type=bomb_type, arm_time=arm_time, fuse_time=fuse_time,
blast_coefficient=blast_coefficient, sticky=sticky, impact=impact, is_mine=is_mine))
return decorator
def add_blast(blast_type: str, callback: Callable):
_blasts[blast_type] = callback
def blast(blast_type: str):
def decorator(function: Callable):
nonlocal blast_type
add_blast(blast_type, function)
return decorator
class MeBomb:
"""Class that defines actions and types of bomb
You must implement this methods in child class:
init(self, actor, position, velocity, materials) - initialize the bomb (create node, etc)
arm(self, actor) - for impact-like bombs or mines
on_impact(self, actor) - handle impact
on_drop(self, actor) - handle dropped
"""
def __init__(self, bomb_type: str, arm_time: float = None, fuse_time: float = None,
blast_coefficient: float = 1, sticky: bool = False, impact: bool = False,
is_mine: bool = False):
self.bomb_type = bomb_type
# Functions must be implemented in child class
# self.init = init
# self.arm = arm
# self.on_impact = on_impact
# self.on_drop = on_drop
# Settings
self.arm_time = arm_time
self.fuse_time = fuse_time
self.blast_coefficient = blast_coefficient
# Flags
self.sticky = sticky
self.is_impact = impact
self.is_mine = is_mine
def init(self, actor, position, velocity, materials):
pass
def arm(self, actor):
pass
def explode(self, actor):
pass
def on_impact(self, actor):
pass
def on_drop(self, actor):
pass
def handlemessage(self, actor, msg):
pass
def get_mebomb(bomb_type: str) -> MeBomb:
for mebomb in _bombs:
if mebomb.bomb_type == bomb_type:
return mebomb
@redefine_class_methods(stdbomb.Bomb)
class Bomb(ba.Actor):
_redefine_methods = ('__init__', '_handle_hit', 'arm', '_handle_impact', '_handle_dropped', 'handlemessage')
@redefine_flag(RedefineFlag.DECORATE_ADVANCED)
def __init__(self, old_function: Callable,
position=(0.0, 1.0, 0.0),
velocity=(0.0, 0.0, 0.0),
bomb_type: str = 'normal',
blast_radius: float = 2.0,
source_player: ba.Player = None,
owner: ba.Node = None):
"""Create a new Bomb.
bomb_type can be standard or one from declared with bd.me.
Note that for impact or land_mine bombs you have to call arm()
before they will go off.
"""
mebomb: MeBomb = get_mebomb(bomb_type)
if mebomb is None:
old_function(self, position, velocity, bomb_type, blast_radius, source_player, owner)
return
ba.Actor.__init__(self)
factory = BombFactory.get()
shared = SharedObjects.get()
self.bomb_type = bomb_type
self._exploded = False
self.texture_sequence = None
self.blast_radius = blast_radius
self._explode_callbacks = []
# the player this came from
self._source_player = source_player
# by default our hit type/subtype is our own, but we pick up types of
# whoever sets us off so we know what caused a chain reaction
self.hit_type = 'explosion'
self.hit_subtype = self.bomb_type
# if no owner was provided, use an unconnected node ref
# (nevermind; trying to use None in these type cases instead)
# if owner is None:
# owner = ba.Node(None)
# the node this came from
self.owner = owner
# adding footing-materials to things can screw up jumping and flying
# since players carrying those things
# and thus touching footing objects will think they're on solid
# ground.. perhaps we don't wanna add this even in the tnt case?..
materials: tuple
materials = (factory.bomb_material,
shared.object_material)
if mebomb.is_impact:
materials = materials + (factory.impact_blast_material,)
elif mebomb.is_mine:
materials = materials + (factory.land_mine_no_explode_material,)
# TODO: add custom materials (now you may add they in mebomb.init)
fuse_time = None
mebomb = get_mebomb(self.bomb_type)
fuse_time = mebomb.fuse_time
self.blast_radius *= mebomb.blast_coefficient
if mebomb.sticky:
materials = materials + (factory.sticky_material,)
else:
materials = materials + (factory.normal_sound_material,)
if mebomb.is_impact:
materials = materials + (factory.impact_blast_material,)
if mebomb.is_mine:
materials = materials + (factory.land_mine_no_explode_material,)
mebomb.init(self, position, velocity, materials)
# Light the fuse!!!
if fuse_time is not None:
ba.timer(fuse_time,
ba.WeakCall(self.handlemessage, ExplodeMessage()))
ba.animate(self.node, "model_scale", {0: 0, 0.2: 1.3, 0.26: 1})
@redefine_flag(RedefineFlag.DECORATE_ADVANCED)
def arm(self, old_function: Callable):
"""Arm the bomb.
These types of bombs will not explode until they have been armed.
"""
if not self.node:
return
factory = BombFactory.get()
mebomb: Optional[MeBomb] = get_mebomb(self.bomb_type)
if mebomb is None:
old_function(self)
return
mebomb.arm(self)
ba.playsound(factory.activate_sound, 0.5, position=self.node.position)
@redefine_flag(RedefineFlag.REDEFINE)
def _handle_hit(self, msg: ba.HitMessage):
ispunch = (msg.srcnode and msg.srcnode.getnodetype() == 'spaz')
# Normal bombs are triggered by non-punch impacts;
# impact-bombs by all impacts.
mebomb = get_mebomb(self.bomb_type)
if (not self._exploded and not ispunch
or (mebomb is not None and (mebomb.is_mine or mebomb.is_impact))):
# Also lets change the owner of the bomb to whoever is setting
# us off. (this way points for big chain reactions go to the
# person causing them).
if msg._source_player not in [None]:
self.source_player = msg._source_player
# Also inherit the hit type (if a landmine sets off by a bomb,
# the credit should go to the mine)
# the exception is TNT. TNT always gets credit.
if self.bomb_type != 'tnt':
self.hit_type = msg.hit_type
self.hit_subtype = msg.hit_subtype
ba.timer(100 + int(random.random() * 100),
ba.WeakCall(self.handlemessage, ExplodeMessage()),
timeformat=ba.TimeFormat.MILLISECONDS)
assert self.node
self.node.handlemessage('impulse', msg.pos[0], msg.pos[1], msg.pos[2],
msg.velocity[0], msg.velocity[1],
msg.velocity[2], msg.magnitude,
msg.velocity_magnitude, msg.radius, 0,
msg.velocity[0], msg.velocity[1],
msg.velocity[2])
if msg.srcnode:
pass
@redefine_flag(RedefineFlag.DECORATE_ADVANCED)
def _handle_dropped(self, old_function: Callable):
mebomb = get_mebomb(self.bomb_type)
if mebomb is None:
return old_function(self)
self.arm_timer = \
ba.Timer(0.5, ba.WeakCall(self.handlemessage, stdbomb.ArmMessage()))
mebomb.on_drop(self)
@redefine_flag(RedefineFlag.DECORATE_ADVANCED)
def _handle_impact(self, old_function):
mebomb = get_mebomb(self.bomb_type)
if mebomb is None:
old_function(self)
return
mebomb.on_impact(self)
node = ba.getcollision().opposingnode
# if we're an impact bomb and we came from this node, don't explode...
# alternately if we're hitting another impact-bomb from the same
# source, don't explode...
try:
node_delegate = node.getdelegate(stdbomb.Bomb)
except Exception:
node_delegate = None
if node:
if (mebomb.is_impact and
(node is self.owner or
(isinstance(node_delegate, stdbomb.Bomb)
and get_mebomb(node_delegate.bomb_type) is not None
and get_mebomb(node_delegate.bomb_type).is_impact
and node_delegate.owner is self.owner))):
return
self.handlemessage(ExplodeMessage())
@redefine_flag(RedefineFlag.DECORATE_ADVANCED)
def handlemessage(self, msg: Any, old_function: Callable) -> Any:
mebomb = get_mebomb(self.bomb_type)
if not (mebomb is not None and mebomb.handlemessage(self, msg)):
old_function(self, msg)
@redefine_class_methods(stdbomb.Blast)
class Blast(ba.Actor):
_redefine_methods = ('__init__',)
@redefine_flag(RedefineFlag.DECORATE_ADVANCED)
def __init__(self, old_function: Callable,
position: Sequence[float] = (0.0, 1.0, 0.0),
velocity: Sequence[float] = (0.0, 0.0, 0.0),
blast_radius: float = 2.0,
blast_type: str = 'normal',
source_player: ba.Player = None,
hit_type: str = 'explosion',
hit_subtype: str = 'normal'):
meblast = _blasts.get(blast_type)
if meblast is None:
old_function(self, position=position, velocity=velocity, blast_radius=blast_radius,
blast_type=blast_type, source_player=source_player, hit_type=hit_type,
hit_subtype=hit_subtype)
return
"""Instantiate with given values."""
# bah; get off my lawn!
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
ba.Actor.__init__(self)
factory = BombFactory.get()
self.blast_type = blast_type
self._source_player = source_player
self.hit_type = hit_type
self.hit_subtype = hit_subtype
self.radius = blast_radius
# Do we need to light?
# lcolor = ((0.6, 0.6, 1.0) if self.blast_type == 'ice' else
# (1, 0.3, 0.1))
# light = ba.newnode('light',
# attrs={
# 'position': position,
# 'volume_intensity_scale': 10.0,
# 'color': lcolor
# })
# scl = random.uniform(0.6, 0.9)
# scorch_radius = light_radius = self.radius
# if self.blast_type == 'tnt':
# light_radius *= 1.4
# scorch_radius *= 1.15
# scl *= 3.0
#
# iscale = 1.6
# ba.animate(
# light, 'intensity', {
# 0: 2.0 * iscale,
# scl * 0.02: 0.1 * iscale,
# scl * 0.025: 0.2 * iscale,
# scl * 0.05: 17.0 * iscale,
# scl * 0.06: 5.0 * iscale,
# scl * 0.08: 4.0 * iscale,
# scl * 0.2: 0.6 * iscale,
# scl * 2.0: 0.00 * iscale,
# scl * 3.0: 0.0
# })
# ba.animate(
# light, 'radius', {
# 0: light_radius * 0.2,
# scl * 0.05: light_radius * 0.55,
# scl * 0.1: light_radius * 0.3,
# scl * 0.3: light_radius * 0.15,
# scl * 1.0: light_radius * 0.05
# })
# ba.timer(scl * 3.0, light.delete)
# make a scorch that fades over time
# if self.blast_type == 'ice':
# ba.playsound(factory.hiss_sound, position=light.position)
# lpos = light.position
# ba.playsound(factory.random_explode_sound(), position=lpos)
# ba.playsound(factory.debris_fall_sound, position=lpos)
ba.camerashake(intensity=5.0 if self.blast_type == 'tnt' else 1.0)
_blasts[blast_type](self,
position=position,
velocity=velocity,
blast_radius=blast_radius,
hit_type=hit_type,
hit_subtype=hit_subtype)
| StarcoderdataPython |
4861929 | <gh_stars>0
import urllib.request
import validators
from flask import render_template, request, flash
from bs4 import BeautifulSoup
from app import app
from app.models import Site
def parsing_url(url):
page = urllib.request.urlopen(url)
html = BeautifulSoup(page.read(), "html.parser")
title, keywords, description = None, None, None
title = html.title.string
for tags in html.find_all('meta'):
if tags.get('name') == 'keywords':
keywords = tags.get('content')
if tags.get('name') == 'description':
description = tags.get('content')
meta = {'url': url,
'title': title,
'keywords': keywords,
'description': description
}
return meta
@app.route('/')
def index():
site = Site(1, 2, 3, 4)
parsed_sites = site.list_of_full()
return render_template("index.html",
title="Main",
sites=parsed_sites,
count_sites=len(parsed_sites)
)
@app.route('/parsing/')
def parsing():
return render_template('parsing.html', title="Parsing")
@app.route('/parsed/', methods=['POST'])
def parsed():
error = None
url = request.form['url']
parsed_url = parsing_url(url)
site = Site(
parsed_url['url'],
parsed_url['title'],
parsed_url['keywords'],
parsed_url['description']
)
if validators.url(url) is not True:
error = "Not a valid URL"
elif site.search(url) == False:
error = 'The URL is already in the database'
parsed_url = None
else:
site.add_url()
flash(error)
return render_template('parsed.html',
url=url,
parsed_url=parsed_url,
title="Parsed",
error=error
) | StarcoderdataPython |
1633377 | __author__ = '<NAME>'
'''
https://codeforces.com/problemset/problem/34/B
Solution: As long as the prices are negative, Bob would be interested to buy. Hence we sort the prices and select
the prices that are negative. That sum is what he needs to have (multiplied by -1).
'''
def solve(n, m, prices):
prices.sort()
total = 0
for i in xrange(0, min(n, m)):
if prices[i] >= 0:
break
total += prices[i]
return -total
if __name__ == "__main__":
n, m = map(int, raw_input().split(" "))
prices = map(int, raw_input().split(" "))
print solve(n, m, prices)
| StarcoderdataPython |
4902413 | #!/usr/bin/env python
"""
Sign daemon process that maintains the count displayed on the sign.
"""
import datetime
import logging
import optparse
import signal
import sys
import time
from sign_controller import SignController
from sign_util import CURSOR_HOME, CURSOR_MAGIC_1, CURSOR_MAGIC_2, ESCAPE
from sign_util import create_serial_connection, seconds_into_year
RATE_LIMIT = 1.0
SECONDS_PER_YEAR = 365 * 86400
class MockConnection(object):
"""Fake connection used for standalone testing without a serial device."""
def __init__(self):
self.first = True
def read(self, size):
if self.first:
self.first = False
return ESCAPE + CURSOR_MAGIC_1 + CURSOR_MAGIC_2 + CURSOR_HOME
else:
time.sleep(1.0)
return ''
def write(self, buf):
pass
def flush(self):
pass
def run_fixed(controller, count):
"""Run in fixed mode, maintaining a constant number on the sign.
Args:
controller: Controller used to communicate with the sign.
count: Count to display on the sign.
"""
while controller.is_alive():
controller.set_count(int(count))
time.sleep(RATE_LIMIT)
def run_target(controller, target):
"""Run in target mode, counting up to an annual target.
Args:
controller: Controller used to communicate with the sign.
target: Target to count to by the end of the year.
"""
rate_per_second = float(target) / SECONDS_PER_YEAR
while controller.is_alive():
now = datetime.datetime.now()
now_seconds = seconds_into_year(now)
count = now_seconds * rate_per_second
controller.set_count(int(count))
time.sleep(RATE_LIMIT)
def go():
"""Main daemon function."""
logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
level=logging.DEBUG)
parser = optparse.OptionParser()
parser.add_option('-f', '--fixed', dest='fixed', default=None,
help='Set a fixed count on the sign')
parser.add_option('-p', '--port', dest='port', default='/dev/ttyUSB0',
help='Serial port device')
parser.add_option('-m', '--mock', dest='mock', action='store_true',
help='Use a mock sign connection for local testing')
parser.add_option('-t', '--target', dest='target', default=443000,
help='Annual target to automatically count up to')
(options, args) = parser.parse_args()
if options.mock:
connection = MockConnection()
else:
connection = create_serial_connection(options.port)
controller = SignController(connection)
controller.start()
def cleanup(signal, frame):
logging.info('Interrupted, cleaning up...')
controller.request_exit()
signal.signal(signal.SIGINT, cleanup)
logging.info('Waiting for cursor sync...')
controller.ping()
while controller.get_cursor() != 0:
logging.debug(controller.get_cursor())
time.sleep(1.0)
logging.info('Running test patterns...')
controller.set_count(888888)
time.sleep(2.0)
controller.set_count(0)
time.sleep(2.0)
if options.fixed is not None:
count = int(options.fixed)
logging.info('Running fixed mode for %d...', count)
run_fixed(controller, count)
else:
target = int(options.target)
logging.info('Running target mode for %d...', target)
run_target(controller, target)
controller.request_exit()
controller.join()
if __name__ == '__main__':
go()
| StarcoderdataPython |
4991802 | __all__ = ['Chats']
from .conversation import Conversation
from .send_read_acknowledge import SendReadAcknowledge
class Chats(Conversation, SendReadAcknowledge):
""" methods.chats """ | StarcoderdataPython |
11312006 | <gh_stars>0
# Copyright (c) 2021 <NAME>, <NAME>.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
import pytest
from inators import imp as inators_imp
@pytest.mark.parametrize('name, object', [
('inators.imp.import_object', inators_imp.import_object)
])
def test_import_object(name, object):
assert inators_imp.import_object(name) == object
| StarcoderdataPython |
3342589 | <filename>src/masonite/contrib/essentials/helpers/__init__.py<gh_stars>0
from .views.hashid import hashid
| StarcoderdataPython |
209195 | <filename>rambo/resources/views.py
from utils import *
from resources.api import *
from django.contrib.auth.decorators import login_required
try:
import json
except:
import simplejson as json
@login_required
def get_resources(request, user=None):
try:
return response(do_get_resources(user))
except Exception as e:
return error(str(e))
@login_required
def get_resource(request, user, resource):
try:
return response(do_get_resource(user, resource))
except Exception as e:
return error(str(e))
@login_required
def add_resource(request):
name = request.REQUEST.get('name', None)
icon = request.REQUEST.get('icon', None)
try:
return response(do_add_resource(request.user.username, name, icon))
except Exception as e:
return error(str(e))
@login_required
def add_resource_template(request):
try:
return response(get_template())
except Exception as e:
return error(str(e))
@login_required
def remove_resource(request,user, resource):
try:
return response(do_remove_resource(request.user, user, data))
except Exception as e:
return error(str(e))
@login_required
def op_category(request, op):
name = request.REQUEST.get('name', None)
if name is None:
return error("name must be set")
if op == "add":
parent = request.REQUEST.get('parent', None)
# try:
return response(do_add_category(request.user, name, parent))
# except Exception as e:
# return error(str(e))
elif op == "rm":
try:
return response(do_remove_category(request.user, name))
except Exception as e:
return error(str(e))
@login_required
def get_categories(request):
try:
return response(do_get_categories())
except Exception as e:
return error(str(e))
@login_required
def share_resource(request, user, resource):
share_with = request.REQUEST.get('with', None)
transparent = request.REQUEST.get('transparent', "false")
transparent = transparent == "true"
if share_with:
try:
return response(do_share_resource(request.user.username, share_with, resource, transparent))
except Exception as e:
return error(str(e))
pass
@login_required
def get_shared_resources(request, user):
try:
return response(do_get_shared_resources(request.user.username))
except Exception as e:
return error(str(e))
| StarcoderdataPython |
11283882 | <filename>utils/file_handler.py<gh_stars>1-10
# -*- coding: utf-8 -*-
"""
--------------------------------------
@File : file_handler.py
@Author : maixiaochai
@Email : <EMAIL>
@CreatedOn : 2020/8/13 23:53
--------------------------------------
"""
from os import makedirs
from os.path import exists
import aiohttp
class FileHandler:
@staticmethod
def trans_dir(dir_path: str):
dir_path = dir_path.replace('\\', '') if '\\' in dir_path else dir_path
dir_path = dir_path if dir_path.endswith("/") else dir_path + '/'
return dir_path
@staticmethod
def make_dirs(dir_path: str):
# 如果 log_dir 不存在,则创建
if not exists(dir_path):
makedirs(dir_path)
@staticmethod
def save_file(file_path: str, content):
with open(file_path, 'wb') as f:
f.write(content)
@staticmethod
async def __get_content(link):
async with aiohttp.ClientSession() as session:
response = await session.get(link)
content = await response.read()
return content
async def download_img(self, db, insert_data):
url, file_path, girl_name = insert_data.get('pic_url'), insert_data.get('file_path'), insert_data.get('girl_name')
content = await self.__get_content(url)
self.save_file(file_path, content)
db.insert(**insert_data)
| StarcoderdataPython |
8045917 | <gh_stars>1-10
#!/usr/bin/env python
#
# Licensed under the BSD license. See full license in LICENSE file.
# http://www.lightshowpi.com/
#
# Author: <NAME>
# Author: <NAME> (<EMAIL>)
"""Empty wrapper module for wiringpi
This module is a place holder for virtual hardware to run a simulated lightshow
an a pc. This module is not yet functional.
"""
# Setup
def wiringPiSetup(*args):
pass
def wiringPiSetupSys(*args):
pass
def pinMode(*args):
pass
# Pin Writes
def softPwmCreate(*args):
pass
def softPwmWrite(*args):
pass
def digitalWrite(*args):
pass
# Devices
def mcp23017Setup(*args):
pass
def mcp23s17Setup(*args):
pass
def mcp23016Setup(*args):
pass
def mcp23008Setup(*args):
pass
def mcp23s08Setup(*args):
pass
def sr595Setup(*args):
pass
def pcf8574Setup(*args):
pass
| StarcoderdataPython |
6480173 | '''
Created on Oct 14, 2018
@author: <NAME>
'''
import os, datetime, threading
from classes import tkinter_app
def init(datalog_path):
#Initialize the class with Datalog file path
max_file_size = 0
#Check if datalog path is a file or a directory
if not os.path.isdir(datalog_path):
datalog_filepath = datalog_path #If datalog_path is file assign it to datalog_filepath variable
datalog_dir = os.path.split(datalog_filepath)[0] #Extract datalog directory from datalog filepath
check_if_folder_exists(datalog_dir) #Create datalog directory if it doesn't exists
else:
datalog_dir = datalog_path
check_if_folder_exists(datalog_dir) #Create datalog directory if it doesn't exists
datalog_filepath = os.path.join(datalog_dir,('log_'+get_time()+".csv"))
print(get_time())
open()
return
def check_if_folder_exists(folder_path):
if not os.path.exists(folder_path):
os.makedirs(folder_path)
return
def get_file_size():
#Get Datalog File size in Bytes
return os.path.getsize(datalog_filepath)
def get_time():
#Returns Timestamp in MM-DD-YYYY-HH.MM format
now = datetime.datetime.now()
return str(now.strftime("%m-%d-%Y-%H.%M.%S"))
def get_log(length):
#Returns N lines from Datalog, where N is Specified by Variable 'length'
line = datalog_fileref.readlines()
return line
def set_log(, category, sub_category, log_string):
#Logs the incoming entires (Category, Subcategory, String) with timestamp
#Category = calss_name = __class__.__name__
#Subcategory = inspect.getframeinfo(inspect.currentframe()).function
timestamp = get_time()
line = category + ',' + sub_category + ',' + log_string + "," + timestamp +'\n'
datalog_fileref.writelines(line)
return
def log_execution_time():
#Returns the execution time on the module for logging
return
def open():
datalog_fileref = open(datalog_filepath,'a+')
line = 'category' + ',' + 'sub_category' + ',' + 'log_string' + "," + 'timestamp' +'\n'
datalog_fileref.writelines(line)
return
def close():
datalog_fileref.close()
return
def show_logger():
#Separate thread to display & use queue to refresh datalog
logger_gui = tkinter_app.tkinter_app()
window_title = 'Datalogger'
threading.Thread(target=logger_gui.progressbar_app, args=(window_title,)).start()
return
| StarcoderdataPython |
1624488 | <reponame>bartekpacia/informatyka<gh_stars>1-10
def jest_anagram(a: str, b: str) -> bool:
# the fast way
if a == b:
return True
a_chars: dict[str, int] = {}
for char in a:
if not a_chars.get(char):
a_chars[char] = 1
else:
a_chars[char] += 1
b_chars: dict[str, int] = {}
for char in b:
if not b_chars.get(char):
b_chars[char] = 1
else:
b_chars[char] += 1
return a_chars == b_chars
def jest_jednolity(a: str) -> bool:
chars: set[str] = set()
for char in a:
chars.add(char)
return len(chars) == 1
| StarcoderdataPython |
1667989 | # Generated by Django 2.2.4 on 2019-08-31 16:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wajiha', '0012_opportunitycategory_is_featured'),
]
operations = [
migrations.AddField(
model_name='opportunitycategory',
name='fontawesome_icon',
field=models.CharField(default='', max_length=250),
),
]
| StarcoderdataPython |
294111 | <gh_stars>100-1000
# Copyright 2021 D-Wave Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
from dimod.binary.binary_quadratic_model import BinaryQuadraticModel
from dimod.typing import Variable
from dimod.vartypes import Vartype
__all__ = ['binary_encoding']
def binary_encoding(v: Variable, upper_bound: int) -> BinaryQuadraticModel:
"""Return a binary quadratic model encoding an integer.
Args:
v: The integer variable label.
upper_bound: The upper bound on the integer value (inclusive).
Returns:
A binary quadratic model. The variables in the BQM will be labelled
with tuples of length two or three. The first value of the tuple will
be the variable label ``v`` provided. The second value will be the
coefficient in the integer encoding. One of the variables will
have a third value in the tuple, ``'msb'``. This is the variable
occupying the position of the most significant bit. Though it may
actually be a smaller number in order to enforce the ``upper_bound``.
Example:
>>> bqm = dimod.generators.binary_encoding('i', 6)
>>> bqm
BinaryQuadraticModel({('i', 1): 1.0, ('i', 2): 2.0, ('i', 3, 'msb'): 3.0}, {}, 0.0, 'BINARY')
We can use a sample to restore the original integer value.
>>> sample = {('i', 1): 1, ('i', 2): 0, ('i', 3, 'msb'): 1}
>>> bqm.energy(sample)
4.0
>>> sum(v[1]*val for v, val in sample.items()) + bqm.offset
4.0
If you wish to encode integers with a lower bound, you can use the
binary quadratic model's :attr:`~BinaryQuadraticModel.offset` attribute.
>>> i = dimod.generators.binary_encoding('i', 10) + 5 # integer in [5, 15]
References:
[1]: <NAME>, <NAME> (2017), Practical Integer-to-Binary
Mapping for Quantum Annealers. arxiv.org:1706.01945.
"""
# note: the paper above also gives a nice way to handle bounded coefficients
# if we want to do that in the future.
if upper_bound <= 1:
raise ValueError("upper_bound must be greater than or equal to 1, "
f"received {upper_bound}")
upper_bound = math.floor(upper_bound)
bqm = BinaryQuadraticModel(Vartype.BINARY)
max_pow = math.floor(math.log2(upper_bound))
for exp in range(max_pow):
val = 1 << exp
bqm.set_linear((v, val), val)
else:
val = upper_bound - ((1 << max_pow) - 1)
bqm.set_linear((v, val, 'msb'), val)
return bqm
| StarcoderdataPython |
6526929 | import unittest
from typing import List
class Solution:
def twoCitySchedCost(self, costs: List[List[int]]) -> int:
weights = []
n = len(costs)
for i in range(n):
weights.append([costs[i][0]-costs[i][1], i])
weights.sort()
sum_costs = 0
middle = int(n/2)
for i in range(middle):
sum_costs = sum_costs + costs[weights[i][1]][0]
for i in range(middle, n):
sum_costs = sum_costs + costs[weights[i][1]][1]
return sum_costs
class TestTwoCitySchedCost(unittest.TestCase):
def setUp(self):
self.sol = Solution()
def test_two_city_sched_cost_easy(self):
costs = [[10, 20], [30, 200], [400, 50], [30, 20]]
expected_total_min = 110
actual_total_min = self.sol.twoCitySchedCost(costs)
self.assertEqual(actual_total_min, expected_total_min)
def test_two_city_sched_cost_two_equal(self):
costs = [[10, 20], [30, 200], [400, 50], [30, 30]]
expected_total_min = 120
actual_total_min = self.sol.twoCitySchedCost(costs)
self.assertEqual(actual_total_min, expected_total_min)
def test_two_city_sched_cost_all_equal(self):
costs = [[10, 20], [10, 20], [10, 20], [10, 20]]
expected_total_min = 60
actual_total_min = self.sol.twoCitySchedCost(costs)
self.assertEqual(actual_total_min, expected_total_min)
test_ = TestTwoCitySchedCost()
test_.setUp()
test_.test_two_city_sched_cost_easy()
| StarcoderdataPython |
5192874 | from copy import copy
from rest_framework.compat import urlparse
from rest_framework.schemas import SchemaGenerator as BaseSchemaGenerator
import coreapi
from drf_swagger_extras.hacks import monkey_patch
monkey_patch()
class SchemaGenerator(BaseSchemaGenerator):
def get_link(self, path, method, callback, view):
"""
Return a `coreapi.Link` instance for the given endpoint.
"""
fields = self.get_path_fields(path, method, callback, view)
fields += self.get_serializer_fields(path, method, callback, view)
fields += self.get_pagination_fields(path, method, callback, view)
fields += self.get_filter_fields(path, method, callback, view)
if fields and any([field.location in ('form', 'body')
for field in fields]):
encoding = self.get_encoding(path, method, callback, view)
else:
encoding = None
description = self.get_description(path, method, callback, view)
link = coreapi.Link(
url=urlparse.urljoin(self.url, path),
action=method.lower(),
encoding=encoding,
description=description,
fields=fields,
transform=None, # Not handled, but here for future reference
)
link._responses = self.get_responses(path, method, callback, view)
link._produces = self.get_produces(path, method, callback, view)
return link
def _get_actual_view(self, method, callback, view, default=True):
if hasattr(callback, 'actions'):
action_name = callback.actions[method.lower()]
action = getattr(view, action_name)
return action
else:
return view if default else None
def get_responses(self, path, method, callback, view):
# Get generic responses
responses = {}
if hasattr(view, '_responses'):
responses = copy(view._responses)
pass
action = self._get_actual_view(method, callback, view, default=False)
if action and hasattr(action, '_responses'):
responses.update(action._responses)
return responses or None
def get_produces(self, path, method, callback, view):
return ["application/json", "application/xml"]
def get_description(self, path, method, callback, view):
action = self._get_actual_view(method, callback, view, default=False)
if action and action.__doc__:
return self._get_description(view, action)
else:
return self._get_description(view, None)
def _get_description(self, view, action=None):
generic = view.__doc__
specific = action.__doc__
return description_format(generic, specific)
def description_format(generic=None, specific=None):
def unwrap(s):
if s:
return "\n".join([l.strip() for l in s.splitlines()])
else:
return ''
if specific:
specific += "\n\n"
if generic or specific:
return "{1}{0}".format(unwrap(generic),
unwrap(specific))
| StarcoderdataPython |
1864062 | <filename>analysis/sentiment-time-graph.py<gh_stars>0
import json
import os
import matplotlib.pyplot as plt
import commonfunctions as cf
root_directory = os.path.abspath(os.path.dirname(os.path.abspath(os.curdir)))
directory = os.path.join(root_directory, cf.working_directory)
with open('sentiment-time.json', 'r') as f:
data = json.load(f)
uniqueYears = data['uniqueYears']
uniquenegativewords = data['uniquenegativewords']
uniquepositivewords = data['uniquepositivewords']
colors = ['#d8b365', '#5ab4ac']
plt.style.use('ggplot')
fig = plt.figure(0)
ax = fig.gca()
ax.grid(b=False)
ax.set_axis_bgcolor('white')
# Set x axis labels so they align with campaign years
ax.set_xlim([1956, 2020])
ax.set_xticks(xrange(1960, 2020, 8))
ax.plot(uniqueYears, uniquepositivewords, label='positive', color=colors[1], lw=2.5)
ax.plot(uniqueYears, uniquenegativewords, label='negative', color=colors[0], lw=2.5)
ax.legend()
ax.set_xlabel('Year')
ax.set_ylabel('Proportion of words in negative/positive dictionaries')
ax.set_title('Sentiment over time in US presidential election debates', y=1.05)
plt.savefig(os.path.join(root_directory, 'images', 'analysis-sentiment-time.svg'), format='svg')
| StarcoderdataPython |
11281512 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2014 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cgi
import HTMLParser
import time
import re
import requests
import types
import unidecode
from BeautifulSoup import BeautifulSoup
from blessings import Terminal as _Terminal
from datetime import datetime
from lxml import etree
from webapp2 import cached_property
VALID_HTML_TAGS = ['br']
# default unicode character mapping (you may not see some chars, leave as is )
char_map = {u'À': 'A', u'Á': 'A', u'Â': 'A', u'Ã': 'A', u'Ä': 'Ae', u'Å': 'A', u'Æ': 'A', u'Ā': 'A', u'Ą': 'A',
u'Ă': 'A', u'Ç': 'C', u'Ć': 'C', u'Č': 'C', u'Ĉ': 'C', u'Ċ': 'C', u'Ď': 'D', u'Đ': 'D', u'È': 'E',
u'É': 'E', u'Ê': 'E', u'Ë': 'E', u'Ē': 'E', u'Ę': 'E', u'Ě': 'E', u'Ĕ': 'E', u'Ė': 'E', u'Ĝ': 'G',
u'Ğ': 'G', u'Ġ': 'G', u'Ģ': 'G', u'Ĥ': 'H', u'Ħ': 'H', u'Ì': 'I', u'Í': 'I', u'Î': 'I', u'Ï': 'I',
u'Ī': 'I', u'Ĩ': 'I', u'Ĭ': 'I', u'Į': 'I', u'İ': 'I', u'IJ': 'IJ', u'Ĵ': 'J', u'Ķ': 'K', u'Ľ': 'K',
u'Ĺ': 'K', u'Ļ': 'K', u'Ŀ': 'K', u'Ł': 'L', u'Ñ': 'N', u'Ń': 'N', u'Ň': 'N', u'Ņ': 'N', u'Ŋ': 'N',
u'Ò': 'O', u'Ó': 'O', u'Ô': 'O', u'Õ': 'O', u'Ö': 'Oe', u'Ø': 'O', u'Ō': 'O', u'Ő': 'O', u'Ŏ': 'O',
u'Œ': 'OE', u'Ŕ': 'R', u'Ř': 'R', u'Ŗ': 'R', u'Ś': 'S', u'Ş': 'S', u'Ŝ': 'S', u'Ș': 'S', u'Š': 'S',
u'Ť': 'T', u'Ţ': 'T', u'Ŧ': 'T', u'Ț': 'T', u'Ù': 'U', u'Ú': 'U', u'Û': 'U', u'Ü': 'Ue', u'Ū': 'U',
u'Ů': 'U', u'Ű': 'U', u'Ŭ': 'U', u'Ũ': 'U', u'Ų': 'U', u'Ŵ': 'W', u'Ŷ': 'Y', u'Ÿ': 'Y', u'Ý': 'Y',
u'Ź': 'Z', u'Ż': 'Z', u'Ž': 'Z', u'à': 'a', u'á': 'a', u'â': 'a', u'ã': 'a', u'ä': 'ae', u'ā': 'a',
u'ą': 'a', u'ă': 'a', u'å': 'a', u'æ': 'ae', u'ç': 'c', u'ć': 'c', u'č': 'c', u'ĉ': 'c', u'ċ': 'c',
u'ď': 'd', u'đ': 'd', u'è': 'e', u'é': 'e', u'ê': 'e', u'ë': 'e', u'ē': 'e', u'ę': 'e', u'ě': 'e',
u'ĕ': 'e', u'ė': 'e', u'ƒ': 'f', u'ĝ': 'g', u'ğ': 'g', u'ġ': 'g', u'ģ': 'g', u'ĥ': 'h', u'ħ': 'h',
u'ì': 'i', u'í': 'i', u'î': 'i', u'ï': 'i', u'ī': 'i', u'ĩ': 'i', u'ĭ': 'i', u'į': 'i', u'ı': 'i',
u'ij': 'ij', u'ĵ': 'j', u'ķ': 'k', u'ĸ': 'k', u'ł': 'l', u'ľ': 'l', u'ĺ': 'l', u'ļ': 'l', u'ŀ': 'l',
u'ñ': 'n', u'ń': 'n', u'ň': 'n', u'ņ': 'n', u'ʼn': 'n', u'ŋ': 'n', u'ò': 'o', u'ó': 'o', u'ô': 'o',
u'õ': 'o', u'ö': 'oe', u'ø': 'o', u'ō': 'o', u'ő': 'o', u'ŏ': 'o', u'œ': 'oe', u'ŕ': 'r', u'ř': 'r',
u'ŗ': 'r', u'ś': 's', u'š': 's', u'ť': 't', u'ù': 'u', u'ú': 'u', u'û': 'u', u'ü': 'ue', u'ū': 'u',
u'ů': 'u', u'ű': 'u', u'ŭ': 'u', u'ũ': 'u', u'ų': 'u', u'ŵ': 'w', u'ÿ': 'y', u'ý': 'y', u'ŷ': 'y',
u'ż': 'z', u'ź': 'z', u'ž': 'z', u'ß': 'ss', u'ſ': 'ss', u'Α': 'A', u'Ά': 'A', u'Ἀ': 'A', u'Ἁ': 'A',
u'Ἂ': 'A', u'Ἃ': 'A', u'Ἄ': 'A', u'Ἅ': 'A', u'Ἆ': 'A', u'Ἇ': 'A', u'ᾈ': 'A', u'ᾉ': 'A', u'ᾊ': 'A',
u'ᾋ': 'A', u'ᾌ': 'A', u'ᾍ': 'A', u'ᾎ': 'A', u'ᾏ': 'A', u'Ᾰ': 'A', u'Ᾱ': 'A', u'Ὰ': 'A', u'Ά': 'A',
u'ᾼ': 'A', u'Β': 'B', u'Γ': 'G', u'Δ': 'D', u'Ε': 'E', u'Έ': 'E', u'Ἐ': 'E', u'Ἑ': 'E', u'Ἒ': 'E',
u'Ἓ': 'E', u'Ἔ': 'E', u'Ἕ': 'E', u'Έ': 'E', u'Ὲ': 'E', u'Ζ': 'Z', u'Η': 'I', u'Ή': 'I', u'Ἠ': 'I',
u'Ἡ': 'I', u'Ἢ': 'I', u'Ἣ': 'I', u'Ἤ': 'I', u'Ἥ': 'I', u'Ἦ': 'I', u'Ἧ': 'I', u'ᾘ': 'I', u'ᾙ': 'I',
u'ᾚ': 'I', u'ᾛ': 'I', u'ᾜ': 'I', u'ᾝ': 'I', u'ᾞ': 'I', u'ᾟ': 'I', u'Ὴ': 'I', u'Ή': 'I', u'ῌ': 'I',
u'Θ': 'TH', u'Ι': 'I', u'Ί': 'I', u'Ϊ': 'I', u'Ἰ': 'I', u'Ἱ': 'I', u'Ἲ': 'I', u'Ἳ': 'I', u'Ἴ': 'I',
u'Ἵ': 'I', u'Ἶ': 'I', u'Ἷ': 'I', u'Ῐ': 'I', u'Ῑ': 'I', u'Ὶ': 'I', u'Ί': 'I', u'Κ': 'K', u'Λ': 'L',
u'Μ': 'M', u'Ν': 'N', u'Ξ': 'KS', u'Ο': 'O', u'Ό': 'O', u'Ὀ': 'O', u'Ὁ': 'O', u'Ὂ': 'O', u'Ὃ': 'O',
u'Ὄ': 'O', u'Ὅ': 'O', u'Ὸ': 'O', u'Ό': 'O', u'Π': 'P', u'Ρ': 'R', u'Ῥ': 'R', u'Σ': 'S', u'Τ': 'T',
u'Υ': 'Y', u'Ύ': 'Y', u'Ϋ': 'Y', u'Ὑ': 'Y', u'Ὓ': 'Y', u'Ὕ': 'Y', u'Ὗ': 'Y', u'Ῠ': 'Y', u'Ῡ': 'Y',
u'Ὺ': 'Y', u'Ύ': 'Y', u'Φ': 'F', u'Χ': 'X', u'Ψ': 'PS', u'Ω': 'O', u'Ώ': 'O', u'Ὠ': 'O', u'Ὡ': 'O',
u'Ὢ': 'O', u'Ὣ': 'O', u'Ὤ': 'O', u'Ὥ': 'O', u'Ὦ': 'O', u'Ὧ': 'O', u'ᾨ': 'O', u'ᾩ': 'O', u'ᾪ': 'O',
u'ᾫ': 'O', u'ᾬ': 'O', u'ᾭ': 'O', u'ᾮ': 'O', u'ᾯ': 'O', u'Ὼ': 'O', u'Ώ': 'O', u'ῼ': 'O', u'α': 'a',
u'ά': 'a', u'ἀ': 'a', u'ἁ': 'a', u'ἂ': 'a', u'ἃ': 'a', u'ἄ': 'a', u'ἅ': 'a', u'ἆ': 'a', u'ἇ': 'a',
u'ᾀ': 'a', u'ᾁ': 'a', u'ᾂ': 'a', u'ᾃ': 'a', u'ᾄ': 'a', u'ᾅ': 'a', u'ᾆ': 'a', u'ᾇ': 'a', u'ὰ': 'a',
u'ά': 'a', u'ᾰ': 'a', u'ᾱ': 'a', u'ᾲ': 'a', u'ᾳ': 'a', u'ᾴ': 'a', u'ᾶ': 'a', u'ᾷ': 'a', u'β': 'b',
u'γ': 'g', u'δ': 'd', u'ε': 'e', u'έ': 'e', u'ἐ': 'e', u'ἑ': 'e', u'ἒ': 'e', u'ἓ': 'e', u'ἔ': 'e',
u'ἕ': 'e', u'ὲ': 'e', u'έ': 'e', u'ζ': 'z', u'η': 'i', u'ή': 'i', u'ἠ': 'i', u'ἡ': 'i', u'ἢ': 'i',
u'ἣ': 'i', u'ἤ': 'i', u'ἥ': 'i', u'ἦ': 'i', u'ἧ': 'i', u'ᾐ': 'i', u'ᾑ': 'i', u'ᾒ': 'i', u'ᾓ': 'i',
u'ᾔ': 'i', u'ᾕ': 'i', u'ᾖ': 'i', u'ᾗ': 'i', u'ὴ': 'i', u'ή': 'i', u'ῂ': 'i', u'ῃ': 'i', u'ῄ': 'i',
u'ῆ': 'i', u'ῇ': 'i', u'θ': 'th', u'ι': 'i', u'ί': 'i', u'ϊ': 'i', u'ΐ': 'i', u'ἰ': 'i', u'ἱ': 'i',
u'ἲ': 'i', u'ἳ': 'i', u'ἴ': 'i', u'ἵ': 'i', u'ἶ': 'i', u'ἷ': 'i', u'ὶ': 'i', u'ί': 'i', u'ῐ': 'i',
u'ῑ': 'i', u'ῒ': 'i', u'ΐ': 'i', u'ῖ': 'i', u'ῗ': 'i', u'κ': 'k', u'λ': 'l', u'μ': 'm', u'ν': 'n',
u'ξ': 'ks', u'ο': 'o', u'ό': 'o', u'ὀ': 'o', u'ὁ': 'o', u'ὂ': 'o', u'ὃ': 'o', u'ὄ': 'o', u'ὅ': 'o',
u'ὸ': 'o', u'ό': 'o', u'π': 'p', u'ρ': 'r', u'ῤ': 'r', u'ῥ': 'r', u'σ': 's', u'ς': 's', u'τ': 't',
u'υ': 'y', u'ύ': 'y', u'ϋ': 'y', u'ΰ': 'y', u'ὐ': 'y', u'ὑ': 'y', u'ὒ': 'y', u'ὓ': 'y', u'ὔ': 'y',
u'ὕ': 'y', u'ὖ': 'y', u'ὗ': 'y', u'ὺ': 'y', u'ύ': 'y', u'ῠ': 'y', u'ῡ': 'y', u'ῢ': 'y', u'ΰ': 'y',
u'ῦ': 'y', u'ῧ': 'y', u'φ': 'f', u'χ': 'x', u'ψ': 'ps', u'ω': 'o', u'ώ': 'o', u'ὠ': 'o', u'ὡ': 'o',
u'ὢ': 'o', u'ὣ': 'o', u'ὤ': 'o', u'ὥ': 'o', u'ὦ': 'o', u'ὧ': 'o', u'ᾠ': 'o', u'ᾡ': 'o', u'ᾢ': 'o',
u'ᾣ': 'o', u'ᾤ': 'o', u'ᾥ': 'o', u'ᾦ': 'o', u'ᾧ': 'o', u'ὼ': 'o', u'ώ': 'o', u'ῲ': 'o', u'ῳ': 'o',
u'ῴ': 'o', u'ῶ': 'o', u'ῷ': 'o', u'¨': '', u'΅': '', u'᾿': '', u'῾': '', u'῍': '', u'῝': '', u'῎': '',
u'῞': '', u'῏': '', u'῟': '', u'῀': '', u'῁': '', u'΄': '', u'΅': '', u'`': '', u'῭': '', u'ͺ': '',
u'᾽': '', u'А': 'A', u'Б': 'B', u'В': 'V', u'Г': 'G', u'Д': 'D', u'Е': 'E', u'Ё': 'E', u'Ж': 'ZH',
u'З': 'Z', u'И': 'I', u'Й': 'I', u'К': 'K', u'Л': 'L', u'М': 'M', u'Н': 'N', u'О': 'O', u'П': 'P',
u'Р': 'R', u'С': 'S', u'Т': 'T', u'У': 'U', u'Ф': 'F', u'Х': 'KH', u'Ц': 'TS', u'Ч': 'CH', u'Ш': 'SH',
u'Щ': 'SHCH', u'Ы': 'Y', u'Э': 'E', u'Ю': 'YU', u'Я': 'YA', u'а': 'A', u'б': 'B', u'в': 'V', u'г': 'G',
u'д': 'D', u'е': 'E', u'ё': 'E', u'ж': 'ZH', u'з': 'Z', u'и': 'I', u'й': 'I', u'к': 'K', u'л': 'L',
u'м': 'M', u'н': 'N', u'о': 'O', u'п': 'P', u'р': 'R', u'с': 'S', u'т': 'T', u'у': 'U', u'ф': 'F',
u'х': 'KH', u'ц': 'TS', u'ч': 'CH', u'ш': 'SH', u'щ': 'SHCH', u'ы': 'Y', u'э': 'E', u'ю': 'YU', u'я': 'YA',
u'Ъ': '', u'ъ': '', u'Ь': '', u'ь': '', u'ð': 'd', u'Ð': 'D', u'þ': 'th', u'Þ': 'TH',
u'ა': 'a', u'ბ': 'b', u'გ': 'g', u'დ': 'd', u'ე': 'e', u'ვ': 'v', u'ზ': 'z', u'თ': 't', u'ი': 'i',
u'კ': 'k', u'ლ': 'l', u'მ': 'm', u'ნ': 'n', u'ო': 'o', u'პ': 'p', u'ჟ': 'zh', u'რ': 'r', u'ს': 's',
u'ტ': 't', u'უ': 'u', u'ფ': 'p', u'ქ': 'k', u'ღ': 'gh', u'ყ': 'q', u'შ': 'sh', u'ჩ': 'ch', u'ც': 'ts',
u'ძ': 'dz', u'წ': 'ts', u'ჭ': 'ch', u'ხ': 'kh', u'ჯ': 'j', u'ჰ': 'h'}
def slugify(s, strip=False):
u"""
Simple slug filter, that has no knowledge of diacritics. Prefer slughifi (see below) to this method for good slugs,
even if for simple languages like english this may be enough (and probably faster).
>>> text = u"C'est déjà l'été."
>>> slugify(text)
'c-est-deja-l-ete-'
"""
str = re.sub(r'\W+', '-', unidecode.unidecode(s).lower())
if strip:
str = re.sub('(^-+|-+$)', '', str)
return str
def replace_char(m):
char = m.group()
if char_map.has_key(char):
return char_map[char]
else:
return char
def unaccent(value):
"""
Replace diacritics with their ascii counterparts.
"""
# unicodification
if type(value) != types.UnicodeType:
value = unicode(value, 'utf-8', 'ignore')
# try to replace chars
value = re.sub('[^a-zA-Z0-9\\s\\-]{1}', replace_char, value)
return value.encode('ascii', 'ignore')
def slughifi(value, do_slugify=True, overwrite_char_map=None, strip=False):
u"""
High Fidelity slugify - slughifi.py, v 0.1
This was found somewhere on internet, and slightly adapted for our needs.
Examples :
>>> text = u"C'est déjà l\'été."
>>> slughifi(text)
'c-est-deja-l-ete-'
>>> slughifi(text, overwrite_char_map={"'": '-',})
'c-est-deja-l-ete-'
>>> slughifi(text, do_slugify=False)
'C-est deja l-ete.'
"""
# unicodification
if type(value) != types.UnicodeType:
value = unicode(value, 'utf-8', 'ignore')
# overwrite chararcter mapping
if overwrite_char_map:
char_map.update(overwrite_char_map)
# try to replace chars
value = re.sub('[^a-zA-Z0-9\\s\\-]{1}', replace_char, value)
# apply ascii slugify
if do_slugify:
value = slugify(value, strip=strip)
return value.encode('ascii', 'ignore')
def filter_html(value):
"""
Simple filter that removes all html found and replace HTML line breaks by a simple line feed character.
"""
if value is None:
return None
soup = BeautifulSoup(value)
tags = soup.findAll(True)
for tag in tags:
if tag.name not in VALID_HTML_TAGS:
tag.hidden = True
if tags:
value = soup.renderContents().replace(' ', ' ').replace('\n', '').replace('<br />', '\n')
else:
value = soup.renderContents().replace(' ', ' ')
if value:
value = html_unescape(unicode(value, 'utf-8'))
return value
class Timer(object):
"""
Context manager used to time execution of stuff.
"""
def __enter__(self):
self.__start = time.time()
def __exit__(self, type=None, value=None, traceback=None):
# Error handling here
self.__finish = time.time()
@property
def duration(self):
return self.__finish - self.__start
def __str__(self):
return str(int(self.duration * 1000) / 1000.0) + 's'
def create_http_reader(url):
"""
Simple reader for an HTTP resource.
"""
def http_reader():
return requests.get(url).content
return http_reader
def create_ftp_reader(url):
"""
Simple reader for an HTTP resource.
"""
import urlparse, ftplib
parsed_url = urlparse.urlparse(url)
def ftp_reader():
ftp_file_content = []
def handle_binary(data):
ftp_file_content.append(data)
ftp = ftplib.FTP(host=parsed_url.hostname,
user=parsed_url.username,
passwd=parsed_url.password)
ftp.retrbinary(cmd='RETR {0}'.format(parsed_url.path),
callback=handle_binary)
return ''.join(ftp_file_content)
return ftp_reader
def create_file_reader(path):
"""
Simple reader for a local filesystem resource.
"""
def file_reader():
with open(path, 'rU') as f:
return f.read()
return file_reader
def sfloat(mixed, default=None):
"""Safe float cast."""
try:
return float(mixed)
except:
return default
def sint(mixed, default=None):
"""Safe int cast."""
try:
return int(mixed)
except:
return default
def sbool(mixed, default=None):
"""Safe boolean cast."""
try:
return bool(mixed)
except:
return default
# Exports
try:
terminal = _Terminal()
except:
class FakeTerminal(object):
clear_eol = ''
move_up = ''
is_a_tty = False
def __call__(self, *args):
return ''.join(*args)
def __getattr__(self, item):
return self
terminal = FakeTerminal()
html_escape = cgi.escape
def html_unescape(txt):
if not isinstance(txt, unicode):
try:
txt = txt.decode('raw_unicode_escape')
except:
print txt
return HTMLParser.HTMLParser().unescape(txt)
now = datetime.now
cached_property = cached_property
etree = etree
| StarcoderdataPython |
3535658 | <reponame>giangbui/fence<gh_stars>0
from boto3 import client
from boto3.exceptions import Boto3Error
from fence.errors import UserError, InternalError, UnavailableError
import uuid
class BotoManager(object):
def __init__(self, config, logger):
self.sts_client = client("sts", **config)
self.s3_client = client("s3", **config)
self.logger = logger
self.ec2 = None
self.iam = None
def assume_role(self, role_arn, duration_seconds, config=None):
try:
if config and config.has_key("aws_access_key_id"):
self.sts_client = client("sts", **config)
session_name_postfix = uuid.uuid4()
return self.sts_client.assume_role(
RoleArn=role_arn,
DurationSeconds=duration_seconds,
RoleSessionName="{}-{}".format("gen3", session_name_postfix),
)
except Boto3Error as ex:
self.logger.exception(ex)
raise InternalError("Fail to assume role: {}".format(ex.message))
except Exception as ex:
self.logger.exception(ex)
raise UnavailableError("Fail to reach AWS: {}".format(ex.message))
def presigned_url(self, bucket, key, expires, config, method="get_object"):
if config.has_key("aws_access_key_id"):
self.s3_client = client("s3", **config)
if method not in ["get_object", "put_object"]:
raise UserError("method {} not allowed".format(method))
if expires is None:
expires = 1800
elif expires > 3600 * 24:
expires = 3600 * 24
url = self.s3_client.generate_presigned_url(
ClientMethod=method,
Params={"Bucket": bucket, "Key": key}
if method == "get_object"
else {"Bucket": bucket, "Key": key, "ServerSideEncryption": "AES256"},
ExpiresIn=expires,
)
return url
def get_bucket_region(self, bucket, config):
try:
if config.has_key("aws_access_key_id"):
self.s3_client = client("s3", **config)
response = self.s3_client.get_bucket_location(Bucket=bucket)
region = response.get("LocationConstraint")
except Boto3Error as ex:
self.logger.exception(ex)
raise InternalError("Fail to get bucket region: {}".format(ex.message))
except Exception as ex:
self.logger.exception(ex)
raise UnavailableError("Fail to reach AWS: {}".format(ex.message))
if region is None:
return "us-east-1"
return region
def get_all_groups(self, list_group_name):
"""
Get all group listed in the list_group_name.
If group does not exist, add as new group and include in the return list
:param list_group_name:
:return:
"""
try:
groups = self.get_user_group(list_group_name)
if len(groups) < len(list_group_name):
for group_name in list_group_name:
if group_name not in groups:
groups[group_name] = self.create_user_group(group_name)
except Exception as ex:
self.logger.exception(ex)
raise UserError("Fail to create list of groups: {}".format(ex.message))
return groups
def add_user_to_group(self, groups, username):
"""
Add user to the list of group which have association membership.
:param groups:
:param username:
:return:
"""
try:
for group in groups.values():
self.iam.add_user_to_group(
GroupName=group["GroupName"], UserName=username
)
except Exception as ex:
self.logger.exception(ex)
raise UserError("Fail to add user to group: {}".format(ex.message))
def get_user_group(self, group_names):
try:
groups = self.iam.list_groups()["Groups"]
res = {}
for group in groups:
if group["GroupName"] in group_names:
res[group["GroupName"]] = group
except Exception as ex:
self.logger.exception(ex)
raise UserError(
"Fail to get list of groups {}: {}".format(group_names, ex.message)
)
return res
def create_user_group(self, group_name, path=None):
try:
group = self.iam.create_group(GroupName=group_name)["Group"]
self.__create_policy__(
group_name, self.__get_policy_document_by_group_name__(group_name)
)
except Exception as ex:
self.logger.exception(ex)
raise UserError(
"Fail to create group {}: {}".format(group_name, ex.message)
)
return group
def __get_policy_document_by_group_name__(self, group_name):
"""
Getting policy document from config file and replace with actual value (same as project name)
:param group_name:
:return:
"""
pass
def __fill_with_new_value__(self, document, value):
pass
def __create_policy__(
self, policy_name, policy_document, path=None, description=None
):
"""
Create policy with name and policies specified in policy_document.
:param policy_name: Name of policy in AWS.
:param policy_document: Document specified the policy rule.
:param path:
:param description:
:return:
"""
try:
aws_kwargs = dict(Path=path, Description=description)
aws_kwargs = {k: v for k, v in aws_kwargs.items() if v is not None}
policy = self.iam.create_policy(
PolicyName=policy_name, PolicyDocument=policy_document, **aws_kwargs
)
self.iam.attach_group_policy(
GroupName=policy_name, PolicyArn=policy["Policy"]["Arn"]
)
except Exception as ex:
self.logger.exception(ex)
raise UserError("Fail to create policy: {}".format(ex.message))
return policy
| StarcoderdataPython |
8055102 | <reponame>tkemps/mklaren
import numpy as np
from itertools import product, combinations
# Kernel constanes
SPECTRUM = "1spectrum"
SPECTRUM_MISMATCH = "2spectrum_mismatch"
WD = "3weighted_degree_kernel"
WD_PI = "4weighted_degree_kernel_pos_inv"
EXPONENTIAL_SPECTRUM = "5exponential_spectrum"
# Assume object are sequences
# or set of sequences
def spectrum_kernel(x1, x2, K=4, beacon=None, bin=None):
"""
:param x1:
Sequence of characters.
:param x2:
Sequence of characters.
:param K:
K-mers to be scanned.
:param beacon:
Beacon sequence (tuple of characters).
If set, K is equal to beacon length and only beacons are counted.
:param bin
tuple (bin, number of all bins)
Run kernel only in specified bin.
Make sure sequences are of equal length!
:return:
Gram matrix.
"""
if isinstance(beacon, str):
beacon = tuple(beacon)
K = len(beacon) if beacon else K
kmers_i = zip(*[x1[k:] for k in range(K)])
kmers_j = zip(*[x2[k:] for k in range(K)])
if bin:
assert len(x1) == len(x2)
b, b_all = bin
start = int(float(b)/b_all * len(kmers_i))
end = int(float(b+1)/b_all * len(kmers_j))
kmers_i = kmers_i[start:end]
kmers_j = kmers_j[start:end]
bin_norm = float(len(kmers_i)) if bin else 1
if isinstance(beacon, type(None)):
return np.sum([kmers_i.count(kmer)*kmers_j.count(kmer) for kmer in set(kmers_i) & set(kmers_j)]) / bin_norm
else:
return kmers_i.count(beacon) * kmers_j.count(beacon) / bin_norm
def spectrum_mismatch(x1, x2, K=4, m=1, bin=None):
"""
:param x1:
Sequence of characters.
:param x2:
Sequence of characters.
:param K:
K-mers to be scanned.
:param bin
tuple (bin, number of all bins)
Run kernel only in specified bin.
Make sure sequences are of equal length!
:return:
Gram matrix.
"""
no_mismatches = lambda ki, kj: sum([not k1 == k2 for k1, k2 in zip(ki, kj)])
# Return number of matches
kmers_i = zip(*[x1[k:] for k in range(K)])
kmers_j = zip(*[x2[k:] for k in range(K)])
if bin:
assert len(x1) == len(x2)
b, b_all = bin
start = int(float(b)/b_all * len(kmers_i))
end = int(float(b+1)/b_all * len(kmers_j))
kmers_i = kmers_i[start:end]
kmers_j = kmers_j[start:end]
bin_norm = float(len(kmers_i)) if bin else 1
return np.sum([no_mismatches(ki, kj) < 2*m for ki, kj in product(kmers_i, kmers_j)]) / bin_norm
def weighted_degree_kernel(x1, x2, K=4, bin=None, beta=None, minK=2):
"""
:param x1:
Sequence of characters.
:param x2:
Sequence of characters.
:param K:
K-mers to be scanned.
:param beta
Weigth for different pairs of matches.
:param bin
tuple (bin, number of all bins)
Run kernel only in specified bin.
Make sure sequences are of equal length!
:return:
Gram matrix.
"""
G = 0
if bin:
assert len(x1) == len(x2)
b, b_all = bin
for Kt in range(minK, K + 1):
kmers_i = zip(*[x1[k:] for k in range(Kt)])
kmers_j = zip(*[x2[k:] for k in range(Kt)])
if bin:
start = int(float(b)/b_all * len(kmers_i))
end = int(float(b+1)/b_all * len(kmers_j))
kmers_i = kmers_i[start:end]
kmers_j = kmers_j[start:end]
bin_norm = float(len(kmers_i)) if bin else 1
g = np.sum([ki == kj for ki, kj in zip(kmers_i, kmers_j)]) / bin_norm
if beta is None:
beta = 2.0 * (K - Kt + 1) / (Kt * (Kt + 1))
G += beta * g
return G
def weighted_degree_kernel_pos_inv(x1, x2, K=4, var=8, beacon=None, bin=None):
"""
Weighted degree kernel with positional invariance
:param x1:
Sequence of characters.
:param x2:
Sequence of characters.
:param K:
K-mers to be scanned.
:param beacon:
Beacon sequence (tuple of characters).
If set, K is equal to beacon length and only beacons are counted.
:param bin
tuple (bin, number of all bins)
Run kernel only in specified bin.
Make sure sequences are of equal length!
:return:
Gram matrix.
"""
G = 0
if bin:
assert len(x1) == len(x2)
b, b_all = bin
if not isinstance(beacon, type(None)):
K = len(beacon)
if isinstance(beacon, str):
beacon = tuple(beacon)
for Kt in range(2, K + 1):
g = 0
kmers_i = zip(*[x1[k:] for k in range(Kt)])
kmers_j = zip(*[x2[k:] for k in range(Kt)])
if bin:
start = int(float(b)/b_all * len(kmers_i))
end = int(float(b+1)/b_all * len(kmers_j))
kmers_i = kmers_i[start:end]
kmers_j = kmers_j[start:end]
bin_norm = float(len(kmers_i)) if bin else 1
for s in range(var):
delta = 1.0 / (2*(s+1))
if isinstance(beacon, type(None)):
mu_i = np.sum([ki == kj for ki, kj in zip(kmers_i, kmers_j[s:])])
mu_j = np.sum([ki == kj for ki, kj in zip(kmers_j, kmers_i[s:])])
g += delta * (mu_i + mu_j)
else:
if Kt != len(beacon):
continue
else:
mu_i = np.sum([beacon == ki == kj for ki, kj in zip(kmers_i, kmers_j[s:])])
mu_j = np.sum([beacon == ki == kj for ki, kj in zip(kmers_j, kmers_i[s:])])
g += delta * (mu_i + mu_j)
beta = 2.0 * (K - Kt + 1) / (Kt * (Kt + 1.0)) / bin_norm
G += beta * g
return G
# Assume object are sequences
# or set of sequences
def exponential_spectrum(x1, x2, K=4, l=1):
"""
Exponential string kernel. Applicable to strings of same length.
:param x1:
Sequence of characters.
:param x2:
Sequence of characters.
:param K:
K-mers to be scanned.
:param l:
Lengthscale parameter.
:return:
Kernel value.
"""
# import matplotlib.pyplot as plt
assert len(x1) == len(x2)
# K-mer content
krow1 = list(enumerate(zip(*[x1[i:] for i in range(K)])))
krow2 = list(enumerate(zip(*[x2[i:] for i in range(K)])))
# K-mer sets
kset1 = set(map(lambda t: t[1], krow1))
kset2 = set(map(lambda t: t[1], krow2))
kint = kset1 & kset2
kdata1 = dict()
kdata2 = dict()
# Compute kmer probability distributions
N = len(krow1)
for krow, kdata in (krow1, kdata1), (krow2, kdata2):
for i, kmer in krow:
if kmer not in kint: continue
if kmer not in kdata:
kdata[kmer] = np.zeros((N, ))
t = np.arange(0, N) - i
vec = kdata[kmer]
vec += np.exp(-t**2 / float(l))
vec = vec / vec.sum()
kdata[kmer] = vec
# Compute correlation between probability distributions
k = 0
for ky in kdata1.iterkeys():
vec1 = kdata1[ky]
vec2 = kdata2[ky]
k += vec1.dot(vec2)
return k
string_kernel_dict = {
"1spectrum": spectrum_kernel,
"2spectrum_mismatch": spectrum_mismatch,
"3weighted_degree_kernel": weighted_degree_kernel,
"4weighted_degree_kernel_pos_inv": weighted_degree_kernel_pos_inv,
"5exponential_spectrum": exponential_spectrum,
}
# General wrapper
def string_kernel(X1, X2, mode="1spectrum", **kwargs):
global string_kernel_dict
if isinstance(X1, str): X1 = [X1]
if isinstance(X2, str): X2 = [X2]
f = string_kernel_dict[mode]
G = np.zeros((len(X1), len(X2)))
if id(X1) == id(X2):
for (i, xi), (j, xj) in combinations(enumerate(X1), 2):
G[j, i] = G[i, j] = f(xi, xj, **kwargs)
for (i, xi) in enumerate(X1):
G[i, i] = f(xi, xi, **kwargs)
else:
for (i, xi), (j, xj) in product(enumerate(X1), enumerate(X2)):
G[i, j] = f(xi, xj, **kwargs)
return G | StarcoderdataPython |
94205 | <reponame>THUCSTHanxu13/BMInf
from .base import Layer
from ..parameter import Parameter
from ..allocator import Allocator
import cupy
import numpy as np
from ..functions.scale_copy import elementwise_copy
class Embedding(Layer):
def __init__(self, num_embeddings, embedding_dim):
self.embedding_dim = embedding_dim
self.weight = Parameter((num_embeddings, embedding_dim), dtype=cupy.float16)
def forward(self, allocator : Allocator, x):
if isinstance(x, list):
x = np.array(x).astype(np.int64)
assert isinstance(x, np.ndarray)
out = allocator.alloc_array( x.shape + (self.embedding_dim,), dtype=self.weight.dtype )
cupy.take(self.weight.value, x, axis=0, out=out)
out_fp16 = allocator.alloc_array(out.shape, dtype=cupy.float16)
elementwise_copy(out, out_fp16)
del out
return out_fp16
| StarcoderdataPython |
4934562 | # Tesseract 3.02 Font Trainer
# V0.01 - 3/04/2013
'''
Edited by <NAME> 17/04/2020
- support for Windows file system
- added working directory, your original files will not be affected
- Input and Output folder for better organisation of files
'''
# Complete the documentation
import os
import subprocess
fontname = 'hsbc'
language = 'eng'
cwd=os.getcwd()
cwdInput=cwd+'\\Input\\'
workingDir=cwd+'\\workDir\\'
os.system(f'cd {cwd}')
os.system(f'mkdir workDir')
print('Tesseract Font Builder - assumes training TIFFs and boxfiles already created')
#print('Note: Only up to 32 .tiff files are supported for training purposes')
#Delete output file
os.system(f'del {cwd}\\Output\\{fontname}.traineddata')
dFcount=0
#Empty WorkDir
for file in os.listdir('WorkDir\\'):
delete=f'del {cwd}\\WorkDir\\{file}'
#print(delete)
os.system(delete)
dFcount+=1
print(f'Deleted {dFcount} files in {cwd}\WorkDir')
tifCount = 0
boxCount = 0
#Copy all files into working directory
for file in os.listdir('Input\\'):
copy =f'copy {cwd}\\Input\\{file} workDir\\{file} >NUL'
#print(copy)
os.system(copy)
if file.endswith('.tiff'):
#rename = 'mv '+files+' '+language+'.'+fontname+'.exp'+str(count)+'.tif'
rename =f'rename {cwd}\\workDir\\{file} {language}.{fontname}.exp{tifCount}.tif'
#print(rename)
os.system(rename)
tifCount+=1
if file.endswith('.box'):
#command='tesseract eng.'+fontname+'.exp'+str(count)+'.tif eng.'+fontname+'.exp'+str(count)+' nobatch box.train.stderr'
rename =f'rename {cwd}\\workDir\\{file} {language}.{fontname}.exp{boxCount}.box'
#print(rename)
os.system(rename)
boxCount+=1
#Train tif with box data
for files in os.listdir(workingDir):
if files.endswith(".tif"):
fData=files.split('.')
#comand = subprocess.Popen(f'tesseract {cwd}\\workDir\\{files} {files[:-4]} nobatch box.train.stderr',stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
#comand= subprocess.run(f'tesseract {cwd}\\workDir\\{files} {files[:-4]} nobatch box.train.stderr',stdout=subprocess.PIPE)
#o=comand.stdout
command=f'tesseract {cwd}\\workDir\\{files} {files[:-4]} nobatch box.train.stderr'
#print(command)
os.system(command)
#Copy created data into working directory
for file in os.listdir(cwd):
if file.endswith('.tr'):
move =f'move {cwd}\\{file} {cwd}\\WorkDir\\{file}'
#print(move)
os.system(move)
trfilelist = ''
boxfilelist = ''
font_properties = ''
for files in os.listdir(workingDir):
if files.endswith(".tr"):
trfilelist = f'{trfilelist} {cwd}\\WorkDir\\{files}'
font_properties = fontname+' 0 0 0 0 0'
if files.endswith(".box"):
boxfilelist =f'{boxfilelist} {cwd}\\WorkDir\\{files}'
#Build the Unicharset File
command2 = f'unicharset_extractor {boxfilelist} '
print(command2)
subprocess.run(command2)
#os.system(command2)
#Move unicharset into working Directory
os.system(f'move {cwd}\\unicharset {cwd}\\WorkDir')
#Build the font properties file
fontpropertiesfile = open(f'{cwd}\\WorkDir\\font_properties', 'a+') # saving into a file
fontpropertiesfile.write(font_properties)
print('Wrote font_properties file')
fontpropertiesfile.close()
#Clustering
command3 = f'shapeclustering -F {cwd}\\WorkDir\\font_properties -U {cwd}\\WorkDir\\unicharset ' + trfilelist
#command3 = 'shapeclustering '
print(command3)
#os.system(command3)
subprocess.run(command3)
#move shapetable
os.system(f'move {cwd}\\shapetable {cwd}\\WorkDir')
mftraining = f'mftraining -F {cwd}\\WorkDir\\font_properties -U {cwd}\\WorkDir\\unicharset -O {cwd}\\WorkDir\\'+fontname+'.charset '+trfilelist
print (mftraining)
subprocess.run(mftraining)
#os.system(mftraining)
#move inttemp pffmtable shapetable
os.system(f'move {cwd}\\inttemp {cwd}\\WorkDir')
os.system(f'move {cwd}\\pffmtable {cwd}\\WorkDir')
os.system(f'move {cwd}\\shapetable {cwd}\\WorkDir')
#CNTraining
command4 = 'cntraining ' + trfilelist
print(command4)
subprocess.run(command4)
#os.system(command4)
#Move normproto
os.system(f'move {cwd}\\normproto {cwd}\\WorkDir\\')
#Rename necessary files
os.system(f'rename {cwd}\\WorkDir\\unicharset '+fontname+'.unicharset')
os.system(f'rename {cwd}\\WorkDir\\shapetable '+fontname+'.shapetable')
os.system(f'rename {cwd}\\WorkDir\\normproto '+fontname+'.normproto')
os.system(f'rename {cwd}\\WorkDir\\pffmtable '+fontname+'.pffmtable')
os.system(f'rename {cwd}\\WorkDir\\inttemp '+fontname+'.inttemp')
##Put it all together
command5 = f'combine_tessdata {cwd}\\WorkDir\\'+fontname+'.'
os.system(command5)
#Move tessData into output
os.system(f'mkdir Output')
os.system(f'move {cwd}\\WorkDir\\{fontname}.traineddata {cwd}\\Output')
print(f'Your {fontname}.traineddata have been saved into {cwd}\Output')
os.system(f'del D:\\Program Files\\Tesseract-OCR\\tessdata\\hsbc.traineddata')
os.system(f'copy {cwd}\\Output\\hsbc.traineddata D:\\Program Files\\Tesseract-OCR\\tessdata\\')
input() | StarcoderdataPython |
9255 | <gh_stars>1-10
"""Fixes for CESM2 model."""
from ..fix import Fix
from ..shared import (add_scalar_depth_coord, add_scalar_height_coord,
add_scalar_typeland_coord, add_scalar_typesea_coord)
class Fgco2(Fix):
"""Fixes for fgco2."""
def fix_metadata(self, cubes):
"""Add depth (0m) coordinate.
Parameters
----------
cube : iris.cube.CubeList
Returns
-------
iris.cube.Cube
"""
cube = self.get_cube_from_list(cubes)
add_scalar_depth_coord(cube)
return cubes
class Tas(Fix):
"""Fixes for tas."""
def fix_metadata(self, cubes):
"""Add height (2m) coordinate.
Parameters
----------
cube : iris.cube.CubeList
Returns
-------
iris.cube.Cube
"""
cube = self.get_cube_from_list(cubes)
add_scalar_height_coord(cube)
return cubes
class Sftlf(Fix):
"""Fixes for sftlf."""
def fix_metadata(self, cubes):
"""Add typeland coordinate.
Parameters
----------
cube : iris.cube.CubeList
Returns
-------
iris.cube.Cube
"""
cube = self.get_cube_from_list(cubes)
add_scalar_typeland_coord(cube)
return cubes
class Sftof(Fix):
"""Fixes for sftof."""
def fix_metadata(self, cubes):
"""Add typesea coordinate.
Parameters
----------
cube : iris.cube.CubeList
Returns
-------
iris.cube.Cube
"""
cube = self.get_cube_from_list(cubes)
add_scalar_typesea_coord(cube)
return cubes
| StarcoderdataPython |
1815208 | from easilyb.urlselector import UrlSelector
from easilyb.net.requestqueue import Requester
from lxml.html import fromstring
import logging
logger = logging.getLogger(__name__)
def _crawler_callback(resp, index=None):
url,counter, depth, crawler = index
crawler._parse_response(url, depth, resp)
class LxmlXpathLinkExtractor:
def __init__(self):
pass
def extract_links(self, baseurl, response):
html = fromstring(response, base_url=baseurl)
html.make_links_absolute()
return html.xpath('//a/@href')
class Crawler:
def __init__(self, baseurls, callback, urlselector=None, urlextractor=None, maxdepth=3, maxurls=100, threads=4):
self.baseurls = baseurls
self.callback = callback
self._urlselector = urlselector or UrlSelector()
self._urlextractor = urlextractor or LxmlXpathLinkExtractor()
self.maxdepth = maxdepth
self.maxurls = maxurls #TODO: can't with the current workerqueue or urlselector implementation
self._urlselector.limit = maxurls #TODO: may be dangerous
self._threads = threads
self._requester = Requester(threads=self._threads, queue=self._urlselector)
self._counter = 0
for u in self.baseurls:
index = (u,self._counter, 0, self,)
self._requester.get(_crawler_callback, u, index)
self._counter+=1
def _parse_response(self, url, depth, response):
self.callback(url, response)
if depth + 1 < self.maxdepth:
logger.debug('Parsing response for url=%s', url)
links = self._urlextractor.extract_links(url, response.text)
for u in links:
logger.debug("Found link: %s", u)
index = (u,self._counter, depth + 1, self,)
self._requester.get( _crawler_callback, u, index)
self._counter+=1
def join(self):
self._requester.join()
| StarcoderdataPython |
8026986 | from spyd.game.player.player import Player
from spyd.permissions.functionality import Functionality
from spyd.registry_manager import register
@register('gep_message_handler')
class SpydGetPlayerInfoMessageHandler(object):
msgtype = 'get_player_info'
execute = Functionality(msgtype)
@classmethod
def handle_message(cls, spyd_server, gep_client, message):
player_uuid = message['player']
player = Player.instances_by_uuid.get(player_uuid, None)
if player is None:
raise Exception("Unknown player.")
state = player.state
player_game_state = {
'is_spectator': state.is_spectator,
'is_alive': state.is_alive,
'has_quad': state.has_quad,
'frags': state.frags,
'deaths': state.deaths,
'suicides': state.suicides,
'teamkills': state.teamkills,
'damage_dealt': state.damage_dealt,
'damage_spent': state.damage_spent,
'flags': state.flags,
'flag_returns': state.flag_returns,
'health': state.health,
'maxhealth': state.maxhealth,
'armour': state.armour,
'armourtype': state.armourtype,
'gunselect': state.gunselect,
'ammo': state.ammo
}
player_info = {
'cn': player.cn,
'name': player.name,
'team': player.team_name,
'room': player.room.name,
'host': player.client.host,
'model': player.playermodel,
'isai': player.isai,
'groups': tuple(player.client.get_group_names()),
'game_state': player_game_state
}
gep_client.send({'msgtype': 'player_info', 'player': player.uuid, 'player_info': player_info}, message.get('reqid'))
| StarcoderdataPython |
6584303 | <reponame>MisterAI/AutoTeSG
#!/usr/bin/python
import sys
import astor
from ast import parse
if sys.version_info[0] < 3:
from io import BytesIO
else:
from io import StringIO
def doRun(codeTree):
compiled_code = compile(astor.to_source(codeTree), filename="<ast>",
mode="exec")
exec(compiled_code)
def runCode(codeTree):
"""
Run code and return output, generated on stdout.
"""
# Temporarily redirect output to own IO
old_stdout = sys.stdout
if sys.version_info[0] < 3:
my_output = sys.stdout = BytesIO()
else:
my_output = sys.stdout = StringIO()
compiled_code = compile(astor.to_source(codeTree), filename="<ast>",
mode="exec")
try:
# use the same dictionary for local and global variables to prevent
# scope issues
dictionary = {}
exec(compiled_code, dictionary, dictionary)
except:
sys.stdout = old_stdout
raise
# Restore sys stdout
sys.stdout = old_stdout
return my_output.getvalue()
| StarcoderdataPython |
8058763 | from .base_metric_loss_function import BaseMetricLossFunction
from ..utils import loss_and_miner_utils as lmu
import math
import torch
import torch.nn.functional as F
###### modified from https://github.com/idstcv/SoftTriple/blob/master/loss/SoftTriple.py ######
###### Original code is Copyright@Alibaba Group ######
###### ICCV'19: "SoftTriple Loss: Deep Metric Learning Without Triplet Sampling" ######
class SoftTripleLoss(BaseMetricLossFunction):
def __init__(self, embedding_size, num_classes, centers_per_class, la=20, gamma=0.1, reg_weight=0.2, margin=0.01, **kwargs):
super().__init__(**kwargs)
self.la = la
self.gamma = 1./gamma
self.reg_weight = reg_weight
self.margin = margin
self.num_classes = num_classes
self.centers_per_class = centers_per_class
self.total_num_centers = num_classes * centers_per_class
self.fc = torch.nn.Parameter(torch.Tensor(embedding_size, self.total_num_centers))
self.set_class_masks(num_classes, centers_per_class)
torch.nn.init.kaiming_uniform_(self.fc, a=math.sqrt(5))
self.add_to_recordable_attributes(list_of_names=["same_class_center_similarity", "diff_class_center_similarity"])
def compute_loss(self, embeddings, labels, indices_tuple):
miner_weights = lmu.convert_to_weights(indices_tuple, labels)
centers = F.normalize(self.fc, p=2, dim=0) if self.normalize_embeddings else self.fc
sim_to_centers = torch.matmul(embeddings, centers)
sim_to_centers = sim_to_centers.view(-1, self.num_classes, self.centers_per_class)
prob = F.softmax(sim_to_centers*self.gamma, dim=2)
sim_to_classes = torch.sum(prob*sim_to_centers, dim=2)
margin = torch.zeros(sim_to_classes.shape).to(embeddings.device)
margin[torch.arange(0, margin.shape[0]), labels] = self.margin
loss = F.cross_entropy(self.la*(sim_to_classes-margin), labels, reduction='none')
loss = torch.mean(loss*miner_weights)
#regularization which encourages the centers of a class to be close to each other
reg = 0
center_similarities = centers.t().matmul(centers)
if self.reg_weight > 0 and self.centers_per_class > 1:
reg = torch.sum(torch.sqrt(2.0+1e-5-2.*center_similarities[self.same_class_mask]))/(torch.sum(self.same_class_mask))
self.set_stats(center_similarities)
return loss+self.reg_weight*reg
def set_class_masks(self, num_classes, centers_per_class):
self.diff_class_mask = torch.ones(self.total_num_centers, self.total_num_centers, dtype=torch.bool)
if centers_per_class > 1:
self.same_class_mask = torch.zeros(self.total_num_centers, self.total_num_centers, dtype=torch.bool)
for i in range(num_classes):
s, e = i*centers_per_class, (i+1)*centers_per_class
if centers_per_class > 1:
curr_block = torch.ones(centers_per_class, centers_per_class)
curr_block = torch.triu(curr_block, diagonal=1)
self.same_class_mask[s:e, s:e] = curr_block
self.diff_class_mask[s:e, s:e] = 0
def set_stats(self, center_similarities):
if self.centers_per_class > 1:
self.same_class_center_similarity = torch.mean(center_similarities[self.same_class_mask])
self.diff_class_center_similarity = torch.mean(center_similarities[self.diff_class_mask])
| StarcoderdataPython |
1788991 | from dataclasses import dataclass
from uuid import uuid4
from sqlalchemy import Boolean, Column, DateTime
from sqlalchemy.dialects.postgresql import UUID
from app.configs.database import db
@dataclass
class Session(db.Model):
id: str
start: str
end: str
finished: bool
__tablename__ = "sessions"
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid4)
start = Column(DateTime , nullable=False)
end = Column(DateTime , nullable=False)
finished = Column(Boolean , default=False)
| StarcoderdataPython |
1672441 | <reponame>epedropaulo/MyPython<filename>02 - Estruturas de controle/ex038.py
num1 = float(input('Digite o primeiro valor: '))
num2 = float(input('Digite o segundo valor: '))
if num1 > num2:
print('O primeiro valor é maior!')
elif num2 > num1:
print('O segundo valor é maior!')
else:
print('Os valores são iguais!')
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.