index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
991,700 | 9242c079a1cf1402c284caa3d78409c25ba7c3ed | def singleNonDuplicate(self, nums: List[int]) -> int:
left = 0
right = len(nums)-1
while(left<right):
mid = (left + right)//2
check_if_halves_are_even = (right-mid) % 2 == 0
if nums[mid+1] == nums[mid]: #if pair is on the right side and halves are even
if check_if_halves_are_even:
left = mid+2
else: #if pair is on the right side and halves are odd
right = mid-1
elif nums[mid-1] == nums[mid]: #if pair is on the left side and halves are even
if check_if_halves_are_even:
right = mid-2
else: #if pair is on the left side and halves are odd
left = mid+1
else: #if element at mid is the culprit element, return
return nums[mid]
return nums[left] #finally return the element,which is pointed by the left pointer
|
991,701 | 1c63b9ae647c875d74e932532d5bd78a0d92cb8c | #from urllib2 import Request, urlopen, URLError
import requests
#import gdax
# get buy volume increase in percent
# get sell volume increase in percent
# get price of Sell/buy order
class PublicClient(object):
def __init__(self, api_url = 'https://api.gdax.com/'):
self.url = api_url.rstrip('/')
def get_products(self):
r = requests.get(self.url + '/products', timeout = 30)
return r.json()
def get_product_order_book(self, product_id, level = 1):
"""
Args:
product_id(str): Product
level (Optional[int]): order book leevel(1,2,3). Def. is 1.
Returns:
dict: Order book. Eksample for level 1:
{
"sequence": "3",
"bids": [
[ price, size, num-orders ],
],
"asks": [
[ price, size, num-orders ],
]
}
"""
params = {'level': level}
r = requests.get(self.url + '/products/{}/book'.format(product_id), params=params, timeout=30)
# r.raise_for_status()
return r.json()
def get_product_ticker(self, product_id):
"""Snapshot about the last trade (tick), best bid/ask and 24h volume.
**Caution**: Polling is discouraged in favor of connecting via
the websocket stream and listening for match messages.
Args:
product_id (str): Product
Returns:
dict: Ticker info. Example::
{
"trade_id": 4729088,
"price": "333.99",
"size": "0.193",
"bid": "333.98",
"ask": "333.99",
"volume": "5957.11914015",
"time": "2015-11-14T20:46:03.511254Z"
}
"""
r = requests.get(self.url + '/products/{}/ticker'.format(product_id), timeout=30)
# r.raise_for_status()
return r.json()
def get_product_trades(self, product_id):
""" List the lates trades for a product.
Args:
product_id (str): Product
Returns:
list: Latest trades. Example::
[{
"time": "2014-11-07T22:19:28.578544Z",
"trade_id": 74,
"price": "10.00000000",
"size": "0.01000000",
"side": "buy"
}, {
"time": "2014-11-07T01:08:43.642366Z",
"trade_id": 73,
"price": "100.00000000",
"size": "0.01000000",
"side": "sell"
}]
"""
r = requests.get(self.url + '/products/{}/trades'.format(product_id), timeout=30)
# r.raise_for_status()
return r.json()
if __name__ == '__main__':
test = PublicClient()
product = test.get_product_ticker("BTC-EUR")
print(product) |
991,702 | a625d27c899a7734384693a5247154bf631136af | import sys
input = lambda:sys.stdin.readline().strip()
def func(cur, bit):
if cur==11:
return 0
ret = 0
for i in range(11):
if arr[cur][i] and bit ^ 1<<i:
ret = max(ret, func(cur+1, bit ^ 1<<i)+arr[cur][i])
return ret
for c in range(int(input())):
arr = [[] for i in range(11)]
for i in range(11):
arr[i] = list(map(int, input().split()))
print(func(0,0))
|
991,703 | 3909df336678900024a6dbdfccff6d9f58e17249 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import os
import contextlib
import shutil
import tempfile
import time
@contextlib.contextmanager
def file_locker(path_in, path_repl, locker_path = None):
in_path = os.path.split(path_repl)
#~ mask_file = os.path.join(path_repl, in_path[-1])
mask_file = path_repl
if os.path.exists(mask_file):
print '\n\tcopy {0} to locker'.format(mask_file)
if locker_path is not None:
tmp_dir = locker_path
else:
tmp_dir = tempfile.mkdtemp()
tmp_file = os.path.join(tmp_dir, in_path[-1])
shutil.copy2(mask_file, tmp_file)
else:
tmp_file = None
print '\tcopy {0} to {1}\n'.format(path_in, mask_file)
shutil.copy2(path_in, mask_file)
yield path_in
if tmp_file is not None:
print 'replace {0} with locker copy'.format(mask_file)
shutil.copy2(tmp_file, mask_file)
shutil.rmtree(tmp_dir)
else:
print 'remove {0}'.format(mask_file)
os.unlink(mask_file)
def main():
import sys
path_repl, path_in = sys.argv[1:3]
with file_locker(path_in, path_repl) as myfl:
minput = raw_input("press <enter> to quit")
print "Done!"
if __name__ =='__main__':
main()
|
991,704 | d5fc023a2121816e75f71fe4cd9c0cc1bd6a3f13 | class CisneiNegro:
def __len__(self):
return 42
Livro = CisneiNegro()
print(len(Livro))
Nome = 'Dikson'
listas = [12,34,40,55]
DICT = {"Dikson": 34, "Luciene": 43, "Vó": 83}
print("Nome",len(Nome))
print("List",len(listas))
print("Dicio",len(DICT))
|
991,705 | 177679098f61f5dcc04416d50e371565dd2511a6 | class Solution:
def lengthLongestPath(self, input: str) -> int:
path = []
ans = 0
for name in input.split("\n"):
l = 0
for c in name:
if c == "\t":
l += 1
else:
break
if len(path) > l:
for i in range(len(path) - l):
path.pop()
path.append(name.strip("\t"))
if "." in name:
length = sum([len(p) for p in path]) + len(path) - 1
ans = max(ans, length)
print(path)
return ans
|
991,706 | 72b20fd98b26a86aaaacf719a10fb5698f60952e | from app import db
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String())
mobile = db.Column(db.String(), unique=True)
email_address = db.Column(db.String(), unique=True)
password = db.Column(db.TEXT())
is_active = db.Column(db.Boolean(), default=True)
created_at = db.Column(db.DateTime, server_default=db.func.now())
jwt_token = db.Column(db.String())
address = db.Column(db.String())
|
991,707 | 4ad47bda7b603fe768d6f3730b0d130cda13a4f9 | FILENAME_BUILDNO = 'VERSION'
FILENAME_VERSION_H = 'src/version.h'
version = '0.1.'
import datetime
from pathlib import Path
def get_active_branch_name():
head_dir = Path(".") / ".git" / "HEAD"
with head_dir.open("r") as f: content = f.read().splitlines()
for line in content:
if line[0:4] == "ref:":
return line.partition("refs/heads/")[2]
def get_commit_ref():
head_dir = Path(".") / ".git" / "ORIG_HEAD"
with head_dir.open("r") as f: content = f.read().splitlines()
for line in content:
return line[:8]
build_no = 0
try:
with open(FILENAME_BUILDNO) as f:
build_no = int(f.readline()) + 1
except:
print('Starting build number from 1..')
build_no = 1
with open(FILENAME_BUILDNO, 'w+') as f:
f.write(str(build_no))
print('Build number: {}'.format(build_no))
hf = """
#ifndef BUILD_NUMBER
#define BUILD_NUMBER "{}"
#endif
#ifndef __VERSION
#define __VERSION "{}-{}-{}"
#endif
#ifndef VERSION_SHORT
#define VERSION_SHORT "{}"
#endif
#ifndef VERSION
#ifdef DEBUG
#define VERSION __VERSION "-debug"
#else
#define VERSION __VERSION
#endif
#endif
""".format(build_no, version+str(build_no), get_commit_ref(), get_active_branch_name(), version+str(build_no))
with open(FILENAME_VERSION_H, 'w+') as f:
f.write(hf) |
991,708 | b9dcc23814c7000a55e67eaf5ce395de64fd3ef5 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SSOSession'
db.create_table('sso_session', (
('sso_session_key', self.gf('django.db.models.fields.CharField')(max_length=40, primary_key=True)),
('django_session_key', self.gf('django.db.models.fields.CharField')(max_length=40)),
))
db.send_create_signal('ssosp', ['SSOSession'])
def backwards(self, orm):
# Deleting model 'SSOSession'
db.delete_table('sso_session')
models = {
'ssosp.ssosession': {
'Meta': {'object_name': 'SSOSession', 'db_table': "'sso_session'"},
'django_session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'sso_session_key': ('django.db.models.fields.CharField', [], {'max_length': '40', 'primary_key': 'True'})
}
}
complete_apps = ['ssosp'] |
991,709 | e6b550a0a469ff8ccca8a9e517fe730f2cb8552a | # -*- coding: utf-8 -*-
# @Time : 2019/4/10 22:21
# @Author : tuihou
# @File : serializers.py
from rest_framework import serializers
from .models import Project, Step, Case, Variables, API, Category, Config, Classify, TestSuite, Report, \
ReportCase, ReportDetail
from drf_writable_nested import WritableNestedModelSerializer
from rest_framework.validators import UniqueValidator
from django.db.models import Q
class ProjectSerializer(serializers.ModelSerializer):
"""
项目详情序列化
"""
# 给project添加关联属性
class Meta:
model = Project
fields = "__all__"
class ApiSerializer(serializers.ModelSerializer):
"""
接口 序列化
"""
category = serializers.PrimaryKeyRelatedField(queryset=Category.objects.all(), required=True)
# name = serializers.CharField(label="接口名称", help_text="接口名", required=True, allow_blank=False,
# validators=[UniqueValidator(queryset=API.objects.all(), message="已经存在")])
class Meta:
model = API
fields = "__all__"
class ApiSelectorSerializer(serializers.ModelSerializer):
"""
api选择器序列化
"""
class Meta:
model = API
fields = ['id', 'name', 'category']
class StepSerializer(serializers.ModelSerializer):
"""
步骤序列化
"""
class Meta:
model = Step
fields = "__all__"
# depth = 1
# api = ApiSerializer(read_only=True)
class CaseCopySerializer(serializers.ModelSerializer):
"""
case复制序列化
"""
step = StepSerializer(many=True)
class Meta:
model = Case
fields = "__all__"
class CaseSerializer(serializers.ModelSerializer):
"""
case序列化
"""
class Meta:
model = Case
fields = "__all__"
# step = StepSerializer(many=True)
# api = serializers.PrimaryKeyRelatedField(queryset=API.objects.all())
create_user = serializers.CharField()
update_user = serializers.CharField()
create_time = serializers.DateTimeField(format="%Y-%m-%d %H:%M:%S", required=False, read_only=True)
update_time = serializers.DateTimeField(format="%Y-%m-%d %H:%M:%S", required=False, read_only=True)
# def get_create_user(self, obj):
#
# return
class CaseSelectorSerializer(serializers.ModelSerializer):
"""
case选择器序列化
"""
class Meta:
model = Case
fields = ['id', 'name']
class VariablesSerializer(serializers.ModelSerializer):
"""
变量序列化
"""
class Meta:
model = Variables
fields = "__all__"
class ConfigSerializer(serializers.ModelSerializer):
"""
config序列化
"""
class Meta:
model = Config
fields = "__all__"
class CategoryApiSerializer3(serializers.ModelSerializer):
"""
三级类别api序列化
"""
api = ApiSelectorSerializer(many=True)
class Meta:
model = Category
fields = ['id', 'label', 'api']
class CategoryApiSerializer2(serializers.ModelSerializer):
"""
二级类别api序列化
"""
children = CategoryApiSerializer3(many=True)
class Meta:
model = Category
fields = ['id', 'label', 'children']
class CategoryApiSerializer(serializers.ModelSerializer):
"""
一级类别api序列化
"""
children = CategoryApiSerializer2(many=True)
class Meta:
model = Category
fields = ['id', 'label', 'children']
class CategoryCaseSerializer3(serializers.ModelSerializer):
"""
三级类别case序列化name
"""
case = CaseSelectorSerializer(many=True)
class Meta:
model = Category
fields = ['id', 'label', 'case']
class CategoryCaseSerializer2(serializers.ModelSerializer):
"""
二级类别case序列化
"""
children = CategoryCaseSerializer3(many=True)
class Meta:
model = Category
fields = ['id', 'label', 'children']
class CategoryCaseSerializer(serializers.ModelSerializer):
"""
一级类别Case序列化
"""
children = CategoryCaseSerializer2(many=True)
class Meta:
model = Category
fields = ['id', 'label', 'children']
class ProjectSerializer2(serializers.ModelSerializer):
"""
项目详情序列化
"""
class Meta:
model = Project
fields = "__all__"
class SuiteDetailSerializer(serializers.ModelSerializer):
"""
suite包含case详情序列化
"""
case = CaseSerializer(many=True)
class Meta:
model = TestSuite
fields = "__all__"
class SuiteSerializer(serializers.ModelSerializer):
"""
suite序列化
"""
class Meta:
model = TestSuite
fields = "__all__"
class ClassifySerializer(serializers.ModelSerializer):
"""
分类序列化
"""
class Meta:
model = Classify
fields = "__all__"
class TestSerializer(serializers.Serializer):
api = serializers.PrimaryKeyRelatedField(queryset=API.objects.all())
name = serializers.CharField(max_length=50)
runFlag = serializers.IntegerField()
create_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
update_user = serializers.HiddenField(default=serializers.CurrentUserDefault())
category = serializers.PrimaryKeyRelatedField(queryset=Category.objects.all())
class Meta:
model = Case
# fields = '__all__'
def validated_name(self, value):
if 'tttt' not in value.lower():
raise serializers.ValidationError('this is test for validated')
return value
def create(self, validated_data):
# user = self.context["request"].user
return Case.objects.create(**validated_data)
# def update(self, instance, validated_data):
# instance.name = validated_data.get('name', instance.name)
# instance.runFlag = validated_data.get('runFlag', instance.runFlag)
# instance.save()
# return instance
class ReportSerializer(serializers.ModelSerializer):
"""
报告概要序列化
"""
class Meta:
model = Report
fields = '__all__'
class ReportDetailSerializer(serializers.ModelSerializer):
"""
step结果列化
"""
class Meta:
model = ReportDetail
fields = '__all__'
class ReportCaseSerializer(serializers.ModelSerializer):
"""
case结果序列化
"""
# detail = ReportDetailSerializer(many=True)
class Meta:
model = ReportCase
fields = '__all__' |
991,710 | 7f6c6a2471f5874fc472430cef77cd5eb99fe11f | from rc4 import rc4
chave = input("Chave: ")
texto = input("Texto: ")
texto_enc = rc4(texto, chave)
print("Resultado da encriptação: ", texto_enc)
print("-------------")
print("Resultado da decriptação: ", rc4(texto_enc, chave))
|
991,711 | bd6a341a4eae0d0309ca1ef73954b2e74463c8f4 | from unittest import TestCase
class TestRecursion(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_reverse_a_list(self):
from kata.katas import reverse_a_list
self.assertEqual(reverse_a_list([1,2,3,4,5]), [5,4,3,2,1])
self.assertEqual(reverse_a_list([[1,2],[3,4],[5,6]]), [[5,6],[3,4],[1,2]])
def test_compress_a_sequence(self):
from kata.katas import compress_a_sequence
self.assertEqual(compress_a_sequence("Leeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeerrrroyyyyyyyy"), "Leroy")
self.assertEqual(compress_a_sequence([1,1,2,2,3,3,2,2,3]), [1,2,3,2,3])
self.assertEqual(compress_a_sequence([[1,2],[1,2],[3,4],[1,2]]), [[1,2],[3,4],[1,2]])
|
991,712 | cf01aa52f04a01df4a3de439ea58a95b773abfd9 | """
Name: problem1.py
Purpose: This problem requires a celsius to fahrenheit converter to help the user
understand the amount of degrees celsius to fahrenheit
Author: Tuczynski.S
Date: 07/12/2020
"""
#inputs user data for the amount of degrees celsius
print("--- Welcome to the online celsius to fahrenheit converter ---")
c = float(input("Degrees Celsius: "))
#computes the amount of degrees celsius to amount of degrees fahrenheit
f = (c * (9/5) + 32)
#outputs the amount of degrees fahrenheit after conversion
print(c , "degrees celsius equals", f, "degrees fahrenheit.")
print("--- Thank you for using the online celsius to fahrenheit converter! ---")
|
991,713 | d669c7beb2016954085249c9844123c54d5c7886 | # ! /usr/bin/env python
from random import randint
SCRABBLES_SCORES = [(1, "E A O I N R T L S U"), (2, "D G"), (3, "B C M P"),
(4, "F H V W Y"), (5, "K"), (8, "J X"), (10, "Q Z")]
ACTION_ONE_WORD = 1
ACTION_MAX = 2
ACTION_WORD_FROM_FILE = 3
class Scrabble:
def __init__(self, scrabbles_scores):
self.letter_scores = {
letter: score for score, letters in scrabbles_scores for letter in letters.split()
}
def _get_file_content(self, file_path):
with open(file_path, 'r') as dictionary_file:
file_content = dictionary_file.read()
word_list = file_content.strip().split('\n')
return word_list
def count_score(self, word):
score = 0
for letter in word:
try:
score += self.letter_scores[letter.upper()]
except KeyError:
raise KeyError('Wrong input value')
return score
def _count_many(self, word_list):
scores = {}
for word in word_list:
score = self.count_score(word)
scores.setdefault(score, [])
scores[score].append(word)
return scores
def _get_scores_from_file(self, file_path):
word_list = self._get_file_content(file_path)
scores = self._count_many(word_list)
return scores
def highest_score(self, file_path):
return max(self._get_scores_from_file(file_path))
def get_word_by_score(self, score, file_path):
scores = self._get_scores_from_file(file_path)
if score in scores:
length = len(scores[score])
if length > 1:
index = randint(0, length - 1)
return scores[score][index]
else:
return scores[score][0]
def run():
scrabble = Scrabble(SCRABBLES_SCORES)
print('Available options:\n 1 count score for word\n 2 return maximum score in file\n 3 display word for given score\n')
try:
action = int(input())
except ValueError:
raise ValueError('Wrong input value. Correct value should be integer between 1 and 3')
if action == ACTION_ONE_WORD:
input_word = input('Please enter word:')
print('Your word score is: {}'.format(scrabble.count_score(input_word)))
elif action == ACTION_MAX:
file_path = input('Enter file path')
print('The highest score in file is: {}'.format(scrabble.highest_score(file_path)))
elif action == ACTION_WORD_FROM_FILE:
file_path = input('Enter file path')
score = int(input('Enter score'))
result = scrabble.get_word_by_score(score, file_path)
if result:
print(result)
else:
raise ValueError('Value out of range.')
if __name__ == '__main__':
run()
|
991,714 | 49ee0c44d5ee3fc381cbf745fe67678f29bbe53c | # Generated by Django 3.0.6 on 2020-12-22 15:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('mi_CandyMuebles', '0006_pedido_tipoevento_idtipodeevento'),
]
operations = [
migrations.AlterModelOptions(
name='tipodeevento',
options={'managed': True, 'ordering': ['nombre']},
),
migrations.AddField(
model_name='descuento',
name='Monto',
field=models.IntegerField(blank=True, db_column='Monto', default=0, null=True),
),
migrations.AddField(
model_name='descuento',
name='codigoreferido',
field=models.CharField(blank=True, db_column='CodigoReferido', max_length=50, null=True),
),
migrations.AddField(
model_name='descuento',
name='descuento',
field=models.IntegerField(blank=True, db_column='Descuento', null=True),
),
migrations.AddField(
model_name='descuento',
name='fecha_fin',
field=models.DateField(blank=True, db_column='Fecha Fin', null=True),
),
migrations.AddField(
model_name='descuento',
name='fecha_ini',
field=models.DateField(blank=True, db_column='Fecha Inicio', null=True),
),
migrations.AddField(
model_name='descuento',
name='status',
field=models.BooleanField(db_column='Status', default=True),
),
]
|
991,715 | 9e3ef0f92a36edb4751ea41a5ddf95aa0e25d64a | #coding:utf8
#只是切割掉 图片的底端空白
#切割掉 左右两端 对称的空白
#吸血鬼行走文件夹名字不对
#泰坦图片左侧存在1像素的白线 需要先切割掉 再crop
import os
import Image
import sys
import ImageOps
import json
#读取士兵图片的边沿 根据士兵 图片 切割 特征色图片的边沿
fi = open('../mb.txt', 'r')
fi = json.loads(fi.read())
#mId aId
print sys.argv
b = int(sys.argv[1])
e = int(sys.argv[2])
n = int(sys.argv[3])
def cropBound(id):
maxBoundary = fi['a%d' % id]
for k in range(0, 8):
im = Image.open('ssa'+str(id)+'f'+str(k)+'.png')
nim = im.crop(maxBoundary)
nim.save('ss'+str(id)+'fa'+str(k)+'.png')
maxBoundary = fi['m%d' % id]
for k in range(0, 7):
im = Image.open('ssm'+str(id)+'f'+str(k)+'.png')
nim = im.crop(maxBoundary)
nim.save('ss'+str(id)+'fm'+str(k)+'.png')
def main():
for i in range(b, e, 10):
for j in range(0, n):
cropBound(i+j)
os.system('rm ssa*')
os.system('rm ssm*')
main()
|
991,716 | eabc1496ee9c1ca75b20758ebbfaf8fb64b73480 | from output.models.nist_data.list_pkg.short.schema_instance.nistschema_sv_iv_list_short_length_2_xsd.nistschema_sv_iv_list_short_length_2 import NistschemaSvIvListShortLength2
obj = NistschemaSvIvListShortLength2(
value=[
31514,
31646,
31122,
31422,
31331,
31643,
]
)
|
991,717 | 51361ae6ab76a15e2db7fbc27f3e1ca7bca99f6b | import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
# from pylab import rcParams
import scipy.stats as stats
from scipy.stats import chi2
import time,tqdm
from imblearn.over_sampling import SMOTE
import lightgbm as lgb
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import roc_auc_score
from sklearn.decomposition import PCA
from sklearn.cluster import KMeans
class Eda():
'''
inputs are dataframes
'''
def __init__(self, predictors, target):
self.predictors = predictors
self.target = target
@staticmethod
def normalization(data):
'''
Squeeze values to -1, 1
Alternative: from sklearn.preprocessing import MinMaxScaler
'''
data = np.array(data)
max_predictors = []
min_predictors = []
normalized_predictors= np.empty(shape=data.shape)
for i in range(data.shape[1]):
max_predictors.append(max([_[i] for _ in data]))
min_predictors.append(min([_[i] for _ in data]))
for index, arr_predictors in enumerate(data):
predictors = [(e - mi)/(ma - mi) for e,mi,ma in list(zip(arr_predictors,min_predictors,max_predictors))]
normalized_predictors[index] = predictors
return normalized_predictors
@staticmethod
def standardization(data):
'''
Squeeze values to -1, 1
Alternative: from sklearn.preprocessing import StandardScaler
'''
data = np.array(data)
mean_predictors = []
std_predictors = []
standardized_predictors = np.empty(shape=data.shape)
for i in range(data.shape[1]):
mean_predictors.append(np.mean([_[i] for _ in data]))
std_predictors.append(np.std([_[i] for _ in data]))
for index, arr_predictors in enumerate(data):
predictors = [(e - mean)/std for e,mean,std in list(zip(arr_predictors,mean_predictors,std_predictors))]
standardized_predictors[index] = predictors
return standardized_predictors
def pca(self, n_pca:int):
'''
Return an array of new pca components as well as important features
'''
scaled_data = self.standardization(self.predictors)
n_pcs = n_pca
pca = PCA(n_components=n_pcs)
pca.fit(scaled_data)
x_pca = pca.transform(scaled_data)
# get the index of the most important feature on EACH component
most_important = [np.abs(pca.components_[i]).argmax() for i in range(n_pcs)]
initial_feature_names = self.predictors.columns
# get the feature names
most_important_features = [initial_feature_names[most_important[i]] for i in range(n_pcs)]
# build the dataframe
dic = {f'PC{i}': most_important_features[i] for i in range(n_pcs)}
df = pd.DataFrame(dic.items())
return x_pca, df
def kmeans(self, data, n_cluster:int):
'''
Return an array of cluster labels with plot. Maximum cluster size is 10
It's recommended to run pca funcation first before clustering
'''
km = KMeans(n_clusters=n_cluster)
pred = km.fit_predict(data)
color_list = ['green','navy','yellow','orange','brown','pink','red', 'purple','black','blue']
for i in range(n_cluster):
idx = np.where(pred==i)
color = color_list.pop()
plt.scatter(data[idx][:,0], data[idx][:,1], color=str(color), label=f"cluster {i}")
plt.scatter(km.cluster_centers_[:,0], km.cluster_centers_[:,1], color='green', marker='*', label='centroid')
plt.xlabel('mean radius')
plt.ylabel('mean texture')
plt.legend()
@staticmethod
def kmeans_elbow(data):
parameters = range(1,10)
sse = []
for k in parameters:
km = KMeans(n_clusters=k)
km.fit(data)
sse.append(km.inertia_)
plt.xlabel('k number')
plt.ylabel('sse')
plt.plot(parameters, sse)
def woe_iv_continuous(self):
"""
Finding weight of importance and informational value for binary classification tasks
"""
df = self.predictors.copy()
df['target'] = self.target.copy()
IV_dict = {}
woe_dict = {}
for col in self.predictors.columns:
# binning values
bins = np.linspace(df[col].min()-0.1, df[col].max()+0.1, int(0.05* self.predictors.shape[0])) # each bin should have at least 5% of the observation
groups = df.groupby(np.digitize(df[col], bins))
df[col] = pd.cut(df[col], bins)
# getting class counts for each bin
count_series = df.groupby([col, 'target']).size()
new_df = count_series.to_frame(name = 'size').reset_index()
new_df['size'] = new_df['size'] + 0.5
df1 = new_df[new_df['target']==0].reset_index(drop=True)
df2 = new_df[new_df['target']==1].reset_index(drop=True)
df1['size1'] = df2['size']
new_df = df1.drop(columns=['target'])
sum_ = new_df['size'].sum()
sum1 = new_df['size1'].sum()
# Calculate woe and IV
new_df['woe'] = np.log((new_df['size']/sum_)/(new_df['size1']/sum1))
new_df['IV'] = ((new_df['size']/sum_) - (new_df['size1']/sum1)) * new_df['woe']
new_df = new_df.replace([np.inf, -np.inf], np.nan)
new_df.dropna(inplace=True)
woe_dict[col] = new_df.drop(columns=['size','size1'])
IV_dict[col] = new_df['IV'].sum()
return woe_dict, IV_dict
def woe_iv_categ(self):
"""
Finding weight of importance and informational value for binary classification tasks
"""
df = self.predictors.copy()
df['target'] = self.target.copy()
IV_dict = {}
woe_dict = {}
for col in self.predictors.columns:
# binning values
bins = np.linspace(df[col].min()-0.1, df[col].max()+0.1, len(set(df[col]))) # each bin should have at least 5% of the observation
groups = df.groupby(np.digitize(df[col], bins))
df[col] = pd.cut(df[col], bins)
# getting class counts for each bin
count_series = df.groupby([col, 'target']).size()
new_df = count_series.to_frame(name = 'size').reset_index()
new_df['size'] = new_df['size'] + 0.5
df1 = new_df[new_df['target']==0].reset_index(drop=True)
df2 = new_df[new_df['target']==1].reset_index(drop=True)
df1['size1'] = df2['size']
new_df = df1.drop(columns=['target'])
sum_ = new_df['size'].sum()
sum1 = new_df['size1'].sum()
# Calculate woe and IV
new_df['woe'] = np.log((new_df['size']/sum_)/(new_df['size1']/sum1))
new_df['IV'] = ((new_df['size']/sum_) - (new_df['size1']/sum1)) * new_df['woe']
new_df = new_df.replace([np.inf, -np.inf], np.nan)
new_df.dropna(inplace=True)
woe_dict[col] = new_df.drop(columns=['size','size1'])
IV_dict[col] = new_df['IV'].sum()
return woe_dict, IV_dict
@staticmethod
def barchart_dict(iv_dict:dict):
d = dict(sorted(iv_dict.items(), key=lambda item: item[1]))
# rcParams['figure.figsize'] = 20, 10
plt.bar(range(len(d)), d.values(), align='center')
plt.xticks(range(len(d)), list(d.keys()))
plt.xticks(rotation=90)
plt.show()
def heatmap(self):
df = self.predictors.copy()
df['target'] = self.target.copy()
corrmat = df.corr()
top_corr_features = corrmat.index
plt.figure(figsize=(20,20))
g=sns.heatmap(df[top_corr_features].corr(),annot=True,cmap="RdYlGn")
def chi_square(self, alpha:float):
result = pd.DataFrame(columns=['Independent_Variable','Alpha','Degree_of_Freedom', 'Chi_Square','P_value','Conclusion'])
for col in self.predictors.columns:
table = pd.crosstab(self.target,self.predictors[col])
print(f"Null hypothesis: there's no relationship between {col} and the response variable")
observed_freq = table.values
val = stats.chi2_contingency(observed_freq)
expected_freq = val[3]
dof = (table.shape[0]-1) * (table.shape[1]-1)
chi_square = sum([(o-e)**2/e for o,e in zip(observed_freq,expected_freq)])
chi_square_statistic = chi_square[0] + chi_square[1]
p_value = 1-chi2.cdf(x=chi_square_statistic,df=dof)
if p_value <= alpha:
print(f"Test result rejects the null hypothesis. There is a relationship between the {col} and the response variable")
conclusion = "There's a relationship"
else:
print(f"Test result fails to reject the null hypothesis. There is no evidence to prove there's a relationship between {col} and the response variable")
conclusion = "There's no relationship"
result = result.append(pd.DataFrame([[col,alpha, dof,chi_square_statistic, p_value,conclusion]],columns=result.columns))
return result
class ML_models():
def __init__(self, train_inputs, train_targets, test_inputs, test_targets):
self.train_inputs = train_inputs
self.train_targets = train_targets
self.test_inputs = test_inputs
self.test_targets = test_targets
def plot_cost_function(self,error,hyperparameters):
plt.plot(hyperparameters,error)
plt.ylabel('error')
plt.xlabel('parameter')
plt.show()
def concept_drift(self):
'''
Evaluate concept drift between train and test data and return sample_weights as a hyperparameter to model's training
'''
trn, tst = self.train_inputs.copy(), self.test_inputs.copy()
trn = np.concatenate((trn,np.ones((trn.shape[0],1))),1)
tst = np.concatenate((tst,np.zeros((tst.shape[0],1))),1)
merged_array = np.vstack((trn,tst))
X_ = np.asarray([e[:-1] for e in list(merged_array)])
y_ = np.asarray([e[-1] for e in list(merged_array)])
predictions = np.zeros(y_.shape)
np.random.seed(123)
lgb_model = lgb.LGBMClassifier(n_jobs=-1, max_depth=-1, n_estimators=500, learning_rate=0.1, num_leaves=30, colsample_bytree=0.28, objective='binary')
skf = StratifiedKFold(n_splits=20, shuffle=True, random_state=123)
for fold, (train_idx, test_idx) in enumerate(skf.split(X_, y_)):
X_train, X_test = X_[train_idx], X_[test_idx]
y_train, y_test = y_[train_idx], y_[test_idx]
lgb_model.fit(X_train, y_train, eval_metric='auc', eval_set = [(X_test, y_test)], verbose=False)
probs_train = lgb_model.predict_proba(self.train_inputs)[:, 1]
probs_test = lgb_model.predict_proba(self.test_inputs)[:, 1]
predictions = np.append(probs_train, probs_test)
score = round(roc_auc_score(y_, predictions),2)
if score >= 0.7:
print(f"The model can differentiate between train and test data with an AUC of {score}. There seem to exist a significant level of concept drift between the test and train data")
else:
print(f"The model cannot strongly differentiate between train and test data with as its AUC is only {score}. There doesn't seem to exist a significant level of concept drift between the test and train data")
sns.kdeplot(probs_train, label='train', legend=True)
sns.kdeplot(probs_test, label='test', legend=True)
predictions_train = predictions[:len(self.train_inputs)] # getting the training array
sample_weights = (1 - predictions_train) / predictions_train
sample_weights /= np.mean(sample_weights) # Normalizing the weights
return sample_weights
class Gaussian_process(ML_models):
measurement_variance = 1
def __init__(self, train_inputs, train_targets, test_inputs, test_targets, k_folds, hyperparameters):
super().__init__(train_inputs, train_targets, test_inputs, test_targets)
self.k_folds = k_folds
self.hyperparameters = hyperparameters
def __str__(self):
return(f'train_input size is {self.train_inputs.shape}, test_input size is {self.test_inputs.shape},train_target size is {self.train_targets.shape}, test_target size is {self.test_targets.shape}')
__repr__ = __str__
@staticmethod
def predict_gaussian_process(inputs, posterior):
mean,variance = posterior(inputs)
return mean, variance
def eval_gaussian_process(self,inputs, posterior, targets):
mean, variance = self.predict_gaussian_process(inputs,posterior)
errors = mean - targets
mean_squared_error = np.sum(errors ** 2) / len(targets)
return mean_squared_error
def train_gaussian_process(self,train_inputs, train_targets, param):
K = self.gaussian_kernel(train_inputs,train_inputs, param)
regularized_gram_matrix = K + self.measurement_variance * np.identity(K.shape[0])
inverse_regularized_gram_matrix = np.linalg.inv(regularized_gram_matrix)
def posterior(inputs, train_inputs=train_inputs, train_targets=train_targets, inverse_regularized_gram_matrix=inverse_regularized_gram_matrix):
mean = np.matmul(np.matmul(self.gaussian_kernel(inputs,train_inputs,param),inverse_regularized_gram_matrix),train_targets)
variance = np.diag(self.gaussian_kernel(inputs,inputs,param) + self.measurement_variance * np.identity(inputs.shape[0]) - np.matmul(self.gaussian_kernel(inputs,train_inputs,param),inverse_regularized_gram_matrix).dot(self.gaussian_kernel(train_inputs,inputs,param)))
return mean, variance
return posterior
def gaussian_kernel(self,inputs1,inputs2,width):
euclidean_distance = np.sum(np.square(inputs1),1)[:,None] - 2 * np.matmul(inputs1,inputs2.transpose()) + np.sum(np.square(inputs2),1).transpose()
gram_matrix = np.exp(-euclidean_distance / (2 * np.square(width)))
return gram_matrix
def identity_kernel(self, dummy_param=None):
gram_matrix = np.matmul(self.train_inputs,self.train_inputs.transpose())
return gram_matrix
def cross_validation_gaussian_process(self):
fold_size = len(self.train_targets)/self.k_folds
mean_squared_errors = np.zeros(len(self.hyperparameters))
for id, hyperparam in enumerate(self.hyperparameters):
for fold in tqdm.tqdm(range(self.k_folds)):
time.sleep(0.00001)
validation_inputs = self.train_inputs[int(round(fold*fold_size)):
int(round((fold+1)*fold_size))]
validation_targets = self.train_targets[int(round(fold*fold_size)):
int(round((fold+1)*fold_size))]
train_inputs = np.concatenate((self.train_inputs[:int(round(fold*fold_size))],
self.train_inputs[int(round((fold+1)*fold_size)):]))
train_targets = np.concatenate((self.train_targets[:int(round(fold*fold_size))],
self.train_targets[int(round((fold+1)*fold_size)):]))
posterior = self.train_gaussian_process(train_inputs, train_targets, hyperparam)
mean_squared_errors[id] += self.eval_gaussian_process(self.train_inputs, posterior, self.train_targets)
mean_squared_errors /= self.k_folds
best_mean_squared_error = np.min(mean_squared_errors)
best_hyperparam = self.hyperparameters[np.argmin(mean_squared_errors)]
return best_hyperparam, best_mean_squared_error, mean_squared_errors
def training_gaussian(self):
best_width, best_mean_squared_error, errors = self.cross_validation_gaussian_process()
posterior = self.train_gaussian_process(self.train_inputs, self.train_targets, best_width)
mse = self.eval_gaussian_process(self.test_inputs, posterior, self.test_targets)
return posterior, mse, errors
class LogisticReg(ML_models):
def __init__(self, train_inputs, train_targets, test_inputs, test_targets, k_folds, hyperparameters, imbalanced=False, upsampling=False, optimizer='newton'):
super().__init__(train_inputs, train_targets, test_inputs, test_targets)
self.k_folds = k_folds
self.hyperparameters = hyperparameters
self.imbalanced = imbalanced
self.upsampling = upsampling
self.optimizer = optimizer
def __str__(self):
return(f'train_input size is {self.train_inputs.shape}, test_input size is {self.test_inputs.shape},train_target size is {self.train_targets.shape}, test_target size is {self.test_targets.shape}')
__repr__ = __str__
@staticmethod
def conjGrad(A,x0,b,tol,maxit):
rold = b.reshape(-1) - np.asarray(np.matmul(A,x0)).reshape(-1)
rold = rold.reshape(len(rold),1)
pold = rold
# Store values
steps = 0
X = x0
res = []
init_normR0 = norm(rold)
## tolerance check
tol_check = 1
for k in range(maxit):
rt_old = np.transpose(rold)
pt_old = np.transpose(pold)
mult1 = np.matmul(rt_old,rold)
mult2 = np.matmul(np.matmul(pt_old,A),pold)
alpha = mult1/mult2
# Update the solution
alpha_pold = alpha*pold.reshape(-1)
X= X.reshape(-1) + alpha_pold
rnew = rold - alpha*np.matmul(A,pold)
rt_new = np.transpose(rnew)
mult3 = np.matmul(rt_new,rnew)
beta = mult3/mult1
pnew = rnew + beta*pold
ResCal = norm(rnew)
res.append(ResCal)
tol_check = ResCal/init_normR0
# update p and r
pold = pnew
rold = rnew
steps = steps + 1
if (tol_check<tol):
break
return X
@staticmethod
def identity_kernel(train_inputs):
gram_matrix = np.matmul(train_inputs,train_inputs.transpose())
return gram_matrix
@staticmethod
def sigmoid(input):
output = 1/(1+np.exp(-input))
return output
def predict_logistic_regression(self,inputs, weights):
logits = np.matmul(inputs,weights)
sigma = self.sigmoid(logits)
predicted_probabilities = np.column_stack((1-sigma,sigma))
return predicted_probabilities
@staticmethod
def calculate_f1(labels,prediction):
labels = np.append(labels, 0)
labels = np.append(labels, 1)
prediction = np.append(prediction, 0)
prediction = np.append(prediction, 1)
df_confusion = pd.crosstab(labels, prediction)
TN = df_confusion.iloc[0,0]
FN = df_confusion.iloc[1,0]
TP = df_confusion.iloc[1,1]
FP = df_confusion.iloc[0,1]
recall_0 = TN / (TN + FP)
recall_1 = TP / (TP + FN)
precision_0 = TN / TN + FN
precision_1 = TP / TP + FP
f1_class0 = (2 * recall_0 * precision_0) / (recall_0 + precision_0)
f1_class1 = (2 * recall_1 * precision_1) / (recall_1 + precision_1)
# ave_f1 = round((f1_class0 + f1_class1) / 2,2)
return f1_class0
def eval_logistic_regression(self,inputs, weights, labels):
predicted_probabilities = self.predict_logistic_regression(inputs,weights)
prediction = np.argmax(predicted_probabilities,axis=1)
# accuracy_check = prediction - labels
# accuracy = 1-(np.count_nonzero(accuracy_check) / labels.shape[0])
if self.imbalanced == True:
score = self.calculate_f1(labels, prediction)
else:
score = -sum(labels * np.log(self.sigmoid(inputs.dot(weights))) + (1-labels) * np.log(1 - self.sigmoid(inputs.dot(weights))))
return score
@staticmethod
def initialize_weights(n_weights):
np.random.seed(123)
random_weights = np.random.rand(n_weights)/10 - 0.05
return random_weights
def train_logistic_regression(self,lambda_hyperparam, train_inputs=None, train_targets=None):
if train_inputs is None:
train_inputs = self.train_inputs
train_targets = self.train_targets
weights = self.initialize_weights(train_inputs.shape[1])
if self.optimizer == 'gd':
transformed_train_inputs = np.matmul(train_inputs.T, train_inputs)
transformed_train_targets = np.matmul(train_inputs.T, train_targets)
tol = 10**(-10)
maxit = 500
weights = self.conjGrad(transformed_train_inputs, weights, transformed_train_targets ,tol,maxit).reshape(-1)
else:
weights = self.initialize_weights(train_inputs.shape[1])
R = np.identity(train_inputs.shape[0])
# Finding optimal weights
max_change = 1
while max_change > 0.001:
# Creating R matrix of size N-N
for i in range(train_inputs.shape[0]):
R[i][i] = self.sigmoid(train_inputs[i].dot(weights)) * (1 - self.sigmoid(train_inputs[i].dot(weights)))
# Creating H with the lambda param
H = train_inputs.T.dot(R).dot(train_inputs) + lambda_hyperparam * np.identity(train_inputs.shape[1])
inverse_H = np.linalg.inv(H)
# Finding gradient of the loss function with the lambda param
gradient_L = train_inputs.T.dot(self.sigmoid(train_inputs.dot(weights)) - train_targets) + lambda_hyperparam * weights
# Find new weights
delta = inverse_H.dot(gradient_L)
weights = weights - delta
max_change = max(delta)
return weights
def cross_validation_logistic_regression(self):
fold_size = len(self.train_targets)/self.k_folds
scores = np.zeros(len(self.hyperparameters))
# Oversample the under-represented class
if self.upsampling == True:
os = SMOTE(random_state=123)
os_data_X,os_data_y=os.fit_sample(self.train_inputs, self.train_targets)
self.train_inputs = np.asarray(os_data_X)
self.train_targets = np.asarray(os_data_y)
for id, hyperparam in enumerate(self.hyperparameters):
for fold in range(self.k_folds):
validation_inputs = self.train_inputs[int(round(fold*fold_size)):int(round((fold+1)*fold_size))]
validation_targets = self.train_targets[int(round(fold*fold_size)):int(round((fold+1)*fold_size))]
train_inputs = np.concatenate((self.train_inputs[:int(round(fold*fold_size))],self.train_inputs[int(round((fold+1)*fold_size)):]))
train_targets = np.concatenate((self.train_targets[:int(round(fold*fold_size))],self.train_targets[int(round((fold+1)*fold_size)):]))
weights = self.train_logistic_regression(hyperparam, train_inputs, train_targets)
score = self.eval_logistic_regression(validation_inputs, weights, validation_targets)
scores[id] += score
scores /= self.k_folds
if self.imbalanced == True:
best_score = np.max(scores)
best_hyperparam = self.hyperparameters[np.argmax(scores)]
return best_hyperparam, best_score, scores
else:
best_score = np.min(scores)
best_hyperparam = self.hyperparameters[np.argmin(scores)]
return best_hyperparam, best_score, scores
class Stack():
def __init__(self):
self.stack = list()
def __str__(self):
return str(self.stack)
__repr__ = __str__
def push(self,n):
self.stack.append(n)
def pop(self):
if len(self.stack) > 0:
return self.stack.pop()
else:
return None
def peek(self):
return self.stack[len(self.stack)-1]
class Queue():
def __init__(self):
self.queue = list()
def __str__(self):
return str(self.queue)
__repr__ = __str__
def enqueue(self,n):
self.queue.append(n)
def dequeue(self):
if len(self.queue) > 0:
return self.queue.pop(0)
else:
return None
def peek(self):
return self.queue[0]
class MaxHeap():
def __init__(self, items =[]):
# super().__init__()
self.heap = [0]
for item in items:
self.heap.append(item)
self.__floatUp(len(self.heap)-1)
def __str__(self):
return str(self.heap)
__repr__ = __str__
def push(self, data):
self.heap.append(data)
self.__floatUp(len(self.heap)-1)
def peek(self):
if len(self.heap) > 1:
return self.heap[1]
else:
return None
def pop(self):
if len(self.heap) > 2:
self.__swap(1, len(self.heap)-1)
max = self.heap.pop()
self.__bubbleDown(1)
elif len(self.heap) == 2:
max = self.heap.pop()
else:
max = None
return max
def __swap(self, i, j):
self.heap[i], self.heap[j] = self.heap[j], self.heap[i]
def __floatUp(self,index):
parent_index = index // 2
if index <= 1:
return
elif self.heap[index] > self.heap[parent_index]:
self.__swap(index, parent_index)
self.__floatUp(parent_index)
def __bubbleDown(self, index):
left_sub = index * 2
right_sub = left_sub + 1
largest = index
if len(self.heap) > left_sub and self.heap[left_sub] > self.heap[largest]:
largest = left_sub
if len(self.heap) > right_sub and self.heap[right_sub] > self.heap[largest]:
largest = right_sub
if largest != index:
self.__swap(index,largest)
self.__bubbleDown(largest)
class MinHeap():
def __init__(self, items =[]):
# super().__init__()
self.heap = [0]
for item in items:
self.heap.append(item)
self.__floatUp(len(self.heap)-1)
def __str__(self):
return str(self.heap)
__repr__ = __str__
def push(self, data):
self.heap.append(data)
self.__floatUp(len(self.heap)-1)
def peek(self):
if len(self.heap) > 1:
return self.heap[1]
else:
return None
def pop(self):
if len(self.heap) > 2:
self.__swap(1, len(self.heap)-1)
max = self.heap.pop()
self.__bubbleDown(1)
elif len(self.heap) == 2:
max = self.heap.pop()
else:
max = None
return max
def __swap(self, i, j):
self.heap[i], self.heap[j] = self.heap[j], self.heap[i]
def __floatUp(self,index):
parent_index = index // 2
if index <= 1:
return
elif self.heap[index] < self.heap[parent_index]:
self.__swap(index, parent_index)
self.__floatUp(parent_index)
def __bubbleDown(self, index):
left_sub = index * 2
right_sub = left_sub + 1
largest = index
if len(self.heap) > left_sub and self.heap[left_sub] < self.heap[largest]:
largest = left_sub
if len(self.heap) > right_sub and self.heap[right_sub] < self.heap[largest]:
largest = right_sub
if largest != index:
self.__swap(index,largest)
self.__bubbleDown(largest)
class Node:
def __init__(self, d, n=None, p=None):
self.data = d
self.next = n
self.prev = p
class LinkedList():
def __init__(self, r=None):
self.root = r
self.size=0
def __str__(self):
this_node = self.root
while this_node is not None:
print(this_node.data, end=" -> ")
this_node = this_node.next
print('end of list')
__repr__ = __str__
def add(self,d):
new_node = Node(d, self.root)
self.root = new_node
self.size += 1
def find(self,d):
this_node = self.root
while this_node is not None:
if this_node.data == d:
return(f'Item found')
else:
this_node = this_node.next
return(f"this item doesn't exist in the list")
def remove(self,d):
this_node = self.root
prev_node = None
while this_node is not None:
if this_node.data == d:
if prev_node is None:
self.root = this_node.next
else:
prev_node.next = this_node.next
self.size -= 1
return(f'item removed')
else:
this_node = this_node.next
prev_node = this_node
return(f'no item in the list')
|
991,718 | aa9e38ad3e2964924a8d1beabdd9a18fe28586a7 | import sys
import re
from package import func_wiki
args = sys.argv
args.append('ch03/ch03/jawiki-country.json.gz')
def main():
pattern = r'\[\[ファイル:(.+?)\|'
result = '\n'.join(re.findall(pattern, func_wiki.read_wiki(args[1], 'イギリス')))
print(result)
if __name__ == '__main__':
main()
|
991,719 | 5e97637819494ac1924c84f29066997dad5b1876 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import glob
import os.path
if __name__ == '__main__':
suite = unittest.TestSuite()
for i in glob.glob('test_*.py'):
module = __import__(os.path.splitext(i)[0])
if hasattr(module, 'get_suite'):
suite.addTest(module.get_suite())
unittest.TextTestRunner(verbosity=2).run(suite) |
991,720 | 75cae99c8a6aac965247d961b182c3b6114bd171 | import numpy as np
import scipy.signal as signal
def gauss_kern(sigma,h):
""" Returns a normalized 2D gauss kernel array for convolutions """
h1 = h
h2 = h
x, y = np.mgrid[0:h2, 0:h1]
x = x-h2/2
y = y-h1/2
# sigma = 10.0
g = np.exp( -( x**2 + y**2 ) / (2*sigma**2) )
return g / g.sum()
def border(img,sigma=10.0,h=8):
"""Funcao de parada das bordas"""
g = gauss_kern(sigma,h)
img_smooth = signal.convolve(img, g, mode='same')
Iy, Ix = np.gradient(img_smooth)
absGradI=np.sqrt(Ix**2+Iy**2)
return 1 / (1+absGradI**2)
class Levelset(object):
"""
Traditional levelset implementation
"""
def __init__(self, borderFunc, step=1, max_iter=150, v=1):
"""
Create traditional levelset solver
Parameters
----------
:border: border function
:step: step size
:num_reinit: number of iterations to reset levelset function
:max_iter: max number of iterations for contour evolution
:v: balloon force
"""
self._u = None # contorno C => levelset
self.data = borderFunc # funcao da borda
self.step_size = step
self.max_iter = max_iter
self.name = "Traditional Levelset"
self.v = v
def set_levelset(self, u):
self._u = np.double(u)
self._u[u>0] = 1
self._u[u<=0] = 0
levelset = property(lambda self: self._u,
set_levelset,
doc="The level set embedding function (u).")
def step(self):
phi = self._u #contorno
g = self.data #funcao de borda
gy, gx = np.gradient(g)
dt = self.step_size
vBalloon = self.v
if phi is None:
raise ValueError("levelset not set")
# gradient of phi
gradPhiY, gradPhiX = np.gradient(phi)
# magnitude of gradient of phi
absGradPhi=np.sqrt(gradPhiX**2+gradPhiY**2)
# normalized gradient of phi - eliminating singularities
normGradPhiX=gradPhiX/(absGradPhi+(absGradPhi==0))
normGradPhiY=gradPhiY/(absGradPhi+(absGradPhi==0))
divYnormGradPhiX, divXnormGradPhiX=np.gradient(normGradPhiX)
divYnormGradPhiY, divXnormGradPhiY=np.gradient(normGradPhiY)
# curvature is the divergence of normalized gradient of phi
K = divXnormGradPhiX + divYnormGradPhiY
tmp1 = g * K * absGradPhi
tmp2 = g * absGradPhi * vBalloon
tmp3 = gx * gradPhiX + gy * gradPhiY
dPhiBydT =tmp1 + tmp2 + tmp3
#curve evolution
phi = phi + (dt * dPhiBydT)
self._u = phi
|
991,721 | 8852cc2a2261fe854ef4b7784f63b77f598a0526 | # Main program.
from . import task_generator
from . import dispatcher
from . import scheduler
from . import net_graph
# Create a network graph.
ntg = net_graph.NetGraph()
# Create a scheduler.
# Create a task generator.
# Create a dispatcher.
# Main loop
|
991,722 | 958ca864f28f9e8f9c91a0ddf34891cf27b99530 | import collections
S=str(input())
cc=collections.Counter(S)
alphabet=['a','b','c','d','e','f','g','h','i','j','k','l','m','m','o','p','q','r','s','t','u','v','w','x','y','z']
for i in range(26):
if cc[alphabet[i]]==0:
print(alphabet[i])
exit()
if i==25:
print('None')
exit()
|
991,723 | 08f0f11a1d4aea0fda0ac25b3e54677dcfb6594a | # Generate an ANN structure for a given data set.
import os
import cProfile
import argparse
import logging
import sys
import config
import annfab.storage
import annfab.hasher
import annfab.utils
try:
import annfab.batch_hasher
disable_batch = False
except ImportError as e:
logging.error("Cannot import annfab.batch_hasher: %s" % e)
logging.error("Disabling batch mode")
disable_batch = True
def create_arg_parser():
parser = argparse.ArgumentParser(
description="Generate a nearest neighbor filter engine")
parser.add_argument("--outfile",
default=None,
help="The filename to use to save the output.")
parser.add_argument("-L",
type=int,
default=550,
help="The number of hashes to use.")
parser.add_argument("-m",
type=int,
default=8,
help="The dimension of each hash")
parser.add_argument("--batch-size", type=int, default=1)
parser.add_argument("--gpu", action="store_true")
return parser
def check_args(args):
if args.batch_size > 1 and disable_batch:
logging.error("Batching is disabled. Please select a batch size of 1")
return False
if args.batch_size == 1 and args.gpu:
logging.error("GPU mode is only supported for batched operation.")
return False
return True
def generate_model():
parser = create_arg_parser()
args = annfab.utils.parse_command_line(parser)
if not check_args(args):
sys.exit()
# Open the LMDB data base.
db_name = os.path.basename(args.data)
env = annfab.utils.open_database(args.data)
# Create the LMDB storage backend
storage = annfab.storage.LmdbStorage(
env, annfab.utils.normalized_image_vector)
if disable_batch and args.batch_size != 1:
logging.info("Batch mode disabled. \
Using a batch size of 1 and not %d" % args.batch_size)
args.batch_size = 1
if args.batch_size == 1:
# Create an image hasher.
hasher = annfab.hasher.Hasher(
storage, db_name, args.L, args.m)
else:
hasher = annfab.batch_hasher.BatchHasher(
storage, db_name, args.L, args.m, args.gpu)
hasher.set_batch_size(args.batch_size)
if args.profile:
pr = cProfile.Profile()
pr.enable()
with env.begin() as txn:
cursor = txn.cursor()
iter(cursor)
for key, value in cursor:
hasher.add_image(key, annfab.utils.value_to_image(value))
if args.batch_size != 1:
hasher.flush()
hasher.save_to_file(args.outfile)
if args.profile:
pr.create_stats()
pr.print_stats()
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO)
generate_model()
|
991,724 | 5fbe6ea85039a217739ed36b01c738d8dfa03b62 | # Perform substitutions
while NOUN_PLACEHOLDER in story:
new_word = random.choice(NOUNS)
story = story.replace(NOUN_PLACEHOLDER, new_word, 1)
while ADJECTIVE_PLACEHOLDER in story:
new_word = random.choice(ADJECTIVES)
story = story.replace(ADJECTIVE_PLACEHOLDER, new_word, 1)
while VERB_PLACEHOLDER in story:
new_word = random.choice(VERBS)
story = story.replace(VERB_PLACEHOLDER, new_word, 1)
# These are the new ones that James verbalizes as potential added sections
while ADVERB_PLACEHOLDER in story:
new_word = random.choice(ADVERBS)
story = story.replace(ADVERB_PLACEHOLDER, new_word, 1)
while EXCLAMATION_PLACEHOLDER in story:
new_word = random.choice(EXCLAMATIONS)
story = story.replace(EXCLAMATION_PLACEHOLDER, new_word, 1)
while PLACE_PLACEHOLDER in story:
new_word = random.choice(PLACES)
story = story.replace(PLACE_PLACEHOLDER, new_word, 1)
|
991,725 | c4f2103cb606f539e57ae2912f9714e881711ce9 | #!/usr/bin/env python3
"""
Example of how to use function_exec_manager.py
"""
import time
from pybullet_ros.function_exec_manager import FuncExecManager
class Plugin1:
def __init__(self, pybullet, robot, **kargs):
print('executing plugin 1 constructor')
def execute(self):
print('calling plugin1 execute function')
time.sleep(1)
class Plugin2:
def __init__(self, pybullet, robot, **kargs):
print('executing plugin 2 constructor')
def execute(self):
print('calling plugin2 execute function')
time.sleep(3)
class Plugin3:
def __init__(self, pybullet, robot, **kargs):
print('executing plugin 3 constructor')
def execute(self):
print('calling plugin3 execute function')
time.sleep(7)
class TestClass:
def __init__(self):
self.i = 0
def count_to_five(self):
self.i = self.i + 1
if self.i > 5:
return True
else:
return False
def exec_after_each_loop(self):
pass
def pause_execution(self):
return False
if __name__ == '__main__':
list_of_classes = [Plugin1, Plugin2, Plugin3]
# make list of objects
list_of_objects = []
for c in list_of_classes:
list_of_objects.append(c('pb', 'robot'))
# create stop condition
hc = TestClass()
obj = FuncExecManager(list_of_objects, hc.count_to_five, hc.exec_after_each_loop, hc.pause_execution)
# start parallel execution of all "execute" class methods in a synchronous way
obj.start_synchronous_execution(loop_rate=0.25)
print('bye, bye!')
|
991,726 | 0762438871c64e340c7db50f054785f58c1fdd25 |
# interfile - Interfile read and write
# Stefano Pedemonte
# Aalto University, School of Science, Helsinki
# Oct 2013, Helsinki
from __future__ import absolute_import, print_function
import unittest
from .. import Interfile
class TestInterfile(unittest.TestCase):
"""Sequence of tests for module interfile. """
def setUp(self):
pass
def test_simple_parse(self):
"""Parse a simple interfile. """
pass
if __name__=="__main__":
unittest.main() |
991,727 | ec7843a8a78eaac9b8955bc7fbe80266d7f0d06e | from django.utils.translation import ugettext_lazy as _
from feincms.content.richtext.models import RichTextContent
from feincms.module.page.models import Page
Page.register_extensions(
'pagepermissions.extension'
)
Page.register_templates({
'title': _('Standard template'),
'path': 'base.html',
'regions': (
('main', _('Main content area')),
),
})
Page.create_content_type(RichTextContent)
|
991,728 | 067823bf7ec5e25121d8b237a5f51f0313c44f79 |
# coding: utf-8
# # Q3_P1
#
# 1.Use ‘cricket_matches’ data set.
#
# 2.Calculate the average score for each team which host the game and win the game.
#
# 3.Remember that if a team hosts a game and wins the game, their score can be innings_1 runs or innings_2 runs. You have to check if the host team won the game, check which innings they played in (innings_1 or innings_2), and take the runs scored in that innings. The final answer is the average score of each team satisfying the above condition.
#
# 4.Display a few rows of the outputuse df.head()
# In[7]:
import csv, sys
from pandas import Series, DataFrame
import pandas as pd
import numpy as np
# In[8]:
df = pd.read_csv('E:/Python/Data_Assignment_3/cricket_matches.csv')
# In[9]:
df.head()
# In[10]:
df1=df[['home','winner','innings1','innings1_runs','innings2','innings2_runs']] # considering only the required columns
# In[11]:
df1.head()
# In[12]:
#winner=1 else 0
df1['HOME'] = np.where(df1['home']==df1['winner'],'1','0')
df1.head()
# In[17]:
df2['score'] = np.where(df2['home']==df2['innings1'], df2['innings1_runs'], df2['innings2_runs'])
# In[18]:
df2.head()
# In[20]:
df3 = df2[['home','score']]
df3['score'] = df3.groupby(['home'])['score'].transform('mean')
df3.head()
# In[21]:
df3.to_csv('Q3_P1_output.csv')
# In[ ]:
|
991,729 | a8718ebd62900eb342c7c46b80ac7619ee450e50 |
# William Kavanagh, August 2019
# Extended CSG - strategy generator for 5c-3at RPGLite
full_name = {"K":"Knight","A":"Archer","W":"Wizard","R":"Rogue","H":"Healer"}
chars = ["K","A","W","R","H"]
def choice_available(state_desc):
# if there is more than one viable action: return True, else return False.
if state_desc[7:12].count("0") == 5: return(False) # Are all opponents dead?
if state_desc[1:6].count("0") == 5: return(False) # Are all actors dead?
if state_desc[1:6].count("0") == 4 and state_desc[7:12].count("0") == 4: return(False) # Is there only 1 actor and 1 target alive?
if state_desc[1:6].count("0") == 3 and state_desc[7:12].count("0") == 4 and int(state_desc[6]) > 0: # If 2 actors are alive and there is only 1 opponent alive ...
if int(state_desc[int(state_desc[6])]) > 0: return(False) # ... but one of those actors is stunned?
return(True)
def run(output, trip):
# takes two files for .sta and .tra and generates a strategy, printed to <pair>_strategy_<i>.txt
# also returns the total number of transitions
transitions = {}
total_actions = 0
voided_actions = 0
file_to_write = output + "/" + trip + "_base_strategy.txt"
f = open(file_to_write, "w")
f.write(open("forced_p2_moves.txt","r").read()) # print forced actions
f.write("// Generated strategy for " + trip + "\n")
for line in open(output+"/tmp.tra","r").readlines()[1:]: # for every transition
if line.split(" ")[4][:2] == "p1": # if the line is relevant
total_actions +=1
transitions[line.split(" ")[0]] = line.split(" ")[4][3:-1]
# Transitions = {state, action_string} for every relevant action.
var_descriptors = ["turn","p1K","p1A","p1W","p1R","p1H","p1_stun","p2K","p2A","p2W","p2R","p2H","p2_stun"]
for line in open(output+"/tmp.sta","r").readlines()[1:]: # for every state
if line.split(":(")[0] in transitions.keys(): # if it's relevant
single_s = line.split(":(")[0] # the state is
state_desc = line.split(":(")[1][:-2].split(",")
if choice_available(state_desc): # if a choice is needed.
act = transitions[line.split(":(")[0]]
guard = guard = "\t[p2_" + act + "] turn = 2 & p1K = "
guard += state_desc[7] + " & p1A = " + state_desc[8] + " & p1W = " + state_desc[9]
guard += " & p1R = " + state_desc[10] + " & p1H = " + state_desc[11]
guard += " & p1_stun = " + state_desc[12] + " &\n\t\t\tp2K = " + state_desc[1]
guard += " & p2A = " + state_desc[2] + " & p2W = " + state_desc[3] + " & p2R = " + state_desc[4]
guard += " & p2H = " + state_desc[5] + " & p2_stun = " + state_desc[6] + " ->\n"
f.write(guard)
p = 2
if "skip" in act:
result = "(turn' = " + str(3-p) + ") & (p" + str(p) + "_stun' = 0)"
action = "\t\t" + result + ";"
elif act[0] != "A" or len(act) < 4:
if "e" in act:
damage = "(p" + str(3-p) + act[2] + "' = 0) & "
else:
damage = "(p" + str(3-p) + act[2] + "' = max(0, p" + str(3-p) + act[2] + " - " + full_name[act[0]] + "_damage)) & "
if act[0] == "W": # add wizard stun to damage
damage += "(p" + str(3-p) + "_stun' = " + str(chars.index(act[2])+1) + ") & "
if act[0] == "H": # add healer heal to damage
damage += "(p" + str(p) + act[3] + "' = min(" + full_name[act[3]] + "_health, p" + str(p) + act[3] + " + Healer_heal)) & "
result = "(turn' = " + str(3-p) + ") & (p" + str(p) + "_stun' = 0)"
action = "\t\t(" + full_name[act[0]] + "_accuracy) : " + damage + result + " + "
action += "\n\t\t(1 - " + full_name[act[0]] + "_accuracy) : " + result + ";"
else:
result = "(turn' = " + str(3-p) + ") & (p" + str(p) + "_stun' = 0)"
targ_1 = "(p" + str(3-p) + act[2] + "' = max(0, p" + str(3-p) + act[2] + " - Archer_damage))"
targ_2 = "(p" + str(3-p) + act[3] + "' = max(0, p" + str(3-p) + act[3] + " - Archer_damage))"
action = "\t\t(pow(Archer_accuracy,2)) : " + targ_1 + " & " + targ_2 + " & " + result + " + \n"
action += "\t\t(Archer_accuracy * (1 - Archer_accuracy)) : " + targ_1 + " & " + result + " + \n"
action += "\t\t(Archer_accuracy * (1 - Archer_accuracy)) : " + targ_2 + " & " + result + " + \n"
action += "\t\t(pow( (1 - Archer_accuracy),2)) : " + result + ";"
f.write(action + "\n")
#run("output", "WRH")
|
991,730 | 9bd50c2e8e95addbd3009fcc3a9a0093065be924 | '''
Created on Dec 2, 2012
@author: adewinter
App meant to create a generic multiplier of two numbers. Using home brewed Genetic Algorithm creator thing.
'''
from gene import *
import logging
logger = logging.getLogger()
logger.setLevel(logging.INFO)
def evaluate_candidate(gene):
"""
Returns the score of the candidate
Here we're trying to evaluate for closest to 3 * 7 (i.e. 21)
Give points for multiples of 3 or 7 as well
Highest score is best. You get zero if your output is 21 on either output 1 or output 2
We pick the highest score between the two outputs
"""
def get_score(output):
if output == None:
output = 0
s = 0;
if output % 3 == 0:
s += 1
if output % 7 == 0:
s += 1
if output == 21:
s += 1
return s
out1,out2 = gene.run(3,7)
s1 = get_score(out1)
s2 = get_score(out2)
if out1 == out2 == None: # or out1 == out2 == 0:
s1 = 0
s2 = 0
gene.score = s1 if s1 <= s2 else s2
return gene.score
def go():
"""
Main init
"""
ga = TestRunCoordinator(200)
ga.MAX_RUNS = 200
ga.evaluate_candidate = evaluate_candidate
candidate = ga.run_rounds()
if not candidate:
print "###################"
print "No candidate found!"
print "###################"
else:
print "##############################"
print "Succesfully found a candidate!"
print "DNA: %s" % candidate
print "##############################"
if __name__ == "__main__":
go() |
991,731 | 70b6b562b02999643f0f248aea5a91ca474c6000 | import requests
from bs4 import BeautifulSoup
import smtplib
import time
import sys
#URL = 'https://www.amazon.de/720%C2%B0DGREE-Trinkflasche-uberBottle-Wasserflasche-Auslaufsicher/dp/B07H7VGFSR?pf_rd_p=93ca5e1f-c180-59f3-a38f-564b8302b2de&pf_rd_r=TFG7VS6T3HD1SBN6EXMP&pd_rd_wg=mgYUE&ref_=pd_gw_ri&pd_rd_w=SNr3w&pd_rd_r=2f60cd90-3694-46aa-ac9b-262eeec28e58'
URL = sys.argv[1]
wished_price = sys.argv[2]
headers = {"User-Agent": 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36'}
def check_price():
page = requests.get(URL, headers=headers)
soup = BeautifulSoup(page.content, 'html.parser')
soup2 = BeautifulSoup(soup.prettify(), "html.parser")
title = soup2.find(id="productTitle").get_text()
price = soup2.find(id="priceblock_ourprice").get_text()
converted_price = float(price[1:3])
if(converted_price > float(wished_price)) :
send_email()
print(converted_price)
print(title.strip())
def send_email():
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.ehlo()
server.login('nouamaneazzouzi19@gmail.com', 'mntudmjngnyfamdj')
subject = 'Price fell down !'
body = 'Check the Amazon link : ' + URL
msg = f"Subject: {subject}\n\n{body}"
server.sendmail(
'nouamaneazzouzi19@gmail.com',
'nouamaneazzouzi19@gmail.com',
msg
)
print('HEY EMAIL HAS BEEN SENT')
server.quit()
while(True):
check_price()
time.sleep(60 * 60 * 24) |
991,732 | d0f66f7130346b9e333e4a82b7c69b873fedc5d4 | from flask import Flask, render_template
app=Flask(__name__)
@app.route('/')
def home():
return render_template('site.html')
@app.route('/mysite2.html')
def about():
return render_template('mysite2.html')
|
991,733 | ba371e6b60127ba78b3f32727263cfe4bdeea04b | # Enter your code here. Read input from STDIN. Print output to STDOUT
import numpy
n = map(int, raw_input().split())
print numpy.zeros(n, int), "\n", numpy.ones(n, int)
|
991,734 | 3329e11822ed577e6c83d676bc4ecc64c92d230e | """
Django settings for django_channels2 project.
"""
SECRET_KEY = "0%1c709jhmggqhk&=tci06iy+%jedfxpcoai69jd8wjzm+k2f0"
DEBUG = True
INSTALLED_APPS = ["channels", "graphql_ws.django", "graphene_django"]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
]
},
}
]
MIDDLEWARE = [
'django.middleware.common.CommonMiddleware',
]
ROOT_URLCONF = "django_channels2.urls"
ASGI_APPLICATION = "graphql_ws.django.routing.application"
CHANNEL_LAYERS = {"default": {"BACKEND": "channels.layers.InMemoryChannelLayer"}}
GRAPHENE = {"MIDDLEWARE": [], "SCHEMA": "django_channels2.schema.schema"}
|
991,735 | 2e0078398ae72a5cfa92c60b9b4e10b03688c58a | __author__ = 'Kami'
|
991,736 | 27610b01613335975586de1b2e041a2b91434f39 | import re
data = [line.strip() for line in open("./input.txt").readlines()]
data.sort()
slept = {}
for line in data :
if '#' in line :
id = int(re.findall(r"\#(\d+)", line)[0])
if id not in slept :
slept[id] = [0] * 60
elif "falls asleep" in line :
start = int(re.findall(r"\d:(\d+)", line)[0])
else :
end = int(re.findall(r"\d:(\d+)", line)[0])
for i in range(start, end):
slept[id][i] += 1
infos = []
for id in slept :
infos.append((id, max(slept[id]), slept[id].index(max(slept[id]))))
res = max(infos, key=lambda item:item[1])
print(res)
print(res[0] * res[2])
# max(lis,key=lambda item:item[1])
# tot_slept = { id: sum(slept[id]) for id in slept }
# id = max(tot_slept, key=tot_slept.get)
# min = slept[id].index(max(slept[id]))
# print("id:" + str(id), "min:" + str(min))
# print("res:" + str(id*min))
|
991,737 | 13f9c5a83986714b1b646258fb0d9a1117eb8f1c | from django import forms
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm
from django.utils.translation import ugettext, ugettext_lazy as _
from apps.user.models import CustomUser
from apps.opt.models import BusinessTypes
class LoginForm(AuthenticationForm):
pass
class UserForm(UserCreationForm):
password1 = forms.CharField(label='Пароль', widget=forms.PasswordInput)
password2 = forms.CharField(label='Подтверждение пароля', widget=forms.PasswordInput)
business_type = forms.ModelMultipleChoiceField(
queryset=BusinessTypes.objects.all(),
widget=forms.CheckboxSelectMultiple,
required=True
)
class Meta:
model = CustomUser
fields = [
'email','phone','password1','password2','first_name','last_name','father_name','city','is_whoosaler','business_type',
]
def __init__(self, language='ru', *args, **kwargs):
super(UserForm, self).__init__(*args, **kwargs)
print(kwargs)
# EMAIL
self.fields['email'].label = ''
self.fields['email'].required = True
self.fields['email'].widget.attrs['data-type'] = 'email'
self.fields['email'].widget.attrs['data-empty'] = 'Введите Ваш Email'
self.fields['email'].widget.attrs['data-error'] = 'Email имеет не верный формат'
self.fields['email'].widget.attrs['placeholder'] = 'Email *'
self.fields['email'].widget.attrs['id'] = 'user_registration_email'
# PHONE
self.fields['phone'].label = ''
self.fields['phone'].required = True
self.fields['phone'].widget.attrs['data-type'] = 'text'
self.fields['phone'].widget.attrs['data-empty'] = 'Введите Номер телефона'
self.fields['phone'].widget.attrs['placeholder'] = 'Номер телефона *'
self.fields['phone'].widget.attrs['id'] = 'user_registration_phone'
# PASSWORD-1
self.fields['password1'].label = ''
self.fields['password1'].widget.attrs['placeholder'] = 'Придумайте пароль'
self.fields['password1'].widget.attrs['data-type'] = 'password'
self.fields['password1'].widget.attrs['data-empty'] = 'Введите пароль'
self.fields['password1'].widget.attrs['data-error'] = 'от 6-ти символов латиницы и содержать цифры'
self.fields['password1'].widget.attrs['id'] = 'user_registration_password1'
# PASSWORD-2
self.fields['password2'].label = ''
self.fields['password2'].widget.attrs['placeholder'] = 'Пароль еще раз'
self.fields['password2'].widget.attrs['data-type'] = 'password2'
self.fields['password2'].widget.attrs['data-parent'] = 'user_registration_password1'
self.fields['password2'].widget.attrs['data-empty'] = 'Поаторите проль'
self.fields['password2'].widget.attrs['data-error'] = 'Пароли не совпадают'
self.fields['password2'].widget.attrs['id'] = 'user_registration_password2'
# FIRST NAME
self.fields['first_name'].label = ''
self.fields['first_name'].required = True
self.fields['first_name'].widget.attrs['data-type'] = 'text'
self.fields['first_name'].widget.attrs['data-empty'] = 'Введите Ваше имя'
self.fields['first_name'].widget.attrs['placeholder'] = 'Имя *'
self.fields['first_name'].widget.attrs['id'] = 'user_registration_first_name'
# LAST NAME
self.fields['last_name'].label = ''
self.fields['last_name'].required = True
self.fields['last_name'].widget.attrs['data-type'] = 'text'
self.fields['last_name'].widget.attrs['data-empty'] = 'Введите Вашу фамилию'
self.fields['last_name'].widget.attrs['placeholder'] = 'Фамилия *'
self.fields['last_name'].widget.attrs['id'] = 'user_registration_last_name'
# FATHER NAME
self.fields['city'].label = ''
self.fields['city'].required = False
self.fields['city'].widget.attrs['data-type'] = 'text'
self.fields['city'].widget.attrs['data-empty'] = 'Введите Ваш город проживания'
self.fields['city'].widget.attrs['placeholder'] = 'Город'
self.fields['city'].widget.attrs['id'] = 'user_registration_city'
self.fields['is_whoosaler'].label = ''
self.fields['is_whoosaler'].required = False
self.fields['is_whoosaler'].initial = True
self.fields['is_whoosaler'].widget.attrs['id'] = "whoosale_client"
self.fields['is_whoosaler'].widget.attrs['name'] = "whoosale_client"
self.fields['is_whoosaler'].widget.attrs['type'] = "hidden"
self.fields['is_whoosaler'].widget.attrs['class'] = "none"
self.fields['is_whoosaler'].widget.attrs['id'] = 'user_registration_is_whoosaler'
def clean(self):
cd = super().clean()
errors = []
user = CustomUser.objects.filter(email=cd['email']).first()
if user:
errors.append("Такой пользователь уже зарегестрирован")
if len(errors):
raise forms.ValidationError(errors)
|
991,738 | 5c421813a057f70141b0cd0898490fd158dd8b82 | #!/usr/bin/env python
import json
import os
import csv
import random
import string
currentdirpath = os.getcwd()
filename = 'choices.csv'
file_path = os.path.join(os.getcwd(), filename)
def get_file_path(filename):
currentdirpath = os.getcwd
file_path = os.path.join(os.getcwd(),filename)
print file_path
return file_path
path = get_file_path('choices.csv')
info=[{}]
def read_csv(filepath):
with open(filepath, 'rU') as csvfile:
reader = csv.reader(csvfile)
i=0
for row in reader:
if "Total Population"in row:
print row[0],row[2]
info[i]["district"]=row[0]
info[i]["population"]=row[2]
info.append({})
i+=1
# print info
return info
a=json.dumps(read_csv(path))
# print a
f= open("tripti.json","w")
f.write(a)
f.close()
|
991,739 | ba8653a1e4a996dbec36ccf0388f9e69c5385859 | #!/usr/bin/env python3
# Power digit sum
# =================
# Problem 16
# :math:`2^{15} = 32768` and the sum of its digits is 3 + 2 + 7 + 6 + 8 = 26.
#
# What is the sum of the digits of the number :math:`2^{1000}`?
# .. rubric:: Solution
# .. py:module:: euler16
# :synopsis: Power digit sum
# .. py:function:: pow2_digits( n )
#
# Compute :math:`2^n` as a sequence of digits.
# Naive implementation.
#
# :param n: power of 2
# :returns: sequence of digits for 2**n
#
# This is, of course, horribly slow.
def pow2_digits( n ):
"""Return digits from :math:`2^n`
>>> from euler16 import pow2_digits
>>> pow2_digits(15)
[3, 2, 7, 6, 8]
>>> sum(pow2_digits(15))
26
"""
digits= [ 1 ]
for i in range(n):
carry= 0
for i,d in enumerate(digits):
d2= 2*d+carry
digits[i]= d2 % 10
carry = d2 // 10
if carry != 0:
digits.append(carry)
#print( list(reversed(digits)) )
return list(reversed(digits))
# .. py:function:: pow2_digits2( n )
#
# Compute :math:`2^n` as a sequence of digits.
# Using a fast exponentiation algorithm,
# we can reduce the number of multiplications from 1000 to
# :math:`\log_2 1000 \approx 11`.
#
# :param n: power of 2
# :returns: sequence of digits for 2**n
#
# This actually relies on Python's internal long
# conversions in the internal val() function, so it
# uses long integer math and is not the same kind
# of digit-by-digit multiplication as :py:func:`pow2_digits`.
def pow2_digits2( n ):
"""Return digits from :math:`2^n`
>>> from euler16 import pow2_digits2
>>> pow2_digits2(15)
[3, 2, 7, 6, 8]
>>> sum(pow2_digits2(15))
26
"""
def mul( a, b_int ):
"""Implement a*b_int for digit sequence a and int b_int.
We really should use digit sequences for both a and b,
rather than assume b can be represented as a simple value.
"""
digits= [0 for i in range(len(a))]
carry= 0
for i,d in enumerate(a):
d2= b_int*d+carry
digits[i]= d2 % 10
carry = d2 // 10
while carry != 0:
digits.append(carry%10)
carry //= 10
return digits
def fast_exp( a, b ):
"""Compute a**b using the fewest multiplies."""
if b == 0:
return [1]
elif b % 2 == 1:
return mul(fast_exp(a, b-1), val(a))
else:
t= fast_exp(a, b//2)
return mul(t, val(t))
def val( digits ):
"""Value of reversed sequence of digits.
This can (potentially) create long values."""
v= 0
p= 1
for d in digits:
v += d*p
p *= 10
return v
digits= fast_exp( [2], n )
return list(reversed(digits))
# .. py:function:: pow2_digits3( n )
#
# Compute :math:`2^n` as a sequence of digits.
# Use Python's ordinary built-in 2**x.
#
# :param n: power of 2
# :returns: sequence of digits for 2**n
from euler04 import digits
def pow2_digits3( n ):
"""Return digits from :math:`2^n`
>>> from euler16 import pow2_digits3
>>> pow2_digits3(15)
[3, 2, 7, 6, 8]
>>> sum(pow2_digits3(15))
26
"""
return digits(2**n)
# Test the module's components.
def test():
import doctest
doctest.testmod(verbose=0)
# Compute the answer.
def answer():
return sum(pow2_digits2(1000))
# Confirm the answer.
def confirm(ans):
assert ans == 1366, "{0!r} Incorrect".format(ans)
# Compare performance of :py:func:`pow2_digits` and :py:func:`pow2_digits2`.
def compare_timing():
import timeit
p= timeit.timeit( "pow2_digits(1000)", "from euler16 import pow2_digits", number=100 )
print( "pow2_digits", p )
p2= timeit.timeit( "pow2_digits2(1000)", "from euler16 import pow2_digits2", number=100 )
print( "pow2_digits2", p2 )
p3= timeit.timeit( "pow2_digits3(1000)", "from euler16 import pow2_digits3", number=100 )
print( "pow2_digits3", p3 )
# Create some output.
if __name__ == "__main__":
test()
ans= answer()
confirm(ans)
print( "The sum of the digits of the number 2**1000:", ans )
#compare_timing() |
991,740 | b86043bb70db47fbe6478fdaf63e45b71e244893 | from flask import Blueprint
from sys_app.api import AppAPI, AccessAPI
app_app = Blueprint('app_app', __name__)
app_view = AppAPI.as_view('app_api')
app_app.add_url_rule('/apps/',view_func=app_view, methods=['POST',])
access_view = AccessAPI.as_view('access_api')
app_app.add_url_rule('/apps/access_token/', view_func=access_view, methods=['POST',]) |
991,741 | 60131206a8647c93f88a0ee2aec3585103a93a52 | import heapq
def solution(scoville, K):
answer = 0
heap = []
for s in scoville:
heapq.heappush(heap, s)
while heap:
if heap[0]>=K: return answer
a = heapq.heappop(heap)
if heap!=[]:
heapq.heappush(heap,a+(heapq.heappop(heap)*2))
answer += 1
return -1
|
991,742 | 934cd2523fca35c070a1c753dec84163b74a373e | from typing import TypedDict
class PullRequest(TypedDict):
pass
|
991,743 | 22be56a727deae2433d56b38bd8ad77585876c77 | caso1 = {'25u8sBP': {'estado': 'Fuera de Servicio',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '17-05-2021',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '15-12-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '07-01-2021',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '27-04-2021',
'monto': 1000000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '15-10-2020',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '21-05-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'}],
'zona': 6},
'2dJeDFI': {'estado': 'Fuera de Servicio',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '24-08-2020',
'monto': 1000000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '18-03-2021',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '14-02-2021',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '06-08-2020',
'monto': 500000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '14-07-2020',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '05-07-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '28-02-2021',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'}],
'zona': 1},
'53ZPtBP': {'estado': 'Cerrado',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '22-03-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '28-04-2021',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '01-12-2020',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '12-06-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '22-02-2021',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'}],
'zona': 6},
'5E2U6FI': {'estado': 'Operando',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '27-08-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '25-07-2020',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'}],
'zona': 2},
'67vr2FI': {'estado': 'Cerrado',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '02-04-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '08-08-2020',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '22-12-2020',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'}],
'zona': 6},
'8KCC9BP': {'estado': 'Operando',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '13-04-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '16-04-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '05-09-2020',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '09-10-2020',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '09-08-2020',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '08-05-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '09-03-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '28-07-2020',
'monto': 1000000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '01-04-2021',
'monto': 500000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '01-12-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '04-11-2020',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '23-03-2021',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '17-07-2020',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'}],
'zona': 2},
'8u577BP': {'estado': 'Fuera de Servicio',
'modeloCajero': 100,
'transacciones': [{'fechaMovimiento': '11-07-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '25-10-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '20-12-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '24-11-2020',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'}],
'zona': 7},
'9Pu8BFI': {'estado': 'Fuera de Servicio',
'modeloCajero': 100,
'transacciones': [{'fechaMovimiento': '17-05-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '18-01-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '06-10-2020',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '14-03-2021',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '09-07-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '26-08-2020',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '06-02-2021',
'monto': 300000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '17-09-2020',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '06-07-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '25-11-2020',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '27-02-2021',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '11-05-2021',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '15-07-2020',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '25-08-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'}],
'zona': 7},
'M1m29FI': {'estado': 'Operando',
'modeloCajero': 2020,
'transacciones': [{'fechaMovimiento': '04-03-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'}],
'zona': 3},
'e573hBP': {'estado': 'Cerrado',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '28-04-2021',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '23-03-2021',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '16-08-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '04-10-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '15-08-2020',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '12-09-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '20-05-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'}],
'zona': 5},
'h3RW9FI': {'estado': 'Fuera de Servicio',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '05-02-2021',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '02-06-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '05-05-2021',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '26-12-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '24-02-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '18-08-2020',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '24-08-2020',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '09-03-2021',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '04-09-2020',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '27-12-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '10-05-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '27-10-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'}],
'zona': 6},
'hV6CdBP': {'estado': 'Fuera de Servicio',
'modeloCajero': 2020,
'transacciones': [{'fechaMovimiento': '02-03-2021',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '06-02-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '04-11-2020',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '28-04-2021',
'monto': 500000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '24-08-2020',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '05-10-2020',
'monto': 500000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '20-03-2021',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '24-12-2020',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '16-10-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '15-08-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '02-09-2020',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '18-05-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '18-12-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'}],
'zona': 2},
'lU236FI': {'estado': 'Cerrado',
'modeloCajero': 2020,
'transacciones': [{'fechaMovimiento': '21-01-2021',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '19-11-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '11-08-2020',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '27-01-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '25-07-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '01-03-2021',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '07-08-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '04-09-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '25-01-2021',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '08-05-2021',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '26-10-2020',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '14-11-2020',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '12-12-2020',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'}],
'zona': 1},
'tGSVPFI': {'estado': 'Cerrado',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '12-01-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '15-08-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '25-04-2021',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '09-03-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '17-12-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '08-09-2020',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'}],
'zona': 3},
'un43eFI': {'estado': 'Cerrado',
'modeloCajero': 100,
'transacciones': [{'fechaMovimiento': '06-11-2020',
'monto': 1000000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '16-04-2021',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '04-09-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '02-01-2021',
'monto': 300000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '25-04-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '14-04-2021',
'monto': 1000000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '13-02-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'}],
'zona': 6}}
caso2 = {'14407BP': {'estado': 'Operando',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '20-04-2021',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '10-04-2021',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '22-03-2021',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '05-07-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '06-04-2021',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '24-01-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '22-05-2021',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '06-03-2021',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '16-02-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '24-03-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'}],
'zona': 3},
'35wwsFI': {'estado': 'Fuera de Servicio',
'modeloCajero': 2020,
'transacciones': [{'fechaMovimiento': '14-01-2021',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '06-05-2021',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '10-05-2021',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '12-10-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '11-03-2021',
'monto': 1000000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '06-03-2021',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '17-04-2021',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '13-02-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '22-04-2021',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '26-01-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '16-06-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '17-02-2021',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'}],
'zona': 4},
'38fb4BP': {'estado': 'Cerrado',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '01-07-2020',
'monto': 500000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '17-06-2020',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '20-06-2020',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '24-05-2021',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '26-08-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '28-07-2020',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '06-07-2020',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '15-02-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '16-12-2020',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '03-10-2020',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'}],
'zona': 4},
'86r18FI': {'estado': 'Cerrado',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '10-02-2021',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '11-01-2021',
'monto': 1000000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '09-08-2020',
'monto': 500000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '27-06-2020',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '12-05-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '10-06-2020',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '18-07-2020',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '05-06-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '06-07-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '20-10-2020',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'}],
'zona': 6},
'HzDdnFI': {'estado': 'Fuera de Servicio',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '19-10-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '01-05-2021',
'monto': 500000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '24-09-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '06-08-2020',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '18-04-2021',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'}],
'zona': 4},
'RDI42BP': {'estado': 'Operando',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '07-05-2021',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '19-04-2021',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '13-05-2021',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'}],
'zona': 1},
'Rn46WFI': {'estado': 'Fuera de Servicio',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '13-04-2021',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '20-11-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '25-05-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '26-12-2020',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '28-12-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '28-10-2020',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '10-06-2020',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '28-08-2020',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '05-07-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '24-05-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '01-03-2021',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '05-03-2021',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '07-02-2021',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '15-11-2020',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '06-02-2021',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'}],
'zona': 4},
'dp272BP': {'estado': 'Cerrado',
'modeloCajero': 100,
'transacciones': [{'fechaMovimiento': '25-08-2020',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '13-01-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '19-04-2021',
'monto': 1000000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '12-09-2020',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '12-06-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '01-03-2021',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '13-12-2020',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '25-04-2021',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '12-07-2020',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '20-03-2021',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '10-10-2020',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '14-07-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '08-02-2021',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '09-08-2020',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'}],
'zona': 4},
'ye677BP': {'estado': 'Cerrado',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '04-05-2021',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '08-09-2020',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '17-02-2021',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '09-08-2020',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'}],
'zona': 2}}
caso3 = {'79273FI': {'estado': 'Operando',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '22-07-2020',
'monto': 500000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '20-02-2021',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '20-08-2020',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '17-06-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '04-03-2021',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '23-07-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '06-11-2020',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '05-03-2021',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '13-03-2021',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '14-04-2021',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '15-08-2020',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '24-04-2021',
'monto': 300000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '18-11-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '24-02-2021',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '09-06-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'}],
'zona': 4},
'ah397BP': {'estado': 'Cerrado',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '02-07-2020',
'monto': 100000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '22-04-2021',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '10-10-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '14-02-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '06-06-2020',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '20-06-2020',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '11-10-2020',
'monto': 300000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '19-10-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '18-12-2020',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '13-03-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '22-12-2020',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'}],
'zona': 3},
'fRYcuBP': {'estado': 'Operando',
'modeloCajero': 2020,
'transacciones': [{'fechaMovimiento': '28-10-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '09-04-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '26-05-2021',
'monto': 500000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '24-02-2021',
'monto': 50000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '02-05-2021',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '05-11-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '09-08-2020',
'monto': 50000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '06-02-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'}],
'zona': 6}}
caso4 = {'1n50rFI': {'estado': 'Operando',
'modeloCajero': 2020,
'transacciones': [{'fechaMovimiento': '13-04-2021',
'monto': 300000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '20-04-2021',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '16-01-2021',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '23-09-2020',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '24-03-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '15-04-2021',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '19-01-2021',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '25-02-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '15-11-2020',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '01-05-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '25-04-2021',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '20-06-2020',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '08-06-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'}],
'zona': 3},
'2Q0AUBP': {'estado': 'Operando',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '18-04-2021',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '12-07-2020',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '28-07-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '10-04-2021',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '19-09-2020',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '05-03-2021',
'monto': 300000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '14-12-2020',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '05-12-2020',
'monto': 20000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '16-03-2021',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '18-12-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '06-06-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '25-05-2021',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'}],
'zona': 5},
'3I0iPBP': {'estado': 'Operando',
'modeloCajero': 101,
'transacciones': [{'fechaMovimiento': '27-05-2021',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '22-06-2020',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'}],
'zona': 2},
'58u00BP': {'estado': 'Operando',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '17-02-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '25-06-2020',
'monto': 50000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '25-04-2021',
'monto': 1000000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '13-12-2020',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '01-10-2020',
'monto': 200000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '22-12-2020',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '21-04-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '14-05-2021',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '10-02-2021',
'monto': 20000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '05-12-2020',
'monto': 100000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '11-01-2021',
'monto': 100000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '06-09-2020',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '26-02-2021',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '21-04-2021',
'monto': 1000000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'}],
'zona': 6},
'PiGasFI': {'estado': 'Operando',
'modeloCajero': 2020,
'transacciones': [{'fechaMovimiento': '19-01-2021',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '12-12-2020',
'monto': 500000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '03-01-2021',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '07-08-2020',
'monto': 200000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'},
{'fechaMovimiento': '01-03-2021',
'monto': 300000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '18-10-2020',
'monto': 300000,
'tipoCuenta': 'corriente',
'tipoMovimiento': 'retiro'}],
'zona': 1},
'x7665FI': {'estado': 'Fuera de Servicio',
'modeloCajero': 2017,
'transacciones': [{'fechaMovimiento': '08-07-2020',
'monto': 1000000,
'tipoCuenta': 'cuentaVirtual',
'tipoMovimiento': 'consignacion'},
{'fechaMovimiento': '03-04-2021',
'monto': 20000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'transferencia'},
{'fechaMovimiento': '26-08-2020',
'monto': 200000,
'tipoCuenta': 'ahorros',
'tipoMovimiento': 'consignacion'}],
'zona': 1}} |
991,744 | 64a6f0b1ee14d0d5b5b4baa58c2aa25f3552aafe | import requests
siteList = [
"https://google.com",
"http://store.nike.com/us/en_us/",
"http://www.adidas.com/us/yeezy",
"http://www.adidas.com/us",
"http://www.supremenewyork.com/shop/all"
]
proxies = [
"123.123.123.123:1234"
]
userAgent = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/56.0.2924.87 Safari/537.36"}
for site in siteList:
for proxy in proxies:
try:
r = requests.get(site,proxies={"http":proxy,"https":proxy},timeout=5,headers=userAgent)
print(site + " - Status Code: " + str(r.status_code))
except:
print(site + " - " + "Banned!")
|
991,745 | c8a37647ef5924b61c022e5beff51a78aa6cc1d4 | # This program support the convertion of object location from camera by rotating it in 3D environment
# It needs the coorporation of robot hand so control robot function will be applied
# Robot position will be changed in config file
from cv2 import sqrt
import kuka_communicate as robot
import configparser
from ast import literal_eval
import cicle_detection
import setup_camera as cam
import matplot_show as mat
import time
import fit_skspatial as fit
import geometry_calculation as cal
import numpy as np
import cam_rotation_eye_base as rot
import random
import math
import utils.angle_error as err
import find_hand_move_range as robot_range
import json
class eye_hand_cali():
def __init__(self, robot_type, number_steps, angle_config):
self.robot_type = robot_type
self.num_step = number_steps
self.angle_config = angle_config
self.pos_list_x = []
self.pos_list_y = []
self.robo_pos_list_x = []
self.robo_pos_list_y = []
self.camera_data = cam.setup_cam()
self.conn = robot.connect_kuka()
self.robot_origin_pos = self.conn.sock.recv(1024)
self.robot_origin_pos = self.robot_origin_pos.replace(b'\r\n',b'')
self.robot_origin_pos = [float(v) for v in self.robot_origin_pos.decode("utf-8").split(',')]
self.get_config_data()
# time.sleep(4)
self.main_function()
def get_config_data(self):
self.config = configparser.ConfigParser()
self.config.read("utils/configuration.cfg", encoding="utf-8")
def set_config_data(self, section_name, key, value):
value = str(value)
if not self.config.has_section(section_name):
self.config.add_section(section_name)
self.config.set(section_name, key, value)
with open("utils/configuration.cfg", 'w') as configfile:
self.config.write(configfile)
def axis_movement(self, axis):
section_name = axis + '_axis_eye_' + self.robot_type
start_pos = literal_eval(self.config[section_name]['start_pos'])
end_pos = literal_eval(self.config[section_name]['end_pos'])
if axis == 'x':
print('xaxis_confirmation')
step_range = (end_pos[1] - start_pos[1])/ self.num_step
for i in range(self.num_step):
# mul_value = random.randint(0, self.num_step)
mul_value = i
print(mul_value)
new_pos = [start_pos[0],start_pos[1] + mul_value*step_range,start_pos[2]]
self.robo_pos_list_x.append(new_pos)
self.conn.send_binary([[new_pos[0],new_pos[1] ,new_pos[2], self.angle_config[0], self.angle_config[1], self.angle_config[2]]])
self.conn.sock.recv(1024)
#get object position
centroid_pos = cicle_detection.chessboard_detection(self.camera_data,5, False)
if len(centroid_pos) > 0:
self.pos_list_x.append(centroid_pos)
elif axis == 'y':
print('yaxis_confirmation')
step_range = (end_pos[0] - start_pos[0])/ self.num_step
for i in range(self.num_step):
# mul_value = random.randint(0, self.num_step)
mul_value = i
print(mul_value)
new_pos = [start_pos[0] + mul_value*step_range,start_pos[1],start_pos[2]]
self.robo_pos_list_y.append(new_pos)
self.conn.send_binary([[new_pos[0],new_pos[1],new_pos[2], self.angle_config[0], self.angle_config[1], self.angle_config[2]]])
self.conn.sock.recv(1024)
#get object position
centroid_pos = cicle_detection.chessboard_detection(self.camera_data,5, False)
if len(centroid_pos) > 0:
self.pos_list_y.append(centroid_pos)
def eyes_xy_to_tool(self, rz, rx, ry):
self.conn.send_binary([[self.robot_origin_pos[0],
self.robot_origin_pos[1],
self.robot_origin_pos[2],
180,0,180]])
self.conn.sock.recv(1024)
centroid_pos_rotation0 = cicle_detection.chessboard_detection(self.camera_data,100, False)
centroid_pos_rotation0 = centroid_pos_rotation0*cal.Rz(math.radians(rz))*cal.Rx(math.radians(rx))*cal.Ry(math.radians(ry))
self.conn.send_binary([[self.robot_origin_pos[0],
self.robot_origin_pos[1],
self.robot_origin_pos[2],
0, 0, 180]])
self.conn.sock.recv(1024)
centroid_pos_rotation1 = cicle_detection.chessboard_detection(self.camera_data,100, False)
centroid_pos_rotation1 = centroid_pos_rotation1*cal.Rz(math.radians(rz))*cal.Rx(math.radians(rx))*cal.Ry(math.radians(ry))
eyes2tool = (np.array(centroid_pos_rotation0) + np.array(centroid_pos_rotation1))/2
if len(eyes2tool) == 1:
eyes2tool = np.array(eyes2tool[0])*1000 # for D435
else:
eyes2tool = np.array(eyes2tool)*1000 # for L515
print(eyes2tool)
return [abs(eyes2tool[1]), abs(eyes2tool[0])]
def eyes_z_to_tool(self, rz, rx, ry, eyes2tool_xy_plane_dist):
self.conn.send_binary([[self.robot_origin_pos[0], self.robot_origin_pos[1], self.robot_origin_pos[2], 180,0,180]])
self.conn.sock.recv(1024)
centroid_pos_rotation0 = cicle_detection.chessboard_detection(self.camera_data,100, False)
centroid_pos_rotation0 = (centroid_pos_rotation0*cal.Rz(math.radians(rz))*cal.Rx(math.radians(rx))*cal.Ry(math.radians(ry)))[0].tolist()
centroid_pos_rotation0 = cal.get_distance_two_point_3d([0,0,0],centroid_pos_rotation0[0])
self.conn.send_binary([[self.robot_origin_pos[0],
self.robot_origin_pos[1],
self.robot_origin_pos[2],
180,0,170]])
self.conn.sock.recv(1024)
centroid_pos_rotation1 = cicle_detection.chessboard_detection(self.camera_data,100, False)
centroid_pos_rotation1 = (centroid_pos_rotation1*cal.Rz(math.radians(rz))*cal.Rx(math.radians(rx))*cal.Ry(math.radians(ry)))[0].tolist()
centroid_pos_rotation1 = cal.get_distance_two_point_3d([0,0,0],centroid_pos_rotation1[0])
self.conn.send_binary([[self.robot_origin_pos[0],
self.robot_origin_pos[1],
self.robot_origin_pos[2],
180,0,160]])
self.conn.sock.recv(1024)
centroid_pos_rotation2 = cicle_detection.chessboard_detection(self.camera_data,100, False)
centroid_pos_rotation2 = (centroid_pos_rotation2*cal.Rz(math.radians(rz))*cal.Rx(math.radians(rx))*cal.Ry(math.radians(ry)))[0].tolist()
centroid_pos_rotation2 = cal.get_distance_two_point_3d([0,0,0],centroid_pos_rotation2[0])
eyes2tool = rot.find_z_eyes_tool((centroid_pos_rotation2-centroid_pos_rotation1)/(centroid_pos_rotation1-centroid_pos_rotation0), eyes2tool_xy_plane_dist)
return eyes2tool
def mapping_eyes_robot(self,rz, rx, ry,eyes_tool_x, eyes_tool_y,eyes_tool_z, err_cubic_equation_x, err_cubic_equation_y):
# self.robot_origin_pos = [400,-1176,1470]
self.conn.send_binary([[self.robot_origin_pos[0],
self.robot_origin_pos[1],
self.robot_origin_pos[2],
180,0,180]])
self.conn.sock.recv(1024)
centroid_pos_rotation1 = cicle_detection.chessboard_detection(self.camera_data,100, False)
centroid_pos_rotation1 = centroid_pos_rotation1*cal.Rz(math.radians(rz))*cal.Rx(math.radians(rx))*cal.Ry(math.radians(ry))
if len(centroid_pos_rotation1) == 1:
centroid_pos_rotation1 = np.array(centroid_pos_rotation1[0])*1000 # for D435
else:
centroid_pos_rotation1 = np.array(centroid_pos_rotation1)*1000 # for L515
centroid_pos_rotation1 = centroid_pos_rotation1[0]
# Decrease error
print('Before centroid_pos_rotation1 = ', centroid_pos_rotation1)
# print(err.objective_5(centroid_pos_rotation1[0], err_cubic_equation_x[0], err_cubic_equation_x[1],
# err_cubic_equation_x[2], err_cubic_equation_x[3], err_cubic_equation_x[4],
# err_cubic_equation_x[5]))
# centroid_pos_rotation1[0] = centroid_pos_rotation1[0] - err.objective_5(centroid_pos_rotation1[0], err_cubic_equation_x[0],
# err_cubic_equation_x[1], err_cubic_equation_x[2],
# err_cubic_equation_x[3], err_cubic_equation_x[4],
# err_cubic_equation_x[5])
# centroid_pos_rotation1[1] = centroid_pos_rotation1[1] - err.objective_5(centroid_pos_rotation1[1], err_cubic_equation_y[0],
# err_cubic_equation_y[1], err_cubic_equation_y[2],
# err_cubic_equation_y[3], err_cubic_equation_y[4],
# err_cubic_equation_y[5])
print(err.objective_3(centroid_pos_rotation1[0], err_cubic_equation_x[0], err_cubic_equation_x[1],
err_cubic_equation_x[2], err_cubic_equation_x[3]))
centroid_pos_rotation1[0] = centroid_pos_rotation1[0] - err.objective_3(centroid_pos_rotation1[0], err_cubic_equation_x[0],
err_cubic_equation_x[1], err_cubic_equation_x[2],
err_cubic_equation_x[3])
centroid_pos_rotation1[1] = centroid_pos_rotation1[1] - err.objective_3(centroid_pos_rotation1[1], err_cubic_equation_y[0],
err_cubic_equation_y[1], err_cubic_equation_y[2],
err_cubic_equation_y[3])
print('After centroid_pos_rotation1 = ', centroid_pos_rotation1)
calibration_value = [eyes_tool_x, eyes_tool_y, eyes_tool_z] # trial run
# calibration_value = [-273.88, 51.17, 400] # real environment
point_in_world = np.array(calibration_value) + np.array([self.robot_origin_pos[0],
self.robot_origin_pos[1],
self.robot_origin_pos[2]])
point_in_world = [point_in_world[0] - centroid_pos_rotation1[1],
point_in_world[1] - centroid_pos_rotation1[0],
point_in_world[2] - centroid_pos_rotation1[2]]# code may change here
self.conn.send_binary([[point_in_world[0],
point_in_world[1],
point_in_world[2],
180,0,180]])
return point_in_world
def main_function(self):
section_name = 'mapping_' + self.robot_type
# self.set_config_data(section_name, 'robot_origin_pos',self.robot_origin_pos)
# limit_value = json.loads(self.config[section_name]['limit_value'])
# step_range = literal_eval(self.config[section_name]['step_range'])
# robot_range_cl = robot_range.find_robot_range(self.robot_origin_pos, self.camera_data, limit_value, step_range, self.conn)
# robot_range_data = robot_range_cl.find_range()
# config_value = self.robot_origin_pos.copy()
# config_value[0] = robot_range_data[0]
# self.set_config_data('y_axis_eye_' + self.robot_type, 'start_pos',config_value)
# config_value = self.robot_origin_pos.copy()
# config_value[0] = robot_range_data[1]
# self.set_config_data('y_axis_eye_' + self.robot_type, 'end_pos',config_value)
# config_value = self.robot_origin_pos.copy()
# config_value[1] = robot_range_data[2]
# self.set_config_data('x_axis_eye_' + self.robot_type, 'start_pos',config_value)
# config_value = self.robot_origin_pos.copy()
# config_value[1] = robot_range_data[3]
# self.set_config_data('x_axis_eye_' + self.robot_type, 'end_pos',config_value)
self.axis_movement('x')
self.axis_movement('y')
rz, rx, ry = rot.find_camera_rotation(self.pos_list_x, self.pos_list_y)
self.set_config_data(section_name, 'rz',rz)
self.set_config_data(section_name, 'rx',rx)
self.set_config_data(section_name, 'ry',ry)
err_cubic_equation_x = err.find_error_equation_5(rz, rx, ry, self.pos_list_x, 0, self.robo_pos_list_x, 1)
self.set_config_data(section_name, 'xa',err_cubic_equation_x[0])
self.set_config_data(section_name, 'xb',err_cubic_equation_x[1])
self.set_config_data(section_name, 'xc',err_cubic_equation_x[2])
self.set_config_data(section_name, 'xd',err_cubic_equation_x[3])
self.set_config_data(section_name, 'xe',err_cubic_equation_x[4])
self.set_config_data(section_name, 'xf',err_cubic_equation_x[5])
err_cubic_equation_y = err.find_error_equation_5(rz, rx, ry, self.pos_list_y, 1, self.robo_pos_list_y, 0)
self.set_config_data(section_name, 'ya',err_cubic_equation_y[0])
self.set_config_data(section_name, 'yb',err_cubic_equation_y[1])
self.set_config_data(section_name, 'yc',err_cubic_equation_y[2])
self.set_config_data(section_name, 'yd',err_cubic_equation_y[3])
self.set_config_data(section_name, 'ye',err_cubic_equation_y[4])
self.set_config_data(section_name, 'yf',err_cubic_equation_y[5])
# err_cubic_equation_x = err.find_error_equation_3(rz, rx, ry, self.pos_list_x, 0, self.robo_pos_list_x, 1)
# self.set_config_data(section_name, 'xa',err_cubic_equation_x[0])
# self.set_config_data(section_name, 'xb',err_cubic_equation_x[1])
# self.set_config_data(section_name, 'xc',err_cubic_equation_x[2])
# self.set_config_data(section_name, 'xd',err_cubic_equation_x[3])
# err_cubic_equation_y = err.find_error_equation_3(rz, rx, ry, self.pos_list_y, 1, self.robo_pos_list_y, 0)
# self.set_config_data(section_name, 'ya',err_cubic_equation_y[0])
# self.set_config_data(section_name, 'yb',err_cubic_equation_y[1])
# self.set_config_data(section_name, 'yc',err_cubic_equation_y[2])
# self.set_config_data(section_name, 'yd',err_cubic_equation_y[3])
# eyes_tool_x, eyes_tool_y = self.eyes_xy_to_tool(rz, rx, ry)
# eyes2tool_xy_plane_dist = math.sqrt(pow(eyes_tool_x,2) + pow(eyes_tool_y,2))
# eyes_tool_z = self.eyes_z_to_tool(rz, rx, ry, eyes2tool_xy_plane_dist)
# self.set_config_data(section_name, 'eyes_tool_x',eyes_tool_x)
# self.set_config_data(section_name, 'eyes_tool_y',eyes_tool_y)
# self.set_config_data(section_name, 'eyes_tool_z',eyes_tool_z)
# Read data from config
err_cubic_equation_x = [literal_eval(self.config[section_name]['xa']),
literal_eval(self.config[section_name]['xb']),
literal_eval(self.config[section_name]['xc']),
literal_eval(self.config[section_name]['xd']),
literal_eval(self.config[section_name]['xe']),
literal_eval(self.config[section_name]['xf'])]
err_cubic_equation_y = [literal_eval(self.config[section_name]['ya']),
literal_eval(self.config[section_name]['yb']),
literal_eval(self.config[section_name]['yc']),
literal_eval(self.config[section_name]['yd']),
literal_eval(self.config[section_name]['ye']),
literal_eval(self.config[section_name]['yf'])]
rz = literal_eval(self.config[section_name]['rz'])
rx = literal_eval(self.config[section_name]['rx'])
ry = literal_eval(self.config[section_name]['ry'])
eyes_tool_x = literal_eval(self.config[section_name]['eyes_tool_x'])
eyes_tool_y = literal_eval(self.config[section_name]['eyes_tool_y'])
eyes_tool_z = literal_eval(self.config[section_name]['eyes_tool_z'])
self.mapping_eyes_robot(rz, rx, ry,eyes_tool_x, eyes_tool_y,eyes_tool_z, err_cubic_equation_x, err_cubic_equation_y)
print(rz, rx, ry)
run = eye_hand_cali('kr10', 30, [0, 90, 0])
# rz, rx, ry = [-9.065, -1.912, -0.894]
# eyes_tool_x, eyes_tool_y, eyes_tool_z = [75.78, -180.46, -20.82]
# def mapping_eyes_robot(rz, rx, ry,eyes_tool_x, eyes_tool_y,eyes_tool_z):
# robot_origin_pos = [1170,0,1470]
# centroid_pos_rotation1 = cicle_detection.chessboard_detection(cam.setup_cam())
# centroid_pos_rotation1 = centroid_pos_rotation1*cal.Rz(math.radians(rz))*cal.Rx(math.radians(rx))*cal.Ry(math.radians(ry))
# if len(centroid_pos_rotation1) == 1:
# centroid_pos_rotation1 = np.array(centroid_pos_rotation1[0])*1000 # for D435
# else:
# centroid_pos_rotation1 = np.array(centroid_pos_rotation1)*1000 # for L515
# # calibration_value = [eyes_tool_x, -eyes_tool_y, eyes_tool_z] # trial run
# # calibration_value = [-273.88, 51.17, 400] # real environment
# # point_in_world = np.array(calibration_value) + np.array([robot_origin_pos[0],
# # robot_origin_pos[1],
# # robot_origin_pos[2]])
# centroid_pos_rotation1 = centroid_pos_rotation1[0]
# point_in_world = [robot_origin_pos[0] - eyes_tool_x - centroid_pos_rotation1[1],
# robot_origin_pos[1] - eyes_tool_y - centroid_pos_rotation1[0],
# robot_origin_pos[2] - eyes_tool_z - centroid_pos_rotation1[2]]# code may change here
# return point_in_world
# mapping_eyes_robot(rz, rx, ry,eyes_tool_x, eyes_tool_y,eyes_tool_z) |
991,746 | f4e7902ca2dffd81714c0d57c261d76223c6db2a | #!/bin/python3
import math
import os
import random
import re
import sys
squares = []
seed = (
(8, 1, 6),
(3, 5, 7),
(4, 9, 2)
)
def reflect(s):
'''
Returns a new square that is the reflected version of s.
Reflects across the identity diagonal. Any sort of reflect works since we
also rotate.
>>> rotate(((1, 2, 3),
(4, 5, 6),
(7, 8, 9)))
((1, 4, 7), (2, 5, 8), (3, 6, 9))
'''
return tuple(zip(*s))
def rotate(s):
'''
Returns a new square that is s rotated 90 degrees clockwise.
>>> rotate(((1, 2, 3),
(4, 5, 6),
(7, 8, 9)))
((7, 4, 1), (8, 5, 2), (9, 6, 3))
'''
return tuple(zip(*reversed(s)))
for _ in range(4):
seed = rotate(seed)
squares.append(seed)
seed = reflect(seed)
for _ in range(4):
seed = rotate(seed)
squares.append(seed)
def squareDiff(a, b):
'''
Returns the sum of the absolute values of the differences between each item
in a and b.
'''
return sum(abs(a[i][j] - b[i][j]) for i in range(3) for j in range(3))
# Complete the formingMagicSquare function below.
def formingMagicSquare(s):
'''
This is an inelegant question. It seems to require preconstructing all magic
squares ahead of time and comparing them.
'''
return min(squareDiff(square, s) for square in squares)
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
s = []
for _ in range(3):
s.append(list(map(int, input().rstrip().split())))
result = formingMagicSquare(s)
fptr.write(str(result) + '\n')
fptr.close()
|
991,747 | 1a82b98af2dd3c9dc08ea2fff9cb710b93807407 | from __future__ import print_function
import argparse
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import torch.optim as optim
from torch.optim.lr_scheduler import StepLR
from model_conv import Net
def train(model, device, train_loader, optimizer, epoch):
model.train()
log_interval = 1
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output.float(), target)
loss.backward()
optimizer.step()
if batch_idx % log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item()))
def test(model, device, test_loader):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in test_loader:
data, target = data.to(device), target.to(device)
output = model(data)
loss = F.nll_loss(output.float(), target)
test_loss += loss # F.nll_loss(output, target, reduction='sum').item() # sum up batch loss
pred = output.argmax(dim=1, keepdim=True) # get the index of the max log-probability
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * correct / len(test_loader.dataset)))
def load_data(training_set=True):
byte_data = np.fromfile('data\\dataset.dat', dtype='float')
num_bytes = len(byte_data)
rows = int(num_bytes / 1951)
chart_data = byte_data.reshape(rows, 1951)
data = []
if training_set:
start_idx = 1
end_idx = int(rows*0.8) + 1
else:
start_idx = int(rows * 0.8) + 2
end_idx = rows
for i in range(start_idx, end_idx):
state = chart_data[i][:-1]
state = torch.tensor(state, dtype=torch.float).unsqueeze(0).to("cuda")
target = int(chart_data[i][-1])
data.append((state, target))
return data
def main():
# Training settings
device = torch.device("cuda")
train_kwargs = {'batch_size': 2000}
test_kwargs = {'batch_size': 500}
dataset1 = load_data(training_set=True)
dataset2 = load_data(training_set=False)
train_loader = torch.utils.data.DataLoader(dataset1, **train_kwargs)
test_loader = torch.utils.data.DataLoader(dataset2, **test_kwargs)
model = Net().to(device)
optimizer = optim.Adadelta(model.parameters(), lr=1)
# optimizer = optim.Adam(model.parameters(), lr=0.001)
for epoch in range(1, 2):
train(model, device, train_loader, optimizer, epoch)
test(model, device, test_loader)
torch.save(model.state_dict(), "checkpoints\\trader.pt")
if __name__ == '__main__':
main()
|
991,748 | fb56a2a3d48c20fb6c890997ca01d1088acbfa26 | class Solution:
def calculate(self, s):
"""
:type s: str
:rtype: int
"""
pile = []
a, b = "", ""
i, size = 0, len(s)
# recover first number
while i < size and (s[i] == ' ' or s[i].isdigit()):
a += s[i]
i += 1
a = a.strip()
pile.append(a)
while i < size:
if s[i] == ' ':
i += 1
continue
if not s[i].isdigit():
sign = s[i]
i += 1
b = ''
while i < size and (s[i] == ' ' or s[i].isdigit()):
b += s[i]
i += 1
b = b.strip()
if sign == '/' or sign == "*":
a = pile.pop()
b = str(int(a) // int(b) if sign == '/' else int(a) * int(b))
pile.append(b)
else:
pile.append(sign)
pile.append(b)
res = int(pile[0])
for n in pile[1:]:
if n == "+":
sign = "+"
elif n == '-':
sign = '-'
else:
res = res - int(n) if sign == '-' else res + int(n)
return res
|
991,749 | 4f35855e0af79363297580f6f169029bc673a924 | #!/usr/bin/env python3
import datetime
from datetime import date
class S1_Utils():
def __init__(self):
tracks = [37, 52, 169, 65, 7, 50, 64, 49]
self.numberOfFrames = {}
self.numberOfFrames[37] = 5
self.numberOfFrames[52] = 5
self.numberOfFrames[169] = 4
self.numberOfFrames[65] = 4
self.numberOfFrames[7] = 5
self.numberOfFrames[50] = 4
self.numberOfFrames[64] = 4
self.numberOfFrames[49] = 3
# from date to track
ref_track={}
ref_track[date(2018,2,13)] = [37]
ref_track[date(2018,2,20)] = [52]
ref_track[date(2018,2,22)] = [169]
ref_track[date(2018,2,21)] = [65]
ref_track[date(2018,2,11)] = [7]
ref_track[date(2015,6,6)] = [50]
ref_track[date(2019,5,29)] = [64]
ref_track[date(2019,5,28)] = [49]
self.ref_track = ref_track
# from track to date
ref_date={}
for track in tracks:
for day, crsp_tracks in ref_track.items():
if track in crsp_tracks:
ref_date[track] = day
self.ref_date = ref_date
print("reference date for tracks: ", ref_date)
self.startdate = date(2014,5,1)
self.enddate = date(2024,5,1)
def track2date(self,track,first=None,last=None):
if first == None:
first = self.startdate
if last == None:
last = self.enddate
today = first
while today < last:
if (today - self.ref_date[track]).days % 6 == 0:
break
today = today + datetime.timedelta(days=1)
dates = [today]
today = today + datetime.timedelta(days=6)
while today <= last:
dates.append(today)
today = today + datetime.timedelta(days=6)
return(dates)
if __name__=="__main__":
S1_Utils()
|
991,750 | 9a53466a6b0c79693ff202865119bb3992c2ee70 | import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, MaxPooling2D, Flatten, Dropout
import matplotlib.pyplot as plt
import numpy as np
mnist = keras.datasets.mnist
(img_train, lab_train), (img_test, lab_test) = mnist.load_data()
img_train, img_test = img_train / 255.0, img_test / 255.0
model = Sequential([
Flatten(input_shape=(28, 28)),
Dense(512, activation='relu'),
Dense(256, activation='relu'),
Dense(128, activation='relu'),
Dropout(0.2),
Dense(10, activation='softmax')
])
model.compile(optimizer='adam',
loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
#model.fit(img_train, lab_train, epochs=5)
model.summary()
print('Evaluating...')
model.evaluate(img_test, lab_test)
# print('Saving...')
# model.save('saved_model_basic.h5') |
991,751 | bd07ec32a003fdd32798fedee0029a4fc67a82ec | from django.db import models
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone
from datetime import datetime
class Question(models.Model):
question=models.CharField(max_length=500)
ans=models.CharField(max_length=500,default=None)
image = models.ImageField(upload_to='images',default='/images/default.png')
round=models.IntegerField(default=1)
def __str__(self):
return self.question
|
991,752 | c51318117177f7d64fda876d7a0fe64cfc7aa98b |
class GroupChat:
def __init__(self, users, messages):
"""
Class representing the group chat in a format way.
:param users: Set of users in the chat
:param messages: List of messages sorted by time, newest first and oldest last
"""
self.users = users
self.messages = messages
# addd dict with each user as key and there corresponidn gmessages |
991,753 | 5023fe4721e42aecaa36835a96fd6c784129daad | def match(s1,s2):
length = len(s2)
result = ""
resultMisaamatchCount = length
seqdict={}
for index, s in enumerate(s1[:-length]):
missmatch = 0
for j,k in zip(s1[index:index+length],s2):
if j!=k:
missmatch +=1
if missmatch <= resultMissmatchCount:
seqdict[missmatch]=s1[index:index+length]
resultMissmatchCount = missmatch
minkey = min(seqdict.key())
result = seqdict[minkey]
return result
|
991,754 | bd4c8d2362c6fc2652831ecb410ca3f50454bd64 | # Python module file
"""Definitions of camera models used by the Curiosity Mars rover."""
|
991,755 | f3f7affcd0a9d7cb2c40911e86292285c3dcbead | from __future__ import print_function
import random
import textwrap
import sys
if sys.version_info < (3, 0):
print("此代码需要 Python 3.x 并使用 3.5.x版测试 ")
print("正在使用的Python版本是"
": %d.%d " % (sys.version_info[0], sys.version_info[1]))
print("退出")
sys.exit(1)
def show_theme_message(width=76):
"""Print the game theme in the terminal window"""
print_dotted_line()
print_bold("兽人天灾 v0.0.5:")
msg = ("人类将要与他们的敌人——兽人展开争夺萨林卡大陆霸权的命运之战"
"一位勇敢的骑士:阿克蒙德-李维离开了他的家乡前往西部兽人聚集地——黑暗之林."
"在途中,他发现了一个小小的孤立的定居点.当他走近村庄时,他看到了五个小屋,他决定进入..")
print(textwrap.fill(msg, width=width))
def show_game_mission():
"""Print the game mission in the terminal window"""
print_bold("任务:")
print("\t选择李维可以休息的小屋...")
print_bold("TIP:")
print("保持警惕,周围有敌人!")
print_dotted_line()
def reveal_occupants(idx, huts):
"""Print the occupants of the hut"""
msg = ""
print("展示小屋内部情况...")
for i in range(len(huts)):
occupant_info = "<%d:%s>" % (i + 1, huts[i])
if i + 1 == idx:
occupant_info = "\033[1m" + occupant_info + "\033[0m"
msg += occupant_info + " "
print("\t" + msg)
print_dotted_line()
def occupy_huts():
"""Randomly populate the `huts` list with occupants"""
huts = []
occupants = ['enemy', 'friend', 'unoccupied']
while len(huts) < 5:
computer_choice = random.choice(occupants)
huts.append(computer_choice)
return huts
def process_user_choice():
"""Accepts the hut number from the user"""
msg = "\033[1m" + "选择一个小屋进去,请输入 (1-5): " + "\033[0m"
user_choice = input("\n" + msg)
idx = int(user_choice)
return idx
def show_health(health_meter, bold=False):
"""Show the remaining hit points of the player and the enemy"""
msg = "Health: 阿克蒙德——李维: %d, 敌人: %d" \
% (health_meter['player'], health_meter['enemy'])
if bold:
print_bold(msg)
else:
print(msg)
def reset_health_meter(health_meter):
"""Reset the values of health_meter dict to the original ones"""
health_meter['player'] = 40
health_meter['enemy'] = 30
def print_bold(msg, end='\n'):
"""Print a string in 'bold' font"""
print("\033[1m" + msg + "\033[0m", end=end)
def print_dotted_line(width=72):
"""Print a dotted (rather 'dashed') line"""
print('-' * width)
def attack(health_meter):
"""The main logic to determine injured unit and amount of injury"""
hit_list = 4 * ['player'] + 6 * ['enemy']
injured_unit = random.choice(hit_list)
hit_points = health_meter[injured_unit]
injury = random.randint(10, 15)
health_meter[injured_unit] = max(hit_points - injury, 0)
print("ATTACK! ", end='')
show_health(health_meter)
def play_game(health_meter):
"""The main control function for playing the game"""
huts = occupy_huts()
idx = process_user_choice()
reveal_occupants(idx, huts)
if huts[idx - 1] != 'enemy':
print_bold("恭喜! 你赢了!!!")
else:
print_bold('发现敌人! ', end='')
show_health(health_meter, bold=True)
continue_attack = True
# Loop that actually runs the combat if user wants to attack
while continue_attack:
continue_attack = input(".......继续战斗? (y/n): ")
if continue_attack == 'n':
print_bold("敌我状态如下...")
show_health(health_meter, bold=True)
print_bold("GAME OVER!")
break
attack(health_meter)
# Check if either one of the opponents is defeated
if health_meter['enemy'] <= 0:
print_bold("幸运的家伙,你赢的了胜利女神的光顾!")
break
if health_meter['player'] <= 0:
print_bold("你输了,快逃,下次继续吧")
break
def run_application():
"""Top level control function for running the application."""
show_theme_message()
keep_playing = 'y'
health_meter = {}
reset_health_meter(health_meter)
show_game_mission()
while keep_playing == 'y':
reset_health_meter(health_meter)
play_game(health_meter)
keep_playing = input("\nPlay again? Yes(y)/No(n): ")
if __name__ == '__main__':
run_application() |
991,756 | 0608fed03d27ed0c3a74dc0fc88ca831726eb1eb |
class Set(object):
def __init__(self):
self.screen_width = 800
self.screen_height = 600
self.bg_color = (0, 0, 0)
self.air_speed = 1.5
self.bullet_set_speed = 1
self.bullet_w = 15
self.bullet_h = 3
self.bullet_set_color = 255, 255, 255
|
991,757 | 0602eb51ede3d8b50f94e254d9f8f3440e414870 |
import pandas as pd
import geopandas as gpd
import networkx as nx
import matplotlib.pyplot as plt
from networkx.readwrite import adjacency_graph
from random import randint
from autographs.faces import HalfEdge
"""
Imports a seeded plan and makes a pretty map of it.
"""
# Load the district assignment into pandas.
df = pd.read_csv("./data/seeds/MS_2.csv")
# Load the shapefile and make a blank column.
shp = gpd.read_file("./data/MS_BLKGRP_DEM_ADJ/MS_BLKGRP_DEM_ADJ.shp")
shp["CD"] = [0 for i in range(len(shp))]
# Assign the congressional districts!
for index, row in df.iterrows():
cd = row["CD"]
geoid = row["GEOID"][9:]
shp.loc[shp["GEOID"] == geoid, "CD"] = cd
# Save the shapefile.
shp.to_file("./map_shapefiles/seeded.shp")
# Find faces!
# Create a few color maps.
blue_green = {
0: "light blue",
1: "blue",
2: "yellow green",
3: "aqua"
}
pastel_hues = {
0: "pink",
1: "magenta",
2: "dark blue",
3: "cyan"
}
underwater = {
0: "plum",
1: "aquamarine",
2: "blue",
3: "coral"
}
color_maps = [blue_green, pastel_hues, underwater]
# Create a new HalfEdge data structure to find faces.
he = HalfEdge(f"./map_shapefiles/seeded.shp")
# Create a mapping from index -> geoid.
df = gpd.read_file(f"./map_shapefiles/seeded.shp")
# Count the number of big faces.
count = 0
# Pick a color scheme.
color_map = color_maps[randint(0, 2)]
for face in he.faces:
# Get list(s) of the bounding centroids' coordinates.
x = [edge.head.x for edge in face]
y = [edge.head.y for edge in face]
# For each edge in the face, get the congressional district assignment.
cds = set()
# Go over the edges and add representation stuff.
for edge in face:
head = edge.head
tail = edge.tail
index = head.label
# Have to do a weird thing here because some subdivisions weren't given
# a district assignment.
cd = df["CD"][index]
if cd > 0:
cds.add(cd - 1)
# Check whether the edge connects vertices in the same district.
if cd > 0 and df["CD"][head.label] == df["CD"][tail.label]:
color = color_map[df["CD"][head.label] - 1]
plt.plot([head.x, tail.x], [head.y, tail.y], c=f"xkcd:{color}", linewidth=1)
cds = list(cds)
# If the face is interior to the component, color it. Also, color its edges.
if len(cds) == 1:
plt.fill(x, y, c=f"xkcd:{color_map[cds[0]]}", linewidth=2, edgecolor="w", closed=True)
if len(face) > 10 and len(cds) == 1:
count += 1
cd = cds[0]
plt.fill(x, y, c=f"xkcd:{color_map[cd]}", linewidth=2, edgecolor="w", closed=True, label=cd)
plt.axis("off")
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=count, mode="expand", borderaxespad=0.)
plt.savefig(f"./maps/seeded.svg", dpi=100000)
plt.close()
|
991,758 | 0480089f416e4e132030aee50a92014ee5df4b10 | #Mark Griffiths
#Date: 11/10/2019
#CSC121
#File Python Dictionaries and file writing
import mbox
def writeMboxReport(mydict):
str1 = "";
for day,count in mydict.items():
str1 = str1 + day + " " + str(count) + "\n";
emailinfo = str1;
fhand = open('report.txt', 'w');
fhand.write(emailinfo)
fhand.close();
def main():
days = {};
fname = input('Enter the name of the file: ');
try:
fhand = open(fname);
except:
print('File cannot be opened:', fname);
quit();
for line in fhand:
clean_line = line.rstrip();
if clean_line.startswith('From '):
day = mbox.get_mbox_day(clean_line);
days[day] = days.get(day, 0) + 1;
for key in days:
print(key, days[key]);
writeMboxReport(days);
main(); |
991,759 | df9487f92f620731d74443a4255060d3fcc767a5 | # -*- coding: utf-8 -*-
import unittest, socket, logging, os
from ..util import TestClient, Msg
class TestChannel(TestClient, unittest.TestCase):
user = 'foo'
host = socket.gethostname()
def openCircuit(self, auth=True):
'Open TCP connection and sent auth info'
self.connectTCP()
self.sendTCP([
Msg(cmd=0, dcnt=13),
])
if auth:
self.sendTCP([
Msg(cmd=20, body=self.user),
Msg(cmd=21, body=self.host),
])
rep = self.recvTCP()
self.assertCAEqual(rep, cmd=0)
self.assertGreater(rep.dcnt, 6) # server version must be post Base 3.12
self.sver = rep.dcnt
def test_echo(self):
self.openCircuit()
self.sendTCP([
Msg(cmd=23, dtype=12, dcnt=23, p1=5678, p2=9101112),
])
rep = self.recvTCP()
self.assertCAEqual(rep, cmd=23)
def test_channel_create(self):
'Create and close a channel'
self.openCircuit()
cid, sid = 156, None
self.sendTCP([
Msg(cmd=18, p1=cid, p2=13, body='ival'),
])
rep = self.recvTCP()
self.assertCAEqual(rep, cmd=22, p1=cid, p2=3)
rep = self.recvTCP()
self.assertCAEqual(rep, cmd=18, dtype=5, dcnt=1, p1=cid)
sid = rep.p2
self.sendTCP([
Msg(cmd=12, p1=sid, p2=cid),
])
rep = self.recvTCP()
self.assertCAEqual(rep, cmd=12, p1=sid, p2=cid)
def test_channel_bad(self):
'Attempt to open a channel to a non-existant PV'
self.openCircuit()
cid = 156
self.sendTCP([
Msg(cmd=18, p1=cid, p2=13, body='invalid'),
])
rep = self.recvTCP()
if self.sver>=6:
self.assertCAEqual(rep, cmd=26, p1=cid)
else:
self.assertCAEqual(rep, cmd=11, p1=cid)
if __name__=='__main__':
if 'LOGLEVEL' in os.environ:
logging.basicConfig(level=logging.getLevelName(os.environ['LOGLEVEL']))
unittest.main()
|
991,760 | 16c79d75cbecc43956a3a57519b3a12e549a0f5f | from .util import *
def get_connection(db_name):
conn = sqlite3.connect('data/{}.db'.format(db_name))
conn.text_factory = str
return conn
def query(firm, cmd):
with get_connection(firm) as conn:
cur = conn.cursor()
cur = cur.execute(cmd)
field_names = [i[0] for i in cur.description]
results = cur.fetchall()
try:
df = pd.DataFrame([x for x in results])
df.columns = field_names
except:
#print(cmd)
df = pd.DataFrame([])
return df
def _fetch_posts(firm, TABLE_NAME, _min, _max, do_filter=False):
#print(firm, TABLE_NAME, _min, _max)
if TABLE_NAME=='tweets':
with get_connection(firm) as conn:
df = query(firm, '''
SELECT time, id, user, body, symbols, urls, mentioned_users, source, hashtags,
in_reply_to_status_id_str, in_reply_to_user_id_str, retweeted
FROM {}
WHERE time >= "{}" AND time < "{}"
'''.format(TABLE_NAME, _min, _max))
elif TABLE_NAME=='twits':
with get_connection(firm) as conn:
df = query(firm, '''
SELECT time, id, user, body, symbols, urls, mentioned_users, source,
liked_users, sentiment
FROM {}
WHERE time >= "{}" AND time < "{}"
'''.format(TABLE_NAME, _min, _max))
else:
return None
return df
# trading hours: UTC 14:30~21:00 (Local 9:30~16:00)
def fetchPostGroup(firm, TABLE_NAME='tweets', date='2018-08-10', do_filter=False):
_min = datetime.strptime(date, '%Y-%m-%d')
_max = _min + relativedelta(days=1)
timezone = pytz.timezone("America/New_York")
_min = timezone.localize(datetime(_min.year, _min.month, _min.day, 16, 0, 0))
_max = timezone.localize(datetime(_max.year, _max.month, _max.day, 9, 30, 0))
_min = _min.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S+00:00")
_max = _max.astimezone(pytz.utc).strftime("%Y-%m-%d %H:%M:%S+00:00")
df = _fetch_posts(firm, TABLE_NAME, _min, _max, do_filter)
return df
def get_text_info_per_day(ROOT, firm, start='2018-07-04', end='2018-11-30'):
text_data_info = {}
dr = [str(d.date()) for d in pd.date_range(start=start, end=end)]
for date in tqdm(dr, total=len(dr), desc=firm):
data =[]
for TABLE_NAME in ['tweets', 'twits']:
df = fetchPostGroup(firm, TABLE_NAME, date=date, do_filter=False)
if len(df) > 0:
df['body'] = [_.replace('\r', '\n') for _ in df['body']]
data.append(df[['time', 'body']])
data = pd.concat(data)
if len(data)!=0:
text_data_info[date] = len(data)
ensure_dir('{}/text_data/{}'.format(ROOT, firm))
data.sort_values('time').to_csv('{}/text_data/{}/{}.csv'.format(ROOT, firm, date), index=False)
with open('{}/text_data/{}.json'.format(ROOT, firm), 'w') as f:
json.dump(text_data_info, f)
if __name__ == '__main__':
for firm in companyList:
get_text_info_per_day(ROOT, firm, start=date_range['train'][0], end=date_range['test'][1]) |
991,761 | d62f6fed42223514d3f0707e98d1b90da4082b92 | #!/usr/bin/env python
"""
.. module:: convert
:synopsis: used to create info.txt and the <txname>.txt files.
"""
import sys
import os
import argparse
import types
argparser = argparse.ArgumentParser(description =
'create info.txt, txname.txt, twiki.txt and sms.py')
argparser.add_argument ('-utilsPath', '--utilsPath',
help = 'path to the package smodels_utils',\
type = str)
argparser.add_argument ('-smodelsPath', '--smodelsPath',
help = 'path to the package smodels_utils',\
type = str)
args = argparser.parse_args()
if args.utilsPath:
utilsPath = args.utilsPath
else:
databaseRoot = '../../../'
sys.path.append(os.path.abspath(databaseRoot))
from utilsPath import utilsPath
utilsPath = databaseRoot + utilsPath
if args.smodelsPath:
sys.path.append(os.path.abspath(args.smodelsPath))
sys.path.append(os.path.abspath(utilsPath))
from smodels_utils.dataPreparation.inputObjects import MetaInfoInput,DataSetInput
from smodels_utils.dataPreparation.databaseCreation import databaseCreator
from smodels_utils.dataPreparation.massPlaneObjects import x, y, z
#+++++++ global info block ++++++++++++++
info = MetaInfoInput('CMS-SUS-13-006')
info.url ='https://twiki.cern.ch/twiki/bin/view/CMSPublic/PhysicsResultsSUS13006'
info.sqrts = 8
info.lumi = 19.5
info.prettyName = 'EW productions with decays to leptons, W, Z, and Higgs'
info.private = False
info.arxiv = 'http://arxiv.org/abs/1405.7570'
info.contact = ''
info.publication = 'http://link.springer.com/article/10.1140%2Fepjc%2Fs10052-014-3036-7'
info.comment ='Using single lepton analysis EM'
info.supersedes ='CMS-PAS-SUS-12-022'
#+++++++ dataset block ++++++++++++++
dataset = DataSetInput("MET_150")
dataset.setInfo(dataType = 'efficiencyMap', dataId = "MET_150", observedN = 3, expectedBG = 3.8 , bgError = 1.0, upperLimit = '2.752E-01*fb', expectedUpperLimit = '2.698E-01*fb')
#+++++++ next txName block ++++++++++++++
TChiWH = dataset.addTxName('TChiWH')
TChiWH.constraint = "[[['W']],[['higgs']]]"
TChiWH.conditionDescription =None
TChiWH.condition =None
TChiWH.source = 'CMS'
#+++++++ next mass plane block ++++++++++++++
TChiWH_1 = TChiWH.addMassPlane([[x,y]]*2)
TChiWH_1.figure = 'Fig. 16(right)'
TChiWH_1.figureUrl ='https://twiki.cern.ch/twiki/pub/CMSPublic/PhysicsResultsSUS13006/Fig16_exclusion_TChiWH.png'
TChiWH_1.addSource('obsExclusion',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 9 )
TChiWH_1.addSource('obsExclusionM1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 7 )
TChiWH_1.addSource('obsExclusionP1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 6 )
TChiWH_1.addSource('expExclusion',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 8 )
TChiWH_1.addSource('expExclusionM1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 4 )
TChiWH_1.addSource('expExclusionP1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 5 )
TChiWH_1.addSource('efficiencyMap','orig/singlelep_results.root', 'root', objectName = 'h_eff_met150', index = None, scale = 0.01 )
TChiWH_1.dataUrl = "https://twiki.cern.ch/twiki/pub/CMSPublic/PhysicsResultsSUS13006/ss_eff_map.root"
#+++++++ dataset block ++++++++++++++
dataset = DataSetInput("MET_175")
dataset.setInfo(dataType = 'efficiencyMap', dataId = "MET_175", observedN = 3, expectedBG = 2.3 , bgError = 0.6, upperLimit = '3.036E-01*fb', expectedUpperLimit = '2.469E-01*fb')
#+++++++ next txName block ++++++++++++++
TChiWH = dataset.addTxName('TChiWH')
TChiWH.constraint = "[[['W']],[['higgs']]]"
TChiWH.conditionDescription =None
TChiWH.condition =None
TChiWH.source = 'CMS'
#+++++++ next mass plane block ++++++++++++++
TChiWH_1 = TChiWH.addMassPlane([[x,y]]*2)
TChiWH_1.figure = 'Fig. 16(right)'
TChiWH_1.figureUrl ='https://twiki.cern.ch/twiki/pub/CMSPublic/PhysicsResultsSUS13006/Fig16_exclusion_TChiWH.png'
TChiWH_1.addSource('obsExclusion',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 9 )
TChiWH_1.addSource('obsExclusionM1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 7 )
TChiWH_1.addSource('obsExclusionP1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 6 )
TChiWH_1.addSource('expExclusion',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 8 )
TChiWH_1.addSource('expExclusionM1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 4 )
TChiWH_1.addSource('expExclusionP1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 5 )
TChiWH_1.addSource('efficiencyMap','orig/singlelep_results.root', 'root', objectName = 'h_eff_met175', index = None, scale = 0.01 )
TChiWH_1.dataUrl = "https://twiki.cern.ch/twiki/pub/CMSPublic/PhysicsResultsSUS13006/ss_eff_map.root"
#+++++++ dataset block ++++++++++++++
dataset = DataSetInput("MET_100")
dataset.setInfo(dataType = 'efficiencyMap', dataId = "MET_100", observedN = 7, expectedBG = 7.7 , bgError = 1.9, upperLimit = '3.867E-01*fb', expectedUpperLimit = '3.883E-01*fb')
#+++++++ next txName block ++++++++++++++
TChiWH = dataset.addTxName('TChiWH')
TChiWH.constraint = "[[['W']],[['higgs']]]"
TChiWH.conditionDescription =None
TChiWH.condition =None
TChiWH.source = 'CMS'
#+++++++ next mass plane block ++++++++++++++
TChiWH_1 = TChiWH.addMassPlane([[x,y]]*2)
TChiWH_1.figure = 'Fig. 16(right)'
TChiWH_1.figureUrl ='https://twiki.cern.ch/twiki/pub/CMSPublic/PhysicsResultsSUS13006/Fig16_exclusion_TChiWH.png'
TChiWH_1.addSource('obsExclusion',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 9 )
TChiWH_1.addSource('obsExclusionM1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 7 )
TChiWH_1.addSource('obsExclusionP1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 6 )
TChiWH_1.addSource('expExclusion',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 8 )
TChiWH_1.addSource('expExclusionM1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 4 )
TChiWH_1.addSource('expExclusionP1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 5 )
TChiWH_1.addSource('efficiencyMap','orig/singlelep_results.root', 'root', objectName = 'h_eff_met100', index = None, scale = 0.01 )
TChiWH_1.dataUrl = "https://twiki.cern.ch/twiki/bin/view/CMSPublic/PhysicsResultsSUS13006"
#+++++++ dataset block ++++++++++++++
dataset = DataSetInput("MET_125")
dataset.setInfo(dataType = 'efficiencyMap', dataId = "MET_125", observedN = 6, expectedBG = 5.4 , bgError = 1.3, upperLimit = '3.887E-01*fb', expectedUpperLimit = '3.359E-01*fb')
#+++++++ next txName block ++++++++++++++
TChiWH = dataset.addTxName('TChiWH')
TChiWH.constraint = "[[['W']],[['higgs']]]"
TChiWH.conditionDescription =None
TChiWH.condition =None
TChiWH.source = 'CMS'
#+++++++ next mass plane block ++++++++++++++
TChiWH_1 = TChiWH.addMassPlane([[x,y]]*2)
TChiWH_1.figure = 'Fig. 16(right)'
TChiWH_1.figureUrl ='https://twiki.cern.ch/twiki/pub/CMSPublic/PhysicsResultsSUS13006/Fig16_exclusion_TChiWH.png'
TChiWH_1.addSource('obsExclusion',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 9 )
TChiWH_1.addSource('obsExclusionM1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 7 )
TChiWH_1.addSource('obsExclusionP1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 6 )
TChiWH_1.addSource('expExclusion',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 8 )
TChiWH_1.addSource('expExclusionM1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 4 )
TChiWH_1.addSource('expExclusionP1',"orig/exclusion_TChiWH.root", "canvas", objectName = "interpret", index = 5 )
TChiWH_1.addSource('efficiencyMap','orig/singlelep_results.root', 'root', objectName = 'h_eff_met125', index = None, scale = 0.01 )
TChiWH_1.dataUrl = "https://twiki.cern.ch/twiki/pub/CMSPublic/PhysicsResultsSUS13006/ss_eff_map.root"
databaseCreator.create()
|
991,762 | 5bc5a057bfc97833f54e29dca67b785b8ed4fa93 | def reverse(text):
return text[::-1]
def is_polidrom(text):
return text == reverse(text)
def filter(text):
forbidden = ('.', '?', '!', ':', ';', '-', '—', '(', ')', '[', ']', '...', '’', '“', '”', '/', ',')
newtext = [x for x in text if not x in forbidden]
newtext = ''.join(newtext)
return newtext
usertext = input('Input text please:')
# usertext = 'Poop ana poop?'
usertext = usertext.lower()
usertext = filter(usertext)
if is_polidrom(usertext):
print('Yes, {0} is a polidrome'.format(usertext))
else:
print('No, {0} is not a polidrome'.format(usertext))
|
991,763 | 366b0c6c2f07942ca1c5efade5881e443dbcd71a | import Image, re, os, subprocess
from vdfparser import VDF
from subprocess import PIPE, Popen
import wikitools
from uploadFile import *
if os.path.isfile('pngcrush.exe'):
pngcrush = True
else:
pngcrush = False
def get_file_list(folder):
""" Returns list of .png files in folder. """
filelist = []
for file in os.listdir(folder):
if file.endswith('.png'):
filelist.append(file)
return filelist
def crop_image(inputimage, folder, newimgname, xtop=0, ytop=64, xbottom=512, ybottom=448):
""" Crops input image and writes to newimgname. """
img = Image.open(folder + os.sep + inputimage)
img = img.crop((xtop, ytop, xbottom, ybottom))
img.save(folder + os.sep + newimgname, 'PNG')
def get_item_from_inventory(allitems, imgname):
""" Returns item with matching image_inventory image name. """
for item in allitems:
if 'image_inventory' in allitems[item]:
if os.path.split(allitems[item]['image_inventory'])[1] == imgname:
return allitems[item]
return None
def upload_item_icons(wikiUsername, wikiPassword, folder, wikiAddress = r'http://wiki.tf2.com/w/', wikiApi = r'http://wiki.tf2.com/w/api.php'):
""" Crops and uploads item icons to wiki. """
uploader = wikiUpload.wikiUploader(wikiUsername, wikiPassword, wikiAddress)
wiki = wikitools.wiki.Wiki(wikiApi)
wiki.login(wikiUsername, wikiPassword)
schema = VDF()
fails = False
allitems = schema.get_items()
for file in get_file_list(folder):
imgname = re.sub(r'_large\.png', '', file)
print imgname
item = get_item_from_inventory(allitems, imgname)
if item is None:
f = open('faileditemiconuploads.txt', 'ab').write(file + '\n')
fails = True
continue
itemname = schema.get_localized_item_name(item['item_name']).encode('utf8')
newfilename = r'Item icon {0}.png'.format(itemname)
crop_image(file, folder, newfilename)
if pngcrush:
process = Popen(['pngcrush', '-rem', 'gAMA', '-rem', 'cHRM', '-rem', 'iCCP', '-rem', 'sRGB', '-brute', folder + os.sep + newfilename, folder + os.sep + newfilename + 'temp'], stdout = subprocess.PIPE).communicate()[0]
os.remove(folder + os.sep + newfilename)
os.rename(folder + os.sep + newfilename + 'temp', folder + os.sep + newfilename)
success = False
n = 0
while n < 5 and not success:
try:
uploader.upload(folder + os.sep + newfilename, 'File:' + newfilename, 'Uploaded new TF2B icon', '', overwrite=True)
success = True
except:
n += 1
if not success:
print 'Could not upload', newfilename
if fails:
print 'Some files could not be uploaded. Please see faileditemiconuploads.txt'
upload_item_icons('wiki_username', 'wiki_password', r'image_folder_path') |
991,764 | 1b96a71fc924744fe72c61dfba74b2643d9cc090 | import sqlite3
from Bio import SeqIO
import random
import re
from jsonschema import validate
class CodonDatabase:
def __init__(self,database=None,default_organism=None,rare_cutoff=None):
if database != None:
self.conn = sqlite3.connect(database)
else:
self.conn = sqlite3.connect(':memory:')
self.default_organism=default_organism
if rare_cutoff is None:
self.rare_cutoff = .1
else:
self.rare_cutoff=rare_cutoff
self.build_database()
codon_table_validator = {
"$schema": "http://json-schema.org/draft-07/schema#",
#"$id": "http://example.com/product.schema.json",
"title": "Codon Table Schema",
"description": "A JSON validator schema for Codon Tables",
"type": "object",
"properties": {
"id": {"description": "Unique identifier for the table. Usually a GenBank ID.",
"type": "string"},
"organism": {"description": "Name of organism for the table. Human readable.", "type": "string"},
"description": {"description": "Description of the table or of the organism used to build the table. Human readable."},
"transl_table": {"description": "Translation table that the codon table uses", "type": "integer"},
"codons": {"description": "Each individual codon in the table", "type": "array", "items": {
"type": "object",
"description": "A single codon in a table",
"properties": {"codon": {"description": "Uppercase 3 letter non-degenerative DNA code", "pattern": "^[ATGC]*$", "type": "string",
"maxLength":3,"minLength":3},
"amino_acid": {"description": "The amino acid coded by the codon. Uppercase one latter code.", "pattern":"^[ARNDBCEQZGHILKMFPSTWYV*X]$",
"type":"string", "maxLength":1,"minLength":1},
"codon_count": {"description": "Count of codon occurrence in all genes in given organism or GenBank file.", "type": "integer"}
}
}}
}
}
def build_database(self):
# Database schema of the Codon Database
CREATE_DB ="""
CREATE TABLE IF NOT EXISTS 'amino_acids' (
'amino_acid' TEXT PRIMARY KEY
);
CREATE TABLE IF NOT EXISTS 'organisms' (
'id' TEXT PRIMARY KEY,
'description' TEXT NOT NULL,
'organism' TEXT NOT NULL
);
CREATE TABLE IF NOT EXISTS 'transl_tables' (
'transl_table' INT PRIMARY KEY
);
CREATE TABLE IF NOT EXISTS 'codons' (
'codon' TEXT NOT NULL,
'codon_count' INTEGER NOT NULL DEFAULT 0,
'amino_acid' TEXT NOT NULL,
'transl_table' INTEGER NOT NULL,
'organism' TEXT NOT NULL,
FOREIGN KEY('amino_acid') REFERENCES 'amino_acids'('amino_acid'),
FOREIGN KEY('transl_table') REFERENCES 'transl_tables'('transl_table'),
FOREIGN KEY('organism') REFERENCES 'organisms'('id')
PRIMARY KEY('codon', 'amino_acid', 'organism')
);
"""
for x in CREATE_DB.split(';'):
self.conn.cursor().execute(x)
amino_acids = 'ARNDBCEQZGHILKMFPSTWYV*X'
self.conn.cursor().executemany('INSERT OR IGNORE INTO amino_acids(amino_acid) VALUES(?)', [(x,) for x in amino_acids])
self.conn.commit()
# transl_tables are for 3NF standardization
def choose_codon(self,aa,organism_id=None,banned=[],gc_bias=None,rare_cutoff=None):
if organism_id is None:
organism_id=self.default_organism
if self.conn.cursor().execute('SELECT * FROM organisms WHERE id = ?',(organism_id,)).fetchone() == None:
raise ValueError('Organism_id {} not available'.format(organism_id))
if rare_cutoff is None:
rare_cutoff=self.rare_cutoff
# Check that the amino acid is valid
valid_aa = 'ARNDBCEQZGHILKMFPSTWYV*'
if aa not in valid_aa:
raise ValueError('{} not valid amino acid ({})'.format(aa,valid_aa))
# Check that all codons aren't banned
codons_per_aa = self.conn.cursor().execute('SELECT COUNT(*) FROM codons WHERE amino_acid = ? AND organism = ?', (aa,organism_id)).fetchone()[0]
if len(banned) >= codons_per_aa:
raise ValueError('Too many banned codons')
# 1. Build a table with a certain amino acid and a certain codon without any of the banned codons
# 2. Take cumulative probability and, using a random number input from python, select a codon
random_select = """
WITH codons_selective AS ( WITH codon_gc AS
(
SELECT codon, -- 2. Selects a table with desired amino acid, organism, and no codons from banned list. Also gives gc_content of each codon
amino_acid,
organism,
codon_count,
( ( Length(Replace(( Replace(codon, 'T', '') ), 'A', '')) ) * 1.0 ) / Length( codon) AS gc_content
FROM (
SELECT * -- 1. Removes rare codons that occur fewer than 10% of the time, though this is configurable.
FROM codons
WHERE ((
codon_count /
(
SELECT Sum(codon_count)*1.0
FROM codons
GROUP BY amino_acid)) > ?)) AS common_codons
WHERE amino_acid = ?
AND organism = ?
AND codon NOT IN {})
SELECT * -- 3. Selects for either high GC or low GC codons
FROM codon_gc {})
SELECT codon, -- Calculates cumulative probability of each codon based on occurrence, then chooses one, given a random number 0-1
Cast(
(
SELECT Sum(codon_count)
FROM codons_selective AS c
WHERE c.codon <= codons_selective.codon) AS FLOAT) /
(
SELECT Sum(codon_count)
FROM codons_selective) AS probability
FROM codons_selective
ORDER BY Abs(probability - Abs(Random() / 18446744073709551616 + 0.5)) -- Number comes from https://www.sqlite.org/lang_corefunc.html#random
ASC limit 1
""" # https://stackoverflow.com/questions/50534961/sqlite-how-to-select-rows-based-on-probability-via-integer-value
if gc_bias == 'GC':
gc_bias_string = 'WHERE gc_content = (SELECT Max(gc_content) FROM codon_gc)'
if gc_bias == 'AT':
gc_bias_string = 'WHERE gc_content = (SELECT Min(gc_content) FROM codon_gc)'
else:
gc_bias_string = ''
try:
# Select a codon. First, add in a variable number of ? for banned codons, then run above statement. Fetch first codon of first result
r = self.conn.cursor().execute(random_select.format('(' + ','.join(["?" for _ in banned]) + ')',gc_bias_string), (rare_cutoff,aa,organism_id,)+tuple(banned)).fetchone()[0]
except Exception as e:
# If there is a value error, it is likely that the organism is not in the database
raise ValueError('Organism or amino acid not found in database')
return r
def optimize_sequence(self,protein_seq,organism_id):
if organism_id is None:
organism_id = self.default_organism
return ''.join([self.choose_codon(aa,organism_id=organism_id) for aa in protein_seq])
def build_from_record(self,record):
c = self.conn.cursor()
c.execute('INSERT OR IGNORE INTO organisms(id,description,organism) VALUES(?,?,?)', (record.id, record.description,record.annotations['organism']))
self.default_organism = record.id
for feature in record.features:
if 'translation' in feature.qualifiers:
translation = feature.qualifiers['translation'][0] + '*'
seq = feature.extract(record).seq
if 'transl_table' in feature.qualifiers:
transl_table = int(feature.qualifiers['transl_table'][0])
else:
transl_table = 0
c.execute('INSERT OR IGNORE INTO transl_tables(transl_table) VALUES(?)', (transl_table,))
c.executemany('INSERT OR IGNORE INTO codons(amino_acid,codon,organism,transl_table) VALUES(?,?,?,?)', [(aa,str(seq[i*3:i*3+3]),record.id,transl_table) for i,aa in enumerate(translation)])
c.executemany('UPDATE codons SET codon_count = codon_count + 1 WHERE amino_acid = ? AND codon = ? AND organism = ?', [(aa,str(seq[i*3:i*3+3]),record.id) for i,aa in enumerate(translation)])
self.conn.commit()
def build_from_genbank(self,genbank_file):
for record in SeqIO.parse(genbank_file, "genbank"):
self.build_from_record(record)
return self
def codon_to_aa(self,codon,organism_id=None):
if organism_id is None:
organism_id = self.default_organism
return self.conn.cursor().execute('SELECT amino_acid FROM codons WHERE codon = ? AND organism = ? ORDER BY codon_count DESC', (codon,organism_id)).fetchone()[0]
def dump_codon_database(self):
return '\n'.join([line for line in self.conn.iterdump()])
def available_codons(self,aa,banned,organism_id=None,rare_cutoff=None):
if organism_id is None:
organism_id = self.default_organism
if rare_cutoff is None:
rare_cutoff = self.rare_cutoff
return [{'codon':x[0],'weight':x[1]} for x in self.conn.cursor().execute("""SELECT codon, 1.0*codon_count/(SELECT Sum(codon_count) FROM codons WHERE amino_acid = ?) as percent FROM codons WHERE amino_acid = ? AND organism = ? AND percent > ? AND codon NOT IN ({}) """.format('(' + ','.join(["?" for _ in banned]) + ')'), (aa,aa,organism_id,rare_cutoff)+tuple(banned)).fetchall()]
def export_table(self,organism_id=None):
if organism_id == None:
organism_id = self.default_organism
if organism_id == None:
raise ValueError('No organism_id given')
rows = self.conn.cursor().execute("SELECT o.id,o.description,o.organism,c.codon,c.codon_count,c.amino_acid,c.transl_table FROM organisms as o JOIN codons as c on c.organism=o.id WHERE id = ?",(organism_id,)).fetchall()
organism_json = {'id':rows[0][0],'description':rows[0][1],'organism':rows[0][2],'transl_table':rows[0][6]}
organism_json['codons'] = [{'codon':row[3],'codon_count':row[4],'amino_acid':row[5]} for row in rows]
return organism_json
def import_table(self,json_table):
validate(instance=json_table,schema=self.codon_table_validator)
transl_table = json_table['transl_table']
organism = json_table['id']
c = self.conn.cursor()
c.execute('INSERT OR IGNORE INTO organisms(id,description,organism) VALUES(?,?,?)',(organism,json_table['description'],json_table['organism']))
c.execute('INSERT OR IGNORE INTO transl_tables(transl_table) VALUES(?)', (transl_table,))
c.executemany('INSERT OR IGNORE INTO codons(amino_acid,codon,codon_count,organism,transl_table) VALUES(?,?,?,?,?)', [(codon['amino_acid'],codon['codon'],codon['codon_count'],organism,transl_table) for codon in json_table['codons']])
self.conn.commit()
class Triplet:
def __init__(self,table,codon,organism_id=None,rare_cutoff=None):
self.table = table
self.codon = codon.upper()
self.last_codon = self.codon
self.aa = self.table.codon_to_aa(codon,organism_id)
self.organism_id = organism_id
self.banned = [codon]
self.rare_cutoff = rare_cutoff
def change_codon(self, gc_bias=None):
new_codon = self.table.choose_codon(self.aa,self.organism_id,self.banned,gc_bias=gc_bias)
self.last_codon = self.codon
self.codon = new_codon
self.banned.append(new_codon)
return new_codon
def available_codons(self):
return self.table.available_codons(self.aa,self.banned,self.organism_id,self.rare_cutoff)
def __str__(self):
return self.codon
class CodingSequence:
def __init__(self,table,sequence,organism_id=None):
# Add in verification
self.triplets = [Triplet(table,x,organism_id) for x in re.findall('...',sequence)]
def __str__(self):
return ''.join([str(x) for x in self.triplets])
def aa_seq(self):
return ''.join([x.aa for x in self.triplets])
|
991,765 | 6f908df70e20039599e28971e4dc9d268ac3126d | # Вариант №3
# 3. Дан файл. Определите сколько в нем букв (латинского алфавита),
# слов, строк. Выведите три найденных числа в формате, приведенном в примере.
# Пример входного файла:
# Beautiful is better than ugly.
# Explicit is better than implicit.
# Simple is better than complex.
# Complex is better than complicated.
# Пример выходного файла:
# Input file contains:
# 108 letters
# 20 words
# 4 lines
import re
path_input = input('Путь к входному файлу: ')
path_output = input('Путь к выходному файлу: ')
def read_file(path):
try:
with open(path) as input_file:
text = input_file.read()
return text
except IOError:
return
def count_letters(text):
# Поиск осуществляется по регулярному выражению.
# В качестве шаблона выступают латинские буквы.
# С вхождением одного латиского символа.
result = re.findall(r"[a-zA-Z]", text)
return '%d letters' % len(result)
def count_words(text):
result = re.findall(r"\w+", text)
return '%d words' % len(result)
def count_lines(text):
a = text.splitlines()
return '%d lines' % len(a)
def print_in_file(path):
text = read_file(path_input)
if text is not None:
try:
with open(path, 'w') as output_file:
items = [count_letters(text), count_words(text), count_lines(text)]
for item in items:
output_file.write("%s\n" % item)
except IOError:
print("No such file or directory: %s" % path)
else:
print("Could not read file")
print_in_file(path_output)
|
991,766 | f1640625af5d38f7f6eaed57e81865546da1e253 | #!/usr/bin/env python
# -*-coding: utf-8 -*-
# out_file = 'data/logs/auth_mix.log'
# in_file_list = ['data/logs/auth_normal.log', 'data/logs/auth_hydra.log']
out_file = 'data/logs/jslab_logs/jslab_access_mix.log'
in_file_list = ['data/logs/jslab_logs/jslab_access.log.1', 'data/logs/jslab_logs/jslab_access.log.2',
'data/logs/jslab_logs/jslab_access.log.3', 'data/logs/jslab_logs/jslab_access.log.4',
'data/logs/jslab_logs/jslab_access.log.5', 'data/logs/jslab_logs/jslab_access.log.6',
'data/logs/jslab_logs/jslab_access.log.7', 'data/logs/jslab_logs/jslab_access.log.8',
'data/logs/jslab_logs/jslab_access.log.9', 'data/logs/jslab_logs/jslab_access.log.10',
'data/logs/jslab_logs/jslab_access.log.11', 'data/logs/jslab_logs/jslab_access.log.12',
'data/logs/jslab_logs/jslab_access.log.13', 'data/logs/jslab_logs/jslab_access.log.14'
]
if __name__ == '__main__':
with open(out_file, 'w') as outf:
for in_file in in_file_list:
with open(in_file, 'r') as inf:
outf.writelines(inf.readlines())
|
991,767 | 240f5ea5159d6e8323073844f2fca785a59387d4 | import logging
import scrapy
import proxy
from ..items import BeautyItem
# print('start generate proxy ip')
# proxy.get_proxy()
logger = logging.getLogger(__name__)
class BeautySpider(scrapy.Spider):
print('start generate proxy ip')
proxy.get_proxy()
name = 'siwa'
allowed_domains = ['www.27270.com']
start_urls = [
'http://www.27270.com/tag/384.html',
'http://www.27270.com/tag/513.html',
'http://www.27270.com/tag/782.html',
'http://www.27270.com/tag/35.html',
'http://www.27270.com/tag/288.html',
'http://www.27270.com/tag/441.html',
]
num = 1
items_per_page = 0
def parse(self, response):
items = []
# write the category page data extraction code here
logger.info('Parse function called on %s', response.url)
if not response.xpath('//ul[@id="Tag_list"]'):
yield scrapy.Request(url=response.url, dont_filter=True)
for li in response.xpath('//ul[@id="Tag_list"]'):
titles = li.xpath('li/a/@title').extract()
imgs = li.xpath('li/a/img/@src').extract()
pages = li.xpath('li/a/@href').extract()
logger.info('%s %s %s', titles, imgs, pages)
total = len(titles)
for i in range(total):
self.logger.info('%s %s', titles[i], pages[i])
yield scrapy.Request(pages[i], callback=self.parse_beauty)
self.logger.debug('callback "parse": got response %s', response)
logger.info('we have done with this page %s', response.url)
while total <= total: # There is 30 items per page
next_page = response.xpathfo('//div[@class="TagPage"]/ul/li/a/@href').extract()[-2]
url = 'http://www.27270.com' + next_page
self.logger.info('Crawl for next page now %s', url)
yield scrapy.Request(url, callback=self.parse)
def parse_beauty(self, response):
if not response.xpath('//div[@id="picBody"]'):
yield scrapy.Request(url=response.url, dont_filter=True)
if response.status == 200:
detail = response.xpath('//div[@id="picBody"]')
title = detail.xpath('p/a/img/@alt').extract()[0]
image_url = detail.xpath('p/a/img/@src').extract()[0]
self.logger.info('%s %s', title, image_url)
beauty = BeautyItem()
# beauty['title'] = title
beauty['image_urls'] = [image_url]
# yield beauty
# self.logger.debug('callback "parse": got response %r' % response)
pages = response.xpath('//ul[@class="articleV4Page l"]')
total_pages = pages.xpath('li[@class="hide"]/@pageinfo').extract()[0]
current_page = pages.xpath('li[@class="thisclass"]/a[@href="#"]/text()').extract()[0]
next_page = pages.xpath('li[@id="nl"]/a/@href').extract()[0]
beauty['title'] = title + current_page
self.logger.info('Starting crawl for next page of %s', beauty['title'])
yield beauty
self.logger.debug('callback "parse": got response %r' % response)
url = response.url
next_page_url = '/'.join(url.split('/')[0:-1]) + '/' + next_page
self.logger.info('There are %s pages out there.scrath for next page %s', total_pages, next_page_url)
yield scrapy.Request(next_page_url, callback=self.parse_beauty)
else:
self.logger.info('request url %s returned error', response.url)
self.logger.debug('callback "parse": got response %r' % response)
|
991,768 | 15384ba35b489540d5968a4493ecd20aa33d4ff5 | # encoding: UTF-8
import json
import tushare as ts
import numpy as np
import pandas as pd
from enum import Enum
from collections import deque
import traceback
import os
from datetime import datetime
from datetime import timedelta
from time import time, sleep
from pymongo import MongoClient, ASCENDING, DESCENDING
from vnpy.app.cta_strategy import (
BarData,
BarGenerator,
ArrayManager
)
from vnpy.trader.constant import Exchange
from vnpy.tools.convert_utils import string_to_datetime, time_to_str
from vnpy.tools.logger import Logger
# 加载配置
# config = open('C:/vnstudio/Lib/site-packages/vnpy/trade_stock_digu/config.json')
path_config_file = os.path.dirname(os.path.abspath(__file__))
config = open(path_config_file + '/config.json')
setting = json.load(config)
MONGO_HOST = setting['MONGO_HOST']
MONGO_PORT = setting['MONGO_PORT']
MONGO_USER = setting['MONGO_USER']
MONGO_PASSWORD = setting['MONGO_PASSWORD']
STOCK_DB_NAME = setting['DB_NAME']
STOCK_DB_NAME_VNPY = setting['DB_NAME_VNPY']
CL_STOCK_K_DATA_VNPY = setting['CL_STOCK_K_DATA_VNPY']
CL_STOCK_DATE = setting['CL_STOCK_DATE']
CL_STOCK_BASIC = setting['CL_STOCK_BASIC']
CL_TRADE_CAL = setting['CL_TRADE_CAL']
DATA_BEGIN_DATE = setting['DATA_BEGIN_DATE']
CL_INDEXS = setting['CL_INDEXS']
CL_TOP10_HOLDER = setting['CL_TOP10_HOLDER']
CL_TOP10_FLOADHOLDER = setting['CL_TOP10_FLOADHOLDER']
CL_PLEDGE_STAT = setting['CL_PLEDGE_STAT']
CL_REPURCHASE = setting['CL_REPURCHASE']
CL_STK_HOLDERNUMBER = setting['CL_STK_HOLDERNUMBER']
CL_STK_HOLDERTRADE = setting['CL_STK_HOLDERTRADE']
CL_STK_POOL_DAILY = setting['CL_STK_POOL_DAILY']
CL_STK_POOL_CUR = setting['CL_STK_POOL_CUR']
CL_STK_TOP_LIST = setting['CL_STK_TOP_LIST']
LOG = Logger().getlog()
class IndexCode:
INDEX_SH = '000001_SH'
INDEX_SZ = '399001_SZ'
INDEX_ZX = '399005_SZ'
INDEX_CY = '399006_SZ'
_VALUES_TO_NAMES = {
'000001_SH': "INDEX_SH",
'399001_SZ': "INDEX_SZ",
'399005_SZ': "INDEX_ZX",
'399006_SZ': "INDEX_CY",
}
_NAMES_TO_VALUES = {
"INDEX_SH": '000001_SH',
"INDEX_SZ": '399001_SZ',
"INDEX_ZX": '399005_SZ',
"INDEX_CY": '399006_SZ',
}
class TsCodeType(Enum):
"""
交易代码类型 1: 股票 2:指数
"""
STOCK = 1
INDEX = 2
class DataServiceTushare(object):
"""封装tushare获取原始数据模块"""
"""
ts_code: 在程序中采用000001_SZ的格式,调用tushare接口时替换为000001.SZ格式
"""
mc = MongoClient(MONGO_HOST, MONGO_PORT, username=MONGO_USER, password=MONGO_PASSWORD) # Mongo连接
# mc = MongoClient("mongodb://124.70.183.208:27017/", username='root', password='qiuqiu78')
db = mc[STOCK_DB_NAME] # 数据库
db_vnpy = mc[STOCK_DB_NAME_VNPY] # 数据库
count_max_retry = 10
second_sleep = 60
index_lst = ['000001_SH', '399001_SZ', '399005_SZ', '399006_SZ']
def __init__(self):
"""Constructor"""
cl_stock_db_date = self.db[CL_STOCK_DATE]
db_date = cl_stock_db_date.find_one({}, {"_id": 0, "db_date": 1})
self.db_date = DATA_BEGIN_DATE if db_date is None else db_date['db_date']
self.date_now = time_to_str(datetime.now(), '%Y%m%d')
ts.set_token('4c1d16a895e4c954adc8d2a436f2b21dd4ccc514f0c5a192edaa953b')
self.pro = ts.pro_api()
def get_stock_list(self):
lst_code = list()
cl_stock_basic = self.db[CL_STOCK_BASIC]
stock_basic_lst = list(cl_stock_basic.find(
{}, {'_id': 0}).sort("ts_code", ASCENDING))
for d in stock_basic_lst:
lst_code.append(d['ts_code'])
return lst_code
def get_trade_date(self, trade_date):
# 获取当前日期最邻近的一个交易日
# 1、如果当前日期就是交易日,则返回当前日期
# 2、如果当前日期不是交易日,则返回当前日期之前的一个交易日
cl_cal = self.db[CL_TRADE_CAL]
trade_cal = list(cl_cal.find(
{'cal_date': {"$lte": trade_date}, 'is_open': 1},
{'_id': 0}).sort("cal_date"))
return list(trade_cal)[-1]['cal_date']
def _is_in_vnpy_db(self, ts_code, update=True):
if ts_code in self.index_lst:
return True
else:
return False
def _build_db_vnpy(self, d):
# 更新vnpy数据库数据
if d['trade_date'] > self.db_date:
d_db_vnpy = dict()
if d['ts_code'][-2:] == 'SH':
# exchange = Exchange.SSE
exchange = 'SSE'
else:
# exchange = Exchange.SZSE
exchange = 'SZSE'
d_db_vnpy['symbol'] = d['ts_code']
d_db_vnpy['exchange'] = exchange
d_db_vnpy['datetime'] = string_to_datetime(d['trade_date'])
d_db_vnpy['interval'] = 'd'
d_db_vnpy['volume'] = d['vol']
d_db_vnpy['open_interest'] = d['pre_close']
d_db_vnpy['open_price'] = d['open']
d_db_vnpy['high_price'] = d['high']
d_db_vnpy['low_price'] = d['low']
d_db_vnpy['close_price'] = d['close']
flt_vnpy = {'symbol': d['ts_code'], 'datetime': d['trade_date'], 'exchange:': exchange, 'interval:': 'd',}
cl_stock_code_vnpy = self.db_vnpy[CL_STOCK_K_DATA_VNPY]
cl_stock_code_vnpy.create_index([('symbol', ASCENDING), ('exchange', ASCENDING), ('interval', ASCENDING), ('datetime', ASCENDING)], unique=True)
cl_stock_code_vnpy.replace_one(flt_vnpy, d_db_vnpy, upsert=True)
def _init_k_data(self, code, k_data):
# 注意:所有的数据库数据和列表数据都按照日期的正序排序(从小到大)
"""
初始化股票数据库数据
@:param code 股票(指数)代码
"""
if len(k_data) != 0:
last_5_vol = deque([0.0] * 5)
last_5_amount = deque([0.0] * 5)
k_data = k_data.sort_values(by='trade_date')
cl_stock_code = self.db[code]
cl_stock_code.create_index([('trade_date', ASCENDING)], unique=True)
am = ArrayManager(size=600)
for ix, row in k_data.iterrows():
d = row.to_dict()
d['ts_code'] = d['ts_code'].replace('.', '_')
if 0.0 not in last_5_vol:
vol_rate = d['vol'] / (sum(last_5_vol) / 5.0)
amount_rate = d['amount'] / (sum(last_5_amount) / 5.0)
d['vol_rate'] = vol_rate
d['amount_rate'] = amount_rate
else:
d['vol_rate'] = 0.0
d['amount_rate'] = 0.0
last_5_vol.popleft()
last_5_amount.popleft()
last_5_vol.append(d['vol'])
last_5_amount.append(d['amount'])
if self._is_in_vnpy_db(d['ts_code'], update=False):
# 构建vnpy股票数据库数据
self._build_db_vnpy(d)
if d['ts_code'][-3:] == '_SH':
exchange = Exchange.SSE
d['exchange'] = 'SSE'
else:
exchange = Exchange.SZSE
d['exchange'] = 'SZSE'
bar = BarData(
gateway_name='ctp', symbol=d['ts_code'],
exchange=exchange,
datetime=string_to_datetime(d['trade_date']))
bar.symbol = d['ts_code']
bar.volume = d['vol']
bar.open_price = d['open']
bar.high_price = d['high']
bar.low_price = d['low']
bar.close_price = d['close']
am.update_bar(bar)
try:
d['ma_5'] = am.sma(5)
except:
traceback.print_exc()
LOG.error('************************')
LOG.error(d['ts_code'])
LOG.error(d['trade_date'])
LOG.error(bar)
d['ma_10'] = am.sma(10)
d['ma_20'] = am.sma(20)
d['ma_30'] = am.sma(30)
d['ma_60'] = am.sma(60)
d['ma_120'] = am.sma(120)
d['ma_250'] = am.sma(250)
d['ma_500'] = am.sma(500)
d['high_5'] = np.max(am.high[-5:])
d['high_10'] = np.max(am.high[-10:])
d['high_20'] = np.max(am.high[-20:])
d['high_30'] = np.max(am.high[-30:])
d['high_60'] = np.max(am.high[-60:])
d['high_120'] = np.max(am.high[-120:])
d['high_250'] = np.max(am.high[-250:])
d['high_500'] = np.max(am.high[-500:])
d['low_5'] = np.min(am.low[-5:])
d['low_10'] = np.min(am.low[-10:])
d['low_20'] = np.min(am.low[-20:])
d['low_30'] = np.min(am.low[-30:])
d['low_60'] = np.min(am.low[-60:])
d['low_120'] = np.min(am.low[-120:])
d['low_250'] = np.min(am.low[-250:])
d['low_500'] = np.min(am.low[-500:])
flt = {'trade_date': d['trade_date']}
cl_stock_code.replace_one(flt, d, upsert=True)
def _update_k_data(self, code, k_data):
# 注意:所有的数据库数据和列表数据都按照日期的正序排序(从小到大)
"""
更新股票,股指每日数据(行情,K线,市值等0)
@:param code 股票(指数)代码
@:param k_data ts中获取的最新df数据
"""
if len(k_data) != 0:
k_data = k_data.sort_values(by='trade_date')
cl_stock_code = self.db[code]
cl_stock_code.create_index([('trade_date', ASCENDING)], unique=True)
# 更新k线数据
# 1、新增日K线入库
# 2、遍历数据库找出最近的500+22(必须保证更新数据操作在22天以内进行)条数据并更新最后的22条的ma和最高 最低价
for ix, row in k_data.iterrows():
d = row.to_dict()
d['ts_code'] = d['ts_code'].replace('.', '_')
if self._is_in_vnpy_db(d['ts_code'], update=True):
# 更新vnpy数据库数据
self._build_db_vnpy(d)
flt = {'trade_date': d['trade_date']}
cl_stock_code.replace_one(flt, d, upsert=True)
rec = list(cl_stock_code.find({}).sort("trade_date", DESCENDING).limit(522))
rec.reverse()
am = ArrayManager(size=600)
last_5_vol = deque([0.0] * 5)
last_5_amount = deque([0.0] * 5)
for d in rec:
if 0.0 not in last_5_vol:
vol_rate = d['vol'] / (sum(last_5_vol) / 5.0)
amount_rate = d['amount'] / (sum(last_5_amount) / 5.0)
d['vol_rate'] = vol_rate
d['amount_rate'] = amount_rate
else:
d['vol_rate'] = 0.0
d['amount_rate'] = 0.0
last_5_vol.popleft()
last_5_amount.popleft()
last_5_vol.append(d['vol'])
last_5_amount.append(d['amount'])
if d['ts_code'][-3:] == '_SH':
exchange = Exchange.SSE
d['exchange'] = 'SSE'
else:
exchange = Exchange.SZSE
d['exchange'] = 'SZSE'
bar = BarData(
gateway_name='ctp', symbol=d['ts_code'],
exchange=exchange,
datetime=string_to_datetime(d['trade_date']))
bar.symbol = d['ts_code']
bar.volume = d['vol']
bar.open_price = d['open']
bar.high_price = d['high']
bar.low_price = d['low']
bar.close_price = d['close']
am.update_bar(bar)
if d['trade_date'] >= self.db_date:
d['ma_5'] = am.sma(5)
d['ma_10'] = am.sma(10)
d['ma_20'] = am.sma(20)
d['ma_30'] = am.sma(30)
d['ma_60'] = am.sma(60)
d['ma_120'] = am.sma(120)
d['ma_250'] = am.sma(250)
d['ma_500'] = am.sma(500)
d['high_5'] = np.max(am.high[-5:])
d['high_10'] = np.max(am.high[-10:])
d['high_20'] = np.max(am.high[-20:])
d['high_30'] = np.max(am.high[-30:])
d['high_60'] = np.max(am.high[-60:])
d['high_120'] = np.max(am.high[-120:])
d['high_250'] = np.max(am.high[-250:])
d['high_500'] = np.max(am.high[-500:])
d['low_5'] = np.min(am.low[-5:])
d['low_10'] = np.min(am.low[-10:])
d['low_20'] = np.min(am.low[-20:])
d['low_30'] = np.min(am.low[-30:])
d['low_60'] = np.min(am.low[-60:])
d['low_120'] = np.min(am.low[-120:])
d['low_250'] = np.min(am.low[-250:])
d['low_500'] = np.min(am.low[-500:])
flt = {'trade_date': d['trade_date']}
cl_stock_code.replace_one(flt, d, upsert=True)
def _build_trade_cal(self):
LOG.info('构建交易日日历数据')
df_trade_cal = self.pro.trade_cal(
exchange='', start_date=DATA_BEGIN_DATE, end_date=self.date_now)
cl_trade_cal = self.db[CL_TRADE_CAL]
cl_trade_cal.create_index([('cal_date', ASCENDING)], unique=True)
for ix, row in df_trade_cal.iterrows():
d = row.to_dict()
flt = {'cal_date': d['cal_date']}
cl_trade_cal.replace_one(flt, d, upsert=True)
LOG.info('构建交易日日历数据完成')
def build_stock_data(self, update=True):
self._build_trade_cal()
self._build_basic()
self._build_index(update)
self._build_top_list()
LOG.info('构建股票日K线数据')
start = time()
cl_stock_basic = self.db[CL_STOCK_BASIC]
stock_basic_lst = list(cl_stock_basic.find(
{}, {'_id': 0}).sort("ts_code", ASCENDING))
for d in stock_basic_lst:
df_stock_k_data = self._get_daily_k_data_from_ts(d['ts_code'].replace('_', '.'), update)
df_stock_daily_basic = self._get_daily_basic_from_ts(d['ts_code'].replace('_', '.'), update)
if df_stock_k_data.empty is False and df_stock_daily_basic.empty is False:
del df_stock_daily_basic['ts_code']
del df_stock_daily_basic['close']
df_stock_info = pd.merge(df_stock_k_data, df_stock_daily_basic, on='trade_date')
if d['list_date'] < self.date_now:
if update is True:
self._update_k_data(d['ts_code'], df_stock_info)
else:
self._init_k_data(d['ts_code'], df_stock_info)
# 数据更新时间
cl_stock_db_date = self.db[CL_STOCK_DATE]
db_date = {'db_date': self.date_now}
flt_date = {'db_date': self.db_date}
cl_stock_db_date.replace_one(flt_date, db_date, upsert=True)
end = time()
cost = (end - start)/3600
LOG.info('构建股票日K线数据完成,耗时%s小时' % cost)
def _build_index(self, update=True):
LOG.info('构建指数K线数据')
for code_db in self.index_lst:
code = code_db.replace('_', '.')
df_index = self._get_index_daily_k_data_from_ts(code, update)
if df_index.empty is False:
if update is True:
self._update_k_data(code_db, df_index)
else:
self._init_k_data(code_db, df_index)
LOG.info('构建指数K线数据完成')
def _build_top_list(self):
# 构建龙虎榜数据
LOG.info('构建龙虎榜数据')
date_top_list = self.get_pre_trade_date(self.db_date) if DATA_BEGIN_DATE != self.db_date else self.db_date # 用前一天和当天的数据更新龙虎榜(防止当天更新db时,龙虎榜tushare接口数据还未生成)
begin_date = '20050101' if date_top_list < '20050101' else date_top_list # 龙虎榜数据只有2005年之后的数据
trade_lst = self.get_trade_cal(begin_date)
for item_date in trade_lst:
df_top_list = self.pro.top_list(trade_date=item_date)
sleep(1)
if df_top_list.size != 0:
for ix_top_list, row_top_list in df_top_list.iterrows():
d_top_list = row_top_list.to_dict()
d_top_list['ts_code'] = d_top_list['ts_code'].replace('.', '_')
cl_stk_top_list = self.db[CL_STK_TOP_LIST]
flt_top_list = {'trade_date': item_date, 'ts_code': d_top_list['ts_code']}
cl_stk_top_list.replace_one(flt_top_list, d_top_list, upsert=True)
LOG.info('构建龙虎榜数据完成')
def _build_basic(self):
LOG.info('构建股票基础信息')
data = self.pro.stock_basic(
exchange='', list_status='L',
fields='ts_code,symbol,name,area,industry,market,list_date')
cl_stock_basic = self.db[CL_STOCK_BASIC]
cl_stock_basic.create_index([('ts_code', ASCENDING)], unique=True)
for ix, row in data.iterrows():
d = row.to_dict()
d['ts_code'] = d['ts_code'].replace('.', '_')
flt = {'ts_code': d['ts_code']}
cl_stock_basic.replace_one(flt, d, upsert=True)
LOG.info('构建股票基础信息完成')
def _get_daily_basic_from_ts(self, code, update=True):
start_date = DATA_BEGIN_DATE
if update is True:
start_date = self.db_date
count = 0
while(True):
try:
df_daily_basic = self.pro.daily_basic(
ts_code=code, start_date=start_date, end_date=self.date_now)
if df_daily_basic is not None:
break
else:
LOG.info('(%s)调用tushare pro.daily_basic失败,空数据' % (code))
break
except:
count += 1
LOG.info('(%s)调用tushare pro.daily_basic失败,重试次数:%s' % (code, count))
if count > self.count_max_retry:
break
sleep(self.second_sleep)
if df_daily_basic is None:
df_daily_basic = pd.DataFrame()
df_daily_basic.fillna(0.0, inplace=True)
return df_daily_basic
def _get_daily_k_data_from_ts(self, code, update=True):
start_date = DATA_BEGIN_DATE
if update is True:
start_date = self.db_date
count = 0
while(True):
try:
df_k_data = ts.pro_bar(
ts_code=code, adj='qfq', start_date=start_date,
end_date=self.date_now)
if df_k_data is not None:
break
else:
LOG.info('(%s)调用tushare ts.pro_bar失败,空数据' % (code))
break
except:
count += 1
LOG.info('(%s)调用tushare ts.pro_bar失败,重试次数:%s' % (code, count))
if count > self.count_max_retry:
break
sleep(self.second_sleep)
if df_k_data is None:
df_k_data = pd.DataFrame()
df_k_data.fillna(0.0, inplace=True)
return df_k_data
def _get_index_daily_k_data_from_ts(self, code, update=True):
start_date = DATA_BEGIN_DATE
if update is True:
start_date = self.db_date
count = 0
while(True):
try:
df_index_k_data = self.pro.index_daily(
ts_code=code, start_date=self.db_date,
end_date=self.date_now)
if df_index_k_data is not None:
break
else:
LOG.info('(%s)调用tushare pro.index_daily失败,空数据' %(code))
break
except:
count += 1
LOG.info('(%s)调用tushare pro.index_daily失败,重试次数:%s' % (code, count))
if count > self.count_max_retry:
break
sleep(self.second_sleep)
if df_index_k_data is None:
df_index_k_data = pd.DataFrame()
df_index_k_data.fillna(0.0, inplace=True)
return df_index_k_data
def get_stock_price_info(self, code, date):
cl_stock_code = self.db[code]
stock_price_info = cl_stock_code.find_one(
{'trade_date': date}, {'_id': 0})
return stock_price_info
def get_stock_price_info_last(self, code):
# 获得某只股票最后一天的股价(考虑停牌因素)
cl_stock_code = self.db[code]
stock_price_info = cl_stock_code.find_one(sort=[('_id', -1)])
return stock_price_info
def get_stock_price_lst(self, code, begin_date, end_date):
cl_stock_code = self.db[code]
ret_lst = list()
stock_price_lst = list(cl_stock_code.find(
{'trade_date': {"$gte": begin_date, '$lte': end_date}}, {'_id': 0}).sort("trade_date"))
for item in stock_price_lst:
ret_lst.append(item)
return ret_lst
def get_stock_basic_info(self, code):
cl_stock_basic = self.db[CL_STOCK_BASIC]
stock_basic_info = cl_stock_basic.find_one(
{'ts_code': code}, {'_id': 0})
return stock_basic_info
def get_trade_cal(self, begin_date, end_date=None):
cl_cal = self.db[CL_TRADE_CAL]
if end_date is None:
trade_cal = list(cl_cal.find(
{'cal_date': {"$gte": begin_date}, 'is_open': 1},
{'_id': 0}).sort("cal_date"))
else:
trade_cal = list(cl_cal.find(
{'cal_date': {"$gte": begin_date, '$lte': end_date},
'is_open': 1}, {'_id': 0}).sort("cal_date"))
trade_cal_lst = list()
for item in trade_cal:
trade_cal_lst.append(item['cal_date'])
return trade_cal_lst
def get_next_trade_date(self, trade_date, n=1):
# 获取下N个交易日日期
cl_cal = self.db[CL_TRADE_CAL]
trade_cal = list(cl_cal.find(
{'cal_date': {"$gt": trade_date}, 'is_open': 1},
{'_id': 0}).sort("cal_date"))
return list(trade_cal)[n-1]['cal_date']
def get_pre_trade_date(self, trade_date, n=1):
# 获取上N个交易日日期
cl_cal = self.db[CL_TRADE_CAL]
trade_cal = list(cl_cal.find(
{'cal_date': {"$lt": trade_date}, 'is_open': 1},
{'_id': 0}).sort("cal_date", DESCENDING))
return list(trade_cal)[n-1]['cal_date']
def get_pre_n_trade_date(self, trade_date, days):
# 获取上N个交易日列表(若当前日期是交易日,则保留一并返回)
cl_cal = self.db[CL_TRADE_CAL]
trade_cal = list(cl_cal.find(
{'cal_date': {"$lte": trade_date}, 'is_open': 1},
{'_id': 0}).sort("cal_date", DESCENDING).limit(days))
ret_lst = list()
for item in trade_cal:
ret_lst.append(item['cal_date'])
return ret_lst
def get_stock_top_lst(self, date):
cl_stock_top_list = self.db[CL_STK_TOP_LIST]
top_list = list(cl_stock_top_list.find(
{'trade_date': date}, {'_id': 0}))
stock_top_list = list()
for item in top_list:
stock_top_list.append(item)
return stock_top_list
def daily_stock_pool_in_db(self, code_lst, date):
LOG.info('每日股票池数据入库')
cl_stk_pool_daily = self.db[CL_STK_POOL_DAILY]
cl_stk_pool_daily.create_index([('date', ASCENDING), ('ts_code', ASCENDING)])
for code in code_lst:
d = {'date_buy': date, 'ts_code': code, 'date_sell': None}
flt = {'date_buy': date, 'ts_code': code}
cl_stk_pool_daily.replace_one(flt, d, upsert=True)
LOG.info('每日股票池数据入库完成')
def cur_stock_pool_in_db(self, code_lst, date):
LOG.info('当前股票池数据入库')
cl_stk_pool_cur = self.db[CL_STK_POOL_CUR]
cl_stk_pool_cur.create_index([('date', ASCENDING), ('ts_code', ASCENDING)])
lst_code_pre = self.get_cur_stock_pool_code_lst(self.get_pre_trade_date(date))
lst_union = list(set(lst_code_pre).union(set(code_lst)))
for code in lst_union:
d = {'date': date, 'ts_code': code}
flt = {'date': date, 'ts_code': code}
cl_stk_pool_cur.replace_one(flt, d, upsert=True)
LOG.info('当前股票池数据入库完成')
def get_cur_stock_pool(self, date):
cl_stk_pool_cur = self.db[CL_STK_POOL_CUR]
ret = list(cl_stk_pool_cur.find(
{'date': date}, {'_id': 0}))
lst_info = list()
for item in ret:
lst_info.append(item)
return lst_info
def get_cur_stock_pool_code_lst(self, date):
cl_stk_pool_cur = self.db[CL_STK_POOL_CUR]
ret = list(cl_stk_pool_cur.find(
{'date': date}, {'_id': 0}))
lst_code = list()
for item in ret:
lst_code.append(item['ts_code'])
return lst_code
def del_cur_stock_pool(self, lst_code, date):
cl_stk_pool_cur = self.db[CL_STK_POOL_CUR]
for code in lst_code:
del_query = {"ts_code": code, 'date':date}
cl_stk_pool_cur.delete_one(del_query)
def set_daily_stock_pool(self, lst_code, date):
cl_stk_pool_daily = self.db[CL_STK_POOL_DAILY]
for code in lst_code:
set_query = {"ts_code": code, "date_sell": None}
cl_stk_pool_daily.update_one(set_query,{"$set":{'date_sell':date}})
def get_cur_stock_pool_date_lst(self):
cl_stk_pool_cur = self.db[CL_STK_POOL_CUR]
ret = list(cl_stk_pool_cur.find().sort("date", ASCENDING))
lst_date = list()
for item in ret:
if item['date'] not in lst_date:
lst_date.append(item['date'])
return lst_date
def get_curve_date(self):
cl_stk_pool_cur = self.db[CL_STK_POOL_CUR]
ret = cl_stk_pool_cur.find_one(sort=[('_id', 1)])
date_end = self.get_trade_date(self.db_date)
if ret is not None:
return ret['date'], date_end
else:
return date_end, date_end
def get_daily_stock_pool(self, date):
cl_stk_pool_daily = self.db[CL_STK_POOL_DAILY]
ret = list(cl_stk_pool_daily.find({'date_buy': date}, {'_id': 0}))
return ret
def get_daily_stock(self, code):
cl_stk_pool_daily = self.db[CL_STK_POOL_DAILY]
ret = cl_stk_pool_daily.find_one({'ts_code': code}, {'_id': 0})
return ret
if __name__ == "__main__":
ds_tushare = DataServiceTushare()
ds_tushare.build_stock_data(update=False)
|
991,769 | e1e1d5b08fd4ea038f33134b969133b4120e1de7 | import numpy as np
from typing import Optional, List
class Galaxy:
def __init__(
self,
redshift: float,
light_profiles: Optional[List] = None,
mass_profiles: Optional[List] = None,
):
"""
A galaxy, which contains light and mass profiles at a specified redshift.
Parameters
----------
redshift
The redshift of the galaxy.
light_profiles
A list of the galaxy's light profiles.
mass_profiles
A list of the galaxy's mass profiles.
"""
self.redshift = redshift
self.light_profiles = light_profiles
self.mass_profiles = mass_profiles
def image_from_grid(self, grid: np.ndarray) -> np.ndarray:
"""
Returns the summed 2D image of all of the galaxy's light profiles using an input
grid of Cartesian (y,x) coordinates.
If the galaxy has no light profiles, a grid of zeros is returned.
Parameters
----------
grid
The (y, x) coordinates in the original reference frame of the grid.
"""
if self.light_profiles is not None:
return sum(map(lambda p: p.image_from_grid(grid=grid), self.light_profiles))
return np.zeros((grid.shape[0],))
def deflections_from_grid(self, grid: np.ndarray) -> np.ndarray:
"""
Returns the summed (y,x) deflection angles of the galaxy's mass profiles
using a grid of Cartesian (y,x) coordinates.
If the galaxy has no mass profiles, two grid of zeros are returned.
Parameters
----------
grid
The (y, x) coordinates in the original reference frame of the grid.
"""
if self.mass_profiles is not None:
return sum(
map(lambda p: p.deflections_from_grid(grid=grid), self.mass_profiles)
)
return np.zeros((grid.shape[0], 2))
class Redshift(float):
def __new__(cls, redshift):
# noinspection PyArgumentList
return float.__new__(cls, redshift)
def __init__(self, redshift):
float.__init__(redshift)
|
991,770 | 268308182d08ecba3757b8a26a7f6ac14009062e | # from Model import Model
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d.axes3d as p3
import numpy as np
from copy import deepcopy
# import aerosonde_parameters as MAV
# import vaporlite_parameters as MAV
from tools import normalize
from tools import Quaternion2Euler
import pdb
import warnings
def R(q):
wx = q[0]*q[1]
wy = q[0]*q[2]
wz = q[0]*q[3]
xx = q[1]*q[1]
xy = q[1]*q[2]
xz = q[1]*q[3]
yy = q[2]*q[2]
yz = q[2]*q[3]
zz = q[3]*q[3]
out = np.array([ [ 1.0 - 2.0*yy - 2.0*zz, 2.0*xy + 2.0*wz, 2.0*xz - 2.0*wy],
[2.0*xy - 2.0*wz, 1.0 - 2.0*xx - 2.0*zz, 2.0*yz + 2.0*wx],
[2.0*xz + 2.0*wy, 2.0*yz - 2.0*wx, 1.0 - 2.0*xx - 2.0*yy]])
return out;
class FixedWing():
def __init__(self):
self.numStates = 13
self.numInputs = 4
self._state = np.array([[0], # (0)
[0], # (1)
[0], # (2)
[0], # (3)
[0], # (4)
[0], # (5)
[0], # (6)
[3], # (7)
[0], # (8)
[0], # (9)
[0], # (10)
[0], # (11)
[0]]) # (12)
self.state_max = np.array([[5], # (0)
[5], # (1)
[5], # (2)
[1], # (3)
[1], # (4)
[1], # (5)
[1], # (6)
[6], # (7)
[1], # (8)
[1], # (9)
[np.pi/1], # (10)
[np.pi/1], # (11)
[np.pi/1]]) # (12)
self.uMax = 1.0
self.uTMax = 1.0
# self.uMax = 0.9
# self.uTMax = 0.9
self.uTMin = 0
self._Va = 3
self._alpha = 0
self._beta = 0
self.plotObjects = []
# plane in ned
self.plane = np.array([[0,0,0],
[0.5,0,0],
[0.1,0,0],
[0,0.5,-0.1], #left wing
[0.1,0,0],
[0,-0.5,-0.1], #right wing
[0.1,0,0],
[-0.5,0,0],
[-0.5,0,-0.25],
[-0.5,0.1,-0.25],
[-0.5,-0.1,-0.25]]).T
self.create_coeficients(np.zeros((32,1)))
def create_coeficients(self, coefList):
# print("Coef List", coefList)
######################################################################################
# Physical Parameters
######################################################################################
self.mass = 0.015 #kg
Jx = 0.00002 #kg m^2
self.Jy = 0.0001
Jz = 0.00011
Jxz = 0.00001
self.S_wing = 0.043
self.b = 0.3429
self.c = 0.1397
self.S_prop = 0.1143**2*np.pi/4.0
self.rho = 1.2682
self.e = 1
self.AR = (self.b**2) / self.S_wing
self.gravity = 9.8
######################################################################################
# Longitudinal Coefficients
######################################################################################
self.C_L_0 = coefList[0] #0.1762
self.C_L_alpha = coefList[1] #3.4329
self.C_L_q = coefList[2] #4.8975
self.C_L_delta_e = coefList[3] #0.000026274
C_D_0 = coefList[4] #0.07
C_D_alpha = coefList[5] #0.04
self.C_D_q = coefList[6] #0.0
self.C_D_p = coefList[7] #0.0
self.C_D_delta_e = coefList[8] #0.0
self.C_m_0 = coefList[9] #0.0
self.C_m_alpha = coefList[10] #-0.26771
self.C_m_q = coefList[11] #-2.3065
self.C_m_delta_e = coefList[12] #-0.71205
M = 50.0
self.alpha0 = 0.47
epsilon = 0.16
######################################################################################
# Lateral Coefficients
######################################################################################
self.C_Y_0 = coefList[13] #0.0
self.C_Y_beta = coefList[14] #-0.19942
self.C_Y_p = coefList[15] #-0.193
self.C_Y_r = coefList[16] #0.27923
self.C_Y_delta_a = coefList[17] #0.0
self.C_Y_delta_r = coefList[18] #0.182201
self.C_ell_0 = coefList[19] #0.0
self.C_ell_beta = coefList[20] #-0.077793
self.C_ell_p = coefList[21] #-0.27114
self.C_ell_r = coefList[22] #0.14918
self.C_ell_delta_a = coefList[23] #0.0
self.C_ell_delta_r = coefList[24] #0.016043
self.C_n_0 = coefList[25] #0.0
self.C_n_beta = coefList[26] #0.074453
self.C_n_p = coefList[27] #-0.07788
self.C_n_r = coefList[28] #-0.098776
self.C_n_delta_a = coefList[29] #0.0
self.C_n_delta_r = coefList[30] # -0.090527
######################################################################################
# Propeller thrust / torque parameters (see addendum by McLain)
######################################################################################
self.C_prop = 1.0
self.S_prop = 0.1143**2*np.pi/4.0
self.k_motor = coefList[31]#6
self.kTp = 0.
self.kOmega = 0.
# self.C_prop = coefList[31] #1.0
# self.S_prop = coefList[32] #0.1143**2*np.pi/4.0
# self.k_motor = coefList[33]#6
# self.kTp = coefList[34] #0.
# self.kOmega = coefList[35] #0.
######################################################################################
# Calculation Variables
######################################################################################
# gamma parameters pulled from page 36 (dynamics)
gamma = Jx * Jz - (Jxz**2)
self.gamma1 = (Jxz * (Jx - self.Jy + Jz)) / gamma
self.gamma2 = (Jz * (Jz - self.Jy) + (Jxz**2)) / gamma
self.gamma3 = Jz / gamma
self.gamma4 = Jxz / gamma
self.gamma5 = (Jz - Jx) / self.Jy
self.gamma6 = Jxz / self.Jy
self.gamma7 = ((Jx - self.Jy) * Jx + (Jxz**2)) / gamma
self.gamma8 = Jx / gamma
# C values defines on pag 62
self.C_p_0 = self.gamma3 * self.C_ell_0 + self.gamma4 * self.C_n_0
self.C_p_beta = self.gamma3 * self.C_ell_beta + self.gamma4 * self.C_n_beta
self.C_p_p = self.gamma3 * self.C_ell_p + self.gamma4 * self.C_n_p
self.C_p_r = self.gamma3 * self.C_ell_r + self.gamma4 * self.C_n_r
self.C_p_delta_a = self.gamma3 * self.C_ell_delta_a + self.gamma4 * self.C_n_delta_a
self.C_p_delta_r = self.gamma3 * self.C_ell_delta_r + self.gamma4 * self.C_n_delta_r
self.C_r_0 = self.gamma4 * self.C_ell_0 + self.gamma8 * self.C_n_0
self.C_r_beta = self.gamma4 * self.C_ell_beta + self.gamma8 * self.C_n_beta
self.C_r_p = self.gamma4 * self.C_ell_p + self.gamma8 * self.C_n_p
self.C_r_r = self.gamma4 * self.C_ell_r + self.gamma8 * self.C_n_r
self.C_r_delta_a = self.gamma4 * self.C_ell_delta_a + self.gamma8 * self.C_n_delta_a
self.C_r_delta_r = self.gamma4 * self.C_ell_delta_r + self.gamma8 * self.C_n_delta_r
def draw_plane_nwu(self, plane_in):
R = np.array([[1,0,0],
[0,-1,0],
[0,0,-1]])
p = R.dot(plane_in)
return p[0,:], p[1,:], p[2,:]
def forward_simulate_dt(self,x,u,coefList,dt=.01):
self.create_coeficients(coefList)
self._state = deepcopy(x)
# u = deepcopy(u.clip(-self.uMax,self.uMax))
u = deepcopy(u)
# x = x.reshape([self.numStates,-1])
# xdot = np.zeros(x.shape)
forces_moments = self._forces_moments(u)
# xdot = self._derivatives(x, forces_moments).reshape((-1,13)).T
# xdot[6:10] = normalize(xdot[6:10])
# xdot[1,:] = x[0,:]
# x = x + xdot*dt
self._state[self._state<-1e100]=0
self._state[self._state>1e100]=0
k1 = self._derivatives(self._state, forces_moments)
k1[k1<-1e10]=0
k1[k1>1e10]=0
k2 = self._derivatives(self._state + dt/2.0*k1, forces_moments)
k2[k2<-1e10]=0
k2[k2>1e10]=0
k3 = self._derivatives(self._state + dt/2.0*k2, forces_moments)
k3[k3<-1e10]=0
k3[k3>1e10]=0
k4 = self._derivatives(self._state + dt*k3, forces_moments)
k4[k4<-1e10]=0
k4[k4>1e10]=0
self._state += dt/6.0 * (k1 + 2.0*k2 + 2.0*k3 + k4)
# print(k1,k2,k3,k4)
# print(dt)
# pdb.set_trace()
self._state[self._state<-1e10]=0
self._state[self._state>1e10]=0
# pdb.set_trace()
self._state[3:7] = normalize(self._state[3:7])
x = deepcopy(self._state)
# print('u',u)
# print('x',x)
# print('xdot',xdot)
# if wrapAngle==True:
# x[1,:] = (x[1,:] + np.pi) % (2*np.pi) - np.pi
# pdb.set_trace()
return x
# def calc_discrete_A_B_w(self,x,u,dt=.01):
# x = deepcopy(x)
# u = deepcopy(u)
# x = x.reshape([self.numStates,-1])
# A = np.matrix([[-self.b/self.I, 0],
# [1.0, 0]])
# B = np.matrix([[1.0/self.I],
# [0.0]])
# w = np.matrix([self.m*self.g*np.sin(x[1,:])/self.I,
# [0.0]])
#
# [Ad,Bd] = self.discretize_A_and_B(A,B,dt)
# wd = w*dt
#
# return Ad,Bd,wd
def visualize(self,x,ax,color='red'):
# CoM = [-0.5*np.sin(x[1]),0.5*np.cos(x[1])]
# theta = x[1]
#
# x = [CoM[0] + self.l/2.0*np.sin(theta),CoM[0] - self.l/2.0*np.sin(theta)]
# y = [CoM[1] - self.l/2.0*np.cos(theta),CoM[1] + self.l/2.0*np.cos(theta)]
#
# massX = CoM[0] - self.l/2.0*np.sin(theta)
# massY = CoM[1] + self.l/2.0*np.cos(theta)
for plot in self.plotObjects:
plot[0].remove()
self.plotObjects = []
# # self.plotObjects.append(ax.scatter(x[0], x[1], -x[2], 'bo', c='blue'))
# self.plotObjects.append(ax.plot(*self.draw_plane_nwu(self.plane), linewidth=2, color='red'))
phi, theta, psi = Quaternion2Euler(x[3:7])
Rphi = np.array([[1,0,0],
[0,np.cos(phi),np.sin(phi)],
[0,-np.sin(phi),np.cos(phi)]])
Rtheta = np.array([[np.cos(theta),0,-np.sin(theta)],
[0,1,0],
[np.sin(theta),0,np.cos(theta)]])
Rpsi = np.array([[np.cos(psi),np.sin(psi),0],
[-np.sin(psi),np.cos(psi),0],
[0,0,1]])
T = np.array([x[0],-x[1],-x[2]])
Rot = Rphi.dot(Rtheta).dot(Rpsi)
# print(Rot)
# print(np.squeeze(R(x[3:7])))
# pdb.set_trace()
# Rot = np.squeeze(R(x[3:7]))
# plt.clf()
xs, ys, zs = self.draw_plane_nwu(Rot.dot(2.5*self.plane)+T)
# xs, ys, zs = Rot.dot(1.5*self.plane)+T
self.plotObjects.append(ax.plot(xs, ys, zs, linewidth=2, color=color))
# plt.draw()
# plt.plot(x[0],x[1], 'bo')
# # ax.scatter(x[0], x[1], x[2], 'bo')
# # plt.scatter(massX,massY,50,'r')
# plt.axis([-20,20,-20,20])
ax.set_xlim3d([-6, 6])
ax.set_ylim3d([-6, 6])
ax.set_zlim3d([-10, 20])
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
plt.ion()
plt.show()
plt.pause(.0000001)
def _derivatives(self, state, forces_moments):
"""
for the dynamics xdot = f(x, u), returns f(x, u)
"""
# extract the states
pn = state[0]
pe = state[1]
pd = state[2]
e0 = state[3]
e1 = state[4]
e2 = state[5]
e3 = state[6]
u = state[7]
v = state[8]
w = state[9]
# state[6:10] = normalize(state[6:10])
p = state[10]
q = state[11]
r = state[12]
# extract forces/moments
fx = forces_moments[0]
fy = forces_moments[1]
fz = forces_moments[2]
l = forces_moments[3]
m = forces_moments[4]
n = forces_moments[5]
# with warnings.catch_warnings():
# warnings.filterwarnings('error')
# try:
# # position kinematics
# except Warning as e:
# pdb.set_trace()
# print(e)
pn_dot = (e1**2+e0**2-e2**2-e3**2)*u + 2*(e1*e2-e3*e0)*v + 2*(e1*e3+e2*e0)*w
pe_dot = 2*(e1*e2+e3*e0)*u + (e2**2+e0**2-e1**2-e3**2)*v + 2*(e2*e3-e1*e0)*w
pd_dot = 2*(e1*e3-e2*e0)*u + 2*(e2*e3+e1*e0)*v + (e3**2+e0**2-e1**2-e2**2)*w
# pn_dot = (e0**2+e1**2-e2**2-e3**2)*u + 2*(e1*e2+e3*e0)*v + 2*(e1*e3-e2*e0)*w
# pe_dot = 2*(e1*e2-e3*e0)*u + (e0**2-e1**2+e2**2-e3**2)*v + 2*(e2*e3+e1*e0)*w
# pd_dot = 2*(e1*e3+e2*e0)*u + 2*(e2*e3-e1*e0)*v + (e0**2-e1**2-e2**2+e3**2)*w
# pdb.set_trace()
# position dynamics
mass = self.mass
u_dot = (r*v-q*w)+fx/mass
v_dot = (p*w-r*u)+fy/mass
w_dot = (q*u-p*v)+fz/mass
# rotational kinematics
e0_dot = 0.5*(-p*e1-q*e2-r*e3)
e1_dot = 0.5*(p*e0+r*e2-q*e3)
e2_dot = 0.5*(q*e0-r*e1+p*e3)
e3_dot = 0.5*(r*e0+q*e1-p*e2)
# rotatonal dynamics
p_dot = self.gamma1*p*q - self.gamma2*q*r + self.gamma3*l + self.gamma4*n
q_dot = self.gamma5*p*r - self.gamma6*(p**2-r**2) + m/self.Jy
r_dot = self.gamma7*p*q - self.gamma1*q*r + self.gamma4*l + self.gamma8*n
# collect the derivative of the states
x_dot = np.array([pn_dot, pe_dot, pd_dot, e0_dot, e1_dot, e2_dot, e3_dot,
u_dot, v_dot, w_dot, p_dot, q_dot, r_dot])
# pdb.set_trace()
# print(x_dot)
return x_dot
def _forces_moments(self, delta):
"""
return the forces on the UAV based on the state, wind, and control surfaces
:param delta: np.matrix(delta_a, delta_e, delta_t, delta_r)
:return: Forces and Moments on the UAV np.matrix(Fx, Fy, Fz, Ml, Mn, Mm)
"""
# assert delta.shape == (4,1)
da = delta[0]
de = delta[1]
dt = delta[2]
dr = delta[3]
e0 = self._state[3]
e1 = self._state[4]
e2 = self._state[5]
e3 = self._state[6]
u = self._state[7]
v = self._state[8]
w = self._state[9]
p = self._state[10]
q = self._state[11]
r = self._state[12]
self._Va = np.sqrt(u**2 + v**2 + w**2)
self._alpha = np.arctan(1.0*w/u)
self._beta = np.arcsin(1.0*v/self._Va)
Fg = self.mass*self.gravity*np.array([2*(e1*e3-e2*e0),
2*(e2*e3 + e1*e0),
e3**2 + e0**2 - e1**2 - e2**2,
])
# Fg = self.mass*self.gravity*np.array([2*(e1*e3 - e2*e0),
# 2*(e2*e3 + e1*e0),
# e3**2 + e0**2 - e1**2 - e2**2,
# ])
M_e = 25
sig = lambda a: (1+np.exp(-M_e*(a-self.alpha0))+np.exp(M_e*(a+self.alpha0)))/((1+np.exp(-M_e*(a-self.alpha0)))*(1+np.exp(M_e*(a+self.alpha0))))
cla = lambda a: (1-sig(a))*(self.C_L_0+self.C_L_alpha*a)+sig(a)*(2*np.sign(a)*np.sin(a)**2*np.cos(a))
cda = lambda a: self.C_D_p + (self.C_L_0+self.C_L_alpha*a)**2/(np.pi*self.e*self.AR)
cxa = lambda a: -(cda(a)) * np.cos(a) + (cla(a)) * np.sin(a)
cxq = lambda a: -self.C_D_q * np.cos(a) +self.C_L_q * np.sin(a)
cxde = lambda a: -self.C_D_delta_e * np.cos(a) + self.C_L_delta_e * np.sin(a)
cza = lambda a: -(cda(a)) * np.sin(a) - (cla(a)) * np.cos(a)
czq = lambda a: -self.C_D_q * np.sin(a) - self.C_L_q * np.cos(a)
czde = lambda a: -self.C_D_delta_e * np.sin(a) - self.C_L_delta_e * np.cos(a)
c = self.c/(2.0*self._Va)
b = self.b/(2.0*self._Va)
one = 0.5*self.rho*self._Va**2*self.S_wing
# two = np.array([[1,0,0],[0,1,0],[0,0,1]])
three = np.array([[cxa(self._alpha)+cxq(self._alpha)*c*q+cxde(self._alpha)*de],
[self.C_Y_0+self.C_Y_beta*self._beta+self.C_Y_p*b*p+self.C_Y_r*b*r+self.C_Y_delta_a*da+self.C_Y_delta_r*dr],
[cza(self._alpha)+czq(self._alpha)*c*q+czde(self._alpha)*de]])
Fa = np.squeeze(three) * one
# pdb.set_trace()
Fa = Fa.reshape((3,-1))
F = Fg + Fa
#
# print("Fa:",Fa)
Fp = 0.5*self.rho*self.S_prop*self.C_prop*((self.k_motor*dt)**2-self._Va**2)
# print("FP:", Fp)
fx = F[0] + Fp
# + 0.5*MAV.rho*self._Va**2*MAV.S_wing*(\
# +cxa(self._alpha)\
# + cxq(self._alpha)*c*q\
# + cxde(self._alpha)*de
# )
fy = F[1]
fz = F[2]
# Moment time!!!
one = 0.5*self.rho*self._Va**2*self.S_wing
two = np.array([\
[self.b*(self.C_ell_0+self.C_ell_beta*self._beta+self.C_ell_p*b*p+self.C_ell_r*b*r+self.C_ell_delta_a*da+self.C_ell_delta_r*dr)],
[self.c*(self.C_m_0+(self.C_m_alpha*self._alpha)+(self.C_m_q*c*q)+(self.C_m_delta_e*de))],
[self.b*(self.C_n_0+(self.C_n_beta*self._beta)+(self.C_n_p*b*p)+(self.C_n_r*b*r)+(self.C_n_delta_a*da)+(self.C_n_delta_r*dr))]
])
Ma = one * np.squeeze(two)
# print("\nMa:", Ma)
# pdb.set_trace()
Ma = Ma.reshape((3,-1))
size = Ma.shape[1]
Mp = np.block([[np.ones(size)*-self.kTp*(self.kOmega*dt)**2],
[np.zeros(size)],
[np.zeros(size)]
])
M = Mp + Ma
Mx = M[0]
My = M[1]
Mz = M[2]
# self._forces[0] = fx
# self._forces[1] = fy
# self._forces[2] = fz
# pdb.set_trace()
# print(fx, fy, fz, Mx, My, Mz)
return np.array([fx, fy, fz, Mx, My, Mz])
|
991,771 | da58fd3dd6dcf065ad37ec18b8e6cd341314547a | AlfaItemList=[]
#AlfaItemList.ItemList+= ["ALFA_HitCollection#*"]
#AlfaItemList.ItemList+= ["ALFA_ODHitCollection#*"]
#AlfaItemList.ItemList+= ["ALFA_DigitCollection#*"]
#AlfaItemList.ItemList+= ["ALFA_ODDigitCollection#*"]
#AlfaItemList.ItemList+= ["ALFA_RawDataContainer#*"]
AlfaItemList.append("ALFA_DigitCollection#ALFA_DigitCollection")
AlfaItemList.append("ALFA_ODDigitCollection#ALFA_ODDigitCollection")
#AlfaItemList.append("ALFA_RawDataContainer#ALFA_RawDataContainer")
AlfaItemList.append("ALFA_RawDataContainer#ALFA_RawData")
AlfaItemList.append("ALFA_LocRecEvCollection#ALFA_LocRecEvCollection")
AlfaItemList.append("ALFA_LocRecODEvCollection#ALFA_LocRecODEvCollection")
AlfaItemList.append("ALFA_LocRecCorrEvCollection#ALFA_LocRecCorrEvCollection")
AlfaItemList.append("ALFA_LocRecCorrODEvCollection#ALFA_LocRecCorrODEvCollection")
AlfaItemList.append("ALFA_CLinkEvent#ALFA_CLinkEvent")
AlfaItemList.append("xAOD::ALFADataContainer#ALFADataContainer")
AlfaItemList.append("xAOD::ALFADataAuxContainer#ALFADataContainerAux.")
|
991,772 | 5be825c3f0f3e7d2bfae25394832e247cc262818 | import pandas as pd
import geopandas as gpd
import osmnx as ox
import networkx as nx
import json
import ast
from fiona.crs import from_epsg
from shapely.geometry import Point, LineString, MultiLineString, box
import utils.exposures as exps
import utils.geometry as geom_utils
import utils.utils as utils
def get_walkable_network(extent_poly_wgs=None):
# define filter for acquiring walkable street network
cust_filter = '["area"!~"yes"]["highway"!~"trunk_link|motor|proposed|construction|abandoned|platform|raceway"]["foot"!~"no"]["service"!~"private"]["access"!~"private"]'
# query graph
g = ox.graph_from_polygon(extent_poly_wgs, custom_filter=cust_filter)
print('loaded graph of', g.number_of_edges(), 'edges')
# convert graph to undirected graph
g_u = ox.get_undirected(g)
print('converted graph to undirected graph of', g_u.number_of_edges(), 'edges')
# project graph
g_u_proj = ox.project_graph(g_u, from_epsg(3879))
return g_u_proj
def get_unwalkable_network(extent_poly_wgs=None):
cust_filter_no_tunnels = '["area"!~"yes"]["highway"!~"trunk_link|motor|proposed|construction|abandoned|platform|raceway"]["foot"!~"no"]["service"!~"private"]["access"!~"private"]["highway"~"service"]["layer"~"-1|-2|-3|-4|-5|-6|-7"]'
# query graph
g = ox.graph_from_polygon(extent_poly_wgs, custom_filter=cust_filter_no_tunnels, retain_all=True)
print('loaded graph of', g.number_of_edges(), 'edges')
# convert graph to undirected graph
g_u = ox.get_undirected(g)
print('converted graph to undirected graph of', g_u.number_of_edges(), 'edges')
# project graph
g_u_proj = ox.project_graph(g_u, from_epsg(3879))
return g_u_proj
def delete_unused_edge_attrs(graph, save_attrs=['uvkey', 'length', 'geometry', 'noises']):
for node_from in list(graph.nodes):
nodes_to = graph[node_from]
for node_to in nodes_to.keys():
edges = graph[node_from][node_to]
for edge_k in edges.keys():
edge = graph[node_from][node_to][edge_k]
edge_attrs = list(edge.keys())
for attr in edge_attrs:
if (attr not in save_attrs):
del edge[attr]
def get_missing_edge_geometries(graph, edge_dict):
edge_d = {}
edge_d['uvkey'] = edge_dict['uvkey']
if ('geometry' not in edge_dict):
node_from = edge_dict['uvkey'][0]
node_to = edge_dict['uvkey'][1]
# interpolate missing geometry as straigth line between nodes
edge_geom = get_edge_geom_from_node_pair(graph, node_from, node_to)
edge_d['geometry'] = edge_geom
else:
edge_d['geometry'] = edge_dict['geometry']
edge_d['length'] = round(edge_d['geometry'].length, 3)
return edge_d
def add_missing_edge_geometries(graph, edge_dicts):
edge_count = len(edge_dicts)
for idx, edge_d in enumerate(edge_dicts):
if ('geometry' not in edge_d):
node_from = edge_d['uvkey'][0]
node_to = edge_d['uvkey'][1]
# interpolate missing geometry as straigth line between nodes
edge_geom = get_edge_geom_from_node_pair(graph, node_from, node_to)
# set geometry attribute of the edge
nx.set_edge_attributes(graph, { edge_d['uvkey']: {'geometry': edge_geom} })
# set length attribute
nx.set_edge_attributes(graph, { edge_d['uvkey']: {'length': round(edge_d['geometry'].length, 3)} })
utils.print_progress(idx+1, edge_count, percentages=True)
print('\nEdge geometries & lengths set.')
def osmid_to_string(osmid):
if isinstance(osmid, list):
osm_str = ''
osmid_list = sorted(osmid)
for osm_id in osmid_list:
osm_str += str(osm_id)+'_'
else:
osm_str = str(osmid)
return osm_str
def export_nodes_edges_to_files(graph):
nodes, edges = ox.graph_to_gdfs(graph, nodes=True, edges=True, node_geometry=True, fill_edge_geometry=True)
edges = edges[['geometry', 'u', 'v', 'length']]
edges.to_file('data/networks.gpkg', layer='koskela_edges', driver="GPKG")
nodes.to_file('data/networks.gpkg', layer='koskela_nodes', driver="GPKG")
def get_node_gdf(graph):
node_gdf = ox.graph_to_gdfs(graph, nodes=True, edges=False, node_geometry=True, fill_edge_geometry=False)
return node_gdf[['geometry']]
def get_node_geom(graph, node):
node_d = graph.node[node]
return Point(node_d['x'], node_d['y'])
def get_edge_geom_from_node_pair(graph, node_1, node_2):
node_1_geom = geom_utils.get_point_from_xy(graph.nodes[node_1])
node_2_geom = geom_utils.get_point_from_xy(graph.nodes[node_2])
edge_line = LineString([node_1_geom, node_2_geom])
return edge_line
def get_new_node_id(graph):
graph_nodes = graph.nodes
return max(graph_nodes)+1
def get_new_node_attrs(graph, point):
new_node_id = get_new_node_id(graph)
wgs_point = geom_utils.project_to_wgs(point)
geom_attrs = {**geom_utils.get_xy_from_geom(point), **geom_utils.get_lat_lon_from_geom(wgs_point)}
return { 'id': new_node_id, **geom_attrs }
def add_new_node_to_graph(graph, point, logging=True):
attrs = get_new_node_attrs(graph, point)
if (logging == True):
print('add new node:', attrs['id'])
graph.add_node(attrs['id'], ref='', x=attrs['x'], y=attrs['y'], lon=attrs['lon'], lat=attrs['lat'])
return attrs['id']
def interpolate_link_noises(link_geom, edge_geom, edge_noises):
link_noises = {}
link_len_ratio = link_geom.length / edge_geom.length
for db in edge_noises.keys():
link_noises[db] = round(edge_noises[db] * link_len_ratio, 3)
return link_noises
def get_edge_noise_cost_attrs(nts, db_costs, edge_d, link_geom):
cost_attrs = {}
# estimate link noises based on link length - edge length -ratio and edge noises
cost_attrs['noises'] = interpolate_link_noises(link_geom, edge_d['geometry'], edge_d['noises'])
# calculate noise tolerance specific noise costs
for nt in nts:
noise_cost = exps.get_noise_cost(noises=cost_attrs['noises'], db_costs=db_costs, nt=nt)
cost_attrs['nc_'+str(nt)] = round(noise_cost + link_geom.length, 2)
noises_sum_len = exps.get_total_noises_len(cost_attrs['noises'])
if ((noises_sum_len - link_geom.length) > 0.1):
print('link length unmatch:', noises_sum_len, link_geom.length)
return cost_attrs
def add_linking_edges_for_new_node(graph, new_node, split_point, edge, nts, db_costs, logging=False):
edge_geom = edge['geometry']
# split edge at new node to two line geometries
split_lines = geom_utils.split_line_at_point(edge_geom, split_point)
node_from = edge['uvkey'][0]
node_to = edge['uvkey'][1]
node_from_p = get_node_geom(graph, node_from)
node_to_p = get_node_geom(graph, node_to)
edge_first_p = Point(edge_geom.coords[0])
if(edge_first_p.distance(node_from_p) < edge_first_p.distance(node_to_p)):
link1 = split_lines[0]
link2 = split_lines[1]
else:
link1 = split_lines[1]
link2 = split_lines[0]
if (logging == True):
print('add linking edges between:', node_from, new_node, node_to)
# interpolate noise cost attributes for new linking edges so that they work in quiet path routing
link1_noise_costs = get_edge_noise_cost_attrs(nts, db_costs, edge, link1)
link2_noise_costs = get_edge_noise_cost_attrs(nts, db_costs, edge, link2)
# combine link attributes to prepare adding them as new edges
link1_attrs = { 'geometry': link1, 'length' : round(link1.length, 3), **link1_noise_costs }
link2_attrs = { 'geometry': link2, 'length' : round(link2.length, 3), **link2_noise_costs }
# add linking edges with noice cost attributes to graph
graph.add_edges_from([ (node_from, new_node, { 'uvkey': (node_from, new_node), **link1_attrs }) ])
graph.add_edges_from([ (new_node, node_from, { 'uvkey': (new_node, node_from), **link1_attrs }) ])
graph.add_edges_from([ (node_to, new_node, { 'uvkey': (node_to, new_node), **link2_attrs }) ])
graph.add_edges_from([ (new_node, node_to, { 'uvkey': (new_node, node_to), **link2_attrs }) ])
link1_d = { 'uvkey': (new_node, node_from), **link1_attrs }
link2_d = { 'uvkey': (node_to, new_node), **link2_attrs }
return { 'node_from': node_from, 'new_node': new_node, 'node_to': node_to, 'link1': link1_d, 'link2': link2_d }
def remove_new_node_and_link_edges(graph, new_node_d):
if ('link_edges' in new_node_d.keys()):
link_edges = new_node_d['link_edges']
edges = [
(link_edges['node_from'], link_edges['new_node']),
(link_edges['new_node'], link_edges['node_from']),
(link_edges['new_node'], link_edges['node_to']),
(link_edges['node_to'], link_edges['new_node'])
]
for edge in edges:
try:
graph.remove_edge(*edge)
except Exception:
continue
try:
graph.remove_node(link_edges['new_node'])
except Exception:
pass
def get_shortest_edge(edges, weight):
if (len(edges) == 1):
return next(iter(edges.values()))
s_edge = next(iter(edges.values()))
for edge_k in edges.keys():
if (weight in edges[edge_k].keys() and weight in s_edge.keys()):
if (edges[edge_k][weight] < s_edge[weight]):
s_edge = edges[edge_k]
return s_edge
def get_edge_line_coords(graph, node_from, edge_d):
from_point = geom_utils.get_point_from_xy(graph.nodes[node_from])
edge_line = edge_d['geometry']
edge_coords = edge_line.coords
first_point = Point(edge_coords[0])
last_point = Point(edge_coords[len(edge_coords)-1])
if(from_point.distance(first_point) > from_point.distance(last_point)):
return edge_coords[::-1]
return edge_coords
def aggregate_path_geoms_attrs(graph, path, weight='length', geom=True, noises=False):
result = {}
edge_lengths = []
path_coords = []
edge_exps = []
for idx in range(0, len(path)):
if (idx == len(path)-1):
break
node_1 = path[idx]
node_2 = path[idx+1]
edges = graph[node_1][node_2]
edge_d = get_shortest_edge(edges, weight)
if geom:
if ('nc_0.1') not in edge_d:
print('missing noise cost attr')
if ('geometry' in edge_d):
edge_lengths.append(edge_d['length'])
edge_coords = get_edge_line_coords(graph, node_1, edge_d)
else:
edge_line = get_edge_geom_from_node_pair(graph, node_1, node_2)
edge_lengths.append(edge_line.length)
edge_coords = edge_line.coords
path_coords += edge_coords
edge_noise_len_diff = (edge_d['length'] - exps.get_total_noises_len(edge_d['noises']))
if (edge_noise_len_diff < -0.05):
print('idx:', idx, 'from:', node_1, 'to:', node_2)
print(' problems with edge:', edge_d['uvkey'], edge_d['noises'])
print(' edge lens vs noise lens:', edge_d['length'], exps.get_total_noises_len(edge_d['noises']))
if noises:
if ('noises' in edge_d):
edge_exps.append(edge_d['noises'])
if geom:
path_line = LineString(path_coords)
total_length = round(sum(edge_lengths),2)
result['geometry'] = path_line
result['total_length'] = total_length
if noises:
result['noises'] = exps.aggregate_exposures(edge_exps)
return result
def get_all_edge_dicts(graph, attrs=None, by_nodes=True):
edge_dicts = []
if (by_nodes == True):
for node_from in list(graph.nodes):
nodes_to = graph[node_from]
for node_to in nodes_to.keys():
# all edges between node-from and node-to as dict (usually)
edges = graph[node_from][node_to]
# usually only one edge is found between each origin-to-destination-node -pair
# edge_k is unique identifier for edge between two nodes, integer (etc. 0 or 1)
for edge_k in edges.keys():
# combine unique identifier for the edge
edge_uvkey = (node_from, node_to, edge_k)
ed = { 'uvkey': edge_uvkey }
# if attribute list is provided, get only the specified edge attributes
if (isinstance(attrs, list)):
for attr in attrs:
ed[attr] = edges[edge_k][attr]
else:
ed = edges[edge_k]
ed['uvkey'] = edge_uvkey
edge_dicts.append(ed)
return edge_dicts
else:
for u, v, k, data in graph.edges(keys=True, data=True):
edge_uvkey = (u, v, k)
# edge dict contains all edge attributes
ed = { 'uvkey': edge_uvkey }
# if attribute list is provided, get only the specified edge attributes
if (isinstance(attrs, list)):
for attr in attrs:
ed[attr] = data[attr]
else:
ed = data.copy()
ed['uvkey'] = edge_uvkey
edge_dicts.append(ed)
return edge_dicts
def get_edge_gdf(graph, attrs=None, by_nodes=True, subset=None, dicts=False):
edge_dicts = get_all_edge_dicts(graph, attrs=attrs, by_nodes=by_nodes)
gdf = gpd.GeoDataFrame(edge_dicts, crs=from_epsg(3879))
if (subset is not None):
gdf = gdf[:subset]
if (dicts == True):
return gdf, edge_dicts
else:
return gdf
def update_edge_noises_to_graph(edge_gdf, graph):
for edge in edge_gdf.itertuples():
nx.set_edge_attributes(graph, { getattr(edge, 'uvkey'): { 'noises': getattr(edge, 'noises')}})
def update_edge_costs_to_graph(edge_gdf, graph, nt):
cost_attr = 'nc_'+str(nt)
for edge in edge_gdf.itertuples():
nx.set_edge_attributes(graph, { getattr(edge, 'uvkey'): { cost_attr: getattr(edge, 'tot_cost')}})
def set_graph_noise_costs(graph, edge_gdf, db_costs=None, nts=None):
edge_nc_gdf = edge_gdf.copy()
for nt in nts:
edge_nc_gdf['noise_cost'] = [exps.get_noise_cost(noises=noises, db_costs=db_costs, nt=nt) for noises in edge_nc_gdf['noises']]
edge_nc_gdf['tot_cost'] = edge_nc_gdf.apply(lambda row: round(row['length'] + row['noise_cost'], 2), axis=1)
update_edge_costs_to_graph(edge_nc_gdf, graph, nt)
|
991,773 | e7e06f5b3f145bccb4452650174660b8fb85ce45 | # Generated by Django 2.2.5 on 2021-02-21 17:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('website', '0008_tech_support'),
]
operations = [
migrations.AlterField(
model_name='tech_support',
name='user',
field=models.CharField(max_length=255),
),
]
|
991,774 | e38008f8a51fa74d56e4bd7d7825692bd57fac14 | #
# Copyright (C) 2019 Databricks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pandas as pd
import pandas.testing as mt
import numpy as np
from databricks import koalas
from databricks.koalas.testing.utils import ReusedSQLTestCase, SQLTestUtils
class SeriesStringTest(ReusedSQLTestCase, SQLTestUtils):
@property
def pds1(self):
return pd.Series(['apples', 'Bananas', 'carrots', '1', '100', '',
'\nleading-whitespace', 'trailing-whitespace \t',
None, np.NaN])
def check_func(self, func):
ks1 = koalas.from_pandas(self.pds1)
mt.assert_series_equal(
func(ks1).toPandas(),
func(self.pds1),
check_names=False
)
def test_string_add_str_num(self):
pdf = pd.DataFrame(dict(col1=['a'], col2=[1]))
ds = koalas.from_pandas(pdf)
with self.assertRaises(TypeError):
ds['col1'] + ds['col2']
def test_string_add_assign(self):
pdf = pd.DataFrame(dict(col1=['a', 'b', 'c'], col2=['1', '2', '3']))
ds = koalas.from_pandas(pdf)
ds['col1'] += ds['col2']
pdf['col1'] += pdf['col2']
self.assert_eq((ds['col1']).to_pandas(), pdf['col1'])
def test_string_add_str_str(self):
pdf = pd.DataFrame(dict(col1=['a', 'b', 'c'], col2=['1', '2', '3']))
ds = koalas.from_pandas(pdf)
self.assert_eq((ds['col1'] + ds['col2']).to_pandas(), pdf['col1'] + pdf['col2'])
self.assert_eq((ds['col2'] + ds['col1']).to_pandas(), pdf['col2'] + pdf['col1'])
def test_string_add_str_lit(self):
pdf = pd.DataFrame(dict(col1=['a', 'b', 'c']))
ds = koalas.from_pandas(pdf)
self.assert_eq((ds['col1'] + '_lit').to_pandas(), pdf['col1'] + '_lit')
self.assert_eq(('_lit' + ds['col1']).to_pandas(), '_lit' + pdf['col1'])
def test_string_capitalize(self):
self.check_func(lambda x: x.str.capitalize())
def test_string_lower(self):
self.check_func(lambda x: x.str.lower())
def test_string_upper(self):
self.check_func(lambda x: x.str.upper())
def test_string_swapcase(self):
self.check_func(lambda x: x.str.swapcase())
def test_string_startswith(self):
pattern = 'car'
self.check_func(lambda x: x.str.startswith(pattern))
self.check_func(lambda x: x.str.startswith(pattern, na=False))
def test_string_endswith(self):
pattern = 's'
self.check_func(lambda x: x.str.endswith(pattern))
self.check_func(lambda x: x.str.endswith(pattern, na=False))
def test_string_strip(self):
self.check_func(lambda x: x.str.strip())
self.check_func(lambda x: x.str.strip('es\t'))
self.check_func(lambda x: x.str.strip('1'))
def test_string_lstrip(self):
self.check_func(lambda x: x.str.lstrip())
self.check_func(lambda x: x.str.lstrip('\n1le'))
self.check_func(lambda x: x.str.lstrip('s'))
def test_string_rstrip(self):
self.check_func(lambda x: x.str.rstrip())
self.check_func(lambda x: x.str.rstrip('\t ec'))
self.check_func(lambda x: x.str.rstrip('0'))
def test_string_get(self):
self.check_func(lambda x: x.str.get(6))
self.check_func(lambda x: x.str.get(-1))
def test_string_isalnum(self):
self.check_func(lambda x: x.str.isalnum())
def test_string_isalpha(self):
self.check_func(lambda x: x.str.isalpha())
def test_string_isdigit(self):
self.check_func(lambda x: x.str.isdigit())
def test_string_isspace(self):
self.check_func(lambda x: x.str.isspace())
def test_string_islower(self):
self.check_func(lambda x: x.str.islower())
def test_string_isupper(self):
self.check_func(lambda x: x.str.isupper())
def test_string_istitle(self):
self.check_func(lambda x: x.str.istitle())
def test_string_isnumeric(self):
self.check_func(lambda x: x.str.isnumeric())
def test_string_isdecimal(self):
self.check_func(lambda x: x.str.isdecimal())
|
991,775 | c81ddc73aca4123e4667141a822194c54f3b9006 | # -*- coding: utf-8 -*-
import time
from wechat.client import WeChat
w = WeChat()
w.login()
print(w.client.login_info)
while True:
time.sleep(100)
|
991,776 | f40d971a99cd46e8993ca606d6716a5eac615165 | import collections
reductions = [
# Simplifications
[ 'n', 'se', 'ne'],
[ 'n', 'sw', 'nw'],
[ 's', 'ne', 'se'],
[ 's', 'nw', 'sw'],
['sw', 'se', 's'],
['nw', 'ne', 'n'],
# Opposites
[ 'n', 's', '--'],
['sw', 'ne', '--'],
['nw', 'se', '--'],
]
def distance(steps):
counts = collections.defaultdict(lambda: 0, collections.Counter(steps))
for a, b, result in reductions * 2:
count = min(counts[a], counts[b])
counts[a] -= count
counts[b] -= count
counts[result] += count
return sum(counts.values()) - counts['--']
with open('data/11.txt', 'r') as file:
steps = file.readline().strip().split(',')
print(f'Part one: {distance(steps)}')
print(f'Part two: {max(distance(steps[:i]) for i in range(len(steps)))}')
|
991,777 | 13b3b84e6d55890d70eedf527ebc83ace9984896 |
import sys, os
from glbase3 import *
sys.path.append("../sam_annotations/")
import sam_map
config.draw_mode = "pdf"
expn = glload("../te_counts/genes_ntc_expression.glb")
sam_map.remap_expn_sample_names(expn)
expn.log(2, 0.1)
expn.tree(filename="tree.png", color_threshold=0.0, label_size=4, size=(5,14))
expn.correlation_heatmap(filename="corr_heatmap.png",
bracket=(0.0,1.0),
size=(14,10),
heat_wid=0.57,
heat_hei=0.925,
row_font_size=4)
|
991,778 | 6495711650a7ae4f032920cfa6f3ac079ec39ffa | from SmartDjango import models, E, Hc
@E.register(id_processor=E.idp_cls_prefix())
class ConfigError:
CREATE = E("更新配置错误", hc=Hc.InternalServerError)
NOT_FOUND = E("不存在的配置", hc=Hc.NotFound)
class Config(models.Model):
key = models.CharField(
max_length=100,
unique=True,
)
value = models.CharField(
max_length=255,
)
@classmethod
def get_config_by_key(cls, key):
cls.validator(locals())
try:
return cls.objects.get(key=key)
except cls.DoesNotExist as err:
raise ConfigError.NOT_FOUND(debug_message=err)
@classmethod
def get_value_by_key(cls, key, default=None):
try:
return cls.get_config_by_key(key).value
except Exception:
return default
@classmethod
def update_value(cls, key, value):
cls.validator(locals())
try:
config = cls.get_config_by_key(key)
config.value = value
config.save()
except E as e:
if e == ConfigError.NOT_FOUND:
try:
config = cls(
key=key,
value=value,
)
config.save()
except Exception as err:
raise ConfigError.CREATE(debug_message=err)
else:
raise e
except Exception as err:
raise ConfigError.CREATE(debug_message=err)
class ConfigInstance:
JWT_ENCODE_ALGO = 'JWT_ENCODE_ALGO'
PROJECT_SECRET_KEY = 'PROJECT_SECRET_KEY'
UPDATE_DATE = 'UPDATE_DATE'
CI = ConfigInstance
|
991,779 | a0d7d3b95dfefe7b19ffccbed089b1a26d769b92 | def sum_of_divisors(number):
x = 0
for a in range(int(0.5+(number**0.5))):
if (a+1)**2 == number or (a+1) ==1:
x += (a+1)
elif number%(a+1) == 0:
x += (a+1)
x += number/(a+1)
return x
abundants = []
for a in range(1,49):
b = sum_of_divisors(a)
if b>a:
abundants.append(a)
print(b,a)
print abundants
sums = []
for c in range(1,60):
if c % 100 == 0 :
print(c)
for d in abundants:
if d > c:
print([c,'no'])
break
if c - d in abundants:
sums.append(c)
print([c,'yes'])
break
'''
print(sums)
f = 0
for e in range(1,28124):
if e not in sums:
f+=e
print e
print f''' |
991,780 | be25c4831533e712a29ac4e39ca13c814e1c32c7 | from flask import Flask
from flask_testing import TestCase
# from flask.ext.testing import TestCase
import unittest
try:
from flask_discoverer import Discoverer, advertise
except ImportError:
import sys
sys.path.append('..')
from flask_discoverer import Discoverer, advertise
class TestConfigOptions(TestCase):
def create_app(self):
app = Flask(__name__, static_folder=None)
@app.route('/foo')
@advertise(thisattr='bar')
def foo():
'''foo docstring'''
return "foo route"
return app
def test_resource_publish_endpoint1(self):
discoverer = Discoverer(self.app, DISCOVERER_PUBLISH_ENDPOINT='/non-default-resources')
self.assertEqual(self.app.config['DISCOVERER_PUBLISH_ENDPOINT'], '/non-default-resources')
r = self.client.get('/resources')
self.assertStatus(r, 404)
r = self.client.get('/non-default-resources')
self.assertStatus(r, 200)
self.assertIn('/foo', r.json)
def test_resource_publish_endpoint2(self):
discoverer = Discoverer()
discoverer.init_app(self.app, DISCOVERER_PUBLISH_ENDPOINT='/non-default-resources2')
self.assertEqual(self.app.config['DISCOVERER_PUBLISH_ENDPOINT'], '/non-default-resources2')
r = self.client.get('/resources')
self.assertStatus(r, 404)
r = self.client.get('/non-default-resources2')
self.assertStatus(r, 200)
self.assertIn('/foo', r.json)
def test_selfpublish_true(self):
discoverer = Discoverer(self.app, DISCOVERER_SELF_PUBLISH=True)
r = self.client.get(self.app.config['DISCOVERER_PUBLISH_ENDPOINT'])
self.assertStatus(r, 200)
self.assertIn(self.app.config['DISCOVERER_PUBLISH_ENDPOINT'], r.json)
def test_selfpublish_false(self):
discoverer = Discoverer(self.app, DISCOVERER_SELF_PUBLISH=False)
r = self.client.get(self.app.config['DISCOVERER_PUBLISH_ENDPOINT'])
self.assertStatus(r, 200)
self.assertNotIn(self.app.config['DISCOVERER_PUBLISH_ENDPOINT'], r.json)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
991,781 | 939aa4c17d16edcc202dac10323ec6245693aabd | from django.shortcuts import render, get_object_or_404
from .models import Product, CartDetail
from django.http import HttpResponse
import json, uuid
def index(request):
latest_product_list = Product.objects.all()
context = {
'latest_product_list': latest_product_list,
}
return render(request, 'webapp/index.html', context)
def detail(request, product_id):
product = get_object_or_404(Product, pk=product_id)
return render(request, 'webapp/detail.html', {'product': product})
def sign_in(request):
return render(request, 'webapp/sign_in.html')
def cart(request):
cart_id = request.session.get('cart_id')
latest_cart_list = CartDetail.objects.filter(cart_id=cart_id)
cds = CartDetail.objects.filter(cart_id=cart_id).values()
total = 0
for cd in latest_cart_list:
cd_total = cd.quantity * cd.product_id.price # 购物车内某一种商品的总价
total += cd_total
print('cd_total:', cd_total)
#print('total:', total)
context = {
'latest_cart_list': latest_cart_list,
'total': total,
}
return render(request, 'webapp/cart.html', context)
def menu(request):
latest_product_list = Product.objects.all()
context = {
'latest_product_list': latest_product_list,
}
return render(request, 'webapp/menu.html', context)
def alter_cd(quantity, product, cart_id):
if quantity and product and cart_id:
c_latest = CartDetail(quantity=quantity, product_id=product, cart_id=cart_id)
if CartDetail.objects.filter(cart_id=cart_id, product_id=product):
CartDetail.objects.filter(cart_id=cart_id, product_id=product).update(
product_id=product, quantity=quantity)
print('successfully updated')
else:
c_latest.save()
print('successfully saved a new cart detail')
else:
print('error, null exists')
def add_count(request):
quantity = request.POST.get('nowCount')
name = request.POST.get('name')
product = Product.objects.get(name=name)
cart_id = request.POST.get('cart_id')
alter_cd(quantity, product, cart_id)
request.session['cart_id'] = cart_id
print('add_count: ', quantity)
print('add_name: ', name)
print('cart_id: ', cart_id)
data = {'result': 'true'}
return HttpResponse(json.dumps(data))
def min_count(request):
quantity = request.POST.get('nowCount')
name = request.POST.get('name')
product = Product.objects.get(name=name)
cart_id = request.POST.get('cart_id')
alter_cd(quantity, product, cart_id)
print('min_count: ', quantity)
print('min_name: ', name)
print('cart_id: ', cart_id)
data = {'result': 'true'}
return HttpResponse(json.dumps(data))
|
991,782 | d6f0ec860f7267cdc19cc7320da6677f4224cee1 | #!/usr/bin/env python
import base64
import hashlib
import hmac
import json
import sys
import time
def base64_url_decode(input):
input = input.encode(u'ascii')
input += '=' * (4 - (len(input) % 4))
return base64.urlsafe_b64decode(input)
def parse_signed_request(input, secret, max_age=3600):
encoded_sig, encoded_envelope = input.split('.', 1)
envelope = json.loads(base64_url_decode(encoded_envelope))
algorithm = envelope['algorithm']
if algorithm != 'HMAC-SHA256':
raise Exception('Invalid request. (Unsupported algorithm.)')
if envelope['issued_at'] < time.time() - max_age:
raise Exception('Invalid request. (Too old.)')
if base64_url_decode(encoded_sig) != hmac.new(
secret, msg=encoded_envelope, digestmod=hashlib.sha256).digest():
raise Exception('Invalid request. (Invalid signature.)')
return envelope
# process from stdin
input = sys.stdin.read()
secret = '13750c9911fec5865d01f3bd00bdf4db'
print json.dumps(parse_signed_request(input, secret)),
|
991,783 | 3c065f540a50844a1bf0238c2990dffa6d7ab9e3 | def computepay(h, r):
if h > 40 :
th = h-40
rh = 40
rr = r*1.5
p = rh*r+th*rr
else :
p = h*r
return p
hrs = input("Enter Hours:")
h = float(hrs)
rate = input("Enter Rate:")
r = float(rate)
p = computepay(h, r)
print("Pay", p)
|
991,784 | fe2690dab9b419a223ffa0edea51118dd9060df9 | from collections import Counter
from typing import List
def mostCommonWord( paragraph: str, banned: List[str]) -> str:
paragraph = paragraph.lower()
stri=""
for c in paragraph:
if c.isalnum():
stri+=c
else:
stri=stri+(" ")
words=stri.split()
c=Counter(words)
print(stri,words,c)
counter=0
stri=""
for i in words:
if i not in banned:
if c[i]>counter:
counter=c[i]
stri=i
return stri
print(mostCommonWord("Bob","[]"))
"""paragraph = "Bob. hIt, baLl"
paragraph=paragraph.lower()
print("para",paragraph)
banned=["bob", "hit"]
i=0
p=[]
s=""
if len(banned)<1:
print(paragraph.strip("!?',;."))
while i<len(paragraph):
if paragraph[i].isspace()==False and paragraph[i] not in ("!?',;.") and i !=len(paragraph)-1:
s+=paragraph[i]
print(s,i,len(paragraph))
i+=1
else:
print(s,"insideelse")
if s not in banned:
print(s,"ss")
p.append(s)
print(p,len(p))
s = ""
i+=1
words=
print(words,"w")
c=Counter(p)
currmax=0
s1=""
for i in p:
print(i,c,"chk")
if currmax<c[i]:
currmax=c[i]
s1=i
#print(c)
""" |
991,785 | b0a58e69ff8e70a0ee4c912de9dc9f236c9267bf | M,N = map(int,raw_input().split())
nums = [43, 88, 250, 290, 319, 312, 501, 177, 358, 24, 221, 30, 98, 591, 4, 66, 76, 37, 131, 6, 450, 188, 384, 241, 85, 291, 12, 505, 523, 480, 33, 183, 504, 419, 454, 44, 272, 104, 374, 133, 172, 427, 81, 190, 225, 458, 349, 418, 337, 266, 592, 444, 170, 306, 175, 296, 571, 542, 204, 362, 245, 150, 38, 431, 344, 451, 268, 564, 479, 158, 478, 534, 235, 205, 74, 28, 495, 403, 477, 402, 259, 262, 550, 34, 101, 73, 206, 132, 322, 224, 538, 410, 239, 153, 380, 200, 77, 369, 256, 559, 544, 125, 216, 128, 367, 575, 243, 353, 141, 305, 387, 393, 308, 541, 159, 396, 371, 144, 63, 255, 274, 377, 232, 90, 8, 270, 519, 149, 409, 439, 460, 594, 109, 389, 146, 332, 529, 96, 147, 512, 123, 57, 124, 187, 506, 459, 134, 449, 531, 487, 540, 569, 502, 244, 483, 82, 434, 372, 412, 385, 110, 563, 360, 152, 265, 189, 258, 32, 490, 67, 467, 155, 493, 27, 340, 545, 248, 113, 554, 597, 596, 279, 489, 31, 435, 168, 50, 593, 433, 48, 333, 193, 240, 75, 1, 103, 261, 581, 185, 560, 311, 357, 298, 548, 466, 328, 211, 482, 26, 330, 236, 271, 423, 356, 518, 51, 196, 421, 395, 578, 513, 58, 532, 135, 589, 442, 492, 60, 375, 565, 509, 192, 269, 417, 71, 530, 440, 378, 47, 416, 199, 587, 568, 137, 457, 408, 222, 64, 413, 447, 208, 576, 515, 230, 425, 61, 166, 180, 84, 511, 108, 346, 15, 195, 10, 286, 562, 320, 19, 105, 486, 405, 181, 359, 585, 201, 586, 257, 315, 100, 83, 580, 65, 106, 336, 35, 198, 173, 324, 283, 462, 234, 223, 91, 553, 294, 2, 182, 157, 426]
def findPrimes(n):
n, correction = n-n%6+6, 2-(n%6>1)
sieve = [True] * (n/3)
for i in xrange(1,int(n**0.5)/3+1):
if sieve[i]:
k=3*i+1|1
sieve[ k*k/3 ::2*k] = [False] * ((n/6-k*k/6-1)/k+1)
sieve[k*(k-2*(i&1)+4)/3::2*k] = [False] * ((n/6-k*(k-2*(i&1)+4)/6-1)/k+1)
return [2,3] + [3*i+1|1 for i in xrange(1,n/3-correction) if sieve[i]]
primeList = findPrimes(2*M)
def findEdges():
pairs = []
for i in nums:
for j in nums:
if (i != j) and ((i + j) in primeList):
if i % 2 == 0:
pairs.append((i,j))
else:
pairs.append((j,i))
return pairs
# Get adjList-- it's a bipartite graph. S1 = odd vertices, S2 = even vertices
def findPrimePairs():
pairs = {}
for i in nums:
for j in nums:
if (i != j) and ((i + j) in primeList) and (i % 2 == 0):
if i not in pairs:
pairs[i] = [j]
# -2 is end vertex
if j not in pairs:
pairs[j] = [-2]
else:
pairs[i].append(j)
# -2 is end vertex
if j not in pairs:
pairs[j] = [-2]
# -1 is start vertex
pairs[-1] = []
for i in pairs:
if i % 2 == 0:
pairs[-1].append(i)
return pairs
G = findPrimePairs()
#print E
def getEdges(path):
tuplePath = []
n = len(path)
for i in range(0,n-1):
tuplePath.append((path[i],path[i+1]))
return tuplePath
def cBfs():
vis = {}
Q = []
Q.append(-1)
vis[-1] = 1
while len(Q) > 0:
u = Q.pop(0)
if u in G:
for v in G[u]:
if v not in vis:
vis[v] = 1
Q.append(v)
if v == -2:
return True
return False
# BFS from -1 to -2
def bfs():
Q = []
Q.append([-1])
while len(Q) > 0:
path = Q.pop(0)
u = path[-1]
if u == -2:
return path
for v in G[u]:
newPath = list(path)
if v not in newPath:
newPath.append(v)
Q.append(newPath)
return []
#print G
# Ford Fulkerson on G to find max-flow, which is also maximum matching
def FF():
maxFlow = 0
f = {}
for u in G:
for v in G[u]:
f[(u,v)] = 0
f[(v,u)] = 0
path = bfs()
while path and path != []:
l = len(path)
d = {}
for e in f:
d[e] = 1 - f[e]
pathD = {}
for i in range(l-1):
e = (path[i],path[i+1])
pathD[e] = d[e]
m = min(pathD,key=pathD.get)
maxFlow += d[m]
for i in range(l-1):
e = (path[i],path[i+1])
f[e] += d[m]
f[(path[i+1],path[i])] -= d[m]
# Find residual graph
for u in G:
for v in G[u]:
if (1 - f[(u,v)] <= 0):
if v in G[u]:
G[u].remove(v)
path = bfs()
return maxFlow
found = False
if not cBfs():
print 0
found = True
if not found:
print FF()
|
991,786 | 87efec1debb62562acc9c7bcb57ece143e054e6f | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pyexplog.configuration import ParameterSpace, Configuration, ConfCollection
from pyexplog.manager import ExpManager, DummyExperiment, repeated_experiment
from pyexplog.log import ExpLog
import pandas as pd
from tables.nodes import filenode
import io
# Path to the results file.
results_file = 'results.h5'
p1 = ParameterSpace("fluchta", range(3))
p2 = ParameterSpace("fluchta 2", [2, 3, 4])
p3 = ParameterSpace("machta", [2, 3, 4])
s = p1 * p2 + p3
# WE STILL NEED TO HANDLE DEFAULTS, NaNs and all that SOMEHOW
# We need to decide how to handle updates to the structure of the logged
# confs. Should they be implicit or explicit, for an instance?
bb1 = {'burgina': 7}
bb2 = {'burgina': 11, 'zemiaky': 8}
base = Configuration({'mana': 17}, dende=15, fluchta=7, derived_from=[bb2, bb1])
col = ConfCollection(base, s)
explog = ExpLog(results_file)
manager = ExpManager(explog)
#file = filenode.new_node(explog.hdfstore._handle, where="/", name="machta2")
#file2 = filenode.open_node(explog.hdfstore.get_node("/machta2"), mode='a+')
#file2 = filenode.open_node(explog.hdfstore.get_node("/machta2"), mode='r')
#ff = io.TextIOWrapper(file, encoding='utf8')
# add filters to get_keys(), get_children(), result_keys() to only see
# dirs vs. only files.
# for result_keys(), we should return directories only by default
# also, when adding results, '' key should probably not be allowed
explog.add_results(
"EvoExperiment", {'param1': 111, 'param2': 222},
{'': pd.DataFrame([[111, 222], [222, 111]],
columns=['cc1', 'cc2'])},
mode='append'
)
#f = explog.open_results_file("EvoExperiment", {'param1': 111, 'param2': 222}, "test", mode='w', useFirstConf=True)
#f = explog.open_results_file("EvoExperiment", {'param1': 111, 'param2': 222}, "test", mode='r', useFirstConf=True)
print(explog.result_keys('EvoExperiment', [9]))
print(explog.select_results('EvoExperiment', [9]))
#explog.hdfstore.get()
#exp = DummyExperiment()
#manager.runExperiment(lambda c: repeated_experiment(exp, c, 2, n_jobs=1), col, "DummyExp")
#explog = ExpLog(results_file)
#res = explog.select_results('EvoExperiment', [0, 1])
#res = explog.select_results('EvoExperiment', [0, 1], [['in_metrics', 'out_metrics'],['in_metrics']])
#res = explog.select_results('EvoExperiment', [0, 1])
#explog.remove_results('EvoExperiment', [0], ['in_metrics'])
#print(explog.select_results('EvoExperiment', [0])[0].keys())
#explog.add_results('EvoExperiment', [0], res[0])
#import pandas as pd
#
#
#tt = explog.select_results("EvoExperiment", [0], result_key='in_metrics')
#print(tt)
#
#explog.add_results("EvoExperiment", 0, {'in_metrics': pd.DataFrame([[11, 22], [22, 11]], columns=['cc1', 'cc2'])}, mode='replace')
#
#tt = explog.select_results("EvoExperiment", 0, result_key='in_metrics')
#print(tt)
#
#explog.add_results("EvoExperiment", 0, {'in_metrics': pd.DataFrame([[111, 222], [222, 111]], columns=['cc1', 'cc2'])}, mode='append')
#
#tt = explog.select_results("EvoExperiment", 0, result_key='in_metrics')
#print(tt)
#explog.remove_results("EvoExperiment", 0, result_key='in_metrics')
#
#tt = explog.select_results("EvoExperiment", 0, result_key='in_metrics')
#print(tt)
|
991,787 | 98c865ee956880823ad312a2a920e68e597b1488 | import sys
from hangman import hangman
class MyInput:
def __init__(self, input_values):
self.__input_values = input_values
def readline(self):
return self.__input_values.pop(0)
def test_hangman(capsys):
sys.stdin = MyInput(list("abeolhxyznmo"))
def test_hangman_hello_won_p():
hangman.hangman(["hello"])
out, err = capsys.readouterr()
assert "You won!" in out
expected = "".join(["Guess a letter:\n",
"Missed, mistake 1 out of 5\n",
"The word: *****\n",
"Guess a letter:\n",
"Missed, mistake 2 out of 5\n",
"The word: *****\n",
"Guess a letter:\n",
"Hit!\n",
"The word: *e***\n",
"Guess a letter:\n",
"Hit!\n",
"The word: *e**o\n",
"Guess a letter:\n",
"Hit!\n",
"The word: *ello\n",
"Guess a letter:\n",
"Hit!\n",
"The word: hello\n",
"You won!\n"])
assert out == expected
assert err == ''
def test_hangman_lost_n():
hangman.hangman(["hello"])
out, err = capsys.readouterr()
assert "You lost!" in out
expected = "".join(["Guess a letter:\n",
"Missed, mistake 1 out of 5\n",
"The word: *****\n",
"Guess a letter:\n",
"Missed, mistake 2 out of 5\n",
"The word: *****\n",
"Guess a letter:\n",
"Missed, mistake 3 out of 5\n",
"The word: *****\n",
"Guess a letter:\n",
"Missed, mistake 4 out of 5\n",
"The word: *****\n",
"Guess a letter:\n",
"Missed, mistake 5 out of 5\n",
"The word: *****\n",
"You lost!\n"])
assert out == expected
assert err == ''
test_hangman_hello_won_p()
test_hangman_lost_n()
|
991,788 | a0da6c04a6b401f34fa144c290b755f0b22d2a47 | f = open('sum_of_digits')
N = int(f.readline())
result_arr = []
for i in range(N):
sum_digits = 0
arr = f.readline().split()
sum_number = int(arr[0]) * int(arr[1]) + int(arr[2])
for ch in str(sum_number):
sum_digits += int(ch)
result_arr.append(sum_digits)
for ch in result_arr:
print(ch, end=' ')
|
991,789 | df8006efbb2153b98ec5bb26415aa68d330c8bae | from abc import ABC, abstractmethod
import datetime as dt
import matplotlib.pyplot as plt
from sqlalchemy import create_engine
import pandas as pd
import simpleaudio as sa
import numpy as np
import court
class AbstractBall(ABC):
def __init__(self, xy, angle, speed):
self._xy = xy # (x, y) pair for center of ball; type: tuple(float, float)
self._angle = angle # angle, in radians, indicating the direction of the ball; type: float
self._speed = speed # speed, in units/second; type: float
super().__init__()
@abstractmethod
def reset(self, xy, angle, speed):
pass
# dtime := time elapsed; type: dt.timedelta
# returns: xy; type: tuple(float, float)
@abstractmethod
def update_location(self, dtime):
pass
@abstractmethod
def get_artist(self):
pass
class LinearSquare(AbstractBall):
width = 0.04 * court.COURT_HEIGHT
height = width
acceleration = 1.05
path_columns = ['TimeStamp','X','Y','Angle','Speed','Final_Y']
def __init__(self, xy, angle, speed, left_paddle, right_paddle, training = True, ball_color='w'):
super().__init__(xy, angle, speed)
self._engine = create_engine('sqlite:///pong.db')
self._artist = plt.Rectangle(self._get_lower_left(), LinearSquare.width, LinearSquare.height, color=ball_color)
self._path_trace = pd.DataFrame(columns=LinearSquare.path_columns)
self._path_start = dt.datetime.now()
self._bounce_sound = sa.WaveObject.from_wave_file('click_x.wav')
self._left_paddle = left_paddle
self._right_paddle = right_paddle
self._training = training
def __del__(self):
pass
#print('Destructor called, LinearSquare Ball deleted.')
def reset(self, xy, angle, speed):
self._xy = xy # (x, y) pair for center of ball; type: tuple(float, float)
self._angle = angle # angle, in radians, indicating the direction of the ball; type: float
self._speed = speed # speed, in units/second; type: float
def _get_lower_left(self):
return (self._xy[0] - 0.5 * LinearSquare.width, self._xy[1] - 0.5 * LinearSquare.height)
def update_location(self, dtime):
dx = self._speed * dtime.total_seconds() * np.cos(self._angle)
dy = self._speed * dtime.total_seconds() * np.sin(self._angle)
#print(dx, dy, dtime, dtime.total_seconds())
new_x = self._xy[0] + dx
new_y = self._xy[1] + dy
#print(f'Ball: {new_x}, {new_y}')
if new_x < -court.COURT_WIDTH/2:
bounce = True
if self._training == False:
# Compute Final Y
old_x = self._xy[0]
ratio_before_bounce = (-0.5*court.COURT_WIDTH - old_x)/dx
final_y = self._xy[1] + ratio_before_bounce * dy
#print(f'Final Y {final_y}, ratio {ratio_before_bounce}, old y {self._xy[1]}')
# Compare Final Y to paddle
(paddle_bottom, paddle_top) = self._left_paddle.get_span()
if final_y <= paddle_bottom or final_y >= paddle_top:
bounce = False
#print(f'Left Paddle Span: ({paddle_bottom}, {paddle_top}), ball: {final_y}, bounce: {bounce}')
if bounce:
self._bounce_sound.play()
new_x = -court.COURT_WIDTH - new_x
self._angle = np.pi - self._angle
if self._angle < 0:
self._angle += 2 * np.pi
self._speed *= LinearSquare.acceleration
if new_x > court.COURT_WIDTH/2:
bounce = True
if self._training == False:
# Compute Final Y
old_x = self._xy[0]
ratio_before_bounce = (0.5*court.COURT_WIDTH - old_x)/dx
final_y = self._xy[1] + ratio_before_bounce * dy
# Compare Final Y to paddle
(paddle_bottom, paddle_top) = self._right_paddle.get_span()
if final_y <= paddle_bottom or final_y >= paddle_top:
bounce = False
#print(f'Right Paddle Span: ({paddle_bottom}, {paddle_top}), ball: {final_y}, bounce: {bounce}')
if bounce:
self._bounce_sound.play()
new_x = court.COURT_WIDTH - new_x
self._angle = np.pi - self._angle
if self._angle < 0:
self._angle += 2 * np.pi
self._speed *= LinearSquare.acceleration
if new_y > court.COURT_HEIGHT/2:
self._bounce_sound.play()
new_y = court.COURT_HEIGHT - new_y
self._angle = -self._angle
if new_y < -court.COURT_HEIGHT/2:
self._bounce_sound.play()
new_y = -court.COURT_HEIGHT - new_y
self._angle = -self._angle
self._xy = (new_x, new_y)
#print(f'New XY: {self._xy}')
self._artist.set_xy(self._get_lower_left())
# new_row = {'TimeStamp':(dt.datetime.now() - self._path_start).total_seconds(),
# 'X':self._xy[0],
# 'Y':self._xy[1],
# 'Angle':self._angle,
# 'Speed':self._speed,
# 'Final_Y':0.0}
# self._path_trace.loc[self._path_trace.shape[0]] = new_row
return (new_x, new_y)
def get_artist(self):
return self._artist
|
991,790 | 670404546b047f4ef06f7abad8b3e6f6a1ccbb9b | from pymongo import MongoClient
from flask import Flask, jsonify, request
import json
import requests
import json
import bson
from datetime import datetime
from flask import send_file
import qrcodegen
import pymongo
import re
from flask_restplus import Api, Resource
from flask_swagger import swagger
from flask_cors import CORS
import qrcode
import random
from algo import assignDrones
import start
import ast
# import ssl
client = MongoClient('mongodb+srv://ai-drone:oOIUq8IGcTVKy7JV@cluster0-igbga.mongodb.net/test?retryWrites=true&w=majority',27017)
# client=MongoClient('localhost',27017)
db=client.test
col=db.user
col1=db.drone
col2=db.inventory
col3=db.order
col4=db.mission
col5=db.userorders
col6=db.dronemissions
col.create_index([('email', pymongo.ASCENDING)], unique=True)
col1.create_index([('name', pymongo.ASCENDING)], unique=True)
col2.create_index([('name', pymongo.ASCENDING)], unique=True)
# col4.create_index([('orderid', pymongo.ASCENDING)], unique=True)
# context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2)
# context.use_privatekey_file('server.key')
# context.use_certificate_file('server.crt')
# FOR USER APP
db1=client.droneusers
usercol=db1.user
app = Flask(__name__)
CORS(app)
#WEB PAGE
@app.route('/', methods = ["GET"])
def getPage():
return "AI DRONE BACKEND"
# STAKEHOLDER APP APIs
@app.route('/signup', methods = ["POST"])
def signup():
print(request)
print(request.json)
data=request.json
print("Data=",data)
#id=data['id']
regex = '^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
name=str(data['name'])
email=str(data['email'])
password=str(data['password'])
#Name Validation
if(len(name)>25):
return json.dumps(False)
#Email Validation
if(re.search(regex,email)==False):
return json.dumps(False)
#Password Validation
flag = 0
while True:
if (len(password)<8):
flag = -1
break
elif not re.search("[a-z]", password):
flag = -1
break
elif not re.search("[A-Z]", password):
flag = -1
break
elif not re.search("[0-9]", password):
flag = -1
break
elif not re.search("[_@$]", password):
flag = -1
break
elif re.search("\s", password):
flag = -1
break
else:
flag = 0
print("Valid Password")
break
if flag ==-1:
return json.dumps(False)
for x in col.find():
print(x)
print(col.find_one({"email": email}))
try:
col.insert({ "name": name, "email":email, "password":password},check_keys=False)
except pymongo.errors.DuplicateKeyError as e:
print(e)
return json.dumps(False)
return json.dumps(True)
@app.route('/validateUser', methods = ["POST"])
def validateUser():
data=request.json
email=str(data['email'])
password=str(data['password'])
response = []
documents=col.find()
for document in documents:
document['_id'] = str(document['_id'])
response.append(document)
flag=0
for i in range(0,len(response)):
emailreg=response[i]["email"]
passwordreg=response[i]["password"]
if(password==passwordreg and email==emailreg):
flag=1
break
if(flag==1):
return json.dumps(True)
else:
return json.dumps(False)
@app.route('/adddrone', methods = ["POST"])
def adddrone():
data=request.json
name=str(data['name'])
capacity=data['capacity']
availability=str(data['availability'])
image=str(data['image'])
try:
col1.insert({ "name": name, "capacity":capacity, "availability":availability,"image":image},check_keys=False)
except pymongo.errors.DuplicateKeyError as e:
print(e)
return json.dumps(False)
return json.dumps(True)
@app.route('/getdrones', methods = ["GET"])
def getdrones():
response = []
documents=col1.find()
for document in documents:
document['_id'] = str(document['_id'])
response.append(document)
return json.dumps(response)
@app.route('/readDronebyId', methods = ["POST"])
def readdronebyid():
data=request.json
id=bson.ObjectId(data['_id'])
response = []
myquery = { "_id": id }
documents=col1.find(myquery)
return json.dumps(documents)
def readdronesbyid(id):
# data=request.json
id=bson.ObjectId(id)
response = []
myquery = { "_id": id }
documents=col1.find(myquery)
for document in documents:
document['_id'] = str(document['_id'])
# response.append(document)
return document
@app.route('/updateavailability', methods = ["PUT"])
def updateavailability():
data=request.json
name=data['name']
availability=data['availability']
myquery = { "name": name }
newvalues = { "$set": { "availability": availability } }
col1.update_one(myquery, newvalues)
if(col1.find_one({"name":name})):
return "Updated"
else:
return "No such Drone"
@app.route('/addinventory', methods = ["POST"])
def addinventory():
data=request.json
#id=data['id']
name=str(data['name'])
units=data['units']
if(units>0):
availability=True
else:
availability=False
weight=data['weight']
image=data['image']
#
try:
col2.insert({ "name": name, "units":units, "availability":availability,"weight":weight,"image":image},check_keys=False)
except pymongo.errors.DuplicateKeyError as e:
print(e)
return json.dumps(False)
return json.dumps(True)
@app.route('/readInventoryItembyId', methods = ["POST"])
def readinventoryitembyid():
data=request.json
id=bson.ObjectId(data['_id'])
response = []
myquery = { "_id": id }
documents=col2.find(myquery)
return json.dumps(documents)
def readinventoryitemsbyid(id):
id=bson.ObjectId(id)
response = []
myquery = { "_id": id }
documents=col2.find(myquery)
for document in documents:
document['_id'] = str(document['_id'])
# response.append(document)
return document
@app.route('/fetchinventory', methods = ["GET"])
def fetchinventory():
response = []
documents=col2.find()
for document in documents:
document['_id'] = str(document['_id'])
response.append(document)
return json.dumps(response)
@app.route('/updateunits', methods = ["PUT"])
def updateunits():
data=request.json
name=data['name']
units=data['units']
myquery = { "name": name }
newvalues = { "$set": { "units": units } }
col2.update_one(myquery, newvalues)
if(units==0):
myquery = { "name": name }
newvalues = { "$set": { "availability": False } }
col2.update_one(myquery, newvalues)
else:
myquery = { "name": name }
newvalues = { "$set": { "availability": True } }
col2.update_one(myquery, newvalues)
if(col2.find_one({"name":name})):
return "Updated units"
else:
return "No such item found"
@app.route('/addorder', methods = ["POST"])
def addorder():
data=request.json
assigneddrones=data['AssignedDrones']
dateTimeObj = datetime.now()
timestamp=dateTimeObj.strftime("%d-%b-%Y (%H:%M:%S.%f)")
timestamp=timestamp[:-5]
timestamp+=")"
odid=col3.insert({ "AssignedDrones": assigneddrones, "timestamp":timestamp},check_keys=False)
ans={}
ans['orderId']=str(odid)
return json.dumps(ans)
# return json.dumps(True)
@app.route('/readOrdersById', methods = ["POST"])
def readordersbyid():
data=request.json
id=bson.ObjectId(data['_id'])
response = []
myquery = { "_id": id }
documents=col3.find(myquery)
for document in documents:
document['_id'] = str(document['_id'])
document['AssignedDrones']=(document['AssignedDrones'])
print(document)
print(document['_id'])
print(document['AssignedDrones'])
for x in document['AssignedDrones']:
droneId = x['drone_id']
x['drone'] = readdronesbyid(droneId)
del x['drone_id']
for y in x['inventoryItems']:
inventoryId = y['inventory_id']
y['inventory'] = readinventoryitemsbyid(inventoryId)
del y['inventory_id']
qty=y['quantity']
y['inventory']['quantity']=qty
y['inventory_item']=y['inventory']
del y['inventory']
del y['quantity']
response.append(document)
print("Response is")
print(response)
print(response)
return json.dumps(document)
def readallordersbyid(id):
print("Hello i am here")
id=bson.ObjectId(id)
response = []
myquery = { "_id": id }
documents=col3.find(myquery)
answer=documents
for document in documents:
print("DOCUMENT")
print(document)
document['_id'] = str(document['_id'])
document['AssignedDrones']=(document['AssignedDrones'])
print("In here")
print("Assigned Drones are :",document['AssignedDrones'])
for x in document['AssignedDrones']:
print("A")
droneId = x['drone_id']
print("B")
x['drone'] = readdronesbyid(droneId)
print("C")
del x['drone_id']
print("D")
for y in x['inventoryItems']:
print("Y is :",y)
inventoryId = y['inventory_id']
y['inventory'] = readinventoryitemsbyid(inventoryId)
del y['inventory_id']
qty=y['quantity']
y['inventory']['quantity']=qty
y['inventory_item']=y['inventory']
del y['inventory']
del y['quantity']
# print(inventoryId)
print("E")
print("Out here")
# newlist.append(x[0])
# print(type(document['AssignedDrones']))
# document['AssignedDrones']=newlist
print("Document is :",document)
response.append(document)
# print("Response is")
# print(response)
print("Hello From Orders")
# print("Document is :",document)
print("Response is :",response)
return response
@app.route('/fetchorders', methods = ["GET"])
def fetchorders():
response = []
documents=col3.find()
for document in documents:
for x in document['AssignedDrones']:
droneId = x['drone_id']
x['drone'] = readdronesbyid(droneId)
del x['drone_id']
for y in x['inventoryItems']:
inventoryId = y['inventory_id']
y['inventory'] = readinventoryitemsbyid(inventoryId)
print(inventoryId)
del y['inventory_id']
document['_id'] = str(document['_id'])
response.append(document)
return json.dumps(response)
@app.route('/createmission', methods = ["POST"])
def createmission():
data=request.json
#id=data['id']
orderid=data['order_id']
# dateOfMission=data["dateOfMission"]
# timeOfDeparture=data["timeOfDeparture"]
# timeOfDelivery=data["timeOfDelivery"]
# timeOfArrival=data["timeOfArrival"]
# distanceTravelled=data["distanceTravelled"]
From=data["from"]
To=data["to"]
src_lat=data['src_lat']
src_lon=data['src_lon']
dest_lat=data['dest_lat']
dest_lon=data['dest_lon']
# clientPhotograph=data["clientPhotograph"]
# waypoints=data["waypoints"]
params = {
"src":{
"lat":src_lat,
"lon":src_lon
},
"des":{
"lat":dest_lat,
"lon":dest_lon
}
}
headers = {'content-type': 'application/json'}
response = requests.post(
'http://13.234.119.101/generate-waypoints',
data=json.dumps(params),headers=headers)
wp=response.json()
print("WP :",wp)
waypoints=[]
temp={}
temp['lat']=src_lat
temp['lng']=src_lon
waypoints.append(temp)
for i in range(0,len(wp)):
waypoints.append(wp[i]["waypoint"])
print("Waypoints are :",waypoints)
for i in range(0,len(waypoints)):
print("Lat :"+str(waypoints[i]['lat'])+"Lon :"+str(waypoints[i]['lng']))
temp={}
temp['lat']=dest_lat
temp['lng']=dest_lon
waypoints.append(temp)
try:
# col4.insert({"orderid": orderid, "dateOfMission":dateOfMission,
# "timeOfDeparture":timeOfDeparture,"timeOfDelivery":timeOfDelivery,"timeOfArrival":timeOfArrival,
# "distanceTravelled":distanceTravelled,"From":From,"To":To,
# "clientPhotograph":clientPhotograph,"waypoints":waypoints},check_keys=False)
mid=col4.insert({"order_id": orderid,"from":From,"to":To,"waypoints":waypoints},check_keys=False)
except pymongo.errors.DuplicateKeyError as e:
print(e)
return json.dumps(False)
ans={}
ans['mission_id']=str(mid)
print("Answer is :",ans)
print(type(ans))
return json.dumps(ans)
@app.route('/readmissions', methods = ["GET"])
def readmissions():
# id=bson.ObjectId(data['_id'])
print("Read Missions")
response = []
# myquery = { "_id": id }
documents=col4.find()
print("Hello")
for document in documents:
orderid=bson.ObjectId(document['order_id'])
print("Hello")
document['order']=readallordersbyid(orderid)
print("World")
del document['order_id']
document['_id'] = str(document['_id'])
response.append(document)
print("Response",response)
return json.dumps(response)
@app.route('/readMissionById', methods = ["POST"])
def readmissionbyid():
data=request.json
id=bson.ObjectId(data['_id'])
response = []
myquery = { "_id": id }
documents=col4.find(myquery)
for document in documents:
orderid=bson.ObjectId(document['order_id'])
document['order']=readallordersbyid(orderid)
del document['order_id']
document['_id'] = str(document['_id'])
response.append(document)
print("Response",response)
return json.dumps(document)
# return json.dumps(document)
@app.route('/deleteMissionById', methods = ["DELETE"])
def deletemissionbyid():
data=request.json
id=data['_id']
response = []
myquery = { "_id": id }
x = col4.delete_many(myquery)
if(x.deleted_count>0):
return True
else:
return False
@app.route('/get_image')
def get_image():
qrcodegen.genqrcode()
if request.args.get('type') == '1':
filename = 'code.jpg'
else:
filename = 'code.jpg'
return send_file(filename, mimetype='image/gif')
@app.route('/assigndrone', methods = ["POST"])
def assigndrone():
response = []
dronesavailable=[]
documents=col1.find()
for document in documents:
document['_id'] = str(document['_id'])
response.append(document)
data=request.json
print("Response is :",response)
for item in response:
print("Item is :",item)
if(item['availability']=='True'):
dronesavailable.append([item['_id'],item['name'],item['capacity'],item['availability'],item['image']])
print("Available drones are :")
print(dronesavailable)
id=[product['_id'] for product in data['product']]
weight=[product['weight'] for product in data['product']]
units=[product['units'] for product in data['product']]
name=[product['name'] for product in data['product']]
availability=[product['availability'] for product in data['product']]
image=[product['image'] for product in data['product']]
for i in range(0,len(units)):
if(units[i]>1):
for j in range(1,units[i]):
id.insert(i+1,id[i])
weight.insert(i+1,weight[i])
name.insert(i+1,name[i])
availability.insert(i+1,availability[i])
image.insert(i+1,image[i])
units[i]=1
units.insert(i+1,1)
for i in range(0,len(dronesavailable)-1):
for j in range(i+1,len(dronesavailable)):
if(dronesavailable[i][2]>dronesavailable[j][2]):
dronesavailable[i],dronesavailable[j]=dronesavailable[j],dronesavailable[i]
print("Drones after arranging in decreasing order of their weight carrying capacity",dronesavailable)
print("Id's are :",id)
print("Weights are :",weight)
visited=[]
for i in range(0,len(weight)):
visited.append(0)
dronespace=[]
dronesinfo=[]
for i in range(0,len(dronesavailable)):
dronespace.append(dronesavailable[i][2])
for i in range(0,len(dronesavailable)):
dronesinfo.append(dronesavailable[i])
for i in range(0,len(dronesinfo)-1):
for j in range(i+1,len(dronesinfo)):
if(dronesinfo[i][2]<dronesinfo[j][2]):
dronesinfo[i],dronesinfo[j]=dronesinfo[j],dronesinfo[i]
for i in range(0,len(dronespace)-1):
for j in range(i+1,len(dronespace)):
if(dronespace[i]<dronespace[j]):
dronespace[i],dronespace[j]=dronespace[j],dronespace[i]
print("Drone Space is : ",dronespace)
assigneddronesvalue=assignDrones(dronespace,weight)
print("Assigned Drones are :",assigneddronesvalue)
finalans=[]
for i in range(0,len(assigneddronesvalue)):
if(assigneddronesvalue[i]!=0):
val=[]
droneval={}
inventoryval={}
for j in range(0,len(assigneddronesvalue[i])):
ind=weight.index(assigneddronesvalue[i][j])
inventoryval['inventory_id']=id[ind]
inventoryval['inventory_name']=name[ind]
inventoryval['inventory_weight']=weight[ind]
inventoryval['quantity']=1
inventoryval['inventory_image']=image[ind]
# val.append([id[ind],weight[ind]])
val.append(inventoryval)
weight[ind]=-1
key=str(dronesinfo[i])
for j in range(0,len(val)-1):
if(val[j]!=0):
for k in range(j+1,len(val)):
if(val[j]['inventory_id']==val[k]['inventory_id']):
val[j]['quantity']+=1
val[k]=0
val=list(filter(lambda a: a != 0, val))
droneval['drone_id']=dronesinfo[i][0]
droneval['drone_name']=dronesinfo[i][1]
droneval['drone_capacity']=dronesinfo[i][2]
droneval['drone_availability']=dronesinfo[i][3]
droneval['drone_image']=dronesinfo[i][4]
droneval['inventoryItems']=val
finalans.append(droneval)
# finalans[key]=val
print("Final Answer is :",finalans)
return json.dumps(finalans)
#USER APP APIS START HERE
qrcodenumber=-1
qrscannumber=-1
@app.route('/usersignup', methods = ["POST"])
def usersignup():
print(request)
print(request.data)
data=request.data
data = json.loads(data.decode('utf8'))
print("Data=",data)
#id=data['id']
regex = '^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$'
name=str(data['name'])
email=str(data['email'])
password=str(data['password'])
#Name Validation
if(len(name)>25):
return json.dumps(False)
#Email Validation
if(re.search(regex,email)==False):
return json.dumps(False)
#Password Validation
flag = 0
while True:
if (len(password)<8):
flag = -1
break
elif not re.search("[a-z]", password):
flag = -1
break
elif not re.search("[A-Z]", password):
flag = -1
break
elif not re.search("[0-9]", password):
flag = -1
break
elif not re.search("[_@$]", password):
flag = -1
break
elif re.search("\s", password):
flag = -1
break
else:
flag = 0
print("Valid Password")
break
if flag ==-1:
return json.dumps(False)
for x in usercol.find():
print(x)
print(usercol.find_one({"email": email}))
try:
usercol.insert({ "name": name, "email":email, "password":password},check_keys=False)
except pymongo.errors.DuplicateKeyError as e:
print(e)
return json.dumps(False)
return json.dumps(True)
@app.route('/userValidation', methods = ["POST"])
def UserValidation():
data=request.data
data = json.loads(data.decode('utf8'))
print(data)
email=str(data['email'])
password=str(data['password'])
print(email)
print(password)
response = []
documents=usercol.find()
for document in documents:
document['_id'] = str(document['_id'])
response.append(document)
flag=0
for i in range(0,len(response)):
emailreg=response[i]["email"]
passwordreg=response[i]["password"]
userid=response[i]['_id']
if(password==passwordreg and email==emailreg):
flag=1
break
if(flag==1):
return json.dumps(userid)
else:
return json.dumps(False)
@app.route('/placeOrder', methods = ["POST"])
def placeOrder():
data=request.data
data = json.loads(data.decode('utf8'))
userid=str(data['user_id'])
status=data['status']
order=data['order']
timestamp=data['timestamp']
print(userid)
try:
col5.insert({"user_id":userid,"status":status,"timestamp":timestamp,"order":order},check_keys=False)
except pymongo.errors.DuplicateKeyError as e:
print(e)
return json.dumps(False)
return json.dumps(True)
@app.route('/getQrCode', methods = ["POST"])
def getQrCode():
data=request.data
data = json.loads(data.decode('utf8'))
number=random.randint(1,100)
number=number*random.randint(1,100)
number=number*random.randint(1,100)
number=number*random.randint(1,100)
number=number*random.randint(1,100)
print(number)
global qrcodenumber
qrcodenumber=number
return json.dumps(number)
@app.route('/setQrScanNumber', methods = ["POST"])
def setQrScanNumber():
data=request.form['qrscannumber']
print("Data is :",data)
global qrscannumber
qrscannumber=data
print(qrscannumber)
return json.dumps(True)
@app.route('/checkQrCode', methods = ["POST"])
def checkQrCode():
data=request.data
data = json.loads(data.decode('utf8'))
print(data)
global qrcodenumber
global qrscannumber
print("Hello")
print("QRCODE SCAN NUMBER :",qrscannumber)
print("QRCODE GEN NUMBER :",qrcodenumber)
if(str(qrcodenumber)==str(qrscannumber)):
qrscannumber=-1
return json.dumps(True)
else:
return json.dumps(False)
# DRONE BASED APIS
portno=5750
#COORDINATE API
@app.route('/coordinates', methods=['POST'])
def givelocation():
try:
_json = request.json
print("Data is :",_json)
src=_json['src']
des=_json['des']
print("SRC is :"+str(src)+"DES is"+str(des))
src_lat = src['lat']
src_lon = src['lon']
des_lat = des['lat']
des_lon = des['lon']
print("Coordinates are :",src_lat,src_lon,des_lat,des_lon)
user_id = _json['user_id']
mission_id = _json['mission_id']
global portno
portno+=10
print("Called Start")
start.execute(src_lat,src_lon,des_lat,des_lon,portno,user_id,mission_id)
print("Came from Start")
time.sleep(5)
return "SRC Latitude is :"+str(src_lat)+"SRC Longitude is :"+str(src_lon)+"DES Latitude is :"+str(des_lat)+"DEST Longitude is :"+str(des_lon)
except Exception as e:
print(e)
return "Hello World"
@app.route('/pushCoordinates', methods=['POST'])
def pushCoordinates():
try:
# print("Data is ",data)
userid=request.form['userid']
missionid=request.form['missionid']
lat=request.form['latitude']
lon=request.form['longitude']
alt=request.form['altitude']
vel=request.form['velocity']
speed=request.form['speed']
clientdistance=request.form['clientdistance']
warehousedistance=request.form['warehousedistance']
vicinity=request.form['vicinity']
clienttime=request.form['clienttime']
warehousetime=request.form['warehousetime']
gimbalstatus=request.form['gimbalStatus']
battery=request.form['battery']
lastheartbeat=request.form['lastHeartBeat']
isarmable=request.form['isArmable']
sysstatus=request.form['systemStatus']
groundspeed=request.form['groundSpeed']
airSpeed=request.form['airSpeed']
mode=request.form['mode']
armed=request.form['armed']
nextwp=request.form['next_waypoint']
distancetonextwp=request.form['distance_to_next_waypoint']
dateTimeObj = datetime.now()
timestamp=dateTimeObj.strftime("%d-%b-%Y (%H:%M:%S.%f)")
timestamp=timestamp[:-5]
timestamp+=")"
response=[]
myquery = { "user_id": userid }
documents=col6.find(myquery)
for document in documents:
document['user_id'] = str(document['user_id'])
response.append(document)
if(len(response)==0):
col6.insert({"user_id":userid,"mission_id":missionid,"latitude":lat,"longitude":lon,"altitude":alt,
"velocity":vel,"speed":speed,"client_distance":clientdistance,"warehouse_distance":warehousedistance,
"vicinity":vicinity,"client_time":clienttime,"warehouse_time":warehousetime,"gimbal_status":gimbalstatus,
"battery":battery,"last_heart_beat":lastheartbeat,"is_armable":isarmable,
"system_status":sysstatus,"ground_speed":groundspeed,"air_speed":airSpeed,"mode":mode,"armed":armed,
"next_waypoint":nextwp,"distance_to_next_waypoint":distancetonextwp,"timestamp":timestamp},check_keys=False)
else:
myquery = { "user_id": userid }
newvalues = { "$set": {"user_id":userid,"mission_id":missionid,"latitude":lat,"longitude":lon,"altitude":alt,
"velocity":vel,"speed":speed,"client_distance":clientdistance,"warehouse_distance":warehousedistance,
"vicinity":vicinity,"client_time":clienttime,"warehouse_time":warehousetime,"gimbal_status":gimbalstatus,
"battery":battery,"last_heart_beat":lastheartbeat,"is_armable":isarmable,
"system_status":sysstatus,"ground_speed":groundspeed,"air_speed":airSpeed,"mode":mode,"armed":armed,
"next_waypoint":nextwp,"distance_to_next_waypoint":distancetonextwp,"timestamp":timestamp} }
col6.update_one(myquery, newvalues)
return json.dumps(True)
except Exception as e:
print(e)
return "Hello World"
@app.route('/readCoordinatesByUserId', methods = ["POST"])
def readcoordinatesbyuserid():
data=request.json
userid=data['user_id']
# print("User Id is :",userid)
response = []
myquery = { "user_id": userid }
documents=col6.find(myquery)
for document in documents:
document['_id']=str(document['_id'])
response.append(document)
print("Response is :",response)
return json.dumps(response)
@app.route('/readCoordinatesByMissionId', methods = ["POST"])
def readcoordinatesbymissionid():
data=request.json
missionid=data['mission_id']
response = []
myquery = { "mission_id": missionid }
documents=col6.find(myquery)
for document in documents:
document['_id']=str(document['_id'])
response.append(document)
print("Response is :",response)
return json.dumps(response)
if __name__ == '__main__':
# app.run(host='0.0.0.0',port=443,debug = True,ssl_context=('cert.pem', 'key.pem'))
# app.run(host='127.0.0.1',port=5000,debug = True,ssl_context='adhoc')
app.run(host='0.0.0.0',port=80,debug = True)
|
991,791 | 2301733dd739c486b6f19d54037920fa77df30c0 | from distutils.core import setup
setup(
name='sleepylyze',
version='0.1dev',
author= 'J. Gottshall',
author_email='jackie.gottshall@gmail.com',
packages=['sleepylyze',],
url='https://github.com/jag2037/sleepylyze',
license='',
description= 'Python analysis of EEG sleep architecture'
long_description=open('README.txt').read(),
install_requires=[
'datetime',
'io',
'json',
'math',
'numpy',
'os',
'pandas',
'psycopg2',
're',
'statistics',
'sqlalchemy',
'glob',
'warnings',
'xlsxwriter',
'mne',
'scipy'
],
) |
991,792 | ed38321193f4a2593ce81d9e72b027924ab0127f | from hashlib import md5
secret = "bgvyzdsv"
hash_out = "xxxxxx"
# part 1
i = 0
while hash_out[:5] != '00000':
i += 1
hash_out = md5(str(secret + str(i)).encode()).hexdigest()
print("part 1:\ti: {}, hash: {}".format(i, hash_out))
# part 2
while hash_out[:6] != '000000':
i += 1
hash_out = md5(str(secret + str(i)).encode()).hexdigest()
print("part 2:\ti: {}, hash: {}".format(i, hash_out))
|
991,793 | 8f2f7037219045cb7d825333034150b130bf4c19 | r"""Models of galaxy luminosities.
Models
======
.. autosummary::
:nosignatures:
:toctree: ../api/
herbel_luminosities
"""
import numpy as np
import skypy.utils.astronomy as astro
from skypy.utils.random import schechter
def herbel_luminosities(redshift, alpha, a_m, b_m, size=None,
x_min=0.00305,
x_max=1100.0, resolution=100):
r"""Model of Herbel et al (2017)
Luminosities following the Schechter luminosity function following the
Herbel et al. [1]_ model.
Parameters
----------
redshift : (nz,) array-like
The redshift values at which to sample luminosities.
alpha : float or int
The alpha parameter in the Schechter luminosity function.
a_m, b_m : float or int
Factors parameterising the characteristic absolute magnitude M_* as
a linear function of redshift according to Equation 3.3 in [1]_.
size: int, optional
Output shape of luminosity samples. If size is None and redshift
is a scalar, a single sample is returned. If size is None and
redshift is an array, an array of samples is returned with the same
shape as redshift.
x_min, x_max : float or int, optional
Lower and upper luminosity bounds in units of L*.
resolution : int, optional
Resolution of the inverse transform sampling spline. Default is 100.
Returns
-------
luminosity : array_like
Drawn luminosities from the Schechter luminosity function.
Notes
-----
The Schechter luminosity function is given as
.. math::
\Phi(L, z) = \frac{\Phi_\star(z)}{L_\star(z)}
\left(\frac{L}{L_\star(z)}\right)^\alpha
\exp\left(-\frac{L}{L_\star(z)}\right) \;.
Here the luminosity is defined as
.. math::
L = 10^{-0.4M} \;,
with absolute magnitude :math:`M`. Furthermore, Herbel et al. [1]_
introduced
.. math::
\Phi_\star(z) = b_\phi \exp(a_\phi z) \;,
M_\star(z) = a_M z + b_M \;.
Now we have to rescale the Schechter function by the comoving element and
get
.. math::
\phi(L,z) = \frac{d_H d_M^2}{E(z)} \Phi(L,z)\;.
References
----------
.. [1] Herbel J., Kacprzak T., Amara A. et al., 2017, Journal of Cosmology
and Astroparticle Physics, Issue 08, article id. 035 (2017)
Examples
--------
>>> import skypy.galaxy.luminosity as lum
Sample 100 luminosity values at redshift z = 1.0 with
a_m = -0.9408582, b_m = -20.40492365, alpha = -1.3.
>>> luminosities = lum.herbel_luminosities(1.0, -1.3, -0.9408582,
... -20.40492365, size=100)
Sample a luminosity value for every redshift in an array z with
a_m = -0.9408582, b_m = -20.40492365, alpha = -1.3.
>>> z = np.linspace(0,2, 100)
>>> luminosities = lum.herbel_luminosities(z, -1.3, -0.9408582,
... -20.40492365)
"""
if size is None and np.shape(redshift):
size = np.shape(redshift)
luminosity_star = _calculate_luminosity_star(redshift, a_m, b_m)
x_sample = schechter(alpha, x_min, x_max, resolution=resolution, size=size)
return luminosity_star * x_sample
def _calculate_luminosity_star(redshift, a_m, b_m):
absolute_magnitude_star = a_m * redshift + b_m
return astro.luminosity_from_absolute_magnitude(absolute_magnitude_star)
|
991,794 | 14735a60820f780205cf6b142f0f6921bacdd63b | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
DESCRIPTION = "STJ_PV: SubTropical Jet Finding via PV Gradient Method"
LONG_DESCRIPTION = """STJ_PV Provides a framework for testing metrics of the subtopical
jet against one another."""
AUTHOR = 'Penelope Maher, Michael Kelleher'
setup(
name='STJ_PV',
version='1.0.0',
author=AUTHOR,
author_email='p.maher@exeter.ac.uk, kelleherme@ornl.gov',
packages=['STJ_PV', ],
license='BSD',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
install_requires=[
"basemap>=1.1.0",
"dask>=2.0.0",
"matplotlib>=2.1.0",
"netCDF4>=1.2.4",
"numpy>=1.11.3",
"pandas>=0.20.0",
"psutil>=5.0.1",
"PyYAML>=3.12",
"scipy>=0.19.0",
"seaborn>=0.9.0",
"xarray>=0.9.0",
],
)
|
991,795 | aec4c9f99cc6d4ba90c008c4ce9544e7108c7169 | #!/usr/bin/env python
"""Unittests for testing.py"""
import sys
import testing
class TestCase(testing.TestCase):
def testBasic(self):
"""The most basic test."""
self.assertEqual(True, True)
def test_assert_iterator(self):
"""Test the test_iterator() method"""
iterator = iter([1,2,3,4])
# Should pass
self.assert_iterator(iterator,
count=4,
assert_item_function=lambda i: i>0)
def test_stdout_to_pipe(self):
"""Test the stdout_to_pipe() context manager"""
original_stdout = sys.stdout
with self.stdout_to_pipe() as output:
self.assertNotEqual(original_stdout, sys.stdout)
print "Hello world!"
self.assertEqual(output.readline(), "Hello world!\n")
# Line without CR should be readable after closing
sys.stdout.write("Goodbye")
self.assertEqual(original_stdout, sys.stdout)
# Now that writing side is closed, we should be able to read
# up to EOF.
self.assertEqual(output.readline(), "Goodbye")
def test_pipe_to_stdin(self):
"""Test the pipe_to_stdin() context manager"""
original_stdin = sys.stdin
with self.pipe_to_stdin() as input:
self.assertNotEqual(original_stdin, sys.stdin)
input.write("Hello world!\n")
self.assertEqual(sys.stdin.readline(), "Hello world!\n")
self.assertEqual(original_stdin, sys.stdin)
if __name__ == "__main__":
testing.main()
|
991,796 | 153a7be9e6eb14d3e6952eac2dc3c794304f9872 | from django.contrib import admin
from productdetails.models import product
admin.site.register(product)
|
991,797 | 87cce7135900ebf0e819977e1654b4451a2facf6 | from unittest import TestCase
from time import sleep
from datetime import datetime
from ctparse.ctparse import _timeout, ctparse, _seq_match, _match_rule
from ctparse.types import Time
class TestCTParse(TestCase):
def test_timeout(self):
t_fun = _timeout(0.5)
with self.assertRaises(Exception):
sleep(1)
t_fun()
t_fun = _timeout(0)
t_fun() # all good
def test_ctparse(self):
txt = '12.12.2020'
res = ctparse(txt)
self.assertEqual(res.resolution, Time(year=2020, month=12, day=12))
self.assertIsNotNone(str(res))
self.assertIsNotNone(repr(res))
# non sense gives no result
self.assertIsNone(ctparse('gargelbabel'))
txt = '12.12.'
res = ctparse(txt, ts=datetime(2020, 12, 1))
self.assertEqual(res.resolution, Time(year=2020, month=12, day=12))
res = ctparse(txt, ts=datetime(2020, 12, 1), debug=True)
self.assertEqual(next(res).resolution, Time(year=2020, month=12, day=12))
def test_ctparse_timeout(self):
# timeout in ctparse: should rather mock the logger and see
# whether the timeout was hit, but cannot get it mocked
txt = 'tomorrow 8 yesterday Sep 9 9 12 2023 1923'
ctparse(txt, timeout=0.0001)
def test_seq_match(self):
def make_rm(i):
def _regex_match(s):
return s == i
return _regex_match
# empty sequence, empty pattern: matches on a single empty sequence
self.assertEqual(list(_seq_match([], [])), [[]])
# non empty sequence, empty pattern matches on an empty sequence
self.assertEqual(list(_seq_match(['a', 'b'], [])), [[]])
# non empty sequence, non empty pattern that does not apper: no match
self.assertEqual(list(_seq_match(['a', 'b'], [make_rm(1)])), [])
# empty sequence, non empty pattern: no match
self.assertEqual(list(_seq_match([], [make_rm(1)])), [])
# sequence shorter than pattern: no match
self.assertEqual(list(_seq_match(['a'], [make_rm(1), make_rm(2)])), [])
# seq = pat
self.assertEqual(list(_seq_match([1], [make_rm(1)])), [[0]])
self.assertEqual(list(_seq_match([1, 2, 3], [make_rm(1)])), [[0]])
self.assertEqual(list(_seq_match([1, 2, 3], [make_rm(2)])), [[1]])
self.assertEqual(list(_seq_match([1, 2, 3], [make_rm(3)])), [[2]])
self.assertEqual(list(_seq_match([1, 2, 'a'], [make_rm(1), make_rm(2)])), [[0, 1]])
self.assertEqual(list(_seq_match([1, 'a', 3], [make_rm(1), lambda x: x, make_rm(3)])),
[[0, 2]])
self.assertEqual(list(_seq_match(['a', 2, 3], [make_rm(2), make_rm(3)])),
[[1, 2]])
# starts with non regex
self.assertEqual(list(_seq_match([1, 2], [lambda x: x, make_rm(1), make_rm(2)])), [])
self.assertEqual(list(_seq_match(['a', 1, 2], [lambda x: x, make_rm(1), make_rm(2)])),
[[1, 2]])
# ends with non regex
self.assertEqual(list(_seq_match([1, 2], [make_rm(1), make_rm(2), lambda x: x])), [])
self.assertEqual(list(_seq_match([1, 2, 'a'], [make_rm(1), make_rm(2), lambda x: x])),
[[0, 1]])
# repeated pattern
self.assertEqual(list(_seq_match([1, 2, 1, 2, 2], [make_rm(1), make_rm(2)])),
[[0, 1], [0, 3], [0, 4], [2, 3], [2, 4]])
self.assertEqual(list(_seq_match([1, 2, 1, 2, 2], [make_rm(1), lambda x: x, make_rm(2)])),
[[0, 3], [0, 4], [2, 4]])
self.assertEqual(list(_seq_match([1, 2, 1, 2, 2], [lambda x: x, make_rm(1), make_rm(2)])),
[[2, 3], [2, 4]])
self.assertEqual(list(_seq_match([1, 2, 1, 2, 2], [make_rm(1), make_rm(2), lambda x: x])),
[[0, 1], [0, 3], [2, 3]])
self.assertEqual(list(_seq_match(
[1, 2, 1, 2, 2],
[lambda x: x, make_rm(1), lambda x: x, make_rm(2), lambda x: x])),
[])
self.assertEqual(list(_seq_match(
[1, 2, 1, 2, 2, 3],
[lambda x: x, make_rm(1), lambda x: x, make_rm(2), lambda x: x])),
[[2, 4]])
def test_match_rule(self):
self.assertEqual(list(_match_rule([], ['not empty'])), [])
self.assertEqual(list(_match_rule(['not empty'], [])), [])
|
991,798 | 6b00861b847ba9b6ac9315b6b77737e9c3375bd1 | #!/usr/bin/env python3
# import boto3
import Inventory_Modules
from Inventory_Modules import display_results
from ArgumentsClass import CommonArguments
from account_class import aws_acct_access
from colorama import init, Fore
from time import time
from botocore.exceptions import ClientError
import logging
init()
__version__ = "2023.05.10"
parser = CommonArguments()
parser.multiprofile()
parser.multiregion()
parser.rootOnly()
parser.verbosity()
parser.timing()
parser.version(__version__)
parser.my_parser.add_argument(
'+R', "--ReplaceRetention",
help="The retention you want to update to on all groups that match.",
default=None,
metavar="retention days",
type=int,
choices=[0, 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1827, 2192, 2557, 2922, 3288, 3653],
dest="pRetentionDays")
parser.my_parser.add_argument(
'-o', "--OldRetention",
help="The retention you want to change on all groups that match. Use '0' for 'Never'",
default=None,
metavar="retention days",
type=int,
choices=[0, 1, 3, 5, 7, 14, 30, 60, 90, 120, 150, 180, 365, 400, 545, 731, 1827, 2192, 2557, 2922, 3288, 3653],
dest="pOldRetentionDays")
args = parser.my_parser.parse_args()
pProfiles = args.Profiles
pRegionList = args.Regions
pRetentionDays = args.pRetentionDays
pOldRetentionDays = args.pOldRetentionDays
pRootOnly = args.RootOnly
pTiming = args.Time
verbose = args.loglevel
logging.basicConfig(level=args.loglevel, format="[%(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s")
##################
ERASE_LINE = '\x1b[2K'
logging.info(f"Profiles: {pProfiles}")
account_number_format = "12s"
if pTiming:
begin_time = time()
##################
def check_cw_groups_retention(faws_acct, fRegionList=None):
ChildAccounts = faws_acct.ChildAccounts
AllCWLogGroups = []
account_credentials = {'Role': 'unset'}
if fRegionList is None:
fRegionList = ['us-east-1']
for account in ChildAccounts:
if account['MgmtAccount'] != account['AccountId'] and pRootOnly:
continue
logging.info(f"Connecting to account {account['AccountId']}")
try:
account_credentials = Inventory_Modules.get_child_access3(faws_acct, account['AccountId'])
logging.info(f"Connected to account {account['AccountId']} using role {account_credentials['Role']}")
except ClientError as my_Error:
if str(my_Error).find("AuthFailure") > 0:
logging.error(
f"{account['AccountId']}: Authorization failure using role: {account_credentials['Role']}")
logging.warning(my_Error)
elif str(my_Error).find("AccessDenied") > 0:
logging.error(
f"{account['AccountId']}: Access Denied failure using role: {account_credentials['Role']}")
logging.warning(my_Error)
else:
logging.error(
f"{account['AccountId']}: Other kind of failure using role: {account_credentials['Role']}")
logging.warning(my_Error)
continue
for region in fRegionList:
CW_Groups = dict()
try:
print(f"{ERASE_LINE}Checking account {account['AccountId']} in region {region}", end='\r')
# TODO: Will eventually support a filter for string fragments, and retention periods
CW_Groups = Inventory_Modules.find_cw_groups_retention2(account_credentials, region)
logging.info(
f"Root Account: {faws_acct.acct_number} Account: {account['AccountId']} Region: {region} | Found {len(CW_Groups['logGroups'])} groups")
except ClientError as my_Error:
if str(my_Error).find("AuthFailure") > 0:
logging.error(f"Authorization Failure accessing account {account['AccountId']} in {region} region")
logging.warning(f"It's possible that the region {region} hasn't been opted-into")
pass
if 'logGroups' in CW_Groups.keys():
for y in range(len(CW_Groups['logGroups'])):
if 'retentionInDays' in CW_Groups['logGroups'][y].keys():
CW_Groups['logGroups'][y]['Retention'] = Retention = CW_Groups['logGroups'][y]['retentionInDays']
else:
CW_Groups['logGroups'][y]['Retention'] = Retention = "Never"
CW_Groups['logGroups'][y]['Name'] = Name = CW_Groups['logGroups'][y]['logGroupName']
CW_Groups['logGroups'][y]['Size'] = Size = CW_Groups['logGroups'][y]['storedBytes']
CW_Groups['logGroups'][y]['AccessKeyId'] = account_credentials['AccessKeyId']
CW_Groups['logGroups'][y]['SecretAccessKey'] = account_credentials['SecretAccessKey']
CW_Groups['logGroups'][y]['SessionToken'] = account_credentials['SessionToken']
CW_Groups['logGroups'][y]['ParentProfile'] = faws_acct.credentials['Profile'] if faws_acct.credentials['Profile'] is not None else 'default'
CW_Groups['logGroups'][y]['MgmtAccount'] = faws_acct.MgmtAccount
CW_Groups['logGroups'][y]['AccountId'] = account_credentials['AccountId']
CW_Groups['logGroups'][y]['Region'] = region
# fmt = f'%-12s %-{account_number_format} %-15s %-10s %15d %-50s'
# print(fmt % (faws_acct.acct_number, account['AccountId'], region, Retention, Size, Name))
# print(f"{str(faws_acct.acct_number):{account_number_format}} {str(account['AccountId']):{account_number_format}} {region:15s} "
# f"{str(Retention):10s} {'' if Retention == 'Never' else 'days'} {Size: >15,} {Name:50s}")
AllCWLogGroups.extend(CW_Groups['logGroups'])
return (AllCWLogGroups)
def update_cw_groups_retention(fCWGroups=None, fOldRetentionDays=None, fRetentionDays=None):
import boto3
if fOldRetentionDays is None:
fOldRetentionDays = 0
Success = True
for item in fCWGroups:
cw_session = boto3.Session(aws_access_key_id=item['AccessKeyId'],
aws_secret_access_key=item['SecretAccessKey'],
aws_session_token=item['SessionToken'],
region_name=item['Region'])
cw_client = cw_session.client('logs')
logging.info(f"Connecting to account {item['AccountId']}")
try:
print(f"{ERASE_LINE}Updating log group {item['logGroupName']} account {item['AccountId']} in region {item['Region']}", end='\r')
if 'retentionInDays' not in item.keys():
retentionPeriod = 'Never'
else:
retentionPeriod = item['retentionInDays']
if (fOldRetentionDays == 0 and 'retentionInDays' not in item.keys()) or retentionPeriod == fOldRetentionDays:
result = cw_client.put_retention_policy(
logGroupName=item['logGroupName'],
retentionInDays=fRetentionDays
)
print(f"Account: {item['AccountId']} in Region: {item['Region']} updated {item['logGroupName']} from {retentionPeriod} to {fRetentionDays} days")
Updated = True
else:
Updated = False
logging.info(f"Skipped {item['logGroupName']} in account: {item['AccountId']} in Region: {item['Region']} as it didn't match criteria")
Success = True
except ClientError as my_Error:
logging.error(my_Error)
Success = False
return (Success)
return (Success)
##################
print()
print(f"Checking for CW Log Groups... ")
print()
print()
display_dict = {'ParentProfile': {'DisplayOrder': 1, 'Heading': 'Parent Profile'},
'MgmtAccount' : {'DisplayOrder': 2, 'Heading': 'Mgmt Acct'},
'AccountId' : {'DisplayOrder': 3, 'Heading': 'Acct Number'},
'Region' : {'DisplayOrder': 4, 'Heading': 'Region'},
'Retention' : {'DisplayOrder': 5, 'Heading': 'Days Retention', 'Condition': ['Never']},
'Name' : {'DisplayOrder': 7, 'Heading': 'CW Log Name'},
'Size' : {'DisplayOrder': 6, 'Heading': 'Size (Bytes)'}}
# print(f"{str(faws_acct.acct_number):{account_number_format}} {str(account['AccountId']):{account_number_format}} {region:15s} "
# f"{str(Retention):10s} {'' if Retention == 'Never' else 'days'} {Size: >15,} {Name:50s}")
fmt = f'%-12s %-{account_number_format} %-15s %-10s %-15s %-50s'
print(fmt % ("Root Acct #", "Account #", "Region", "Retention", "Size", "Name"))
print(fmt % ("-----------", "---------", "------", "---------", "----", "----"))
CWGroups = []
AllChildAccounts = []
RegionList = []
if pProfiles is None: # Default use case from the classes
logging.info("Using whatever the default profile is")
aws_acct = aws_acct_access()
RegionList = Inventory_Modules.get_regions3(aws_acct, pRegionList)
logging.warning(f"Default profile will be used")
CWGroups.extend(check_cw_groups_retention(aws_acct, RegionList))
# AllChildAccounts.extend(aws_acct.ChildAccounts)
else:
logging.warning(f"These profiles are being checked {pProfiles}.")
ProfileList = Inventory_Modules.get_profiles(fprofiles=pProfiles, fSkipProfiles="skipplus")
logging.warning(ProfileList)
for profile in ProfileList:
aws_acct = aws_acct_access(profile)
logging.warning(f"Looking at {profile} account now... ")
RegionList = Inventory_Modules.get_regions3(aws_acct, pRegionList)
CWGroups.extend(check_cw_groups_retention(aws_acct, RegionList))
# AllChildAccounts.extend(aws_acct.ChildAccounts)
AllChildAccounts = list(set([(x['MgmtAccount'], x['AccountId']) for x in CWGroups]))
display_results(CWGroups, display_dict, None)
print(ERASE_LINE)
totalspace = 0
for i in CWGroups:
totalspace += i['storedBytes']
print(f"Found {len(CWGroups)} log groups across {len(AllChildAccounts)} accounts across {len(RegionList)} regions, representing {totalspace / 1024 / 1024 / 1024:,.3f} GB")
print(f"To give you a small idea - in us-east-1 - it costs $0.03 per GB per month to store (after 5GB).")
if totalspace / 1024 / 1024 / 1024 <= 5.0:
print("Which means this is essentially free for you...")
else:
print(f"This means you're paying about ${((totalspace / 1024 / 1024 / 1024) - 5) * 0.03:,.2f} per month in CW storage charges")
if pRetentionDays is not None:
print(f"As per your request - updating ALL retention periods to {pRetentionDays} days")
print(f"")
UpdateAllRetention = input(f"This is definitely an intrusive command, so please confirm you want to do this (y/n): ") in ['Y', 'y']
if UpdateAllRetention:
print(f"Updating all log groups to have a {pRetentionDays} retention period")
update_cw_groups_retention(CWGroups, pOldRetentionDays, pRetentionDays)
else:
print(f"No changes made")
print()
if pTiming:
print(ERASE_LINE)
print(f"{Fore.GREEN}This script took {time() - begin_time:.2f} seconds{Fore.RESET}")
print()
print("Thank you for using this script")
print()
|
991,799 | 6e8ad7acd7843cf856552137fac09946d22ddf39 | # Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
def InitListNode(self,matrix):
self.head=ListNode(matrix[0])
p=self.head
for num in matrix[1:]:
p.next=ListNode(num)
p=p.next
return self.head
class Solution(object):
def removeElements(self, head, val):
"""
:type head: ListNode
:type val: int
:rtype: ListNode
"""
res=res1=ListNode(0)
while head:
if head.val!=val:
res1.next=head
res1=res1.next
head=head.next
res1.next=None
return res.next
l=ListNode(None).InitListNode([1,2,1,1])
Solution().removeElements(l,1)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.