index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
13,900 | 75981e44eaca2378dec5a5e928e51602616eda8d | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import logging
import aiohttp
import asyncio
from bs4 import BeautifulSoup
from urllib.parse import urljoin, urlparse
from operator import itemgetter
from collections import OrderedDict
def extract_chapters(chapters_url, html):
"""
通用解析小说目录
:param chapters_url: 小说目录页url
:param html: 当前页面html
:return:
"""
# 参考https://greasyfork.org/zh-CN/scripts/292-my-novel-reader
chapters_reg = r'(<a\s+.*?>.*第?\s*[一二两三四五六七八九十○零百千万亿0-91234567890]{1,6}\s*[章回卷节折篇幕集].*?</a>)'
# 这里不能保证获取的章节分得很清楚,但能保证这一串str是章节目录。可以利用bs安心提取a
chapters_res = re.findall(chapters_reg, str(html), re.I)
str_chapters_res = '\n'.join(chapters_res)
chapters_res_soup = BeautifulSoup(str_chapters_res, 'html5lib')
all_chapters = {}
for link in chapters_res_soup.find_all('a'):
each_data = {}
url = urljoin(chapters_url, link.get('href')) or ''
name = link.text or ''
each_data['chapter_url'] = url
each_data['chapter_name'] = name
each_data['index'] = int(urlparse(url).path.split('.')[0].split('/')[-1])
all_chapters[each_data['index']] = each_data
chapters_sorted = sorted(all_chapters.values(), reverse=True, key=itemgetter('index'))
return chapters_sorted
def extract_content(html, chapter_url=None):
"""
从小说章节页面提取 小说内容以及 上下一章的地址
:param html:小说章节页面内容
:param chapter_url: 小说章节页面地址
:return:
"""
soup = BeautifulSoup(html, 'html5lib')
selector = {'id': 'content'}
if selector.get('id', None):
content = soup.find_all(id=selector['id'])
elif selector.get('class', None):
content = soup.find_all(class_=selector['class'])
else:
content = soup.find_all(selector.get('tag'))
if content:
# 提取出真正的章节标题
title_reg = r'(第?\s*[一二两三四五六七八九十○零百千万亿0-91234567890]{1,6}\s*[章回卷节折篇幕集]\s*.*?)[_,-]'
title = soup.title.string
extract_title = re.findall(title_reg, title, re.I)
if extract_title:
title = extract_title[0]
else:
title = soup.select('h1')[0].get_text()
if not title:
title = soup.title.string
# if "_" in title:
# title = title.split('_')[0]
# elif "-" in title:
# title = title.split('-')[0]
if chapter_url:
next_chapter = extract_pre_next_chapter(chapter_url=chapter_url, html=str(soup))
else:
next_chapter = OrderedDict()
content = [str(i) for i in content]
data = {
'content': str(''.join(content)),
'next_chapter': next_chapter,
'title': title
}
else:
data = None
return data
def extract_pre_next_chapter(chapter_url, html):
"""
获取单章节上一页下一页
:param chapter_url:
:param html:
:return:
"""
next_chapter = OrderedDict()
try:
# 参考https://greasyfork.org/zh-CN/scripts/292-my-novel-reader
next_reg = r'(<a\s+.*?>.*[第上前下后][一]?[0-9]{0,6}?[页张个篇章节步].*?</a>)'
judge_reg = r'[第上前下后][一]?[0-9]{0,6}?[页张个篇章节步]'
# 这里同样需要利用bs再次解析
next_res = re.findall(next_reg, html.replace('<<', '').replace('>>', ''), re.I)
str_next_res = '\n'.join(next_res)
next_res_soup = BeautifulSoup(str_next_res, 'html5lib')
for link in next_res_soup.find_all('a'):
text = link.text or ''
text = text.replace(' ', '')
if novels_list(text):
is_next = re.search(judge_reg, text)
# is_ok = is_chapter(text)
if is_next:
url = urljoin(chapter_url, link.get('href')) or ''
next_chapter[text[:5]] = url
# nextDic = [{v[0]: v[1]} for v in sorted(next_chapter.items(), key=lambda d: d[1])]
return next_chapter
except Exception as e:
logging.exception(e)
return next_chapter
def novels_list(text):
rm_list = ['后一个', '天上掉下个']
for i in rm_list:
if i in text:
return False
else:
continue
return True
async def target_fetch(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.text()
async def main():
url = "https://www.xbiquge6.com/81_81273/"
html = await target_fetch(url)
chapters = extract_chapters(url, html)
for chapter in chapters[-3:]:
chapter_url = chapter['chapter_url']
chapter_name = chapter['chapter_name']
html = await target_fetch(chapter_url)
data = extract_content(html, chapter_url)
print(data['content'])
loop = asyncio.get_event_loop()
loop.run_until_complete(main()) |
13,901 | dbced9badaf5fe104f6b1552731d8e852aa05fc4 | import sys
from Data_Processing_Unit.models import *
from Public_Data_Acquisition_Unit.mongo_models import *
from dotmap import DotMap
import json
from bson import ObjectId
import datetime
class Mongo_Serializer(object):
SUPPORTED_MODELS = [Keybase_Response_TMS,Trends]
def __init__(self):
pass
def is_supported(self,model):
if(model in Mongo_Serializer.SUPPORTED_MODELS):
return True
return False
def find_model_of_object(self,object):
return self.str_to_class(self.parse_model_name(str(object.__class__)))
def str_to_class(self,class_name):
return getattr(sys.modules[__name__], class_name)
def parse_model_name(self,model_name):
return model_name.strip("><'").split('.')[-1]
def keybase_responce_tms_serializer(self,objects_list):
data = []
if(len(objects_list) > 0):
if(self.is_supported(self.find_model_of_object(objects_list[0]))):
for obj in objects_list:
obj = obj.to_mongo()
obj_data_list = obj['data']
for obj_data in obj_data_list:
temp_in = {}
for k,v in obj_data.items():
temp_in[k] = v
print({"fields":temp_in})
data.append({"fields":temp_in})
del(temp_in)
print(len(data))
return data
def keybase_responce_tms_serializer_nested(self,objects_list):
data = []
if(len(objects_list) > 0):
if(self.is_supported(self.find_model_of_object(objects_list[0]))):
for obj in objects_list:
obj = JSONEncoder().encode(obj.to_mongo())
data.append({"fields":obj})
print(len(data))
return data
class JSONEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
if isinstance(o,datetime.datetime):
return str(o)
return json.JSONEncoder.default(self, o) |
13,902 | 0793d29eddf863f13768df45dc473b42c5a9998c | import functools
import time
import code
import warnings
import math
def timethis(func=None, *, n_iter=100):
if func is None:
return lambda func: timethis(func, n_iter=n_iter)
@functools.wraps(func)
def inner(*args, **kwargs):
print(func.__name__, end=" ... ")
acc = float('inf')
for i in range(n_iter):
tick = time.perf_counter()
result = func(*args, **kwargs)
acc = min(acc, time.perf_counter() - tick)
print(acc)
return result
return inner
result = timethis(sum)(range(10 ** 6))
# ###################################################################################
def profiled(func):
"""Count how mane times func was called"""
@functools.wraps(func)
def inner(*args, **kwargs):
inner.ncalls += 1
return func(*args, **kwargs)
inner.ncalls = 0
return inner
@profiled
def identity(x):
return x
# print(identity(42)) # 42
# print(identity(42)) # 42
# print(identity.ncalls) # 2
# ###################################################################################
def once(func):
"""Do something once"""
@functools.wraps(func)
def inner(*args, **kwargs):
if not inner.called:
func(*args, **kwargs)
inner.called = True # already call
inner.called = False # don't call else
return inner
@once
def initialize_setting():
print("Setting initialized")
# initialize_setting()
# initialize_setting()
# ###################################################################################
def memoized(func):
cache = {}
@functools.wraps(func)
def inner(*args, **kwargs):
key = args + tuple(sorted(kwargs.items()))
if key not in cache:
cache[key] = func(*args, **kwargs)
return cache[key]
return inner
# ###################################################################################
def deprecated(func):
code = func.__code__
warnings.warn_explicit(
func.__name__ + 'is deprecated',
category=DeprecationWarning,
filename=code.co_filename,
lineno=code.co_firstlineno + 1)
return func
@deprecated
def identity(x):
return x
# ###################################################################################
def pre(cond, massage):
def wrapper(func):
@functools.wraps(func)
def inner(*args, **kwargs):
assert cond(*args, **kwargs), massage
return func(*args, **kwargs)
return inner
return wrapper
@pre(lambda x: r >= 0, 'negative argument')
def checked_log(x):
return math.log(x)
# checked_log(-42)
|
13,903 | 840f213a75f01912feae8fb8fcd2e3f00b7e1408 | from datetime import datetime
from bleuio_lib.bleuio_funcs import BleuIo
# from serial import SerialException
from time import sleep
my_dongle = BleuIo()
my_dongle.start_daemon()
print(
"Connected to dongle\n\n"
"Welcome to the Bluetooth device Scan and Store example!\n\n"
)
# Set the dongle in central role
my_dongle.at_central()
user_input = input(
"Enter something such as a Manufacturer Specific (MFS) ID to scan for "
"and store in a file or just leave it blank to scan all:\n"
)
# Scans with a specific id or all devices if none is provided
my_dongle.at_findscandata(user_input)
log = ""
while user_input.casefold() != "stop":
user_input = input('Enter "STOP" to stop scanning\n')
# If the user stops the scan log reformat and log the response
if user_input.casefold() == "stop":
# Stop the scan
my_dongle.stop_scan()
# Fetch the result
log = my_dongle.rx_scanning_results
# Saves the log to scan_log.txt
with open("scan_log.txt", "w") as scan_log:
for line in log:
scan_log.write(line)
|
13,904 | af0079a4dc4bcbe1294a472df50d7712585eeca9 | import numpy as np
from load_data import load_data
from veltman_format import teams
class PlayCallGame():
def __init__(self):
self.start()
def start(self):
data = load_data('formatted_veltman_pbp_small.pkl', False)
self.train_set_x, self.train_set_y = data[0]
self.test_set_x, self.test_set_y = data[1]
# Opening prompt
print('\nTry your luck as an NFL coach! Guess the play call based on each '
'(admittedly simple) game situation.')
inpt = raw_input('Type \'q\' at any time to stop. '
'Press enter to begin...\n')
n_correct = 0
n_incorrect = 0
# Game loop
if inpt != 'q':
response = ''
while response != 'q':
response, answer = self.ask_question()
if response == 'q':
self.end_game(n_correct, n_incorrect)
continue
response = int(response) - 1
if response == answer:
print('Good call, coach!\n')
n_correct += 1
else:
action = self.format_action(answer)
print('Whoops, that\'s not what your NFL counterpart decided.'
' He {0}.\n'.format(action))
n_incorrect += 1
else:
self.end_game(n_correct, n_incorrect)
def ask_question(self):
row, answer = self.select_random()
off_team, def_team = self.extract_teams(row)
score = self.format_score(off_team, def_team, int(row[69]), int(row[70]))
situation = 'You are coaching {0}. {1} with {2} in the {3}.'.format(
off_team, score, self.format_time(row), self.format_quarter(row))
position = self.format_position(row, def_team)
question = situation + ' ' + position + (' Will you [1] run the ball, [2] '
'pass, [3] punt, or [4] kick a field goal? ')
return raw_input(question), answer
def format_score(self, off_team, def_team, off_score, def_score):
order = ()
if off_score > def_score:
order = (off_team, def_team, off_score, def_score)
elif off_score < def_score:
order = (def_team, off_team, def_score, off_score)
else:
return '{0} and {1} are tied at {2}'.format(off_team, def_team, off_score)
return '{0} leads {1} {2}-{3}'.format(*order)
def format_time(self, row):
quarter = row[0]
seconds = row[1] - (4 - quarter) * 60 * 15
minutes = int(seconds / 60)
seconds = int(seconds % 60)
return '{0}:{1:02d}'.format(minutes, seconds)
def format_quarter(self, row):
quarter = int(row[0])
return '{0}{1} quarter'.format(quarter, self.suffix(quarter))
def format_position(self, row, def_team):
down = int(row[2])
togo = int(row[3])
yardline = int(row[4])
if yardline > 50:
yardline -= 50
yardline_str = 'your own {0} yard line'.format(yardline)
else:
yardline_str = '{0}\'s {1} yard line'.format(def_team, yardline)
if yardline <= 10: togo = 'goal'
return 'It\'s {0}{1} and {2} on {3}.'.format(down,
self.suffix(down), togo, yardline_str)
def format_action(self, answer):
actions = ['ran the ball', 'threw the ball', 'punted the ball', 'kicked a '
'field goal']
return actions[answer]
def select_random(self):
row = np.random.randint(1, len(self.test_set_x))
return self.test_set_x[row], self.test_set_y[row]
def extract_teams(self, row):
off_one_hot = row[5:37]
def_one_hot = row[37:69]
off_team = teams[int(np.argmax(off_one_hot))]
def_team = teams[int(np.argmax(def_one_hot))]
return off_team, def_team
def suffix(self, index):
suffixes = ['st', 'nd', 'rd', 'th']
return suffixes[index - 1]
def end_game(self, n_correct, n_incorrect):
n_total = n_correct + n_incorrect
accuracy = n_correct / (n_total + 0.) * 100 if n_total > 0 else 0.
print('\nThanks for playing, coach. You correctly guessed {0} out of {1} '
'play calls for an accuracy of {2:.1f}%.'.format(n_correct,
n_total, accuracy))
# Run
if __name__ == '__main__':
PlayCallGame()
|
13,905 | 83ae465e5e0013ecf6b4e3b57fe6ee9c6d0a2900 | import configparser
import difflib
import logging
import os
from pathlib import Path
import pytoml as toml
from .validate import validate_config
from .vendorized.readme.rst import render
import io
log = logging.getLogger(__name__)
class ConfigError(ValueError):
pass
metadata_list_fields = {
'classifiers',
'requires',
'dev-requires'
}
metadata_allowed_fields = {
'module',
'author',
'author-email',
'maintainer',
'maintainer-email',
'home-page',
'license',
'keywords',
'requires-python',
'dist-name',
'entry-points-file',
'description-file',
'requires-extra',
} | metadata_list_fields
metadata_required_fields = {
'module',
'author',
'author-email',
}
def read_pkg_ini(path: Path):
"""Read and check the `pyproject.toml` or `flit.ini` file with data about the package.
"""
if path.suffix == '.toml':
with path.open() as f:
d = toml.load(f)
res = prep_toml_config(d, path)
else:
# Treat all other extensions as the older flit.ini format
cp = _read_pkg_ini(path)
res = _validate_config(cp, path)
if validate_config(res):
if os.environ.get('FLIT_ALLOW_INVALID'):
log.warning("Allowing invalid data (FLIT_ALLOW_INVALID set). Uploads may still fail.")
else:
raise ConfigError("Invalid config values (see log)")
return res
class EntryPointsConflict(ConfigError):
def __str__(self):
return ('Please specify console_scripts entry points, or [scripts] in '
'flit config, not both.')
def prep_toml_config(d, path):
"""Validate config loaded from pyproject.toml and prepare common metadata
Returns a dictionary with keys: module, metadata, scripts, entrypoints,
raw_config.
"""
if ('tool' not in d) or ('flit' not in d['tool']) \
or (not isinstance(d['tool']['flit'], dict)):
raise ConfigError("TOML file missing [tool.flit] table.")
d = d['tool']['flit']
unknown_sections = set(d) - {'metadata', 'scripts', 'entrypoints'}
unknown_sections = [s for s in unknown_sections if not s.lower().startswith('x-')]
if unknown_sections:
raise ConfigError('Unknown sections: ' + ', '.join(unknown_sections))
if 'metadata' not in d:
raise ConfigError('[tool.flit.metadata] section is required')
md_dict, module, reqs_by_extra = _prep_metadata(d['metadata'], path)
if 'scripts' in d:
scripts_dict = dict(d['scripts'])
else:
scripts_dict = {}
if 'entrypoints' in d:
entrypoints = flatten_entrypoints(d['entrypoints'])
else:
entrypoints = {}
_add_scripts_to_entrypoints(entrypoints, scripts_dict)
return {
'module': module,
'metadata': md_dict,
'reqs_by_extra': reqs_by_extra,
'scripts': scripts_dict,
'entrypoints': entrypoints,
'raw_config': d,
}
def flatten_entrypoints(ep):
"""Flatten nested entrypoints dicts.
Entry points group names can include dots. But dots in TOML make nested
dictionaries:
[entrypoints.a.b] # {'entrypoints': {'a': {'b': {}}}}
The proper way to avoid this is:
[entrypoints."a.b"] # {'entrypoints': {'a.b': {}}}
But since there isn't a need for arbitrarily nested mappings in entrypoints,
flit allows you to use the former. This flattens the nested dictionaries
from loading pyproject.toml.
"""
def _flatten(d, prefix):
d1 = {}
for k, v in d.items():
if isinstance(v, dict):
yield from _flatten(v, prefix+'.'+k)
else:
d1[k] = v
if d1:
yield prefix, d1
res = {}
for k, v in ep.items():
res.update(_flatten(v, k))
return res
def _add_scripts_to_entrypoints(entrypoints, scripts_dict):
if scripts_dict:
if 'console_scripts' in entrypoints:
raise EntryPointsConflict
else:
entrypoints['console_scripts'] = scripts_dict
def _read_pkg_ini(path):
"""Reads old-style flit.ini
"""
cp = configparser.ConfigParser()
with path.open(encoding='utf-8') as f:
cp.read_file(f)
return cp
readme_ext_to_content_type = {
'.rst': 'text/x-rst',
'.md': 'text/markdown',
'.txt': 'text/plain',
}
def _prep_metadata(md_sect, path):
"""Process & verify the metadata from a config file
- Pull out the module name we're packaging.
- Read description-file and check that it's valid rst
- Convert dashes in key names to underscores
(e.g. home-page in config -> home_page in metadata)
"""
if not set(md_sect).issuperset(metadata_required_fields):
missing = metadata_required_fields - set(md_sect)
raise ConfigError("Required fields missing: " + '\n'.join(missing))
module = md_sect.get('module')
if not module.isidentifier():
raise ConfigError("Module name %r is not a valid identifier" % module)
md_dict = {}
# Description file
if 'description-file' in md_sect:
description_file = path.parent / md_sect.get('description-file')
try:
with description_file.open(encoding='utf-8') as f:
raw_desc = f.read()
except FileNotFoundError:
raise ConfigError(
"Description file {} does not exist".format(description_file)
)
ext = description_file.suffix
try:
mimetype = readme_ext_to_content_type[ext]
except KeyError:
log.warning("Unknown extension %r for description file.", ext)
log.warning(" Recognised extensions: %s",
" ".join(readme_ext_to_content_type))
mimetype = None
if mimetype == 'text/x-rst':
# rst check
stream = io.StringIO()
res = render(raw_desc, stream)
if not res:
log.warning("The file description seems not to be valid rst for PyPI;"
" it will be interpreted as plain text")
log.warning(stream.getvalue())
md_dict['description'] = raw_desc
md_dict['description_content_type'] = mimetype
if 'urls' in md_sect:
project_urls = md_dict['project_urls'] = []
for label, url in sorted(md_sect.pop('urls').items()):
project_urls.append("{}, {}".format(label, url))
for key, value in md_sect.items():
if key in {'description-file', 'module'}:
continue
if key not in metadata_allowed_fields:
closest = difflib.get_close_matches(key, metadata_allowed_fields,
n=1, cutoff=0.7)
msg = "Unrecognised metadata key: {!r}".format(key)
if closest:
msg += " (did you mean {!r}?)".format(closest[0])
raise ConfigError(msg)
k2 = key.replace('-', '_')
md_dict[k2] = value
if key in metadata_list_fields:
if not isinstance(value, list):
raise ConfigError('Expected a list for {} field, found {!r}'
.format(key, value))
if not all(isinstance(a, str) for a in value):
raise ConfigError('Expected a list of strings for {} field'
.format(key))
elif key == 'requires-extra':
if not isinstance(value, dict):
raise ConfigError('Expected a dict for requires-extra field, found {!r}'
.format(value))
if not all(isinstance(e, list) for e in value.values()):
raise ConfigError('Expected a dict of lists for requires-extra field')
for e, reqs in value.items():
if not all(isinstance(a, str) for a in reqs):
raise ConfigError('Expected a string list for requires-extra. (extra {})'
.format(e))
else:
if not isinstance(value, str):
raise ConfigError('Expected a string for {} field, found {!r}'
.format(key, value))
# What we call requires in the ini file is technically requires_dist in
# the metadata.
if 'requires' in md_dict:
md_dict['requires_dist'] = md_dict.pop('requires')
# And what we call dist-name is name in the metadata
if 'dist_name' in md_dict:
md_dict['name'] = md_dict.pop('dist_name')
# Move dev-requires into requires-extra
reqs_noextra = md_dict.pop('requires_dist', [])
reqs_by_extra = md_dict.pop('requires_extra', {})
dev_requires = md_dict.pop('dev_requires', None)
if dev_requires is not None:
if 'dev' in reqs_by_extra:
raise ConfigError(
'dev-requires occurs together with its replacement requires-extra.dev.')
else:
log.warning(
'“dev-requires = ...” is obsolete. Use “requires-extra = {"dev" = ...}” instead.')
reqs_by_extra['dev'] = dev_requires
# Add requires-extra requirements into requires_dist
md_dict['requires_dist'] = \
reqs_noextra + list(_expand_requires_extra(reqs_by_extra))
md_dict['provides_extra'] = sorted(reqs_by_extra.keys())
# For internal use, record the main requirements as a '.none' extra.
reqs_by_extra['.none'] = reqs_noextra
return md_dict, module, reqs_by_extra
def _expand_requires_extra(re):
for extra, reqs in sorted(re.items()):
for req in reqs:
if ';' in req:
name, envmark = req.split(';', 1)
yield '{}; extra == "{}" and ({})'.format(name, extra, envmark)
else:
yield '{}; extra == "{}"'.format(req, extra)
def _validate_config(cp, path):
"""Validate and process config loaded from a flit.ini file.
Returns a dict with keys: module, metadata, scripts, entrypoints, raw_config
"""
unknown_sections = set(cp.sections()) - {'metadata', 'scripts'}
unknown_sections = [s for s in unknown_sections if not s.lower().startswith('x-')]
if unknown_sections:
raise ConfigError('Unknown sections: ' + ', '.join(unknown_sections))
if not cp.has_section('metadata'):
raise ConfigError('[metadata] section is required')
md_sect = {}
for k, v in cp['metadata'].items():
if k in metadata_list_fields:
md_sect[k] = [l for l in v.splitlines() if l.strip()]
else:
md_sect[k] = v
if 'entry-points-file' in md_sect:
entry_points_file = path.parent / md_sect.pop('entry-points-file')
if not entry_points_file.is_file():
raise FileNotFoundError(entry_points_file)
else:
entry_points_file = path.parent / 'entry_points.txt'
if not entry_points_file.is_file():
entry_points_file = None
if entry_points_file:
ep_cp = configparser.ConfigParser()
with entry_points_file.open() as f:
ep_cp.read_file(f)
# Convert to regular dict
entrypoints = {k: dict(v) for k,v in ep_cp.items()}
else:
entrypoints = {}
md_dict, module, reqs_by_extra = _prep_metadata(md_sect, path)
# Scripts ---------------
if cp.has_section('scripts'):
scripts_dict = dict(cp['scripts'])
else:
scripts_dict = {}
_add_scripts_to_entrypoints(entrypoints, scripts_dict)
return {
'module': module,
'metadata': md_dict,
'reqs_by_extra': reqs_by_extra,
'scripts': scripts_dict,
'entrypoints': entrypoints,
'raw_config': cp,
}
|
13,906 | b8e531662f9b5c43832b9a721505d2b8cff0357d | # Generated by Django 2.0.4 on 2018-05-11 15:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('agents', '0002_auto_20180511_1256'),
]
operations = [
migrations.AlterField(
model_name='ag',
name='policy_ID',
field=models.DecimalField(decimal_places=1, max_digits=15),
),
]
|
13,907 | dccd8c8fba715e9f0d4602563ba4f3a7b6750148 | import os
import json
import boto3
import uuid
print("PLEASE CONFIGURE A TEST USER WITH setup_user.py")
print("This script simulates an illegal write into a DynamoDB table caused by a faulty group policy")
print("Incident: Admin user attempts to write into patient table")
print("Response: Updated record rollbacks and an email is sent to admin groups' email")
print("\nPlease ensure your AWS CLI config is in json format")
print("\nPress y/n to proceed:")
decision = input()
if decision == 'y' or decision == 'Y':
print("\nInput the patient table name in your account:")
table_name = input()
print("Input the id of the patient record:")
patient_id = input()
print("Verifying values... (Please take note of the value of insurance_id at this point)")
os.system("aws dynamodb get-item --table-name "
+ table_name
+ " --key '{ \"id\": {\"S\": \""
+ patient_id
+ "\"} }'")
print("The patient table is inaccessible to users in 'adminusers' IAM group.")
print("However for demo purposes, the generated user will have read access")
print("Continue on? (y/n)")
user_input = input()
if user_input == 'y' or user_input == 'Y':
print("Input a malicious insurance id for the patient record:")
insurance_id = input()
print("\nAttempting to write new random ("+insurance_id+") insurance_id for patient id: " + patient_id)
os.system("aws dynamodb update-item --table-name "
+ table_name
+ " --key '{ \"id\": {\"S\": \""
+ patient_id
+ "\"} }'"
+ " --update-expression 'SET insurance_id = :q' "
+ "--expression-attribute-values '{ \":q\": {\"S\": \""
+ insurance_id
+ "\"} }' "
+ "--return-values ALL_NEW")
print("Continue on? (y/n)")
user_input = input()
if user_input == 'y' or user_input == 'Y':
print("Check if rollback occurred...")
os.system("aws dynamodb get-item --table-name "
+ table_name
+ " --key '{ \"id\": {\"S\": \""
+ patient_id
+ "\"} }'")
else:
print("Exiting...") |
13,908 | abb01e70838d7a318a657e60e610a86a2dcc7584 | from typing import List
from fastapi import Depends, FastAPI, HTTPException
from starlette.responses import RedirectResponse
from sqlalchemy.orm import Session
import crud, models, schemas
from database import SessionLocal, engine
# creates database tables
models.Base.metadata.create_all(bind=engine)
app = FastAPI()
# create a dependency to make a new session per request and close it after the request is completed
def get_db():
try:
db = SessionLocal()
yield db
finally:
db.close()
@app.get("/")
def main():
''' default function to fetch the backend documentation using starlette redirect response'''
return RedirectResponse(url="/docs/")
@app.post("/customers/", response_model=schemas.Customer)
def reserve_room(customer: schemas.Customer, db: Session = Depends(get_db)):
''' make a function call to reserve room'''
db_customer = crud.get_customer_by_email(db, email=customer.email)
if db_customer:
raise HTTPException(status_code=400, detail="Email already registered")
return crud.reserve_room(db=db, customer=customer)
@app.get("/customer/{customer_id}", response_model=schemas.Customer)
def get_customer_by_id(customer_id: int, db: Session = Depends(get_db)):
''' fetch the customer by customer id if user is not in the system returns user not found'''
db_customer = crud.get_customer_by_id(db, customer_id=customer_id)
if db_customer is None:
raise HTTPException(status_code=404, detail="User not found")
return db_customer
@app.get("/rooms/", response_model=List[schemas.Room])
def get_rooms(db: Session = Depends(get_db)):
''' fetch rooms from db which is created by the admin for the customer to reserve'''
rooms = crud.get_rooms(db)
return rooms
@app.get("/room/{room_id}", response_model=schemas.Room)
def get_room_by_id(room_id: int, db: Session = Depends(get_db)):
'''fetch room by id to get more details about it separately'''
db_room = crud.get_room_by_room_no(db, room_id=room_id)
if db_room is None:
raise HTTPException(status_code=404, detail="Room not found")
return db_room
@app.put("/pay_amount/",response_model=schemas.Customer)
def pay_amount_to_book_rooms(customer_id:int,account_no:int,mpin:int,db:Session=Depends(get_db)):
'''make payment to book the room once it is selected in reserve_room function'''
pay=crud.pay_amount(customer_id,account_no,mpin,db=db)
return pay
@app.delete("/customer/{customer_id}")
def cancel_room(customer_id:int,db: Session = Depends(get_db)):
'''cancels the booking if customer dont want that room or need to reserve again '''
db_customer = crud.get_customer_by_id(db, customer_id=customer_id)
return crud.cancel_room(db,db_customer)
|
13,909 | fa703992ea8db534856c1d623875148be9a369ca | # Write a function called make_shirt() that accepts a size and the
# text of a message that should be printed on the shirt. The function should print
# a sentence summarizing the size of the shirt and the message printed on it.
# Call the function once using positional arguments to make a shirt. Call the
# function a second time using keyword arguments.
# Modify the make_shirt() function so that shirts are large
# by default with a message that reads I love Python.
def make_shirt(size='large', message='I love Python'):
print(f'The shirt made is size {size.title()} with "{message.upper()}" printed on it')
# Make a large shirt and a medium shirt with the default message, and a
# shirt of any size with a different message.
make_shirt('large')
make_shirt('medium')
make_shirt('small', 'buy this man a beer')
# Write a function called describe_city() that accepts the name of
# a city and its country. The function should print a simple sentence, such as
# Reykjavik is in Iceland. Give the parameter for the country a default value.
# Call your function for three different cities, at least one of which is not in the
# default country.
print("\n")
def describe_city(city, country='north america'):
print(f"{city.title()} is in {country.title()}")
describe_city('san francisco')
describe_city(city='venice', country='italy')
describe_city('tokyo', 'japan')
|
13,910 | cd80cccc843df8b58c47299845096b812568ce3a | # -*- coding: utf-8 -*-
from apt_pkg import __init__
from cwr.validation.common import Validation, ValidationStatus, ASValidationStatus
"""
Base classes for implementing validation rules.
"""
__author__ = 'Yaroslav O. Golub'
__license__ = 'MIT'
__status__ = 'Development'
class ValidationTransaction(Validation):
config = None
def __init__(self, config):
self.config = config
def validate(self, transaction):
return ASValidationStatus()
|
13,911 | 2306fdd0864a3342918988e279cc190e2eb85899 | import Parser.ConditionParser as condParser
import Parser.ParserUtils as utils
from Lexers.Lexer import Lexer
from Lexers.tokens import *
from Model.Rule import Rule
from Model.Action import Action
def parse_from_lexer(lexer, symbol_table, engine, passed_token):
rotated = False
while True:
if passed_token is None or rotated:
token = utils.get_token_skipping_whitespace(lexer)
else:
token = passed_token
rotated = True
if token.token_type == TokenType.eof:
return
if token.token_type != TokenType.keyword or token.token_value != 'rule':
raise ValueError("This error should not have occurred. rule keyword is expected, Sir")
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type == TokenType.given_name:
parsed_file = open(token.token_value)
new_lexer = Lexer(parsed_file)
parse_from_lexer(new_lexer, symbol_table, engine, None)
elif token.token_type == TokenType.structure_operator_start:
rid = get_id(lexer, symbol_table)
prio = get_priority(lexer)
condition = condParser.get_condition(lexer, symbol_table, engine)
actions = get_actions(lexer, symbol_table, engine)
utils.expect_otherchar(lexer, TokenType.structure_operator_end, '}')
rule = Rule(rid, prio, condition, actions)
symbol_table.add_rule_id(rid)
engine.rules[rid] = rule
else:
raise ValueError("Either a given_name of a file containing a rule or { are expected, Sir")
rotated = True
def get_id(lexer, symbol_table):
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.keyword or token.token_value != 'id':
raise ValueError('Expected id keyword, Sir. Found ' + str(token.token_value))
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.definition_operator:
raise ValueError('Expected : ,found ' + str(token.token_value), " Sir.")
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.number:
raise ValueError('Rule id must be a number, found ' + str(token.token_value))
rule_id = token.token_value
if symbol_table.is_rule_id_busy(rule_id):
raise ValueError('Id' + str(rule_id) + 'for rule is already taken, Sir')
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.instr_end:
raise ValueError('Expected ; found ' + str(token.token_value) + " ,Sir.")
return rule_id
def get_priority(lexer):
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.keyword or token.token_value != 'priority':
raise ValueError('Expected priority keyword, Sir. Found ' + str(token.token_value))
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.definition_operator:
raise ValueError('Expected : ,found ' + str(token.token_value), " Sir.")
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.number:
raise ValueError('Rule id must be a number, found ' + str(token.token_value))
rule_id = token.token_value
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.instr_end:
raise ValueError('Expected ; found ' + str(token.token_value) + " ,Sir.")
return rule_id
def get_actions(lexer, symbol_table, engine):
last_was_action = False
actions = []
utils.expect_keyword(lexer, 'actions')
utils.expect_otherchar(lexer, TokenType.definition_operator, ':')
while True:
if last_was_action:
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type == TokenType.list_separator:
last_was_action = False
continue
if token.token_type == TokenType.instr_end:
break
else:
actions.append(parse_action(lexer, symbol_table, engine))
last_was_action = True
return actions
def parse_action(lexer, symbol_table, engine):
is_buy = False
is_stock = False
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.keyword and token.token_value not in ['buy', 'sell']:
raise ValueError('Buy or sell expected to start action, found something else, Sir')
if token.token_value == 'buy':
is_buy = True
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.keyword and token.token_value not in ['stock', 'currency']:
raise ValueError('An action requires you to choose either stock or currency, nothing else, Sir')
if token.token_value == 'stock':
is_stock = True
utils.expect_access_operator(lexer)
symbol_name = utils.expect_given_name(lexer)
symbol_id = -1
if is_stock:
symbol_id = symbol_table.get_stock(symbol_name)
else:
symbol_id = symbol_table.get_currency(symbol_name)
if not is_buy:
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type == TokenType.keyword and token.token_value == 'amount':
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type == TokenType.number:
if token.token_value < 0:
raise ValueError("You can only sell a positive amount, Sir")
return build_action(engine, is_buy, is_stock, symbol_id, amount=token.token_value)
elif token.token_type == TokenType.keyword and token.token_value == 'ALL':
return build_action(engine, is_buy, is_stock, symbol_id, amount='ALL')
else:
raise ValueError("Expecting a number or ALL keyword, found neither of them, Sir")
elif token.token_type == TokenType.keyword and token.token_value == 'part':
amount = utils.expect_number(lexer)
if not 1 <= amount <= 100:
raise ValueError("Part must be 1 to 100 no more no less, Sir")
return build_action(engine, is_buy, is_stock, symbol_id, part=amount)
elif token.token_type == TokenType.keyword and token.token_value == 'for':
curr_amount = utils.expect_number(lexer)
if curr_amount < 0:
raise ValueError("You can only sell for a positive price, Sir")
return build_action(engine, is_buy, is_stock, symbol_id, curr_amount=curr_amount)
else:
raise ValueError("Expecting either amount, part, for, none of those were found, Sir")
else:
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type != TokenType.keyword or token.token_value != 'amount':
raise ValueError("Expecting keyword amount, Sir")
token = utils.get_token_skipping_whitespace(lexer)
buy_amount = 0
if token.token_type == TokenType.keyword or token.token_value == 'MAX':
buy_amount = 'MAX'
elif token.token_type == TokenType.number:
buy_amount = token.token_value
if buy_amount < 0:
raise ValueError("You can only buy a positive amount, Sir")
else:
raise ValueError("Unexpected Token, Sir")
utils.expect_keyword(lexer, 'for')
currency_used_id = None
token = utils.get_token_skipping_whitespace(lexer)
if token.token_type == TokenType.keyword and token.token_value == 'OWN':
currency_used_id = 'OWN'
elif token.token_type == TokenType.keyword and token.token_value == 'ANY':
currency_used_id = 'ANY'
elif token.token_type == TokenType.keyword and token.token_value == 'currency':
utils.expect_access_operator(lexer)
currency_name = utils.expect_given_name(lexer)
currency_used_id = symbol_table.get_currency(currency_name)
else:
raise ValueError("Unexpected token, Sir")
return build_action(engine, is_buy, is_stock, symbol_id, buy_amount=buy_amount, curr_used=currency_used_id)
def build_action(engine, is_buy, is_stock, symbol_id, amount=None, part=None, curr_amount=None, buy_amount=None,
curr_used=None):
if is_buy: # buy
if is_stock:
return Action(engine.buy_stock_amount, symbol_id, buy_amount, curr_used)
else:
return Action(engine.buy_currency_amount, symbol_id, buy_amount, curr_used)
else: # sell
if not is_stock:
raise ValueError("Currency can not be sold, Sir")
if amount is not None:
return Action(engine.sell_stock_amount, symbol_id, amount)
elif part is not None:
return Action(engine.sell_stock_part, symbol_id, part)
elif curr_amount is not None:
return Action(engine.sell_stock_for_currency, symbol_id, curr_amount)
else:
raise ValueError("Critical error, during construction, Sir")
|
13,912 | a562cfe06a0d08ea14ed13150e7d9db5cf23694d | # -*- coding: utf-8 -*-
"""
Created on Mon May 7 14:28:31 2018
@author: Administrator
"""
from copy import deepcopy
from sklearn import datasets
from FeatureEvaluate import CartGini
from FeatureEvaluate import gini
from FeatureEvaluate import SE
from FeatureEvaluate import SETwo
from sklearn.preprocessing import LabelEncoder
import numpy as np
import pandas as pd
from sklearn.cross_validation import train_test_split
class Node:
def __init__(self,label=None,splitAttr=None,splitPoint=None,isLeaf=True,numInstances=None,
selfGini=None,childWGinis=None,lNode=None,rNode=None,numChildren=None):
self.label,self.splitAttr,self.splitPoint,self.lNode,self.rNode=label,splitAttr,splitPoint,lNode,rNode
self.isLeaf,self.numInstances,self.selfGini,self.childWGinis=isLeaf,numInstances,selfGini,childWGinis
self.numChildren=numChildren
class CARTClassifier:
def __init__(self,isDisc):
self.x,self.y,self.attrNumValues,self.isDisc=None,None,[],np.array(isDisc)
self.xLabens,self.yLaben=[],None
self.__mina,self.__cutNode=None,None
def __LabelEncoder(self):
newX=np.empty(shape=self.x.shape)
for i in range(self.x.shape[1]):#对于每一列
if(self.isDisc[i]):#如果是离散的
laben=LabelEncoder().fit(self.x[:,i])
self.xLabens.append(laben)
self.attrNumValues.append(len(laben.classes_))
newX[:,i]=laben.transform(self.x[:,i])
else:
self.xLabens.append(None)
self.attrNumValues.append(None)
newX[:,i]=self.x[:,i]
self.x=newX
self.yLaben=LabelEncoder().fit(self.y)
self.y=self.yLaben.transform(self.y)
def __labelCount(self,y):
di=dict()
for i in list(y):
if (i in di.keys()):
di[i]+=1
else:
di[i]=1
return di
def __splitData(self,ins,attrIndex,splitPoint):
"""
返回分裂后的索引集合
约定:离散属性第一集合是等于某个值,第二集合是不等于
连续属性分别是小于等于和大于
"""
res=[[],[]]
if(self.isDisc[attrIndex]):#是离散属性
for i in ins:
if(self.x[i,attrIndex]==splitPoint):
res[0].append(i)
else:
res[1].append(i)
else:#是连续属性
for i in ins:
if(self.x[i,attrIndex]<=splitPoint):
res[0].append(i)
else:
res[1].append(i)
return res
def __CWAG(self,node):
"""
计算每个节点的孩子的加权平均基尼指数
"""
if(node.isLeaf):#叶子结点:
node.childWGinis=node.selfGini
else:
node.childWGinis=(self.__CWAG(node.rNode)*node.rNode.numInstances+self.__CWAG(node.lNode)*node.lNode.numInstances)/node.numInstances
return node.childWGinis
def __numChildren(self,node):
# node=Node()
if(node.isLeaf):
return 1
else:
node.numChildren=self.__numChildren(node.lNode)+self.__numChildren(node.rNode)
return node.numChildren
def dfs(self,node):
print("是叶节点?:",node.isLeaf," 自身基尼:",node.selfGini," 孩子基尼加权平均:",node.childWGinis,
" 分裂属性:",node.splitAttr," 分裂点:",node.splitPoint," 叶节点个数:",node.numChildren)
if(node.isLeaf ==False):
self.dfs(node.lNode)
self.dfs(node.rNode)
def __build(self,ins,attrIns):
#是否为根结点
di=self.__labelCount(self.y[ins])
# print(di)
if(len(di)==1):#数据集只有一种类别的数据
#叶子结点
# print("只有一种类别",di)
return Node(label=list(di.keys())[0],isLeaf=True,numInstances=len(ins),selfGini=0,childWGinis=0)
elif(len(attrIns) ==0):#属性用尽
# print("属性用尽",di)
# print(len(ins))
t=gini(self.y[ins])
# print("gini",t)
return Node(label=max(di,key=di.get),isLeaf=True,numInstances=len(ins),selfGini=t,childWGinis=t)
else:#是内部结点
#寻找最佳分裂属性
ginis=np.array(CartGini(self.x[ins,:][:,attrIns],self.y[ins],self.isDisc[attrIns]))
minGiniIndex=ginis[:,0].argmin()#attrIns中第maxGainIndex个属性就是最佳分裂属性
splitAttr=attrIns[minGiniIndex]
splitPoint=ginis[minGiniIndex,1]
# minGini=ginis[minGiniIndex,0]
#根据最佳分类属性将数据集分组
splitedIns=self.__splitData(ins,splitAttr,splitPoint)
#将最佳属性从属性列表中移除
attrIns.remove(splitAttr)
#根据分组后的数据建立新的结点
childNodes=[]#childNondes[i]就是该属性第i种值对应的孩子结点
for i in splitedIns:
# newNode=node()
if(len(i)==0):#子节点无数据集可用,将此子节点设置为叶节点
# print("子节点无数据",ginis)
childNodes.append(Node(label=max(di,key=di.get),isLeaf=True,numInstances=0,selfGini=0,childWGinis=0))
else:
childNodes.append(self.__build(i,deepcopy(attrIns)))
return Node(label=max(di,key=di.get),isLeaf=False,splitAttr=splitAttr,splitPoint=splitPoint,
lNode=childNodes[0],rNode=childNodes[1],numInstances=len(ins),selfGini=gini(self.y[ins]))
def __prune(self,X,y):
"""
后剪枝
"""
x=np.array(X)
newX=np.empty(shape=x.shape)
for i in range(x.shape[1]):
if(self.isDisc[i]):
newX[:,i]=self.xLabens[i].transform(x[:,i])
else:
newX[:,i]=x[:,i]
cutNodes,bestSubTree,bestGini=[],-1,float("-inf")
while(True):
self.__mina=float("inf")
self.__getACutNode(self.root)
cutNodes.append(self.__cutNode)
self.__cutNode.isLeaf=True
self.__CWAG(self.root)
# t=self.__validationGini(newX,y)
t=self.__accuracy(newX,y)
# print("t:",t)
if(t>bestGini):
bestSubTree=len(cutNodes)-1
bestGini=t
if(self.__cutNode is self.root):#结束剪枝
break
for i in range(bestSubTree+1,len(cutNodes)):
cutNodes[i].isLeaf=False
print(len(cutNodes),bestSubTree)
self.__CWAG(self.root)
def __getACutNode(self,node):
# node=Node()
# global mina,cutNode
if(node.isLeaf == False):
t=(node.selfGini-node.childWGinis)/(node.numChildren-1)
if(t<self.__mina):
self.__mina=t
self.__cutNode=node
self.__getACutNode(node.lNode)
self.__getACutNode(node.rNode)
def __accuracy(self,X,y):
res=[]
for i in range(len(X)):
res.append(self.__predict(self.root,X[i,:]))
res=list( self.yLaben.inverse_transform(res))
y=list(y)
tru=0
for i in range(len(y)):
if(res[i]==y[i]):
tru+=1
return tru/len(y)
def __validationGini(self,X,y):
res=[]
for i in range(len(X)):
res.append(self.__predict(self.root,X[i,:]))
res=list( res)
ins=dict()
y=np.array(y)
for i in range(len(res)):
if(res[i] not in ins.keys()):
ins[res[i]]=[]
ins[res[i]].append(i)
else:
ins[res[i]].append(i)
t=0.0
for i in ins.keys():
t+=len(ins[i])*gini(y[ins[i]])
return t/len(res)
def fit(self,X,y):
x_train, x_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=10,stratify=y)
self.x,self.y=np.array(x_train),np.array(y_train)
self.__LabelEncoder()
self.root=self.__build(ins=list(range(len(self.x))),
attrIns=list(range(self.x.shape[1])))
self.__CWAG(self.root)
self.__numChildren(self.root)
# self.dfs(self.root)
self.__prune(x_test,y_test)
# print("减值后")
# self.dfs(self.root)
def __predict(self,node,x):
# node=Node()
if(node.isLeaf):
return node.label
else:
if(self.isDisc[node.splitAttr]):
if(x[node.splitAttr] == node.splitPoint):
return self.__predict(node.lNode,x)
else:
return self.__predict(node.rNode,x)
else:#连续属性
if(x[node.splitAttr]<= node.splitPoint):
return self.__predict(node.lNode,x)
else:
return self.__predict(node.rNode,x)
def predict(self,X):
res=[]
x=np.array(X)
newX=np.empty(shape=x.shape)
for i in range(x.shape[1]):
if(self.isDisc[i]):
newX[:,i]=self.xLabens[i].transform(x[:,i])
else:
newX[:,i]=x[:,i]
for i in range(len(newX)):
res.append(self.__predict(self.root,newX[i,:]))
return self.yLaben.inverse_transform(res)
|
13,913 | f46230a123b5f91517fb530063de7b9eeb54e4f6 | import sys
from tkinter import *
from docxtpl import DocxTemplate
from test import *
from PyQt5 import QtCore, QtGui, QtWidgets
from test import Ui_MainWindow
class MyWin(QtWidgets.QMainWindow):
def __init__(self, parent=None):
QtWidgets.QWidget.__init__(self, parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.pushButton_go.clicked.connect(self._generate)
#кнопка выход
self.ui.pushButton_exit.clicked.connect(self.close_window)
def close_window():
self.destroy()
#кпонка сформировать
def _generate(self):
number = self.ui.lineEdit_number.text()
chkoap = self.ui.lineEdit_chkoap.text()
stkoap = self.ui.lineEdit_stkoap.text()
dd1 = self.ui.lineEdit_dd1.text()
dd2 = self.ui.lineEdit_dd2.text()
dd3 = self.ui.lineEdit_dd3.text()
dd4 = self.ui.lineEdit_dd4.text()
dd5 = self.ui.lineEdit_dd5.text()
month1 = self.ui.comboBox_month1.currentText()
month2 = self.ui.comboBox_month2.currentText()
month3 = self.ui.comboBox_month3.currentText()
month4 = self.ui.comboBox_month4.currentText()
month5 = self.ui.comboBox_month5.currentText()
ul = self.ui.lineEdit_ul.text()
zp_ip = self.ui.lineEdit_zp_ip.text()
zp_rp = self.ui.lineEdit_zp_rp.text()
zp_dp = self.ui.lineEdit_zp_dp.text()
ogrn = self.ui.lineEdit_ogrn.text()
inn = self.ui.lineEdit_inn.text()
kpp = self.ui.lineEdit_kpp.text()
ad_index = self.ui.lineEdit_ad_index.text()
subrf = self.ui.lineEdit_subrf.text()
naspunkt = self.ui.lineEdit_naspunkt.text()
ulitsadom = self.ui.lineEdit_ulitsadom.text()
dateul = self.ui.lineEdit_dateul.text()
fio1 = self.ui.comboBox_perar.currentText()
fio2 = self.ui.comboBox_zapros.currentText()
fio3 = self.ui.comboBox_prinyal.currentText()
fio4 = self.ui.comboBox_otlozh.currentText()
fio5 = self.ui.comboBox_rassm.currentText()
if fio1 == "В.М. Мамонтов":
znt1 = "Заместитель"
znt1_1 = "заместителя"
fio1_1 = "В.М. Мамонтова"
else:
znt1 = "И.о. заместителя"
znt1_1 = "и.о. заместителя"
fio1_1 = "Н.П. Алексеевой"
if fio2 == "В.М. Мамонтов":
znt2 = "Заместитель"
else:
znt2 = "И.о. заместителя"
if fio3 == "В.М. Мамонтов":
znt3 = "Заместитель"
znt3_1 = "Заместителю"
fio3_1 = "В.М. Мамонтову"
else:
znt3 = "И.о. заместителя"
znt3_1 = znt3
fio3_1 = "Н.П. Алексеевой"
if fio4 == "В.М. Мамонтов":
znt4 = "Заместитель"
else:
znt4 = "И.о. заместителя"
if fio5 == "В.М. Мамонтов":
znt5 = "Заместитель"
else:
znt5 = "И.о. заместителя"
#условие если выбран 1 файл ГОТОВ
if self.ui.checkBox_01.isChecked():
doc1 = DocxTemplate("templates/1.docx")
context1 = {
'dd2': dd2, 'month2': month2, 'number': number,
'ul': ul, 'orgn': ogrn, 'inn': inn, 'kpp': kpp,
'ad_index': ad_index, 'subrf': subrf,
'naspunkt': naspunkt, 'ulitsadom': ulitsadom,
'dateul': dateul, 'chkoap': chkoap, 'stkoap': stkoap,
'znt1': znt1, 'fio1': fio1
}
doc1.render(context1)
doc1.save(number + "__ 2.1 РЕШЕНИЕ о передаче дела для проведения АР.docx")
#условие если выбран 2 файл ГОТОВ
if self.ui.checkBox_02.isChecked():
doc2 = DocxTemplate("templates/2.docx")
context2 = {
'dd2': dd2, 'month2': month2, 'number': number,
'ul': ul, 'orgn': ogrn, 'inn': inn, 'kpp': kpp,
'ad_index': ad_index, 'subrf': subrf,
'naspunkt': naspunkt, 'ulitsadom': ulitsadom,
'dateul': dateul, 'znt1_1': znt1_1, 'fio1_1': fio1_1
}
doc2.render(context2)
doc2.save(number + "__ 2.2 Определение о принятии дела к своему производству.docx")
#условие если выбран 8 файл
if self.ui.checkBox_08.isChecked():
doc8 = DocxTemplate("templates/8.docx")
context8 = {'number': number}
doc8.render(context8)
doc8.save(number + "__ 2.8 запрос в ГИБДД.docx")
#условие если выбран 9 файл
if self.ui.checkBox_09.isChecked():
doc9 = DocxTemplate("templates/9.docx")
context9 = {'number': number}
doc9.render(context9)
doc9.save(number + "__ 2.9 запрос в Росреестр.docx")
#условие если выбран 10 файл
if self.ui.checkBox_10.isChecked():
doc10 = DocxTemplate("templates/10.docx")
context10 = {
'number': number
}
doc10.render(context10)
doc10.save(number + "__ 3.1 рапорт.docx")
#условие если выбран 11 файл
if self.ui.checkBox_11.isChecked():
doc11 = DocxTemplate("templates/11.docx")
context11 = {
'number': number, 'dd4': dd4,
'znt3': znt3, 'fio3': fio3
}
doc11.render(context11)
doc11.save(number + "__ 3.2 справка об издержках.docx")
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
myapp = MyWin()
myapp.show()
sys.exit(app.exec_()) |
13,914 | 780f100b81554f845d984010937089489cc21fcf | '''
Crie um programa que leia o ano de nascimento de sete pessoas. No final, mostre quantas pessoas ainda não
atingiram a maioridade e quantas já são maiores (considerar 21 anos para maioridade).
'''
print("=========== desafio 054============")
import datetime
maiores = 0
menores = 0
for x in range(1, 8):
print(x, end= ' - ')
ano = (int(input('Informe o ano de nascimento: ')))
print(datetime.date.today().year - ano)
if (datetime.date.today().year - ano) > 20:
maiores += 1
else:
menores += 1
print('{} maiores de idade e {} menores de idade'.format(maiores, menores)) |
13,915 | 690f2cfccd80742cd0dec5f2926bf825e08ac037 |
from subprocess import check_call
import sys
import os
# Get path of this file
basePath = os.path.dirname(os.path.abspath(__file__))
managePath = os.path.join(basePath, "manage.py")
check_call([
sys.executable,
managePath,
"runserver",
"--settings=wwustc.dev-settings"
])
|
13,916 | f3c8e321abb1b9478cf7774807ff3fadfbcada84 | import socket
import os
import json
from dotenv import load_dotenv
from pynput import mouse
from signal import signal, SIGINT
from sys import exit
from multiprocessing import Process
load_dotenv()
size = width, height = 1280, 800
MAX = 32500
def handler(signal_received, frame):
global run_code
# Handle any cleanup here
print('SIGINT or CTRL-C detected. Exiting gracefully')
run_code = False
def Main(s):
controller = mouse.Controller()
while True:
data, addr = s.recvfrom(1024)
data = data.decode('utf-8')
#pos = type, posX, posY = tuple(map(int, data.split(',')))
print("Raw data", data)
event = json.loads(data)
print("Message from: " + str(addr))
print("From connected client: " + str(event))
if event.get("code") == 0:
posX = event.get("value") / MAX * width + width * event.get("num")
if event.get("code") == 1:
posY = event.get("value") / MAX * height
print("set position {:f} x {:f}".format(posX, posY))
controller.position = (posX, posY)
if event.get("code") == 330 and event.get("type") == 1 and event.get("value") == 0:
# Need to send two clicks to get one. Is it a bug?
controller.click(mouse.Button.left, 1)
# TODO: add double click support
# TODO: add finger press and move support for painting and marking
# TODO: filter out ghost clicks
if __name__=='__main__':
run_code = True
host = os.environ.get("HOST_IP", '') #Server ip
port = int(os.environ.get("HOST_PORT", 4000))
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind((host, port))
program = Process(target=Main, args=(s,))
# Tell Python to run the handler() function when SIGINT is recieved
signal(SIGINT, handler)
print('Running. Press CTRL-C to exit.')
# Start Main
program.start()
print("Server start listening on {0} and port {1}".format(host, port))
while run_code:
# run forever
pass
# Clean up
program.terminate()
s.close()
|
13,917 | 5432886d87b2aade8528f6c472ce71696cb040f1 | from django.contrib.sites.models import _simple_domain_name_validator
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import pgettext_lazy
from . import AuthenticationBackends
from decimal import Decimal
from django.core.validators import MinValueValidator
from datetime import datetime
import datetime as t
@python_2_unicode_compatible
class SiteSettings(models.Model):
domain = models.CharField(
pgettext_lazy('Site field', 'domain'), max_length=100,
validators=[_simple_domain_name_validator], blank=True, null=True,default='')
name = models.CharField(pgettext_lazy('Site field', 'name'), max_length=50,blank=True, null=True)
email = models.EmailField(pgettext_lazy('Site field', 'email'), max_length=50,blank=True, null=True)
header_text = models.CharField(
pgettext_lazy('Site field', 'header text'), max_length=200, blank=True)
wholesale_name = models.CharField(
pgettext_lazy('Site field', 'wholesale field name'), max_length=200, default='Wholesale', blank=True)
description = models.CharField(
pgettext_lazy('Site field', 'site description'), max_length=500,
blank=True)
show_transfer = models.BooleanField(default=True, blank=False)
loyalty_point_equiv = models.DecimalField(
pgettext_lazy('Site field', 'loyalty points'), default=Decimal(0), max_digits=100, decimal_places=2)
max_credit_date = models.IntegerField(pgettext_lazy('Site field', 'Maximum credit sale expiration in days'),
validators=[MinValueValidator(0)], unique=True, default=Decimal(0))
opening_time = models.TimeField(pgettext_lazy('Site field', 'opening time'),
default=t.time(6, 00))
closing_time = models.TimeField(pgettext_lazy('Site field', 'closing time'),
default=t.time(21, 00))
sms_gateway_username = models.CharField(
pgettext_lazy('Site field', 'sms gateway username'), max_length=500,
blank=True)
sms_gateway_apikey = models.CharField(
pgettext_lazy('Site field', 'sms gateway api key'), max_length=500,
blank=True)
image = models.ImageField(upload_to='employee', null=True, blank=True)
def __str__(self):
return self.name
def available_backends(self):
return self.authorizationkey_set.values_list('name', flat=True)
@python_2_unicode_compatible
class AuthorizationKey(models.Model):
site_settings = models.ForeignKey(SiteSettings)
name = models.CharField(
pgettext_lazy('Authentication field', 'name'), max_length=20,
choices=AuthenticationBackends.BACKENDS)
key = models.TextField(pgettext_lazy('Authentication field', 'key'))
password = models.TextField(
pgettext_lazy('Authentication field', 'password'))
class Meta:
unique_together = (('site_settings', 'name'),)
def __str__(self):
return self.name
def key_and_secret(self):
return self.key, self.password
class Bank(models.Model):
name = models.CharField(max_length=100, null=True, blank=True)
def __str__(self):
return str(self.name)
class BankBranch(models.Model):
name = models.CharField(max_length=100, null=True, blank=True)
bank = models.ForeignKey(Bank, related_name='branch', max_length=100, null=True, blank=True)
def __str__(self):
return str(self.name)
class Department(models.Model):
name = models.CharField(max_length=100, null=True, blank=True)
def __str__(self):
return str(self.name)
class UserRole(models.Model):
name = models.CharField(max_length=100, null=True, blank=True)
def __str__(self):
return str(self.name)
class Files(models.Model):
file = models.TextField(null=True, blank=True)
check = models.CharField(max_length=256, null=True, blank=True)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
|
13,918 | 96f7e5cafce64cafd9393bbf32aec739a9927b63 | # Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Convert a TestRecord into a mfg_event proto for upload to mfg inspector.
Also includes utilities to handle multi-dim conversion into an attachment
and the reverse.
A decision had to be made on how to handle phases, measurements and attachments
with non-unique names. Approach taken is to append a _X to the names.
"""
import collections
import dataclasses
import datetime
import itertools
import json
import logging
import numbers
import os
import sys
from typing import Mapping, Optional, Tuple
from openhtf.core import measurements
from openhtf.core import test_record as htf_test_record
from openhtf.output.proto import mfg_event_pb2
from openhtf.output.proto import test_runs_converter
from openhtf.output.proto import test_runs_pb2
from openhtf.util import data as htf_data
from openhtf.util import units
from openhtf.util import validators
TEST_RECORD_ATTACHMENT_NAME = 'OpenHTF_record.json'
# To be lazy loaded by _LazyLoadUnitsByCode when needed.
UNITS_BY_CODE = {}
# Map test run Status (proto) name to measurement Outcome (python) enum's and
# the reverse. Note: there is data lost in converting an UNSET/PARTIALLY_SET to
# an ERROR so we can't completely reverse the transformation.
MEASUREMENT_OUTCOME_TO_TEST_RUN_STATUS_NAME = {
measurements.Outcome.PASS: 'PASS',
measurements.Outcome.FAIL: 'FAIL',
measurements.Outcome.UNSET: 'ERROR',
measurements.Outcome.PARTIALLY_SET: 'ERROR',
}
TEST_RUN_STATUS_NAME_TO_MEASUREMENT_OUTCOME = {
'PASS': measurements.Outcome.PASS,
'MARGINAL_PASS': measurements.Outcome.PASS,
'FAIL': measurements.Outcome.FAIL,
'ERROR': measurements.Outcome.UNSET
}
_GIBI_BYTE_TO_BASE = 1 << 30
MAX_TOTAL_ATTACHMENT_BYTES = int(1.9 * _GIBI_BYTE_TO_BASE)
_LOGGER = logging.getLogger(__name__)
@dataclasses.dataclass(eq=True, frozen=True) # Ensures __hash__ is generated.
class AttachmentCacheKey:
name: str
size: int
AttachmentCacheT = Mapping[AttachmentCacheKey, mfg_event_pb2.EventAttachment]
def _measurement_outcome_to_test_run_status_name(outcome: measurements.Outcome,
marginal: bool) -> str:
"""Returns the test run status name given the outcome and marginal args."""
return ('MARGINAL_PASS'
if marginal else MEASUREMENT_OUTCOME_TO_TEST_RUN_STATUS_NAME[outcome])
def _test_run_status_name_to_measurement_outcome_and_marginal(
name: str) -> Tuple[measurements.Outcome, bool]:
"""Returns the outcome and marginal args given the test run status name."""
return TEST_RUN_STATUS_NAME_TO_MEASUREMENT_OUTCOME[name], 'MARGINAL' in name
def _lazy_load_units_by_code():
"""Populate dict of units by code iff UNITS_BY_CODE is empty."""
if UNITS_BY_CODE:
# already populated
return
for unit in units.UNITS_BY_NAME.values():
UNITS_BY_CODE[unit.code] = unit
def mfg_event_from_test_record(
record: htf_test_record.TestRecord,
attachment_cache: Optional[AttachmentCacheT] = None,
) -> mfg_event_pb2.MfgEvent:
"""Convert an OpenHTF TestRecord to an MfgEvent proto.
Most fields are copied over directly and some are pulled out of metadata
(listed below). Multi-dimensional measurements are stored only in the JSON
dump of the record.
Important Note: This function mutates the test_record so any output callbacks
called after this callback will operate on the mutated record.
Metadata fields:
test_name: The name field from the test's TestOptions.
config: The OpenHTF config, as a dictionary.
assembly_events: List of AssemblyEvent protos.
(see proto/assembly_event.proto).
operator_name: Name of the test operator.
Args:
record: An OpenHTF TestRecord.
attachment_cache: Provides a lookup to get EventAttachment protos for
already uploaded (or converted) attachments.
Returns:
An MfgEvent proto representing the given test record.
"""
mfg_event = mfg_event_pb2.MfgEvent()
_populate_basic_data(mfg_event, record)
_attach_record_as_json(mfg_event, record)
_attach_argv(mfg_event)
_attach_config(mfg_event, record)
# Only include assembly events if the test passed.
if ('assembly_events' in record.metadata and
mfg_event.test_status == test_runs_pb2.PASS):
for assembly_event in record.metadata['assembly_events']:
mfg_event.assembly_events.add().CopyFrom(assembly_event)
convert_multidim_measurements(record.phases)
phase_copier = PhaseCopier(phase_uniquizer(record.phases), attachment_cache)
phase_copier.copy_measurements(mfg_event)
if not phase_copier.copy_attachments(mfg_event):
mfg_event.test_run_type = mfg_event_pb2.TEST_RUN_PARTIAL
return mfg_event
def _populate_basic_data(mfg_event: mfg_event_pb2.MfgEvent,
record: htf_test_record.TestRecord) -> None:
"""Copies data from the OpenHTF TestRecord to the MfgEvent proto."""
# TODO(openhtf-team):
# * Missing in proto: set run name from metadata.
# * `part_tags` field on proto is unused
# * `timings` field on proto is unused.
# * Handle arbitrary units as uom_code/uom_suffix.
# Populate non-repeated fields.
mfg_event.dut_serial = record.dut_id
mfg_event.start_time_ms = record.start_time_millis
mfg_event.end_time_ms = record.end_time_millis
mfg_event.tester_name = record.station_id
mfg_event.test_name = record.metadata.get('test_name') or record.station_id
mfg_event.operator_name = record.metadata.get('operator_name', '')
mfg_event.test_version = str(record.metadata.get('test_version', ''))
mfg_event.test_description = record.metadata.get('test_description', '')
mfg_event.test_status = (
test_runs_pb2.MARGINAL_PASS
if record.marginal else test_runs_converter.OUTCOME_MAP[record.outcome])
# Populate part_tags.
mfg_event.part_tags.extend(record.metadata.get('part_tags', []))
# Populate phases.
for phase in record.phases:
mfg_phase = mfg_event.phases.add()
mfg_phase.name = phase.name
mfg_phase.description = phase.codeinfo.sourcecode
mfg_phase.timing.start_time_millis = phase.start_time_millis
mfg_phase.timing.end_time_millis = phase.end_time_millis
# Populate failure codes.
for details in record.outcome_details:
failure_code = mfg_event.failure_codes.add()
failure_code.code = details.code
failure_code.details = details.description
# Populate test logs.
for log_record in record.log_records:
test_log = mfg_event.test_logs.add()
test_log.timestamp_millis = log_record.timestamp_millis
test_log.log_message = log_record.message
test_log.logger_name = log_record.logger_name
test_log.levelno = log_record.level
if log_record.level <= logging.DEBUG:
test_log.level = test_runs_pb2.TestRunLogMessage.DEBUG
elif log_record.level <= logging.INFO:
test_log.level = test_runs_pb2.TestRunLogMessage.INFO
elif log_record.level <= logging.WARNING:
test_log.level = test_runs_pb2.TestRunLogMessage.WARNING
elif log_record.level <= logging.ERROR:
test_log.level = test_runs_pb2.TestRunLogMessage.ERROR
elif log_record.level <= logging.CRITICAL:
test_log.level = test_runs_pb2.TestRunLogMessage.CRITICAL
test_log.log_source = log_record.source
test_log.lineno = log_record.lineno
def _attach_record_as_json(mfg_event, record):
"""Attach a copy of the record as JSON so we have an un-mangled copy."""
attachment = mfg_event.attachment.add()
attachment.name = TEST_RECORD_ATTACHMENT_NAME
test_record_dict = htf_data.convert_to_base_types(record)
attachment.value_binary = _convert_object_to_json(test_record_dict)
attachment.type = test_runs_pb2.TEXT_UTF8
def _convert_object_to_json(obj): # pylint: disable=missing-function-docstring
# Since there will be parts of this that may have unicode, either as
# measurement or in the logs, we have to be careful and convert everything
# to unicode, merge, then encode to UTF-8 to put it into the proto.
def unsupported_type_handler(o):
# For bytes, JSONEncoder will fallback to this function to convert to str.
if isinstance(o, bytes):
return o.decode(encoding='utf-8', errors='replace')
elif isinstance(o, (datetime.date, datetime.datetime)):
return o.isoformat()
else:
raise TypeError(repr(o) + ' is not JSON serializable')
json_encoder = json.JSONEncoder(
sort_keys=True,
indent=2,
ensure_ascii=False,
default=unsupported_type_handler)
return json_encoder.encode(obj).encode('utf-8', errors='replace')
def _attach_config(mfg_event, record):
"""Attaches the OpenHTF config file as JSON."""
if 'config' not in record.metadata:
return
attachment = mfg_event.attachment.add()
attachment.name = 'config'
attachment.value_binary = _convert_object_to_json(record.metadata['config'])
attachment.type = test_runs_pb2.TEXT_UTF8
def _attach_argv(mfg_event):
attachment = mfg_event.attachment.add()
attachment.name = 'argv'
argv = [os.path.realpath(sys.argv[0])] + sys.argv[1:]
attachment.value_binary = _convert_object_to_json(argv)
attachment.type = test_runs_pb2.TEXT_UTF8
class UniqueNameMaker(object):
"""Makes unique names for phases, attachments, etc with duplicate names."""
def __init__(self, all_names):
self._counts = collections.Counter(all_names)
self._seen = collections.Counter()
def make_unique(self, name): # pylint: disable=missing-function-docstring
count = self._counts[name]
assert count >= 1, 'Seeing a new name that was not given to the constructor'
if count == 1:
# It's unique, so let's skip extra calculations.
return name
# Count the number of times we've seen this and return this one's index.
self._seen[name] += 1
main, ext = os.path.splitext(name)
return '%s_%d%s' % (main, self._seen[name] - 1, ext)
def phase_uniquizer(all_phases):
"""Makes the names of phase measurement and attachments unique.
This function will make the names of measurements and attachments unique.
It modifies the input all_phases.
Args:
all_phases: the phases to make unique
Returns:
the phases now modified.
"""
measurement_name_maker = UniqueNameMaker(
itertools.chain.from_iterable(
phase.measurements.keys() for phase in all_phases
if phase.measurements))
attachment_names = list(itertools.chain.from_iterable(
phase.attachments.keys() for phase in all_phases))
attachment_names.extend(itertools.chain.from_iterable([
'multidim_' + name for name, meas in phase.measurements.items()
if meas.dimensions is not None
] for phase in all_phases if phase.measurements))
attachment_name_maker = UniqueNameMaker(attachment_names)
for phase in all_phases:
# Make measurements unique.
for name, _ in sorted(phase.measurements.items()):
old_name = name
name = measurement_name_maker.make_unique(name)
phase.measurements[old_name].name = name
phase.measurements[name] = phase.measurements.pop(old_name)
# Make attachments unique.
for name, _ in sorted(phase.attachments.items()):
old_name = name
name = attachment_name_maker.make_unique(name)
phase.attachments[name] = phase.attachments.pop(old_name)
return all_phases
def multidim_measurement_to_attachment(name, measurement):
"""Convert a multi-dim measurement to an `openhtf.test_record.Attachment`."""
dimensions = list(measurement.dimensions)
if measurement.units:
dimensions.append(
measurements.Dimension.from_unit_descriptor(measurement.units))
dims = []
for d in dimensions:
if d.suffix is None:
suffix = u''
else:
suffix = d.suffix
dims.append({
'uom_suffix': suffix,
'uom_code': d.code,
'name': d.name,
})
# Refer to the module docstring for the expected schema.
dimensioned_measured_value = measurement.measured_value
value = (
sorted(dimensioned_measured_value.value, key=lambda x: x[0])
if dimensioned_measured_value.is_value_set else None)
outcome_str = _measurement_outcome_to_test_run_status_name(
measurement.outcome, measurement.marginal)
data = _convert_object_to_json({
'outcome': outcome_str,
'name': name,
'dimensions': dims,
'value': value,
})
attachment = htf_test_record.Attachment(data, test_runs_pb2.MULTIDIM_JSON) # pytype: disable=wrong-arg-types # gen-stub-imports
return attachment
def convert_multidim_measurements(all_phases):
"""Converts each multidim measurements into attachments for all phases.."""
# Combine actual attachments with attachments we make from multi-dim
# measurements.
attachment_names = list(itertools.chain.from_iterable(
phase.attachments.keys() for phase in all_phases))
attachment_names.extend(itertools.chain.from_iterable([
'multidim_' + name for name, meas in phase.measurements.items()
if meas.dimensions is not None
] for phase in all_phases if phase.measurements))
attachment_name_maker = UniqueNameMaker(attachment_names)
for phase in all_phases:
# Process multi-dim measurements into unique attachments.
for name, measurement in sorted(phase.measurements.items()):
if measurement.dimensions:
old_name = name
name = attachment_name_maker.make_unique('multidim_%s' % name)
attachment = multidim_measurement_to_attachment(name, measurement)
phase.attachments[name] = attachment
phase.measurements.pop(old_name)
return all_phases
class PhaseCopier(object):
"""Copies measurements and attachments to an MfgEvent."""
def __init__(self,
all_phases,
attachment_cache: Optional[AttachmentCacheT] = None):
self._phases = all_phases
self._using_partial_uploads = attachment_cache is not None
self._attachment_cache = (
attachment_cache if self._using_partial_uploads else {})
def copy_measurements(self, mfg_event):
for phase in self._phases:
for name, measurement in sorted(phase.measurements.items()):
# Multi-dim measurements should already have been removed.
assert measurement.dimensions is None
self._copy_unidimensional_measurement(phase, name, measurement,
mfg_event)
def _copy_unidimensional_measurement(self, phase, name, measurement,
mfg_event):
"""Copy uni-dimensional measurements to the MfgEvent."""
mfg_measurement = mfg_event.measurement.add()
# Copy basic measurement fields.
mfg_measurement.name = name
if measurement.docstring:
mfg_measurement.description = measurement.docstring
mfg_measurement.parameter_tag.append(phase.name)
if (measurement.units and
measurement.units.code in test_runs_converter.UOM_CODE_MAP):
mfg_measurement.unit_code = (
test_runs_converter.UOM_CODE_MAP[measurement.units.code])
# Copy failed measurements as failure_codes. This happens early to include
# unset measurements.
if (measurement.outcome != measurements.Outcome.PASS and
phase.outcome != htf_test_record.PhaseOutcome.SKIP):
failure_code = mfg_event.failure_codes.add()
failure_code.code = name
failure_code.details = '\n'.join(str(v) for v in measurement.validators)
# Copy measurement value.
measured_value = measurement.measured_value
status_str = _measurement_outcome_to_test_run_status_name(
measurement.outcome, measurement.marginal)
mfg_measurement.status = test_runs_pb2.Status.Value(status_str)
if not measured_value.is_value_set:
return
value = measured_value.value
if isinstance(value, numbers.Number):
mfg_measurement.numeric_value = float(value)
elif isinstance(value, bytes):
mfg_measurement.text_value = value.decode(errors='replace')
else:
# Coercing to string.
mfg_measurement.text_value = str(value)
# Copy measurement validators.
for validator in measurement.validators:
if isinstance(validator, validators.RangeValidatorBase):
if validator.minimum is not None:
mfg_measurement.numeric_minimum = float(validator.minimum)
if validator.maximum is not None:
mfg_measurement.numeric_maximum = float(validator.maximum)
if validator.marginal_minimum is not None:
mfg_measurement.numeric_marginal_minimum = float(
validator.marginal_minimum)
if validator.marginal_maximum is not None:
mfg_measurement.numeric_marginal_maximum = float(
validator.marginal_maximum)
elif isinstance(validator, validators.RegexMatcher):
mfg_measurement.expected_text = validator.regex
else:
mfg_measurement.description += '\nValidator: ' + str(validator)
def copy_attachments(self, mfg_event: mfg_event_pb2.MfgEvent) -> bool:
"""Copies attachments into the MfgEvent from the configured phases.
If partial uploads are in use (indicated by configuring this class instance
with an Attachments cache), this function will exit early if the total
attachment data size exceeds a reasonable threshold to avoid the 2 GB
serialized proto limit.
Args:
mfg_event: The MfgEvent to copy into.
Returns:
True if all attachments are copied and False if only some attachments
were copied (only possible when partial uploads are being used).
"""
value_copied_attachment_sizes = []
skipped_attachment_names = []
for phase in self._phases:
for name, attachment in sorted(phase.attachments.items()):
size = attachment.size
attachment_cache_key = AttachmentCacheKey(name, size)
if attachment_cache_key in self._attachment_cache:
mfg_event.attachment.append(
self._attachment_cache[attachment_cache_key])
else:
at_least_one_attachment_for_partial_uploads = (
self._using_partial_uploads and value_copied_attachment_sizes)
if at_least_one_attachment_for_partial_uploads and (
sum(value_copied_attachment_sizes) + size >
MAX_TOTAL_ATTACHMENT_BYTES):
skipped_attachment_names.append(name)
else:
value_copied_attachment_sizes.append(size)
self._copy_attachment(name, attachment.data, attachment.mimetype,
mfg_event)
if skipped_attachment_names:
_LOGGER.info(
'Skipping upload of %r attachments for this cycle. '
'To avoid max proto size issues.', skipped_attachment_names)
return False
return True
def _copy_attachment(self, name, data, mimetype, mfg_event):
"""Copies an attachment to mfg_event."""
attachment = mfg_event.attachment.add()
attachment.name = name
attachment.value_binary = data
if mimetype in test_runs_converter.MIMETYPE_MAP:
attachment.type = test_runs_converter.MIMETYPE_MAP[mimetype]
elif mimetype == test_runs_pb2.MULTIDIM_JSON:
attachment.type = mimetype
else:
attachment.type = test_runs_pb2.BINARY
def test_record_from_mfg_event(mfg_event):
"""Extract the original test_record saved as an attachment on a mfg_event."""
for attachment in mfg_event.attachment:
if attachment.name == TEST_RECORD_ATTACHMENT_NAME:
return json.loads(attachment.value_binary)
raise ValueError('Could not find test record JSON in the given MfgEvent.')
def attachment_to_multidim_measurement(attachment, name=None):
"""Convert an OpenHTF test record attachment to a multi-dim measurement.
This is a best effort attempt to reverse, as some data is lost in converting
from a multidim to an attachment.
Args:
attachment: an `openhtf.test_record.Attachment` from a multi-dim.
name: an optional name for the measurement. If not provided will use the
name included in the attachment.
Returns:
An multi-dim `openhtf.Measurement`.
"""
data = json.loads(attachment.data)
name = name or data.get('name')
# attachment_dimn are a list of dicts with keys 'uom_suffix' and 'uom_code'
attachment_dims = data.get('dimensions', [])
# attachment_value is a list of lists [[t1, x1, y1, f1], [t2, x2, y2, f2]]
attachment_values = data.get('value')
attachment_outcome_str = data.get('outcome')
if attachment_outcome_str not in TEST_RUN_STATUS_NAME_TO_MEASUREMENT_OUTCOME:
# Fpr backward compatibility with saved data we'll convert integers to str
try:
attachment_outcome_str = test_runs_pb2.Status.Name(
int(attachment_outcome_str))
except ValueError:
attachment_outcome_str = None
# Convert test status outcome str to measurement outcome
if attachment_outcome_str:
outcome, marginal = (
_test_run_status_name_to_measurement_outcome_and_marginal(
attachment_outcome_str))
else:
outcome = None
marginal = False
# convert dimensions into htf.Dimensions
_lazy_load_units_by_code()
dims = []
for d in attachment_dims:
# Try to convert into htf.Dimension including backwards compatibility.
unit = UNITS_BY_CODE.get(d.get('uom_code'), units.NONE)
description = d.get('name', '')
dims.append(measurements.Dimension(description=description, unit=unit))
# Attempt to determine if units are included.
if attachment_values and len(dims) == len(attachment_values[0]):
# units provided
units_ = dims[-1].unit
dimensions = dims[:-1]
else:
units_ = None
dimensions = dims
# created dimensioned_measured_value and populate with values.
measured_value = measurements.DimensionedMeasuredValue(
name=name, num_dimensions=len(dimensions))
for row in attachment_values:
coordinates = tuple(row[:-1])
val = row[-1]
measured_value[coordinates] = val
measurement = measurements.Measurement(
name=name,
units=units_,
dimensions=tuple(dimensions),
measured_value=measured_value,
outcome=outcome,
marginal=marginal)
return measurement
|
13,919 | 2940fe4d84c3fab1559922640842652ac2be6f7e | import mock
import time
from unittest import TestCase
from Alarm.Alarm import Alarm
from Alarm.AlarmState.AlarmState import AlarmState
from Alarm.AlarmState.Triggered import Triggered
from Alarm.AlarmState.Idle import Idle
class TestTriggered(TestCase):
def setUp(self):
self.alarm = Alarm([], None, None)
self.triggered = Triggered(self.alarm, [], AlarmState.DOOR_OPEN)
self.alarm.set_alarm_state(self.triggered)
def test_door_opened(self):
self.triggered.door_closed()
self.assertEqual(self.triggered.get_door_state(), AlarmState.DOOR_CLOSED)
self.triggered.door_opened()
self.assertEqual(self.triggered.get_door_state(), AlarmState.DOOR_OPEN)
self.assertIsInstance(self.alarm.get_alarm_state(), Triggered)
self.assertTrue(self.alarm.get_alarm_state().is_door_open())
def test_door_closed(self):
self.assertEqual(self.triggered.get_door_state(), AlarmState.DOOR_OPEN)
self.triggered.door_closed()
self.assertEqual(self.triggered.get_door_state(), AlarmState.DOOR_CLOSED)
self.assertIsInstance(self.alarm.get_alarm_state(), Triggered)
self.assertFalse(self.alarm.get_alarm_state().is_door_open())
def test_alarm_deactivated_door_opened(self):
self.assertEqual(self.triggered.get_door_state(), AlarmState.DOOR_OPEN)
self.triggered.alarm_deactivated()
self.assertIsInstance(self.alarm.get_alarm_state(), Idle)
self.assertTrue(self.alarm.get_alarm_state().is_door_open())
def test_alarm_deactivated_door_closed(self):
self.triggered.door_closed()
self.assertEqual(self.triggered.get_door_state(), AlarmState.DOOR_CLOSED)
self.triggered.alarm_deactivated()
self.assertIsInstance(self.alarm.get_alarm_state(), Idle)
self.assertFalse(self.alarm.get_alarm_state().is_door_open())
@mock.patch('time.time')
def test_perform_action_time_not_exceeded(self, mock_time):
mock_time.return_value = 0
alarm = Alarm([], None, None)
triggered = Triggered(alarm, [], AlarmState.DOOR_OPEN)
alarm.set_alarm_state(triggered)
mock_time.return_value = 20
triggered.perform_action()
self.assertIsInstance(alarm.get_alarm_state(), Triggered)
@mock.patch('time.time')
def test_perform_action_time_exceeded(self, mock_time):
mock_time.return_value = 0
alarm = Alarm([], None, None)
triggered = Triggered(alarm, [], AlarmState.DOOR_OPEN)
alarm.set_alarm_state(triggered)
self.assertEqual(triggered.get_trigger_time(), 0)
mock_time.return_value = 40
self.assertEqual(time.time(), 40)
triggered.perform_action()
self.assertIsInstance(alarm.get_alarm_state(), Idle)
self.assertTrue(alarm.get_alarm_state().is_door_open())
|
13,920 | c313d3df532b0e9a000c6c0c666fdcebe13c2a80 | # -*- coding: utf-8 -*-
""" 实现线性搜索 """
# 算法核心思想:从表的一端开始,顺序扫描数组,依次将扫描到的数组元素和目标值相比较,若当前数组元素与目标值相等,则查找成功;
# 若扫描结束后,仍未找到数组元素等于目标值的结点,则查找失败
# 算法介绍网站:https://zh.wikipedia.org/wiki/%E7%BA%BF%E6%80%A7%E6%90%9C%E7%B4%A2
def linear_search(lists, target):
""" 实现线性搜索
Args:
lists: list
待搜索的数组
target: int
指定搜索元素
Returns:
result: int, default = -1
指定的搜索元素在数组中的位置,如果不存在,则返回-1
"""
result = -1
for i, j in enumerate(lists):
if j == target:
result = i
break
return result
if __name__ == '__main__':
print u'线性搜索示例:\n'
array = [9, 1, 2, 5, 7, 4, 8, 6, 3, 5]
print u'待搜索的数组:'
print array
target = 7
print u'\n指定搜索元素: ' + str(target)
result = linear_search(array, target)
print u'\n搜索结果: ' + str(result)
|
13,921 | 26b23e0bc083f5ae04912972a7b2c521d95e7633 | #!/usr/bin/env python3
#IMPORTING LIBRARIES
import urllib.request as ur
from bs4 import BeautifulSoup
from nltk.tokenize import sent_tokenize
import numpy as np
#WEBBSITE TO SCRAP
website="http://www.cricbuzz.com/cricket-match/live-scores"
#GETTING SOURCE CODE
web_data=ur.urlopen(website)
source_code=web_data.read()
#USING BEAUTIFUL-SOUP TO ALIGN THE TEXT
data=BeautifulSoup(source_code,'html5lib')
#sent_split=sent_tokenize(final_data)
#print(np.shape(sent_split))
good_data=data.prettify()
#FUNCTION TO EXTRACT SCORE DATA FROM SOUCE CODE
def get_score(good_data):
first_header=good_data.find('<h2')
next_header=good_data.find('<h2',first_header+1)
if first_header==-1 or next_header==-1:
return
req_score_data=good_data[first_header:next_header]
score_bs4=BeautifulSoup(req_score_data,'html5lib')
score_instance=score_bs4.get_text(strip=True)
split_instance=sent_tokenize(score_instance)
copy_instance=split_instance[0]
#INSTANCE CLEANING
live=split_instance[0].find('Live')
split_instance[0]=split_instance[0][:live]
check_discarded=copy_instance[live:]
#print('@@\n'+check_discarded+'\n@@\n')
'''
if len(check_discarded)>38:
while len(check_discarded)>38:
news=check_discarded.find('News')
live=check_discarded.find('Live',news)
patch=check_discarded[news+4:live]
split_instance.append(patch)
check_discarded=check_discarded[live+1:]
'''
print(split_instance)
print('\n')
good_data=good_data[next_header:]
get_score(good_data)
get_score(good_data)
|
13,922 | 616cd62921f2772e5390c6185ab39f2ff74ca718 | import unittest
from path import path
import clld
class Tests(unittest.TestCase):
def test_parsed_args(self):
from clld.scripts.util import parsed_args
parsed_args(args=[path(clld.__file__).dirname().joinpath('tests', 'test.ini')])
|
13,923 | ce91c3470b1aa825783875baba4ad62848902b64 | ##
# Represents the physical view of the DFS.
##
import os.path
import shutil
import dfs_state
import error as err
from base import Base
from lock import Lock
shareFolderPath = os.path.expanduser("~/Share/")
saveStateName = '.state'
class PhysicalView(Base):
def __init__(self, dfs):
Base.__init__(self, dfs)
self.lock_ = Lock(dfs)
self.createBaseFolder()
def read(self, fileName, buf, offset, bufsize):
# TODO add thread safetly
filePath = os.path.join(self.getBasePath(), fileName)
size = self.getFileSize(fileName)
if offset + bufsize > size:
self.log_.w('tried to read ' + fileName + ' but size is ' + str(size) +
' and bufsize + offset = ' + str(offset + bufsize))
return err.InvalidBufferSize
self.lock_.acquire()
try:
f = open(filePath, "r")
except Exception, ex:
self.log_.e('error opening file in read mode ' + filePath + ': ' + str(ex))
self.lock_.release()
return err.FileNotFound
status = err.OK
f.seek(offset)
try:
data = f.read(bufsize)
for i, d in enumerate(data):
buf[i] = d
except Exception, ex:
self.log_.e('failed to read ' + filePath + ' from ' + str(offset) + ' to ' + str(offset + bufsize) + ': ' + str(ex))
status = err.CannotReadFile
f.close()
self.lock_.release()
return status
def write(self, fileName, buf, offset, bufsize):
status = err.OK
size = self.getFileSize(fileName)
self.lock_.acquire()
f = open(os.path.join(self.getBasePath(), fileName), "r+")
if(offset > size):
f.seek(0, 2)
f.write(' ' * (offset - size))
f.seek(offset)
try:
for i in range(bufsize):
f.write(buf[i])
except Exception, ex:
self.log_.e('failed to write ' + fileName + ' from ' + str(offset) + ' to ' + str(offset + bufsize) + ': ' + str(ex))
status = err.CannotReadFile
f.close()
self.lock_.release()
return status
def getChunk(self, fileName, chunk):
filePath = os.path.join(self.getBasePath(), fileName)
self.lock_.acquire()
try:
f = open(filePath, 'r')
except Exception, ex:
self.log_.e('getChunk - failed to open ' + filePath)
return None
data = None
try:
f.seek(chunk * dfs_state.CHUNK_SIZE)
data = f.read(dfs_state.CHUNK_SIZE)
except Exception, ex:
self.log_.e('getChunk - failed to seek to chunk ' + chunk + ' in ' + filePath)
f.close()
self.lock_.release()
return data
def writeChunk(self, fileName, chunkNum, data):
# Assume data is no longer than size CHUNK_SIZE
filePath = os.path.join(self.getBasePath(), fileName)
size = self.getFileSize(fileName)
if size < chunkNum * dfs_state.CHUNK_SIZE:
buf = [' '] * (chunkNum * dfs_state.CHUNK_SIZE - size)
self.write(fileName, buf, size, len(buf))
self.lock_.acquire()
f = open(filePath, "r+")
f.seek(chunkNum * dfs_state.CHUNK_SIZE)
f.write(data)
# Resize the file b/c the last chunk may be smaller then CHUNK_SIZE
if len(data) < dfs_state.CHUNK_SIZE: # is this the last chunk?
f.truncate()
f.close()
self.lock_.release()
def trim(self, fileName, size):
self.lock_.acquire()
filePath = os.path.join(self.getBasePath(), fileName)
f = open(filePath, 'r+')
f.truncate(size)
f.close()
self.lock_.release()
def getNumChunks(self, fileName):
self.lock_.acquire()
size = self.getFileSize(fileName)
self.lock_.release()
return int(size / dfs_state.CHUNK_SIZE) + 1
def copyFile(self, src, des):
self.lock_.acquire()
shutil.copyfile(os.path.join(self.getBasePath(), src), os.path.join(self.getBasePath(), des))
self.lock_.release()
def deleteFile(self, fileName):
self.lock_.acquire()
os.remove(os.path.join(self.getBasePath(), fileName))
self.lock_.release()
def writeState(self, serializedState):
path = os.path.join(self.getBasePath(), saveStateName)
self.lock_.acquire()
f = open(path, 'w')
f.write(serializedState)
f.close()
self.lock_.release()
def readState(self):
path = os.path.join(self.getBasePath(), saveStateName)
if not os.path.exists(path):
return None
self.lock_.acquire()
f = open(path, 'r')
state = f.read()
f.close()
self.lock_.release()
return state
##
# Private methods
##
def createBaseFolder(self):
if not os.path.isdir(shareFolderPath):
os.mkdir(shareFolderPath)
if not os.path.isdir(self.getBasePath()):
os.mkdir(self.getBasePath())
def exists(self, fileName):
return os.path.exists(os.path.join(self.getBasePath(), fileName))
def getBasePath(self):
dirName = "peer_" + str(self.dfs_.id)
return shareFolderPath + dirName + "/"
def fillEmptyFile(self, fileName, size):
self.lock_.acquire()
path = os.path.join(self.getBasePath(), fileName)
if not os.path.isfile(path):
w = open(path, 'w')
w.write(' ' * size) # fill the file with empty space
w.close()
self.lock_.release()
def getFileSize(self, fileName):
return os.path.getsize(os.path.join(self.getBasePath(), fileName))
|
13,924 | a6fad21a27b81ccec33fd3f5ee500d41d4ce3f1c | from behave import *
use_step_matcher("parse")
@given('Exists a user "{username}" with password "{password}"')
def step_impl(context, username, password):
from django.contrib.auth.models import User
User.objects.create_user(username=username, email='user@example.com', password=password)
@given('I login as user "{username}" with password "{password}"')
def step_impl(context, username, password):
context.browser.visit(context.get_url('/accounts/login/'))
form = context.browser.find_by_tag('form').first
context.browser.fill('username', username)
context.browser.fill('password', password)
form.find_by_css('button.btn-post').first.click()
@then('I\'m viewing user "{username}" workspace')
def step_impl(context, username):
assert context.browser.is_text_present(username)
|
13,925 | 053df4fc50440077a054ccb81b44f8016b24d02b | # Спортсмен занимается ежедневными пробежками. В первый день его результат составил a километров.
# Каждый день спортсмен увеличивал результат на 10 % относительно предыдущего.
# Требуется определить номер дня, на который общий результат спортсмена составить не менее b километров.
# Программа должна принимать значения параметров a и b и выводить одно натуральное число — номер дня.
a=input("Сколько пробежал в первый день?\n")
b=input("А цель какая?\n")
a=int(a)
b=int(b)
c=1
while a<b:
a=a*1.1
c+=1
print(f"Спортсмен пробежит свою цель на {c}-й день.") |
13,926 | e2e2d5b1c3f3554ae9cb77efff6410403c28d0c6 | from collections import Counter
import numpy as np
import plotly.graph_objects as go
from model_operations import extract_ids
def summary_report(shape_values, feature_names, class_names, merge=False):
"""
Summary report
:param class_names:
:param shape_values:
:param feature_names:
:param merge:
:return:
"""
if merge:
importances = []
for shap_value in shape_values:
shap_value = shap_value.reshape((shap_value.shape[0], 3, -1))
importance = np.sum(shap_value, axis=1)
importance = np.sum(np.abs(importance), axis=0)
importances.append(importance.tolist())
fig = go.Figure(data=go.Heatmap(
z = importances,
x = feature_names,
y = class_names
))
fig.write_html("figure/summary_merge.html")
else:
importances = []
for shap_value in shape_values:
importance = np.sum(np.abs(shap_value), axis=0)
importances.append(importance.tolist())
fig = go.Figure(data=go.Heatmap(
z = importances,
x = feature_names,
y = class_names
))
fig.write_html("figure/summary_without_merge.html")
def dependency_report(feature_name, class_name, shap_values, data, feature_names, class_names, relative=True):
feature_index, class_index = feature_names.index(feature_name), class_names.index(class_name)
shap_value = shap_values[class_index]
shap_value = [sum([value[feature_index], value[feature_index + len(feature_names)],
value[feature_index + 2 * len(feature_names)]]) for value in shap_value]
initial_value = [d[feature_index] for d in data]
second_value = [d[feature_index + len(feature_names)] - d[feature_index] for d in data]
third_value = [d[feature_index + 2 * len(feature_names)] - d[feature_index + len(feature_names)] for d in data]
symbols = ["star-triangle-up" if d[feature_index + 2*len(feature_names)] - d[feature_index + len(feature_names)] > 0 else "star-triangle-down" for d in data]
fig = go.Figure(data=[go.Scatter(
x = initial_value,
y = shap_value,
marker_symbol=symbols,
mode='markers',
marker= dict(
size=[abs(v) for v in third_value],
color=second_value,
colorscale='Viridis',
colorbar=dict(
title=f"2-{feature_name} - 1-{feature_name}",
),
sizemode='area',
sizeref=2. * max(second_value) / (20. ** 2),
sizemin=4,
showscale=True
)
)])
fig.update_layout(
xaxis=dict(
title=f"1-{feature_name} value",
gridcolor='white',
gridwidth=2,
),
yaxis=dict(
title=f"shap value for {feature_name} (sum)",
gridcolor='white',
gridwidth=2,
),
)
fig.write_html(f"figure/dependency_{feature_name}_{class_name}.html")
def evolution_event_distribution_report(timestamps, meta_community_network):
bar_data = {}
for node in meta_community_network.nodes():
sid, _ = extract_ids(node)
pre_event = meta_community_network.nodes[node]['pre']
if pre_event != "None":
bar_data[timestamps[sid-1]] = bar_data.get(timestamps[sid-1], [])
bar_data[timestamps[sid-1]].append(pre_event)
nex_event = meta_community_network.nodes[node]['nex']
if nex_event != "None":
bar_data[timestamps[sid]] = bar_data.get(timestamps[sid], [])
bar_data[timestamps[sid]].append(nex_event)
data = []
ne_count = ["#forming", "#continuing", "#growing", "#shrinking", "#splitting", "#merging", "#dissolving"]
for timestamp in timestamps[:-1]:
counter = Counter(bar_data[timestamp])
data.append(go.Bar(name=timestamp, x=ne_count, y=[counter.get(ne[1:], 0) for ne in ne_count]))
fig = go.Figure(data=data)
fig.update_layout(barmode="group")
fig.write_html("figure/evolution_event_distribution.html") |
13,927 | 38f61009fd83cb3ed73421a9617cbc52fbcfccfe | from game.adventure.campaign import Campaign
from game.adventure.room import Room
from game.adventure.item import Item
from game.adventure.character import Character
from game.adventure.generators.rooms import random_room
class DeepDungeon(Campaign):
name = "The Deep Dungeon"
def __init__(self, console):
super().__init__(console)
self.rooms = {}
self.current_room = None
self.next_room()
def next_room(self):
procedurally_generated_room = random_room(self.console)
room_name = procedurally_generated_room.name
self.rooms[room_name] = procedurally_generated_room
self.current_room = self.rooms[room_name]
print(f"Current room name is {room_name}")
return None
|
13,928 | 2919c52c9922a862e197c9c34c9757f3f8cbdc51 | '''
Constant string values used for home environment setup
'''
MANAGED_FILES = [ ".bash_aliases", ".gdbinit", ".gitignore", ".gitconfig", ".vimrc", "Documents/colors.modal.ls", "Documents/colors.template", ".ssh/config", ".config/git"]
LOCAL = ".local"
LOCO_BIN = "/".join([LOCAL, "bin"])
CLONE_URI = "ssh://git@github.com/redorca/home-env.git"
GIT_DIR = "bin/home-env/.git"
SAMBA_ENTRY = [ "[]", "path = /home//", "read only = no", "browseable = yes", "guest ok = yes" ]
SAMBA_GLOBAL = [ "guest account = USER" ]
print(SAMBA_ENTRY[1])
|
13,929 | b34e02f404756bc982644e4879accd0cd455e607 | #Create a function that returns True if the given string has any of the following: Only letters and no numbers.
#Only numbers and no letters.
#If a string has both numbers and letters or contains characters that don't fit into any category, return False.
def csAlphanumericRestriction(input_str):
return input_str.isalpha() or input_str.isnumeric()
#Write a function that takes a string as input and returns that string in reverse order,
#with the opposite casing for each character within the string.
def csOppositeReverse(txt):
swappedCase = txt.swapcase()
return swappedCase[::-1]
#Create a function that given an integer, returns an integer where every digit in the input integer is squared.
def csSquareAllDigits(n):
squaredN = ''.join(str(int(i)**2) for i in str(n))
return int(squaredN)
#Given a string, return a new string with all the vowels removed.
def csRemoveTheVowels(input_str):
result = ""
for c in input_str:
if c.lower() not in "aeiou":
result += c
return result
|
13,930 | 8e38d4dad1b433b684ccff507ac0db607d48e45c | import numpy as np
from thimbles.thimblesdb import Base, ThimblesTable
from sqlalchemy import create_engine, ForeignKey
from sqlalchemy import Column, Date, Integer, String, Float
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.declarative import declared_attr
from sqlalchemy.orm import relationship, backref
class FlagSpace(object):
"""enables efficient translation of dictionaries of true and false values
paired with specific sets of keys to and from individual integers, for efficient
storage and checking of many boolean values at once.
"""
def __init__(self):
#keep an internal dictionary of our flag names and corresponding integers
self.flag_bits = {} #flag name --> bit number
self.flag_masks = {}#flag name --> 2**(bit number)
self.default_dict = {}#flag name --> default truth value
def add_dimension(self, name, bit_index=None, default=False):
"""add a flag corresponding to the integer 2**bit_index
"""
if not self.flag_masks.get(name) is None:
raise ValueError("the name %s is already in this flag space" % name)
bit_nums = list(self.flag_bits.values())
if bit_index is None:
bit_index = 0
#assign the lowest currently unused bit number
while bit_index in bit_nums:
bit_index += 1
if bit_index in bit_nums:
raise ValueError("bit_index %d is already taken" % bit_index)
self.flag_bits[name] = bit_index
self.flag_masks[name] = 2**bit_index
self.default_dict[name] = default
def int_to_dict(self, flag_int):
"""decomposes a flag integer into a dictionary of the form {name:bool, ...}
"""
out_dict = {}
for key in list(self.flag_masks.keys()):
out_dict[key] = bool(self.flag_masks[key] & flag_int)
return out_dict
def dict_to_int(self, flag_dict):
"""converts a flag dictionary into a corresponding integer in flag space
"""
int_out = 0
for key in list(flag_dict.keys()):
fmask = self.flag_masks.get(key)
if fmask is None:
raise ValueError("key %s does not belong to this flag space" % key)
if flag_dict[key]:
int_out += fmask
return int_out
def __getitem__(self, flag_name):
return self.flag_masks[flag_name]
def __len__(self):
return len(self.flag_masks)
@property
def flag_names(self):
return self.flag_bits.keys()
@property
def default_int(self):
return self.dict_to_int(self.default_dict)
class Flags(object):
def __init__(self, flag_space, flag_int=0):
self.flag_space = flag_space
self.flag_int = flag_int
def __getitem__(self, flag_name):
return bool(self.flag_space[flag_name] & self.flag_int)
def __setitem__(self, flag_name, new_flag_val):
if new_flag_val:
self.flag_int |= self.flag_space[flag_name]
else:
self.flag_int -= self.flag_space[flag_name] & self.flag_int
def update(self, **kwargs):
for flag_name in kwargs:
self[flag_name] = kwargs[flag_name]
def asdict(self):
return self.flag_space.int_to_dict(self.flag_int)
def __repr__(self):
return repr(self.asdict())
feature_flag_space = FlagSpace()
feature_flag_space.add_dimension("fiducial", bit_index=0, default=True)
class FeatureFlags(Flags, Base, ThimblesTable):
flag_int= Column(Integer)
flag_space = feature_flag_space
def __init__(self, flag_int=None):
if flag_int is None:
flag_int = self.flag_space.default_int
self.flag_int = flag_int
spectrum_flag_space = FlagSpace()
spectrum_flag_space.add_dimension("normalized", bit_index=0)
spectrum_flag_space.add_dimension("fluxed", bit_index=1)
spectrum_flag_space.add_dimension("observed", bit_index=2, default=True)
spectrum_flag_space.add_dimension("telluric", bit_index=3)
spectrum_flag_space.add_dimension("sky", bit_index=4)
class SpectrumFlags(Flags, ThimblesTable, Base):
flag_int= Column(Integer)
flag_space = spectrum_flag_space
def __init__(self, flag_int=None):
if flag_int is None:
flag_int = self.flag_space.default_int
self.flag_int = flag_int
distribution_flag_space = FlagSpace()
distribution_flag_space.add_dimension("observation", bit_index=0)
distribution_flag_space.add_dimension("prior", bit_index=1)
distribution_flag_space.add_dimension("convergence", bit_index=2)
class DistributionFlags(Flags, ThimblesTable, Base):
flag_int = Column(Integer)
flag_space = distribution_flag_space
def __init__(self, flag_int=None):
if flag_int is None:
flag_int = self.flag_space.default_int
self.flag_int = flag_int
|
13,931 | 884471eafad56b84b2d8fe97572f33d7a2ceb79a | from .ensemble import *
from .plots import *
|
13,932 | 7dd087cb91a6423d4f4a11708f9c0a4c31339315 | class AccessControlList():
accessControlList = None
def __init__(self, accessControlList):
self.accessControlList = {}
def add(self, ivleid, level):
self.accessControlList[ivleid] = level
def remove(self, ivleid):
if ivleid in self.accessControlList:
del self.accessControlList[ivleid]
def reset(self):
self.accessControlList = {}
def get(self, ivleid):
if ivleid in self.accessControlList:
return self.accessControlList[ivleid]
else:
return 0
def toObject(self):
return self.accessControlList |
13,933 | af49a7c88e714fcb2e9ab30c9b3ca39b7388e3bc | from .ocean import *
from .city import *
from .country import *
from .division import *
from .subdivision import *
from .altname import *
|
13,934 | b7d73c913e9ada7ff21a72fcf508b71aa108c86a | from django.apps import AppConfig
class CanteenAppConfig(AppConfig):
name = 'canteen_app'
|
13,935 | f696bf92ab9a00211fa6f61edbb5b0c844ea657e | from os import listdir
import random
import numpy as np
TEST_DATA_PERCENTAGE = 30
class DataGenerator:
def __init__(self, neg_reviews_dir_path, pos_reviews_dir_path):
self.neg_reviews_dir_path = neg_reviews_dir_path
self.pos_reviews_dir_path = pos_reviews_dir_path
self.neg_reviews_files = [(neg_reviews_dir_path + "/" + f, 0) for f in listdir(self.neg_reviews_dir_path)]
self.pos_reviews_files = [(pos_reviews_dir_path + "/" + f, 1) for f in listdir(self.pos_reviews_dir_path)]
self.neg_train_samples_count = int(round(len(self.neg_reviews_files) * (100 - TEST_DATA_PERCENTAGE) / 100, 0))
self.pos_train_samples_count = int(round(len(self.pos_reviews_files) * (100 - TEST_DATA_PERCENTAGE) / 100, 0))
self.neg_test_samples_count = len(self.neg_reviews_files) - self.neg_train_samples_count
self.pos_test_samples_count = len(self.pos_reviews_files) - self.pos_train_samples_count
self.train_samples_count = self.neg_train_samples_count + self.pos_train_samples_count
self.test_samples_count = self.neg_test_samples_count + self.pos_test_samples_count
self.train_files = self.neg_reviews_files[:self.neg_train_samples_count]
self.train_files += self.pos_reviews_files[:self.pos_train_samples_count]
random.shuffle(self.train_files)
self.test_files = self.neg_reviews_files[self.neg_train_samples_count:]
self.test_files += self.pos_reviews_files[self.pos_train_samples_count:]
random.shuffle(self.test_files)
def get_train_samples_count(self):
return self.train_samples_count
def get_test_samples_count(self):
return self.test_samples_count
def get_train_generator(self, batch_size):
idx = 0
while True:
x = []
y = []
counter = 0
while counter < batch_size:
x.append(np.load(self.train_files[idx][0]))
y.append(np.array(self.train_files[idx][1])) # positive (1) or negative (0)
idx = (idx + 1) % self.train_samples_count
counter += 1
yield (np.array(x), np.array(y))
def get_test_generator(self, batch_size):
idx = 0
while True:
x = []
y = []
counter = 0
while counter < batch_size:
x.append(np.load(self.test_files[idx][0]))
y.append(np.array(self.test_files[idx][1])) # positive (1) or negative (0)
idx = (idx + 1) % self.test_samples_count
counter += 1
yield (np.array(x), np.array(y))
|
13,936 | 4c40c1cab37766e58082cbe82977c566cf2e1699 | # ##### BEGIN MIT LICENSE BLOCK #####
#
# Copyright (c) 2012 Matt Ebb
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#
# ##### END MIT LICENSE BLOCK #####
import parameter
from parameter import Histogram
import pyglet
from pyglet.gl import *
from pyglet.window import key
from shader import Shader
from ui2ddraw import *
import euclid
import colorsys
import numpy as np
class uiGroup(pyglet.graphics.OrderedGroup):
def __init__(self, order, window, **kwargs):
super(uiGroup, self).__init__(order, **kwargs)
self.window = window
def set_state(self):
width, height = self.window.get_size()
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0.0, width, 0.0, height, -1, 1)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glDisable(GL_DEPTH_TEST)
def unset_state(self):
glEnable(GL_DEPTH_TEST)
pass
class uiBlendGroup(uiGroup):
def set_state(self):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
def unset_state(self):
glDisable(GL_BLEND)
class uiAdditiveGroup(uiGroup):
def set_state(self):
glEnable(GL_BLEND)
#glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glBlendFunc(GL_SRC_ALPHA, GL_ONE)
def unset_state(self):
glDisable(GL_BLEND)
class uiShaderGroup(uiGroup):
def __init__(self, order, window, vertex_shader, fragment_shader, **kwargs):
super(uiShaderGroup, self).__init__(order, window, **kwargs)
self.shader = Shader(vertex_shader, fragment_shader)
def set_state(self):
self.shader.bind()
#self.shader.uniform_matrixf('projection', camera.matrix * m)
def unset_state(self):
self.shader.unbind()
class UiControls(object):
TOGGLE = 1
SLIDER = 2
ACTION = 3
ANGLE = 1
font_style = {'color': (0, 0, 0, 255),
'font_size': 8,
'font_name': 'Bitstream Vera Sans',
}
# XXX convert each control to handle itself?
class UiEventHandler(object):
def __init__(self, window, ui):
self.window = window
self.ui = ui
def on_draw(self):
if not self.ui.overlay:
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
def needs_redraw(ctrl):
return hasattr(ctrl, "param") and ctrl.param is not None and ctrl.param.needs_redraw == True
for control in [c for c in self.ui.controls if needs_redraw(c)]:
control.update()
control.param.needs_redraw = False
self.ui.batch.draw()
self.ui.fps_display.draw()
def on_expose(self):
self.on_draw()
def on_mouse_press(self, x, y, buttons, modifiers):
for control in [c for c in self.ui.controls if c.point_inside(x, y)]:
control.on_mouse_press(x, y, buttons, modifiers)
def on_mouse_release(self, x, y, buttons, modifiers):
for control in [c for c in self.ui.controls if c.point_inside(x, y)]:
control.on_mouse_release(x, y, buttons, modifiers)
for control in [c for c in self.ui.controls if c.active
and not c.point_inside(x, y)]:
control.release_outside(x, y, buttons, modifiers)
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
for control in [c for c in self.ui.controls if c.active]:
control.on_mouse_drag(x, y, dx, dy, buttons, modifiers)
def on_text(self, text):
for control in [c for c in self.ui.controls if c.active]:
return control.on_text(text)
def on_key_press(self, symbol, modifiers):
for control in [c for c in self.ui.controls if c.active]:
return control.on_key_press(symbol, modifiers)
def on_resize(self, width, height):
self.ui.layout.w = width
self.ui.layout.y = height
self.ui.layout.layout()
def attr_len(attr):
# if attr is subscriptable
if hasattr(attr, "__getitem__"):
return len(attr)
else:
return 1
class Ui(object):
def __init__(self, window, overlay=True, layoutw=0.5):
self.window = window
self.controls = []
self.batch = pyglet.graphics.Batch()
self.overlay = overlay
self.groups = {}
self.groups['control'] = uiGroup(3, window)
self.groups['label'] = uiGroup(10, window, parent=self.groups['control'])
self.groups['outline'] = uiBlendGroup(5, window, parent=self.groups['control'])
self.groups['additive'] = uiAdditiveGroup(5, window, parent=self.groups['control'])
self.groups['wheel'] = uiShaderGroup(3, window, ColorWheel.vertex_shader, ColorWheel.fragment_shader, parent=self.groups['control'])
self.control_types = {}
self.control_types['numeric'] = [float, int]
self.control_types['color'] = []
self.control_types['toggle'] = [bool,]
self.fps_display = pyglet.window.FPSDisplay(window)
ww, wh = self.window.get_size()
self.layout = UiLayout(self, y=wh, w=ww, wf=layoutw, pad=10)
self.handler = UiEventHandler(window, self)
window.push_handlers( self.handler )
def update(self):
for control in self.controls:
control.update()
def set_from_kwargs(ob, args, kwargs):
for key, value in kwargs.items():
if key in args:
setattr(ob, key, value)
class UiControl(object):
LABELSIZE = 0.4
HEIGHT = 16
def __init__(self, ui, x=0, y=0, w=1, h=0, title=''):
self.x = x
self.y = y
self.w = w
self.h = self.HEIGHT if h == 0 else h
self.active = False
self.ui = ui
self.vertex_lists = {}
self.title = '' if title is None else title
self.label = pyglet.text.Label(self.title,
batch=ui.batch,
group=ui.groups['label'],
x=0, y=0, width=self.w, height=self.h,
**UiControls.font_style )
def add_shape_geo(self, shapegeo):
id = shapegeo['id']
# add z dimension to 2d controls
vertices = np.array(shapegeo['vertices']).reshape(-1,2)
vertices = np.column_stack( [vertices, np.array([0.0]*len(vertices)) ] )
shapegeo['vertices'] = list(vertices.flat)
if id in self.vertex_lists.keys():
if self.vertex_lists[id].get_size() != shapegeo['len']:
self.vertex_lists[id].resize(shapegeo['len'])
self.vertex_lists[id].vertices = shapegeo['vertices']
self.vertex_lists[id].colors = shapegeo['colors']
if 'tex_coords' in shapegeo.keys():
self.vertex_lists[id].tex_coords = shapegeo['tex_coords']
else:
if 'outline' in shapegeo['id']:
group = self.ui.groups['outline']
elif 'wheel' in shapegeo['id']:
group = self.ui.groups['wheel']
elif 'additive' in shapegeo['id']:
group = self.ui.groups['additive']
else:
group = self.ui.groups['control']
data_update = shapegeo['data_update'] if 'data_update' in shapegeo.keys() else 'static'
attributes = [('v3f/%s' % data_update, shapegeo['vertices']), \
('c4f/%s' % data_update, shapegeo['colors'])]
if 'tex_coords' in shapegeo.keys():
attributes.append( ('t2f/static', shapegeo['tex_coords']) )
self.vertex_lists[id] = self.ui.batch.add( shapegeo['len'],
shapegeo['mode'],
group,
*attributes)
def del_shape_geo(self, id):
for id, vertex_list in [(k,v) for k,v in self.vertex_lists.items() if id in k]:
vertex_list.delete()
del self.vertex_lists[id]
def point_inside(self, x, y):
if x < self.x: return False
if x > self.x+self.w: return False
if y < self.y: return False
if y > self.y+self.h: return False
return True
def delete(self):
for v in self.vertex_lists.values():
v.delete()
# override in subclasses
def height(self):
return self.h
def position_label(self):
self.label.anchor_y = 'baseline'
self.label.x = self.x
self.label.y = self.y+4
self.label.width = self.w
self.label.text = self.title
#if self.w > 10 and len(self.title) > 1:
# while self.label.content_width > self.w - 14:
# self.label.text = self.label.text[1:]
def reposition(self):
self.position_label()
self.update()
def update(self):
pass
def on_mouse_press(self, *args, **kwargs):
self.activate()
def on_mouse_release(self, *args, **kwargs):
self.deactivate()
def on_mouse_drag(self, *args, **kwargs):
pass
def on_text(self, text):
pass
def on_key_press(self, symbol, modifiers):
pass
def release_outside(self, x, y, buttons, modifiers):
self.deactivate()
def activate(self):
self.active = True
self.update()
def deactivate(self):
self.active = False
self.update()
class UiLayout(UiControl):
VERTICAL = 1
HORIZONTAL = 2
CONTROL = 3
HEIGHT = 16
PADDING = 2
def __init__(self, ui, x=0, y=3, w=0, wf=1.0, pad=0, bg=False, style=VERTICAL, **kwargs):
super(UiLayout, self).__init__( ui, **kwargs )
self.style = style
self.items = []
self.x = x
# XXX: Check me
#self.y = y
#self.w = w
set_from_kwargs(self, ('y', 'w'), kwargs)
#print(self.y)
self.h = 0
self.pad = pad
self.wf = wf
self.title = 'uiLayout'
self.bg = bg
def reposition(self):
for item in self.items:
item.reposition()
def print_r(self, indent=0):
print('- '*indent + self.title +' - x: %d y: %d w: %d h: %d' % (self.x,self.y,self.w,self.h))
for item in self.items:
if type(item) == UiLayout:
item.print_r(indent=indent+1)
else:
print(' - '*indent + item.title.ljust(20) +' x: %d y: %d w: %d h: %d' % (item.x,item.y,item.w,item.h))
def layout(self):
self.h = 0
y = self.y - self.pad
x = self.x + self.pad
self.w *= self.wf
w = self.w
for item in self.items:
item.w = w
if type(item) == UiLayout:
item.y = y
item.layout()
y -= item.height() + self.PADDING
item.y = y
item.x = x
self.h = self.y - y
self.y = y
self.update()
self.reposition()
def update(self):
if self.bg == False: return
col = [0.4,0.4,0.4,0.7]
outline_col = [.2,.2,.2, 0.3]
x = self.x+self.pad - 4
w = self.w + 8
y = self.y-self.pad - 4
h = self.h + 8
self.add_shape_geo( roundbase(x, y, w, h, 3, col, col) )
self.add_shape_geo( roundoutline(x, y, w, h, 3, outline_col) )
def addLayout(self, **kwargs):
layout = UiLayout(self.ui, **kwargs)
self.items.append(layout)
self.layout()
return layout
def addParameter(self, ui, param, ctype=None, **kwargs):
if ctype is not None:
controltype = ctype
elif param.enum is not None:
controltype = EnumControl
elif param.type in ui.control_types['numeric']:
controltype = NumericControl
elif param.type in ui.control_types['toggle']:
controltype = ToggleControl
elif param.type in ui.control_types['color']:
controltype = ColorSwatch
elif param.type == Histogram:
controltype = HistogramControl
control = controltype(ui, param=param, vmin=param.min, vmax=param.max, **kwargs)
self.items.append(control)
ui.controls.append(control)
self.layout()
def addLabel(self, ui, **kwargs):
control = LabelControl(ui, **kwargs)
self.items.append(control)
ui.controls.append(control)
self.layout()
def addControl(self, ui, **kwargs):
# detect ui control type and length
# attribute controls
if 'object' in kwargs.keys() and 'attr' in kwargs.keys():
attr = getattr( kwargs['object'], kwargs['attr'])
if type(attr) in ui.control_types['numeric']:
controltype = NumericControl
elif type(attr) in ui.control_types['toggle']:
controltype = ToggleControl
control = controltype(ui, **kwargs)
self.items.append(control)
ui.controls.append(control)
# action control
elif 'func' in kwargs.keys():
control = ActionControl(ui, **kwargs)
self.items.append(control)
ui.controls.append(control)
self.layout()
class UiAttrControl(UiControl):
def __init__(self, ui, param=None, object=None, attr='', vmin=0, vmax=100, subtype=None, **kwargs):
super(UiAttrControl, self).__init__( ui, **kwargs )
self.param = self.object = None
self.attr = ''
self.subtype = subtype
if param is not None:
self.param = param
self.len = param.len
self.title = param.title if self.title == '' else self.title
self.subtype = param.subtype
elif hasattr(object, attr):
self.object = object
self.attr = attr
self.len = attr_len(getattr(object, attr))
self.title = self.attr.capitalize() if self.title == '' else self.title
else:
pass
#raise ValueError("Invalid attribute provided: %s" % attr)
self.min = vmin
self.max = vmax
self.label.text = self.title
def limited(self, val, newval):
if type(val) in ('float', 'int'):
return min(self.max, max(self.min, newval))
else:
return newval
@property
def value(self):
return self.getval()
@value.setter
def value(self, value):
self.setval(value)
def getval(self, sub=None):
# Parameter interface
if self.param is not None:
if sub is None:
return self.param.value
else:
return self.param.values[sub]
# or modify attribute values directly
attr = getattr(self.object, self.attr)
if self.len > 1 and sub is not None:
return attr[sub]
else:
return attr
def setval(self, newval, sub=None):
# Parameter interface
if self.param is not None:
if sub is None:
self.param.value = newval
else:
self.param.values[sub] = newval
return
# or modify attribute values directly
attr = getattr(self.object, self.attr)
if self.len > 1 and sub is not None:
attr[sub] = self.limited( attr[sub], newval )
else:
attr = self.limited(attr, newval)
setattr(self.object, self.attr, attr)
def subw(self):
return (self.w*(1-self.LABELSIZE)) / float(self.len)
def subx(self, i):
iw = self.subw()
return int( self.x + self.LABELSIZE*self.w + i*self.subw() )
def point_inside_sub(self, x, y):
w = (1-self.LABELSIZE)*self.w / float(self.len)
offsetx = self.x + self.LABELSIZE*self.w
for i in range(self.len):
x1 = offsetx + i*w
x2 = offsetx + (i+1)*w
if x1 < x < x2 and self.y < y < self.y+self.h:
return i
return None
class UiTextEditControl(UiAttrControl):
NUM_VALUE_WIDTH = 56
def __init__(self, ui, **kwargs):
super(UiTextEditControl, self).__init__( ui, **kwargs)
self.textediting = None
self.carets = []
self.documents = []
self.layouts = []
for i in range(self.len):
doc = pyglet.text.document.UnformattedDocument( '' )
doc.set_style(0, len(doc.text), UiControls.font_style)
layout = pyglet.text.layout.IncrementalTextLayout(
doc, 20, 20, multiline=False,
batch=ui.batch,
group=ui.groups['label'],
)
caret = pyglet.text.caret.Caret(layout)
caret.visible = False
self.documents.append(doc)
self.layouts.append(layout)
self.carets.append(caret)
self.text_from_val()
def position_label(self):
super(UiTextEditControl, self).position_label()
for i, layout in enumerate(self.layouts):
w = self.w*(1-self.LABELSIZE) / float(len(self.layouts))
layout.anchor_y = 'baseline'
layout.anchor_x = 'left'
layout.x = int( self.x + self.LABELSIZE*self.w + i*w + 6)
layout.y = self.y + 4
layout.width = int( w )
layout.height = self.h
self.label.width = self.w
self.label.anchor_x = 'left'
self.label.x = self.x
def val_from_text(self):
for i, doc in enumerate(self.documents):
try:
val = float(doc.text)
if self.subtype == UiControls.ANGLE:
val = math.radians(val)
self.setval(val, sub=i)
except:
pass
def text_from_val(self):
for i, doc in enumerate(self.documents):
val = self.getval(sub=i)
if self.subtype == UiControls.ANGLE:
doc.text = u"%.2f\xB0" % math.degrees(val)
else:
doc.text = "%.2f" % val
def textedit_begin(self, s=0):
self.activate()
self.textediting = s
self.carets[s].visible = True
self.carets[s].mark = 0
self.carets[s].position = len(self.documents[s].text)
self.update()
def textedit_update(self, text):
self.carets[self.textediting].on_text(text)
def textedit_end(self):
self.deactivate()
self.textediting = None
for i in range(self.len):
self.carets[i].visible = False
self.carets[i].mark = self.carets[i].position = 0
self.update()
def textedit_confirm(self):
if self.textediting is None: return
self.val_from_text()
self.text_from_val() # update the displayed text with consistent decimal rounding etc.
self.textedit_end()
def textedit_cancel(self):
if self.textediting is None: return
self.textedit_end()
def release_outside(self, x, y, buttons, modifiers):
self.textedit_confirm()
super(UiTextEditControl, self).release_outside(x, y, buttons, modifiers)
class ToggleControl(UiAttrControl):
CHECKBOX_W = 10
CHECKBOX_H = 10
def position_label(self):
self.label.width = self.w - (self.CHECKBOX_W + 8)
super(ToggleControl, self).position_label()
self.label.x = self.x + self.CHECKBOX_W + 8
def update(self):
if self.value:
col1 = [0.35]*3 + [1.0]
col2 = [0.30]*3 + [1.0]
coltext = [255]*4
outline_col = [.2,.2,.2, 1.0]
checkmark_col = [1,1,1,1.0]
else:
col2 = [0.5]*3 + [1.0]
col1 = [0.6]*3 + [1.0]
coltext = [0,0,0,255]
outline_col = [.25,.25,.25, 1.0]
checkmark_col = [0,0,0,0.0]
cbw = self.CHECKBOX_W
cbh = self.CHECKBOX_H
cbx = self.x + 2
cby = self.y + (self.h - cbh)*0.5
self.add_shape_geo( roundbase(cbx, cby, cbw, cbh, 2, col1, col2) )
self.add_shape_geo( roundoutline(cbx, cby, cbw, cbh, 2, outline_col) )
self.add_shape_geo( checkmark(cbx, cby, cbw, cbh, checkmark_col) )
self.label.color = coltext
def on_mouse_press(self, x, y, buttons, modifiers):
if buttons & pyglet.window.mouse.LEFT:
self.activate()
self.toggle()
def on_mouse_release(self, x, y, buttons, modifiers):
if buttons & pyglet.window.mouse.LEFT:
self.deactivate()
def toggle(self):
self.setval( not self.value )
self.update()
class EnumControl(UiAttrControl):
def __init__(self, ui, *args, **kwargs):
super(EnumControl, self).__init__(ui, *args, **kwargs)
if self.title == '':
self.LABELSIZE = 0.0
self.len = len(self.param.enum)
self.itemlabels = []
for item in self.param.enum:
itemlabel = pyglet.text.Label(item[0],
batch=ui.batch,
group=ui.groups['label'],
x=0, y=0, width=self.subw(), height=self.h,
**UiControls.font_style )
self.itemlabels.append(itemlabel)
def position_label(self):
super(EnumControl, self).position_label()
for i, itemlabel in enumerate(self.itemlabels):
item = self.param.enum[i]
itemlabel.text = item[0]
itemlabel.width = self.subw()
itemlabel.anchor_y = 'baseline'
itemlabel.anchor_x = 'center'
itemlabel.x = int(self.subx(i) + itemlabel.width/2.0)
itemlabel.y = self.y+4
def update(self):
for i, item in enumerate(self.param.enum):
w = self.subw()
selected = (item[1] == self.value)
if selected:
col1 = [0.35]*3 + [1.0]
col2 = [0.30]*3 + [1.0]
outline_col = [.2,.2,.2, 1.0]
coltext = [255]*4
else:
col1 = [0.5]*3 + [1.0]
col2 = [0.6]*3 + [1.0]
outline_col = [.25,.25,.25, 1.0]
coltext = [0,0,0,255]
x = self.subx(i)
if i == 0:
corners = '03'
elif i == len(self.param.enum)-1:
corners = '12'
else:
corners = ''
self.add_shape_geo( roundbase(x, self.y, w, self.h, 3, col1, col2, index=i, corners=corners) )
self.add_shape_geo( roundoutline(x, self.y, w, self.h, 3, outline_col, index=i, corners=corners) )
self.itemlabels[i].color = coltext
def on_mouse_press(self, x, y, buttons, modifiers):
s = self.point_inside_sub(x, y)
if s != None:
if buttons & pyglet.window.mouse.LEFT:
self.setval( self.param.enum[s][1] )
class PickerWindow(pyglet.window.Window):
def __init__(self, parentcontrol, *args, **kwargs):
super(PickerWindow, self).__init__(*args, **kwargs)
self.parentcontrol = parentcontrol
self.param = self.parentcontrol.param
self.parentui = self.parentcontrol.ui
self.ui = Ui(self, overlay=False)
self.ui.layout.x = 0
self.ui.layout.wf = 1.0
self.ui.layout.addParameter(self.ui, self.param, type=ColorWheel)
self.ui.layout.addParameter(self.ui, self.param, type=NumericControl)
glClearColor(0.4, 0.4, 0.4, 1.0)
def on_resize(self, width, height):
glViewport(0, 0, width, height)
if hasattr(self, "ui"):
self.ui.layout.w = width
self.ui.layout.y = height
self.ui.layout.layout()
def update_picker(self):
self.ui.update()
self.parentcontrol.update()
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
self.update_picker()
def on_mouse_press(self, x, y, buttons, modifiers):
self.update_picker()
class ColorWheel(UiAttrControl):
# for all sharing ColorWheel class
util = ''.join(open('util.glsl').readlines())
vertex_shader = '''
void main(void) {
gl_TexCoord[0] = gl_MultiTexCoord0;
gl_FrontColor = gl_Color;
gl_Position = ftransform();
}
'''
fragment_shader = util + '''
#define pi 3.141592653589793238462643383279
vec3 hsvrgb(float h,float s,float v) { return mix(vec3(1.),clamp((abs(fract(h+vec3(3.,2.,1.)/3.)*6.-3.)-1.),0.,1.),s)*v; }
void main(void) {
float u = gl_TexCoord[0].s*2.0 - 1.0;
float v = gl_TexCoord[0].t*2.0 - 1.0;
float theta = atan(v, u);
float h = (theta/pi)*0.5 + 0.5;
float s = sqrt(u*u + v*v);
float val = gl_Color.x;
vec4 hsv = vec4(h, s, val, 1.0);
vec4 rgb = linearrgb_to_srgb( hsv_to_rgb(hsv) );
gl_FragColor = rgb;
}
'''
def __init__(self, *args, **kwargs):
super(ColorWheel, self).__init__(*args, **kwargs)
self.h = 128
def update(self):
col = self.value[:]
h, s, v = colorsys.rgb_to_hsv(*col)
self.add_shape_geo( colorwheel(self.x, self.y, self.w, self.h, v) )
def set_color(self, x, y):
col = self.value[:]
hue, sat, val = colorsys.rgb_to_hsv(*col)
r = float(self.h * 0.5)
cx = self.x + self.w*0.5
cy = self.y + self.h*0.5
u = (x - cx) / r
v = (y - cy) / r
theta = math.atan2(v, u)
h = (theta/math.pi)*0.5 + 0.5
s = math.sqrt(u*u + v*v)
rgb = parameter.Color3(*colorsys.hsv_to_rgb(h, s, val))
self.setval(rgb)
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
if buttons & pyglet.window.mouse.LEFT:
self.set_color(x, y)
def on_mouse_press(self, x, y, buttons, modifiers):
if buttons & pyglet.window.mouse.LEFT:
self.activate()
self.set_color(x, y)
class ColorSwatch(UiAttrControl):
def update(self):
col = list(self.value)
col = col + [1.0]
outline_col = [.2,.2,.2, 1.0]
w = self.w*(1-self.LABELSIZE)
x = self.x + self.w*self.LABELSIZE
self.add_shape_geo( roundbase(x, self.y, w, self.h, 6, col, col) )
self.add_shape_geo( roundoutline(x, self.y, w, self.h, 6, outline_col) )
def open_picker(self, x, y):
wx, wy = self.ui.window.get_location()
sx, sy = self.ui.window.get_size()
import platform
if platform.system() == 'Linux':
style = pyglet.window.Window.WINDOW_STYLE_DIALOG
else:
style = pyglet.window.Window.WINDOW_STYLE_TOOL
window = PickerWindow(self, 200, 200, \
resizable=True, caption='color', style=style)
window.set_location(wx+x, wy+(sy-y))
def on_mouse_press(self, x, y, buttons, modifiers):
if buttons & pyglet.window.mouse.LEFT:
self.open_picker(x, y)
class NumericControl(UiTextEditControl):
def __init__(self, *args, **kwargs):
super(NumericControl, self).__init__(*args, **kwargs)
self.sliding = None
if self.title == '':
self.LABELSIZE = 0.0
def update(self):
self.text_from_val()
w = (self.w*(1-self.LABELSIZE)) / float(self.len)
for i in range(self.len):
if self.active and (self.textediting == i or self.sliding == i):
col2 = [0.3,0.3,0.3, 1.0]
col1 = [0.4,0.4,0.4, 1.0]
outline_col = [.2,.2,.2, 1.0]
coltext = [255]*4
else:
col2 = [0.5,0.5,0.5, 1.0]
col1 = [0.6,0.6,0.6, 1.0]
outline_col = [.2,.2,.2, 1.0]
coltext = [0,0,0,255]
x = self.x + self.LABELSIZE*self.w + i*w
x = int(x)
self.add_shape_geo( roundbase(x, self.y, w, self.h, 6, col1, col2, index=i) )
self.add_shape_geo( roundoutline(x, self.y, w, self.h, 6, outline_col, index=i) )
self.documents[i].set_style(0, len(self.documents[i].text), {'color': coltext})
def on_mouse_press(self, x, y, buttons, modifiers):
s = self.point_inside_sub(x, y)
if s != None:
if buttons & pyglet.window.mouse.LEFT:
if self.textediting == None:
self.textedit_begin(s=s)
elif self.textediting == s:
self.carets[s].on_mouse_press(x, y, buttons, modifiers)
return pyglet.event.EVENT_HANDLED
else:
self.textedit_end()
elif buttons & pyglet.window.mouse.MIDDLE:
self.sliding = s
self.activate()
def on_mouse_release(self, x, y, buttons, modifiers):
if buttons & pyglet.window.mouse.MIDDLE:
self.deactivate()
def on_text(self, text):
if self.textediting is not None:
self.textedit_update(text)
def on_key_press(self, symbol, modifiers):
if self.textediting is not None:
if symbol in (key.ENTER, key.RETURN, key.NUM_ENTER):
self.textedit_confirm()
return pyglet.event.EVENT_HANDLED
elif symbol == key.ESCAPE:
self.textedit_cancel()
return pyglet.event.EVENT_HANDLED
def on_mouse_drag_setval(self, dx):
sensitivity = (self.max - self.min) / 500.0
self.setval( self.getval(sub=self.sliding) + sensitivity*dx, sub=self.sliding )
self.text_from_val()
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
s = self.point_inside_sub(x, y)
if s != None:
if buttons & pyglet.window.mouse.LEFT:
if self.textediting == s:
self.carets[s].on_mouse_drag(x, y, dx, dy, buttons, modifiers)
return pyglet.event.EVENT_HANDLED
if buttons & pyglet.window.mouse.MIDDLE:
if self.sliding is not None:
self.on_mouse_drag_setval(dx)
self.update()
class ActionControl(UiControl):
def __init__(self, ui, object=None, attr='', func=None, argslist=[], kwargsdict={}, **kwargs):
super(ActionControl, self).__init__( ui, **kwargs )
if func is None:
raise ValueError('Invalid function')
self.func = func
self.argslist = argslist
self.kwargsdict = kwargsdict
if self.title == '':
self.title = self.func.__name__.capitalize()
self.label.text = self.title
def position_label(self):
super(ActionControl, self).position_label()
self.label.anchor_x = 'center'
self.label.x = self.x + self.w//2
def update(self):
if self.active:
col1 = [0.35]*3 + [1.0]
col2 = [0.30]*3 + [1.0]
outline_col = [.2,.2,.2, 1.0]
coltext = [255]*4
else:
col1 = [0.5]*3 + [1.0]
col2 = [0.6]*3 + [1.0]
outline_col = [.25,.25,.25, 1.0]
coltext = [0,0,0,255]
self.add_shape_geo( roundbase(self.x, self.y, self.w, self.h, 6, col1, col2) )
self.add_shape_geo( roundoutline(self.x, self.y, self.w, self.h, 6, outline_col) )
self.label.color = coltext
def on_mouse_press(self, x, y, buttons, modifiers):
if buttons & pyglet.window.mouse.LEFT:
self.activate()
def on_mouse_release(self, x, y, buttons, modifiers):
if buttons & pyglet.window.mouse.LEFT:
self.func(*self.argslist, **self.kwargsdict)
self.deactivate()
class LabelControl(UiAttrControl):
def calc_label(self):
super(LabelControl, self).position_label()
self.label.width = self.w
self.label.multiline = True
self.label.anchor_y = 'baseline'
if self.param is not None:
self.label.text = self.title + str(self.param.value)
else:
self.label.text = self.title
self.h = math.ceil(self.label.content_height / float(UiControl.HEIGHT)) * UiControl.HEIGHT
self.label.y += self.h - UiControl.HEIGHT
def height(self):
# update height based on given width
self.calc_label()
return self.h
def update(self):
self.calc_label()
class HistogramControl(UiAttrControl):
def height(self):
self.h = 64
return self.h
def update(self):
colors = [[1,0,0,0.7],[0,1,0,0.7],[0,0,1,0.7]]
hist = self.value
if hasattr(self, 'prev_arrays'):
if self.prev_arrays != len(hist.arrays):
self.del_shape_geo('histogram')
self.prev_arrays = len(hist.arrays)
for i, hchannel in enumerate(hist.arrays):
geo = histogram(self.x, self.y, self.w, self.h, hchannel, colors[i], i)
self.add_shape_geo( geo )
def activate(self):
pass
def deactivate(self):
pass |
13,937 | 5ab097add7af5ffccd4bb62c83d24df94111b52c | from pydantic import BaseModel
class IUser(BaseModel):
uid: int = None
name: str = None
dp: str = None
dpName: str = None
pos: int = None
posName: str = None
role: int = None
roleName: str = None
class IUserInDB(IUser):
password: str
DICUSERINFO = dict() |
13,938 | 884817d2ea92b10e30b61b5ae747185e6aab1a17 | from django.shortcuts import render, redirect, get_object_or_404
from django.views.decorators.http import require_POST
from products.models import Book
from .cart import Cart
from .forms import CartAddProductForm
from discount.forms import CouponApplyForm
from django.contrib import messages
from django.urls import reverse
from orders.models import Order
# ,CartAddProductForm_func
@require_POST
def cart_add(request,book_id):
"""
adding books to cart
"""
cart = Cart(request)
book = get_object_or_404(Book, id=book_id)
form = CartAddProductForm(request.POST)
if book.get_discounted_price()>0 :
if form.is_valid():
cd = form.cleaned_data
if book.has_inventory(cd['quantity']):
cart.add(book=book,
quantity=cd['quantity'],
override_quantity=cd['override'])
return redirect('cart_detail')
return redirect('book_detail',pk = book.id)
@require_POST
def cart_remove(request, book_id):
"""
removing the book from cart by pressing bottum
"""
cart = Cart(request)
book = get_object_or_404(Book, id=book_id)
cart.remove(book)
return redirect('cart_detail')
def cart_detail(request):
"""
shows all the book within cart
"""
cart = Cart(request)
for item in cart:
item['update_quantity_form'] = CartAddProductForm(initial={'quantity': item['quantity'],
'override': True})
# cartClass = CartAddProductForm_func(item['book'].inventory + item['quantity'])
# item['update_quantity_form'] = cartClass(initial={'quantity': item['quantity'],
# 'override': True})
coupon_apply_form = CouponApplyForm()
isvalid_discount = False
a = 1
if a == -1:
isvalid_discount = True
else :
isvalid_discount = False
return render(request, 'cart/cart_detail.html', {'cart': cart,'coupon_apply_form':coupon_apply_form, 'isvalid_discount':isvalid_discount})
|
13,939 | dc571ed0d58bfc0d18c91070c8a120d84ef430b6 |
def Actors_In_View(actorsinview, livingactors, position, viewdistance, role):
for Actor in livingactors:
if role == 'predator' and Actor.role == 'plant':
continue
else:
xcheck = Actor.position[0] - position[0]
ycheck = Actor.position[1] - position[1]
# if creature is within view distance
if abs(xcheck) < viewdistance and abs(ycheck) < viewdistance:
actorsinview.append(Actor)
return(actorsinview) |
13,940 | 14718985e75317991848ce9b757bc77982b1d97c | import math #brings in the basic python math used in the rest of the program
yFunction = str(input("f(x)="))#asks for a function
xValue = int(input("where does your approximation start? "))#asks where the method begins
rePeat = int(input("how many times should I approximate? "))#asks how many times the process should be repeated
print("")#this just creates an empty line that makes the output look nicer
print("The approximations of y=0 (in order from first to last):")
def funcTion():
yFunctionNew = ""
for i in range(len(yFunction)):
if yFunction[i] == "x":
yFunctionNew += str(xValue)
else:
yFunctionNew += yFunction[i]
i += 1
#print(yFunctionNew)
yFunctionNew = eval(yFunctionNew)
#print(yFunctionNew)
def functionRight():
rightPoint = ""
for i in range(len(yFunction)):
if yFunction[i] == "x":
rightPoint += str(xValue+.001)
else:
rightPoint += yFunction[i]
i += 1
rightPoint = eval(rightPoint)
#print(rightPoint)
counTer = 0
while counTer < rePeat:
funcTion()
functionRight()
yFunctionNew = eval(yFunctionNew)
rightPoint = eval(rightPoint)
deriVative = (rightPoint-yFunctionNew)/.001
newGuess = xValue - (yFunctionNew/deriVative)
print(newGuess)
xValue = newGuess
counTer += 1
|
13,941 | 7dd663caf1dde61ca1a75e7362300f9f9c1fe4b9 | # -*- coding: utf-8 -*-
"""
Created on Tue Dec 5 16:03:26 2017
https://www.python.org/doc/essays/graphs/
The problem with this implementation is that it always goes A->B-> and so on while there could be A->C connection directly
This is DEFINITELY not find the shortest path
@author: User
"""
graph = {'A': [ 'B','C'],
'B': ['C', 'D'],
'C': [ 'D', 'F', 'E'],
'D': ['C'],
'E': ['F'],
'F': ['C', 'E']}
def find_path(graph, start, end, path=[]): #definition, begins with path as empty
path = path + [start] # appends start to path
if start == end:
return path
if not start in graph: #checks for repeat
return None
for node in graph[start]: #for each of the nodes in start (eg if B is start, it will check both C and D)
if node not in path:
newpath = find_path(graph, node, end, path)
if newpath: return newpath
return None
print(find_path(graph, 'A', 'F')) |
13,942 | ca446e098975aa361e21c6e9b69e6c2749897a7a | # encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib64/python2.6/site-packages/PyQt4/QtGui.so
# by generator 1.136
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
class QTextOption(): # skipped bases: <type 'sip.simplewrapper'>
# no doc
def alignment(self, *args, **kwargs): # real signature unknown
pass
def flags(self, *args, **kwargs): # real signature unknown
pass
def setAlignment(self, *args, **kwargs): # real signature unknown
pass
def setFlags(self, *args, **kwargs): # real signature unknown
pass
def setTabArray(self, *args, **kwargs): # real signature unknown
pass
def setTabs(self, *args, **kwargs): # real signature unknown
pass
def setTabStop(self, *args, **kwargs): # real signature unknown
pass
def setTextDirection(self, *args, **kwargs): # real signature unknown
pass
def setUseDesignMetrics(self, *args, **kwargs): # real signature unknown
pass
def setWrapMode(self, *args, **kwargs): # real signature unknown
pass
def tabArray(self, *args, **kwargs): # real signature unknown
pass
def tabs(self, *args, **kwargs): # real signature unknown
pass
def tabStop(self, *args, **kwargs): # real signature unknown
pass
def textDirection(self, *args, **kwargs): # real signature unknown
pass
def useDesignMetrics(self, *args, **kwargs): # real signature unknown
pass
def wrapMode(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
AddSpaceForLineAndParagraphSeparators = 4
CenterTab = 2
DelimiterTab = 3
IncludeTrailingSpaces = -2147483648
LeftTab = 0
ManualWrap = 2
NoWrap = 0
RightTab = 1
ShowLineAndParagraphSeparators = 2
ShowTabsAndSpaces = 1
SuppressColors = 8
WordWrap = 1
WrapAnywhere = 3
WrapAtWordBoundaryOrAnywhere = 4
|
13,943 | 29ee13b0be6582088694cdcdd600fdc235488f33 | # -*- coding: utf-8 -*-
"""This module defines functions for fetching and parsing files in the PDB
for the Chemical Compound Dictionary (CCD_).
.. _CCD: https://www.wwpdb.org/data/ccd
"""
from prody import LOGGER, PY3K
from prody.utilities import openURL, isListLike
from prody.proteins.starfile import parseSTARLines, StarDict
__all__ = ['parseCCD']
def parseCCD(ids):
"""Retrieve the whole Chemical Component Dictionary (CCD) resource.
"""
if isListLike(ids):
n_ids = len(ids)
else:
ids = [ids]
n_ids = 1
ret = []
for id in ids:
id_url = 'http://ligand-expo.rcsb.org/reports/{0}/{1}/{1}.cif'.format(id[0],
id)
try:
handle = openURL(id_url)
except Exception as err:
LOGGER.warn('download failed ({1}).'.format(str(err)))
else:
data = handle.read()
if len(data):
if PY3K:
data = data.decode()
parsingDict, prog = parseSTARLines(data.split('\n'), shlex=True)
star_dict = StarDict(parsingDict, prog, id)
ret.append(star_dict[id])
else:
ret.append(None)
LOGGER.warn('Could not parse CCD data for {0}'.format(id))
if n_ids == 1:
return ret[0]
return ret
|
13,944 | bbe7f021fbcd0da0dc5836d59b979d977d5cb32b | #!/usr/bin/python
import unittest
import string
import time
import psutil
import datetime as dt
import heapq
import collections
'''
Given a list of scores of different students, return the average score of each student's top five scores in the order of each student's id.
Each entry items[i] has items[i][0] the student's id, and items[i][1] the student's score. The average score is calculated using integer division.
Example 1:
Input: [[1,91],[1,92],[2,93],[2,97],[1,60],[2,77],[1,65],[1,87],[1,100],[2,100],[2,76]]
Output: [[1,87],[2,88]]
Explanation:
The average of the student with id = 1 is 87.
The average of the student with id = 2 is 88.6. But with integer division their average converts to 88.
Note:
1 <= items.length <= 1000
items[i].length == 2
The IDs of the students is between 1 to 1000
The score of the students is between 1 to 100
For each student, there are at least 5 scores
'''
items = [[1,91],[1,92],[2,93],[2,97],[1,60],[2,77],[1,65],[1,87],[1,100],[2,100],[2,76]]
'''
student_info ={}
def highFive(items):
for val in items:
id = val[0]
score = val[1]
if id in student_info :
student_info[id].append(score)
else:
student_info[id]= [score]
student_average =[]
for key in student_info.keys():
ll = heapq.nlargest(5, student_info[key])
avg = sum(ll)/len(ll)
student_average.append([key,avg])
return student_average
'''
def highFive(items):
min_heap = collections.defaultdict(list)
for id,val in items:
heapq.heappush(min_heap[id],val*(-1))
return [[key,(sum(min_heap[key][:5])/5)*(-1)]for key in min_heap.keys()]
class Test(unittest.TestCase):
data = [([[1,91],[1,92],[2,93],[2,97],[1,60],[2,77],[1,65],[1,87],[1,100],[2,100],[2,76]],[[1, 87.0], [2, 88.6]])]
def test_highFive(self):
print(self.data)
for input,expected in self.data:
n1=dt.datetime.now()
actual = highFive(input)
n2=dt.datetime.now()
print('Execution Time ',(n2-n1).microseconds,' ms' )
print('memory % used:', psutil.virtual_memory()[2],'%')
self.assertEqual(actual,expected)
if __name__ =="__main__":
unittest.main()
|
13,945 | 1f94ca734b62c9144b4b78593bd14cea1cf278c7 | # Generated by Django 3.1.7 on 2021-03-05 04:06
import datetime
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('assignmentsApp', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Submissions',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('file', models.FileField(upload_to='Assignments/')),
('date', models.DateTimeField(blank=True, default=datetime.datetime.now)),
('assignment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='assignmentsApp.assignments')),
('submitted_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
13,946 | 227dd1f9a8eb39bc3a8237f90b71da34e99fa8d7 | #!/usr/bin/env python3
from math import ceil
from .data_conversion import IntConverter
class ByteData:
def __init__(self, data=b'', converter=None):
if converter is None:
self._bytes = bytearray(data)
else:
self._bytes = converter.decode(data)
def __eq__(self, other):
return self._bytes == other
def __add__(self, other):
return ByteData(self._bytes + other._bytes)
def __mul__(self, interger):
return ByteData(self._bytes * interger)
def __len__(self):
if isinstance(self._bytes, int):
return 1
return len(self._bytes)
def __iter__(self):
return iter(self._bytes)
def __getitem__(self, index):
data = self._bytes[index]
if isinstance(data, int):
return ByteData(self._bytes[index], IntConverter())
return ByteData(self._bytes[index])
def __setitem__(self, index, value):
if isinstance(value, int):
self._bytes[index] = value
else:
self._bytes[index] = int.from_bytes(value, 'big')
def __xor__(self, other):
if len(self) != len(other):
raise ValueError('Inputs not same size')
byte_data = bytearray()
for byte1, byte2 in zip(self, other):
byte_data.append(byte1 ^ byte2)
data = ByteData(byte_data)
return data
def get_data(self):
return bytes(self._bytes)
def encode(self, converter):
return converter.encode(self._bytes)
def repeating_key_xor(self, key):
key_size = len(key)
data_size = len(self)
multiplier = ceil(data_size/key_size)
xor_data = self ^ (key * multiplier)[:data_size]
return xor_data
def pkcs7_pad(self, block_size):
padding_lenght = (block_size - len(self) % block_size) % block_size
if padding_lenght == 0:
padding = ByteData(bytes([block_size] * block_size))
else:
padding = ByteData(bytes([padding_lenght]) * padding_lenght)
return self + padding
def pkcs7_pad_remove(self):
if isinstance(self.get_data()[-1], int):
padding_lenght = self.get_data()[-1]
padding = self[-1*padding_lenght:]
if padding == ByteData(bytes([padding_lenght])*padding_lenght):
return self[:-1*padding_lenght]
raise Exception('Data does not have valid pkcs7 padding: {}'.format(padding.get_data()))
return self
def hamming_distance(self, data):
edit_distance = 0
for byte1, byte2 in zip(self._bytes, data):
bin1 = "{0:08b}".format(byte1)
bin2 = "{0:08b}".format(byte2)
for bit1, bit2 in zip(bin1, bin2):
if bit1 != bit2:
edit_distance += 1
return edit_distance
|
13,947 | fa752545a47b46aafb4e3bf27bfa8fd2a548a498 | import simplejson
import urllib
import openerp
from openerp import http
from openerp.http import request
import openerp.addons.web.controllers.main as webmain
from openerp.addons.web.http import SessionExpiredException
from werkzeug.exceptions import BadRequest
import werkzeug.utils
import requests
url_return=''
resource_id='https://outlook.office365.com/'
api_endpoint='https://outlook.office365.com/api/v1.0'
class outlook_auth(http.Controller):
@http.route('/ep_ol_calendar/sync',type='json',auth='user')
def sync_data(self, arch, fields, model, **kw):
users_obj=request.registry['res.users']
is_rtoken=users_obj.is_rtoken_active(request.cr,request.uid,kw['local_context']['uid'])
if not is_rtoken:
return {
'status':'need_rtoken',
'url':'https://login.microsoftonline.com/common/oauth2/authorize?client_id=&redirect_uri=http://test.net/ep_ol_calendar/authorize&response_type=code'
}
if is_rtoken:
return {
'status':'rtoken_present',
'url':'https://login.microsoftonline.com/common/oauth2/authorize?client_id=redirect_uri=http://test.net/ep_ol_calendar/authorize&response_type=code'
}
return True
@http.route('/ep_ol_calendar/authorize', type='http', auth="none")
def authorize(self, **kw):
redirect_uri = 'http://test.net/ep_ol_calendar/authorize'
# registry = openerp.modules.registry.RegistryManager.get('test')
registry=request.registry
cr=request.cr
uid=request.session.uid
context=request.context
print "\nCurrent User %s \n" %uid
rtoken=registry.get('res.users').is_rtoken_active(cr,uid,uid)
print "DB refresh token %s" %rtoken
access_token_json=False
if not rtoken:
refresh_token_dict=registry.get('outlook.service').send_request(cr,uid,kw.get('code'),redirect_uri)
refresh_token=refresh_token_dict.get('refresh_token')
rtoken=refresh_token.encode('utf-8','ignore')
access_token=registry.get('outlook.service').get_access_token_from_refresh_token(cr,uid,rtoken,resource_id)
access_token_json=access_token.json()
registry.get('outlook.service').set_all_tokens(cr,uid,access_token_json,context)
else:
try:
access_token=registry.get('outlook.service').get_access_token_from_refresh_token(cr,uid,rtoken,resource_id)
except:
print "Could not process request"
if access_token.status_code != 200:
print "Refresh token may have expired",rtoken
else:
access_token_json=access_token.json()
registry.get('outlook.service').set_access_token(cr,uid,access_token_json,context)
if access_token_json:
token=access_token_json['access_token']
a={
"Subject": "Created through API",
"Body": {
"ContentType": "HTML",
"Content": "I think it will meet our requirements!"
},
"Start": "2016-02-02T18:00:00-08:00",
"StartTimeZone": "Pacific Standard Time",
"End": "2016-02-02T19:00:00-08:00",
"EndTimeZone": "Pacific Standard Time",
# "Attendees": [
# {
# "EmailAddress": {
# "Address": "shawn@test.com",
# "Name": "Shawn 123"
# },
# "Type": "Required"
# }
# ]
}
calendar=registry.get('outlook.service').get_events(cr,uid,api_endpoint,token,'startdatetime=2016-02-01T20:00:00.000Z&enddatetime=2016-02-29T20:00:00.000Z')
# calendar=registry.get('outlook.service').get_events(cr,uid,api_endpoint,token,str(a))
print "Calendar \n %s" %calendar
else:
print "Access token missing"
return werkzeug.utils.redirect(url_return) |
13,948 | fb2114239ffa16578cdbfb61eac7c3d5a500beae | """
70. 単語ベクトルの和による特徴量Permalink
問題50で構築した学習データ,検証データ,評価データを行列・ベクトルに変換したい.
例えば,学習データについて,すべての事例xiの特徴ベクトルxiを並べた行列Xと,正解ラベルを並べた行列(ベクトル)Yを作成したい.
X=⎛⎝⎜⎜⎜⎜x1x2…xn⎞⎠⎟⎟⎟⎟∈ℝn×d,Y=⎛⎝⎜⎜⎜⎜y1y2…yn⎞⎠⎟⎟⎟⎟∈ℕn
ここで,nは学習データの事例数であり,xi∈ℝdとyi∈ℕはそれぞれ,i∈{1,…,n}番目の事例の特徴量ベクトルと正解ラベルを表す.
なお,今回は「ビジネス」「科学技術」「エンターテイメント」「健康」の4カテゴリ分類である.
ℕ<4で4未満の自然数(0を含む)を表すことにすれば,任意の事例の正解ラベルyiはyi∈ℕ<4で表現できる.
以降では,ラベルの種類数をLで表す(今回の分類タスクではL=4である).
i番目の事例の特徴ベクトルxiは,次式で求める.
xi=1Ti∑t=1Tiemb(wi,t)
ここで,i番目の事例はTi個の(記事見出しの)単語列(wi,1,wi,2,…,wi,Ti)から構成され,
emb(w)∈ℝdは単語wに対応する単語ベクトル(次元数はd)である.
すなわち,i番目の事例の記事見出しを,その見出しに含まれる単語のベクトルの平均で表現したものがxiである.
今回は単語ベクトルとして,問題60でダウンロードしたものを用いればよい.
300次元の単語ベクトルを用いたので,d=300である.
i番目の事例のラベルyiは,次のように定義する.
yi=⎧⎩⎨⎪⎪0123(記事xiが「ビジネス」カテゴリの場合)(記事xiが「科学技術」カテゴリの場合)(記事xiが「エンターテイメント」カテゴリの場合)(記事xiが「健康」カテゴリの場合)
なお,カテゴリ名とラベルの番号が一対一で対応付いていれば,上式の通りの対応付けでなくてもよい.
以上の仕様に基づき,以下の行列・ベクトルを作成し,ファイルに保存せよ.
学習データの特徴量行列: Xtrain∈ℝNt×d
学習データのラベルベクトル: Ytrain∈ℕNt
検証データの特徴量行列: Xvalid∈ℝNv×d
検証データのラベルベクトル: Yvalid∈ℕNv
評価データの特徴量行列: Xtest∈ℝNe×d
評価データのラベルベクトル: Ytest∈ℕNe
なお,Nt,Nv,Neはそれぞれ,学習データの事例数,検証データの事例数,評価データの事例数である.
"""
import csv
import pickle
import re
import string
import numpy as np
from nltk.tokenize import word_tokenize
train_file = "../chapter06/train.csv"
valid_file = "../chapter06/valid.csv"
test_file = "../chapter06/test.csv"
X_train_file = "X_train"
Y_train_file = "Y_train"
X_valid_file = "X_valid"
Y_valid_file = "Y_valid"
X_test_file = "X_test"
Y_test_file = "Y_test"
model_file = "../chapter07/model.sav"
with open(model_file, "rb") as file_model:
model = pickle.load(file_model)
def preprocessing(text):
#記号
symbol = string.punctuation
#記号をスペースに変換 maketransの置き換えは同じ文字数じゃないと不可
table = str.maketrans(symbol, ' '*len(symbol))
text = text.translate(table)
#小文字に統一
text = text.lower()
#数字列を0に変換
text = re.sub("[0-9]+", "0", text)
text = word_tokenize(text)
return text
def to_vec(file_name, X_file, Y_file):
with open(file_name) as f:
reader = csv.reader(f, delimiter="\t")
l = [row for row in reader]
l = l[1:]
category = ["b", "t", "e", "m"]
x = []
y = []
for i, row in enumerate(l):
y.append(category.index(row[1]))
words = preprocessing(row[0])
sum = 0
t = 0
for word in words:
try: #モデルになかったらスルー
sum += model[word]
t += 1
except KeyError:
continue
x.append(sum/t)
print(len(x))
np.set_printoptions(precision=10, suppress=True, linewidth=5000)
np.save(X_file, x)
np.save(Y_file, y)
if __name__ == "__main__":
to_vec(train_file, X_train_file, Y_train_file)
to_vec(valid_file, X_valid_file, Y_valid_file)
to_vec(test_file, X_test_file, Y_test_file) |
13,949 | 1563ca18bc4f5b5425e3806ec9b720c2325351af | import random
import hashlib
import time
import socket
def tHeader(tgt):
start = time.time()
t = str(random.getrandbits(64)).encode('utf-8')
val = hashlib.sha256(t).hexdigest()
nonce = 0
while int(val,16)>int(tgt,16):
temp = int(val,16) + nonce
val = hashlib.sha256((hex(temp)).encode('utf-8')).hexdigest()
nonce+=1
return val
tgt = hex(10**10)
s = socket.socket()
host = socket.gethostname()
port = 5005
s.bind((host, port))
s.listen(5)
while True:
c, addr = s.accept()
val = tHeader(tgt)
c.send(int(val, 16))
c.close()
|
13,950 | b6eaff509c685fc390f3ba50b4856e9916df8c2a | import pygame
pygame.init()
class Adjacent:
def __init__(self,square):
self.square = square
self.astar_distance = square.objective_distance |
13,951 | 3b28b5bfe5b12747fa91bc3752e72db3ff882d99 | from . import all_et
from . import all_el
|
13,952 | 7619c67841e3186ce311624e01301e53be20ad16 | import cnn
from PIL import Image
def main():
# Фотография для анализа:
path = 'pics/cat_man.jpg'
img = Image.open(path)
# Загружаем нейронную сеть:
ml = cnn.load_cnn_model()
# Делаем предикт фотографии:
result = cnn.img_analyze(img, ml)
print(result)
if __name__ == '__main__':
main()
|
13,953 | a6e91f97f77919bb5486898adba79321997b78fb | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class StudyAccountInfo(object):
def __init__(self):
self._alipay_card_no = None
self._card_type = None
self._study_id = None
@property
def alipay_card_no(self):
return self._alipay_card_no
@alipay_card_no.setter
def alipay_card_no(self, value):
self._alipay_card_no = value
@property
def card_type(self):
return self._card_type
@card_type.setter
def card_type(self, value):
self._card_type = value
@property
def study_id(self):
return self._study_id
@study_id.setter
def study_id(self, value):
self._study_id = value
def to_alipay_dict(self):
params = dict()
if self.alipay_card_no:
if hasattr(self.alipay_card_no, 'to_alipay_dict'):
params['alipay_card_no'] = self.alipay_card_no.to_alipay_dict()
else:
params['alipay_card_no'] = self.alipay_card_no
if self.card_type:
if hasattr(self.card_type, 'to_alipay_dict'):
params['card_type'] = self.card_type.to_alipay_dict()
else:
params['card_type'] = self.card_type
if self.study_id:
if hasattr(self.study_id, 'to_alipay_dict'):
params['study_id'] = self.study_id.to_alipay_dict()
else:
params['study_id'] = self.study_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = StudyAccountInfo()
if 'alipay_card_no' in d:
o.alipay_card_no = d['alipay_card_no']
if 'card_type' in d:
o.card_type = d['card_type']
if 'study_id' in d:
o.study_id = d['study_id']
return o
|
13,954 | 9cb2c4ca2bc5955cfc390a43b4fdb08082f18785 | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class BasicDetails(models.Model):
Auth_Id = models.TextField(primary_key=True,max_length=12)
User = models.OneToOneField(User,on_delete=models.CASCADE,null=True)
Asha_Worker = models.CharField(max_length=50,null=True)
Name = models.CharField(max_length=50)
Gender = models.CharField(max_length=5,null=True)
Address = models.CharField(max_length=100)
State = models.CharField(max_length=50)
Distrct = models.CharField(max_length=50)
Pan_Mun = models.CharField(max_length=50)
Ward = models.CharField(max_length=50)
Phone = models.CharField(max_length=15)
Email = models.CharField(max_length=50)
Status = models.CharField(max_length=5,default='Tobit')
Created_Date = models.DateField()
class FamilyMembers(models.Model):
Auth_Id = models.ForeignKey(BasicDetails,on_delete=models.CASCADE)
Name = models.CharField(max_length=50)
Gender = models.CharField(max_length=5)
Relationship = models.CharField(max_length=15)
Marital = models.CharField(max_length=10,null=True)
Phone = models.CharField(max_length=15)
Status = models.CharField(max_length=10,null=True)
class Baby(models.Model):
Auth_Id = models.ForeignKey(BasicDetails,on_delete=models.CASCADE,null=True)
Name = models.CharField(max_length=50)
Gender = models.CharField(max_length=5)
DOB = models.DateField()
class User_Pregnancy(models.Model):
Auth_Id = models.ForeignKey(BasicDetails, on_delete=models.CASCADE)
Name = models.CharField(max_length=50)
LMP = models.DateField()
Weeks = models.CharField(max_length=5)
Blood = models.CharField(max_length=5)
Weight = models.CharField(max_length=10)
Health = models.CharField(max_length=30)
Hospital = models.CharField(max_length=25)
Doctor = models.CharField(max_length=30)
Mobile = models.CharField(max_length=15)
Register_Date = models.DateField()
class User_Palliative(models.Model):
Auth_Id = models.ForeignKey(BasicDetails, on_delete=models.CASCADE)
Name = models.CharField(max_length=50)
DOB = models.DateField()
Mobile = models.CharField(max_length=15)
Issue = models.CharField(max_length=20)
Comments = models.CharField(max_length=35)
Register_Date = models.DateField()
class User_Asha_Notifications(models.Model):
Owner = models.CharField(max_length=20)
Title = models.CharField(max_length=20)
Message = models.TextField()
Date = models.DateTimeField() |
13,955 | 27e8ad56a7a416b1e1bd2c9c120f7a8243e9f9fb | zbior = set()
while True:
komenda = input("Podaj liczbę, ewentualnie [w]yjdz")
if komenda == "w":
break
zbior.add(int(komenda))
zbior2 = set()
for liczba in zbior:
if liczba % 2 == 0 and liczba < 101:
zbior2.add(liczba)
zbior3 = zbior & zbior2
print(zbior3)
print(len(zbior3))
|
13,956 | ac94d8e9023e8245c6d0d83b39fe99dacb1dd39c | #find largest among 3 numbers
a=int(input("enter the number a:"))
b=int(input("enter the number b:"))
c=int(input("enter the number c:"))
if(a>b):
print("a is largest number")
elif(b>c):
print("b is largest number")
else:
print("c is largest number") |
13,957 | 8fc9f145ef20ca90a6364612c108bd4fe25d93ee | from setuptools import setup
setup(
name='pythonml',
version='1.0',
description='Learning ML along with Python',
author='Alex Kolomiytsev',
author_email='alexander.kolomiytsev@gmail.com',
packages='pythonml',
install_requires=['numpy', 'sklearn', 'scipy']
) |
13,958 | ceb8d66188d758a57a465f2122e02487ecac0f1e | #/usr/bin/python
import time
from sms_reader import SmsReader
reader = SmsReader()
global messages
if __name__ == "__main__":
while True:
reader.update()
if len(reader.new_sms) > 0:
messages = reader.new_sms
print messages
reader.new_sms = []
time.sleep(1)
|
13,959 | c3b92252cf3d7a9cc015915d3cad36730ea4c14e | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
from alipay.aop.api.domain.ShopSummaryQueryResponse import ShopSummaryQueryResponse
class AlipayOfflineMarketShopSummaryBatchqueryResponse(AlipayResponse):
def __init__(self):
super(AlipayOfflineMarketShopSummaryBatchqueryResponse, self).__init__()
self._current_page_no = None
self._page_size = None
self._shop_summary_infos = None
self._total_items = None
self._total_page_no = None
@property
def current_page_no(self):
return self._current_page_no
@current_page_no.setter
def current_page_no(self, value):
self._current_page_no = value
@property
def page_size(self):
return self._page_size
@page_size.setter
def page_size(self, value):
self._page_size = value
@property
def shop_summary_infos(self):
return self._shop_summary_infos
@shop_summary_infos.setter
def shop_summary_infos(self, value):
if isinstance(value, list):
self._shop_summary_infos = list()
for i in value:
if isinstance(i, ShopSummaryQueryResponse):
self._shop_summary_infos.append(i)
else:
self._shop_summary_infos.append(ShopSummaryQueryResponse.from_alipay_dict(i))
@property
def total_items(self):
return self._total_items
@total_items.setter
def total_items(self, value):
self._total_items = value
@property
def total_page_no(self):
return self._total_page_no
@total_page_no.setter
def total_page_no(self, value):
self._total_page_no = value
def parse_response_content(self, response_content):
response = super(AlipayOfflineMarketShopSummaryBatchqueryResponse, self).parse_response_content(response_content)
if 'current_page_no' in response:
self.current_page_no = response['current_page_no']
if 'page_size' in response:
self.page_size = response['page_size']
if 'shop_summary_infos' in response:
self.shop_summary_infos = response['shop_summary_infos']
if 'total_items' in response:
self.total_items = response['total_items']
if 'total_page_no' in response:
self.total_page_no = response['total_page_no']
|
13,960 | 8c519d15b9079ae84cd8757de2a9e1d0b921378f | """
Evaluate the value of an arithmetic expression in Reverse Polish Notation.
Valid operators are +, -, *, /. Each operand may be an integer or another expression.
Note:
Division between two integers should truncate toward zero.
The given RPN expression is always valid. That means the expression would always evaluate to a result and there won't be any divide by zero operation.
Algorithm:
1) Keep ading operands to stack until a operator exists
2) secondNum is first popped and firstNum is the next popped
3) Compute using operator and return back into stack
4) At end, should have one num, return that num
Result:
Runtime: 84 ms, faster than 30.68% of Python3 online submissions for Evaluate Reverse Polish Notation.
Memory Usage: 14.2 MB, less than 26.76% of Python3 online submissions for Evaluate Reverse Polish Notation.
"""
class Solution:
def evalRPN(self, tokens: List[str]) -> int:
if not tokens:
return 0
operands = []
def isInt(ch): # check to see if its an int
try:
t = int(ch)
return True
except ValueError:
return False
def compute(firstNum,secondNum, operator):
if operator == "+":
return firstNum + secondNum
elif operator == "-":
return firstNum - secondNum
elif operator == "/":
if abs(firstNum) < abs(secondNum):
return 0
elif ((firstNum < 0 and secondNum >= 0) or (secondNum < 0 and firstNum >= 0)) and firstNum % secondNum != 0 :
# for cases where its a negative quotient and there is a remainder - we want to truncate to zero (python specific will to lower num not zero)
return (firstNum // secondNum) + 1
return firstNum // secondNum
elif operator == "*":
return firstNum * secondNum
for char in tokens:
if isInt(char):
operands.append(int(char))
continue
else: #it is operator
secondNum = operands.pop()
firstNum = operands.pop()
operands.append(compute(firstNum,secondNum,char))
return operands.pop() |
13,961 | 82d9c32f5fc9e176a2b1d51f335cb8fe2e0c49a7 | #!/usr/bin/env python3
import colors as c
print(c.clear)
print(c.green + 'Welcome to Blakes Note Pad!')
print(c.red + '#!/usr/bin/env python3')
|
13,962 | 54762fdb35cb386ce6eef0d902219de4fb53447c | #!/usr/bin/python3
# https://practice.geeksforgeeks.org/problems/right-view-of-binary-tree/1
class Node:
def __init__(self, data):
self.data = data
self.left = None
self.right = None
def inOrder(root):
if root == None:
return
inOrder(root.left)
print(root.data, end=" ")
inOrder(root.right)
def getRightViewSum(root):
h = {}
level = 0
getSum(root, level, h)
return sum(h.values())
def getSum(root, level, h):
"""
Time complexity is O(n) and space complexity is O(h) where h is
the height of the tree
"""
if root == None:
return
h[level] = root.data
getSum(root.left, level+1, h)
getSum(root.right, level+1, h)
def getSum2(root, level=0, maxLevel=None, sum=None):
"""
This approach is better than the previous one as it does not use extra
space. We keep track of two variables as references, maxlevel and sum.
maxLevel refers to the highest level reached so far. sum is the total sum
of the right view uptil that level. We traverse right child first, so
as we go down the level we obtain the rightmost node of the level. We
compare the level with maxLevel and if it is greater signifies that
we have obtained the rightmost node of a that level, we add the node
data to the sum. Finally we return the sum on level 0.
"""
if root == None:
return 0
if maxLevel == None:
maxLevel = [-1]
sum = [0]
if maxLevel[0] < level:
sum[0] += root.data
maxLevel[0] = level
getSum2(root.right, level+1, maxLevel, sum)
getSum2(root.left , level+1, maxLevel, sum)
if level == 0:
return sum[0]
nodes = [None]
for v in range(1, 9):
nodes.append(Node(v))
nodes[1].left = nodes[2]
nodes[1].right = nodes[3]
nodes[2].left = nodes[4]
nodes[2].right = nodes[5]
nodes[3].left = nodes[6]
nodes[3].right = nodes[7]
nodes[4].right = nodes[8]
#inOrder(nodes[1])
print(getRightViewSum(nodes[1]))
print(getSum2(nodes[1])) |
13,963 | 4396b971a0c5c3fdee54d5b60977b83bb5f9c6b6 |
''' Purpose: print a requested phrase a requested number of times
'''
# get phrase to be repeated
reply = input( "Enter phrase to be printed: " )
phrase = reply.strip()
# get number of times to repeat the phrase
reply = input( "Enter number of times to print the phrase: " )
nbr_of_times = int( reply )
# print the phrase the right number of times
for i in range( 0 , nbr_of_times):
print(phrase)
|
13,964 | fa0cd8f25090122a968f0e43dcc2c66dd79c6d0c | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# coding by xiaoming
'''
当你的才华还撑不起你的野心时,那你就应该静下心来学习
当你的能力还驾驭不了你的目标时,那就应该沉下心来历练
'''
'''
random()
获取0~1之间的随机小数包含0不包含1
格式:random.random()
返回值:浮点数
choice()
随机获取列表中的值
格式:random.choice(序列)
返回值:序列中的某个值
shuffle()
随机打乱序列
格式:random.shuffle(序列)
返回值:打乱顺序的序列
randrange()
获取指定范围内指定间隔的随机整数数
格式:random.randrange(开始值,结束值[,间隔值])
返回值:整数
uniform()
随机获取指定范围内的所有数值包括小数
格式:random.uniform(开始值,结束值)
返回值:随机返回范围内的所有数值(浮点型)
'''
|
13,965 | 7f09c6dbf0716d3df7e2a9a9f634b38fd7b7e120 | from itertools import permutations
def char_has_twin(index: int, string: str) -> bool:
if index >= len(string) - 1:
return False
else:
return string[index] == string[index + 1]
def str_has_twins(string: str) -> bool:
return any(char_has_twin(index, string) for index in range(len(string)))
def perm_alone(string: str) -> int:
perms_twins = (str_has_twins(perm) for perm in permutations(string))
perms_alone = filter(lambda perm: not perm, perms_twins)
return len(list(perms_alone))
|
13,966 | 8b2bdaabae9703abddf94969328cc143d25b8145 | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
browser = webdriver.Firefox()
browser.get('https://gabrielecirulli.github.io/2048/')
htmlElem = browser.find_element_by_tag_name('html')
while True:
htmlElem.send_keys(Keys.UP)
htmlElem.send_keys(Keys.RIGHT)
htmlElem.send_keys(Keys.DOWN)
htmlElem.send_keys(Keys.LEFT)
try:
gameOverElem = browser.find_element_by_class_name('game-over')
score = browser.find_element_by_class_name('score-container').text
#score string is multi line, we want the first line (the score)
print("Score: %s" % score.partition('\n')[0])
browser.quit()
break
except:
continue
|
13,967 | e6412ab9efcf524c13eb92caacded28bc8af8ba4 | from rest_framework import serializers
from .models import AdvertPost, Services, StudioProfile, CreativeProfile, Review, Booking, User
from django import forms
from django.contrib.auth import authenticate
# Studio
class UserSerializer(serializers.ModelSerializer):
# user objects
email = forms.EmailField(max_length=254, help_text="Required. Enter a valid email address.")
class Meta:
model = User
fields = ["email", "username", "password", "user_type", "token"]
class AdvertPostSerializer(serializers.ModelSerializer):
studio_id = UserSerializer
class Meta:
model = AdvertPost
fields = "__all__"
class ServicesSerializer(serializers.ModelSerializer):
class Meta:
model = Services
fields = "__all__"
class StudioProfileSerializer(serializers.ModelSerializer):
studio_id = UserSerializer
service_provided = ServicesSerializer
advert_photos = AdvertPostSerializer
class Meta:
model = StudioProfile
fields = "__all__"
# Creatives
# serializer
class CreativeProfileSerializer(serializers.ModelSerializer):
creative_id = UserSerializer
class Meta:
model = User
fields = "__all__"
class ReviewSerializer(serializers.ModelSerializer):
creative_id = UserSerializer
class Meta:
model = Review
fields = "__all__"
class BookingSerializer(serializers.ModelSerializer):
creative_id = UserSerializer
class Meta:
model = Booking
fields = "__all__"
class RegistrationSerializer(serializers.ModelSerializer):
"""Serializers registration requests and creates a new user."""
# Ensure passwords are at least 8 characters long, no longer than 128
# characters, and can not be read by the client.
password = serializers.CharField(max_length=128, min_length=8, write_only=True)
# The client should not be able to send a token along with a registration
# request. Making `token` read-only handles that for us.
token = serializers.CharField(max_length=255, read_only=True)
class Meta:
model = User
# List all of the fields that could possibly be included in a request
# or response, including fields specified explicitly above.
fields = ["email", "username", "password", "user_type", "token"]
def create(self, validated_data):
# Use the `create_user` method we wrote earlier to create a new user.
return User.objects.create_user(**validated_data)
class LoginSerializer(serializers.Serializer):
email = serializers.CharField(max_length=255)
username = serializers.CharField(max_length=255, read_only=True)
password = serializers.CharField(max_length=128, write_only=True)
token = serializers.CharField(max_length=255, read_only=True)
user_type = serializers.IntegerField(read_only=True)
def validate(self, data):
# The `validate` method is where we make sure that the current
# instance of `LoginSerializer` has "valid". In the case of logging a
# user in, this means validating that they've provided an email
# and password and that this combination matches one of the users in
# our database.
email = data.get("email", None)
password = data.get("password", None)
user_type = data.get("user_type", None)
if email is None:
raise serializers.ValidationError("An email address is required to log in.")
if password is None:
raise serializers.ValidationError("A password is required to log in.")
user = authenticate(username=email, password=password, user_type=user_type)
if user is None:
raise serializers.ValidationError("A user with this email and password was not found.")
# Django provides a flag on our `User` model called `is_active` to check if user is deactivated.
if not user.is_active:
raise serializers.ValidationError("This user has been deactivated.")
return {"email": user.email, "username": user.username, "user_type": user.user_type, "token": user.token}
class UserSerializer(serializers.ModelSerializer):
"""Handles serialization of User objects."""
password = serializers.CharField(max_length=128, min_length=8, write_only=True)
class Meta:
model = User
fields = (
"email",
"username",
"password",
"token",
)
read_only_fields = ("token",)
def update(self, instance, validated_data):
"""Performs an update on a User."""
password = validated_data.pop("password", None)
for (key, value) in validated_data.items():
setattr(instance, key, value)
if password is not None:
instance.set_password(password)
instance.save()
return instance
|
13,968 | 4a7e81f4acd67b33a024e8d1553c8890c1958bd5 | import math
def raizCuadrada(listaNumeros):
"""
la funcion devuelve una lista con la raiz cuadrada
de los elementos numericos pasados por parametros en
otra lista
>>> lista=[]
>>> for i in [4, 9, 16]:
... lista.append(i)
>>> raizCuadrada(lista)
[2.0, 3.0, 4.0]
"""
return [math.sqrt(n) for n in listaNumeros]
#print(raizCuadrada([9,16,25,36]))
import doctest
doctest.testmod() |
13,969 | 56da46b4deaaafc1e6385cad6af11ac4c42f3e02 | # AST443 Lab 0 Data Analysis for part 4.3
# September 7, 2018
# Imports
import numpy as np
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
from astropy.io import fits
from scipy import stats
from scipy.stats import norm
# Load Lab constants
info = {}
for line in open('inputs.txt'):
li=line.strip()
if not li.startswith("#"):
data = [x.strip() for x in line.split(',')]
info[data[0]] = data[1]
# Open Files
flat_dir = info['dataDir'] + info['flatSubdir']
files = open(flat_dir+'names.txt', 'r')
flats = []
for line in files:
flats.append(fits.open(flat_dir+line.strip('\n')))
flats_data = []
for flat in flats:
flats_data.append(flat[0].data)
# Average the flats and remove the bias
flat_avg = np.median( flats_data, axis=0 )
flat_avg_mode = stats.mode(flat_avg.flatten())[0][0]
flat_avg_shape = flat_avg.shape
flat_avgnorm = np.zeros(flat_avg_shape)
for column in range(flat_avg_shape[1]):
for row in range(flat_avg_shape[0]):
flat_avgnorm[row][column] = flat_avg[row][column] / flat_avg_mode
# Save the Master flat
master_write = fits.PrimaryHDU(flat_avgnorm)
master_write.writeto(info['fitsSubdir']+info['masterFlat'])
# Histogram of counts in the master flat field
flat_avgnorm_flat = flat_avgnorm.flatten()
flat_avgnorm_mean = np.mean(flat_avgnorm_flat)
flat_avgnorm_median = np.median(flat_avgnorm_flat)
flat_avgnorm_mode = stats.mode(flat_avgnorm_flat)[0][0]
flat_avgnorm_stddev = np.std(flat_avgnorm_flat)
fig, ax = plt.subplots()
flat_avgnorm_bins = 100
flat_avgnorm_min = min( flat_avgnorm_flat )
flat_avgnorm_max = max( flat_avgnorm_flat )
flat_avgnorm_norm = ( (flat_avgnorm_max-flat_avgnorm_min)
/ flat_avgnorm_bins * len(flat_avgnorm_flat) )
xgauss = np.linspace( flat_avgnorm_min, flat_avgnorm_max,
10 * flat_avgnorm_bins )
ygauss = flat_avgnorm_norm * norm.pdf(xgauss,loc=flat_avgnorm_mean, scale=flat_avgnorm_stddev)
xmode = [flat_avgnorm_mode] * 100
ymode = np.linspace( 0, max(ygauss), len(xmode) )
textstr = '\n'.join((
'Mean=%.2f' % (flat_avgnorm_mean, ),
'Median=%.2f' % (flat_avgnorm_median, ),
'Mode=%.2f' % (flat_avgnorm_mode, ) ,
r'$\sigma=%.2f$' % (flat_avgnorm_stddev, )))
ax.hist(flat_avgnorm_flat, bins=100, color='black')
props = dict(boxstyle='round', facecolor='white', alpha=0.5)
ax.text(0.05, 0.95, textstr, transform=ax.transAxes, fontsize=14,
verticalalignment='top', bbox=props)
ax.set_title("Raw Data")
ax.set_yscale("log", nonposy='clip')
ax.set_xlabel('Number of Counts in a Bin')
ax.set_ylabel('Number of Bins')
ax.set_ylim([0.1,1e6])
gauss = norm.pdf(xgauss,loc=flat_avgnorm_mean, scale=flat_avgnorm_stddev)
plt.plot(xgauss, ygauss, color="red", linewidth=1.0)
plt.plot(xmode, ymode, color="yellow", linewidth=1.0)
plt.savefig(info['images']+'masterFlat.pdf', ppi=300)
plt.clf()
files.close()
|
13,970 | 4cfe584bc57992df1d684de0e7c6289e914fca24 | import src.config.waypoints as wp
from src.track.track import Track
from src.ui.main_view import MainView
class App:
def __init__(self, headless_mode=False):
self.headless_mode = headless_mode
# Init tracks
self.reinvent_2018 = Track(name="Reinvent 2018", waypoints=wp.REINVENT_2018, track_width=0.5, config_name="REINVENT_2018")
self.fumiaki_loop_2020 = Track(name="Fumiaki Loop 2020", waypoints=wp.FUMIAKI_LOOP_2020, track_width=0.5,
config_name="FUMIAKI_LOOP_2020")
self.pentagon = Track(name="Pentagon", waypoints=wp.PENTAGON, track_width=0.5, config_name="PENTAGON")
self.default_track = self.reinvent_2018 # Default track
# Init UI
if not headless_mode:
self.view = MainView(self, self.fumiaki_loop_2020.model)
self.view.update()
self.view.mainloop()
# renderer = TrackRenderer(reinvent_2018)
# renderer.render()
if __name__ == "__main__":
app = App()
|
13,971 | 06f1ad02a1ab34fc944dd3aeb4345e7eef5bad5d | from influxdb import InfluxDBClient
from influxdb.client import InfluxDBClientError
import sched
import time
import random
import sys
def parse_args():
import optparse
usage="%prog [options] ts_relay_file_name.\nNote: requires exactly one filename"
parser = optparse.OptionParser(usage=usage)
parser.add_option('-H', '--HOST', type="string", default="localhost",
help="Database connection host")
parser.add_option('-P', '--PORT', type="int", default=8094,
help="Database connection port number")
parser.add_option('-u', '--user', type="string", default="root",
help="User name for writing to the database")
parser.add_option('-p', '--password', type="string", default="root",
help="User password for writing to the database")
parser.add_option('-d', '--dbname', type="string", default="thedb",
help="database name")
parser.add_option('--time_operations', default=False, action="store_true",
help="record and periodically summarize and emit timing data")
opts, args = parser.parse_args()
return opts, args
def get_client(opts):
client = InfluxDBClient(opts.HOST, opts.PORT,
opts.user, opts.password,
opts.dbname)
try:
client.create_database(opts.dbname)
except InfluxDBClientError:
print >> sys.stderr, "debug: database %s exists. Continuing" % opts.dbname
return client
def scheduled_write():
t = int(time.time())
value = float(random.randint(0, 100000))
possible_kws = ["ixltrade:ixl", "pool:barton", "margin:average",
"size:alot", "type:profit"]
kws = possible_kws[random.randint(0, len(possible_kws))-1]
ts_point = (t, value)
json_point = [{"name": kws,
"columns": ["time", "value"],
"points": [ts_point]}]
client.write_points_with_precision(json_point, "u")
if __name__ == '__main__':
opts, args = parse_args()
client = get_client(opts)
s = sched.scheduler(time.time, time.sleep)
while True:
s.enter(1, 1, scheduled_write, ())
s.run()
|
13,972 | 6a27464f24615df8679a39e810b9332e789d2e60 | import serial
ser=serial.Serial("COM21")
while True:
a=input("please type your cmd here, q for quit, e for remote quit.")
print(a)
if a=='q':
break
ser.write(a.encode())
|
13,973 | 191ef34ce0a07167678d0d20e1498ee11c77ffe8 | import yaml
import json
import sys
INPUT_NAME = sys.argv[1]
if INPUT_NAME[-5:] != ".yaml":
print("FIle is not yaml")
sys.exit(1)
OUTPUT_NAME = INPUT_NAME[:-5] + ".json"
print("Outputing to %s" % OUTPUT_NAME)
with open(INPUT_NAME) as input:
with open(OUTPUT_NAME, 'w') as output:
o = yaml.load(input)
json.dump(o, output, indent=4, sort_keys=True)
|
13,974 | 3623d3fdef2b1907c2de5975b4394b4a7f98194b |
from typing import get_args
class Node:
def __init__(self):
self.nodes = [None]*26
self.isEndOfWord = False
class Trie:
def __init__(self):
self.root = Node()
def get_index_of_letter(self, value):
return ord(value.lower()) - ord('a')
def getNode():
return Node()
def insert(self, value):
## essentially we are going to create a list of lists (like a organized nest of lists)
temp_node = self.root
len_of_word = len(value)
for i in range(len_of_word):
index = self.get_index_of_letter(value[i])
if temp_node.nodes[index] == None:
temp_node.nodes[index] = Node()
temp_node = temp_node.nodes[index]
temp_node.isEndOfWord = True
def search(self, value):
temp_node = self.root
len_of_word = len(value)
for i in range(len_of_word):
index = self.get_index_of_letter(value[i])
if temp_node.nodes[index] == None:
return False
temp_node = temp_node.nodes[index]
return temp_node.isEndOfWord
trie = Trie()
trie.insert(value = 'glasses')
trie.insert(value = 'glass')
trie.insert(value = 'chashma')
print(trie.search('glas')) |
13,975 | e37b75537a3c3ba4e82ff9279c6679f738c6bc5a | """
Robert Graham (rpgraham84@gmail.com)
Project Euler
Multiples of 3 and 5
Problem 1
If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9. The sum of these multiples is 23.
Find the sum of all the multiples of 3 or 5 below 1000.
Answer:
233168
"""
def problem_1():
return sum(x for x in range(1000) if not x % 3 or not x % 5)
if __name__ == '__main__':
print(problem_1())
|
13,976 | cf502f6036c3639314f62b56571e35f928fe45af | # FPL Model -- shortened
import pandas as pd
import requests, json
# requests is a python library that provides useful methods for api requests and webscraping
r = requests.get('https://fantasy.premierleague.com/drf/bootstrap-static')
data = r.json()['elements']
pd.set_option('display.max_columns', 60)
pd.options.display.max_rows = 999
df = pd.DataFrame(data)
df[df['web_name']=='Firmino']
df.tail()
df[df['second_name'] == 'Alonso']
df.sort_values(by='total_points', ascending=False)[0:20]
df.columns
columns_to_keep = ['web_name', 'total_points', 'goals_scored', 'assists', 'points_per_game', 'value_form', 'form',
'value_season' ,'now_cost', 'bonus', 'bps', 'minutes', 'selected_by_percent',
'code', 'ict_index', 'creativity', 'influence', 'threat', 'yellow_cards',
]
df = df[columns_to_keep]
df.dtypes
objects = ['points_per_game', 'value_form', 'form', 'value_season', 'selected_by_percent', 'ict_index',
'creativity', 'influence', 'threat']
for i in objects:
df[i] = pd.to_numeric(df[i])
df.dtypes
df.sort_values(by='total_points', ascending=False)
df.sort_values(by='value_form', ascending=False)
df[df['web_name'].isin(['De Bruyne', 'David Silva'])]
#################################################################################
# Adding coefficient and z-score
from scipy import stats
coef_df = df.copy(deep=True)
coef_df.describe()
# Creating coefficient weights here
coef_df['goal_z-score'] = stats.zscore(coef_df['goals_scored'])
coef_df['goal_coef'] = coef_df['goal_z-score'] * 0.35
coef_df['assist_z-score'] = stats.zscore(coef_df['assists'])
coef_df['assist_coef'] = coef_df['assist_z-score'] * 0.20
coef_df['value-form_z-score'] = stats.zscore(coef_df['value_form'])
coef_df['value-form_coef'] = coef_df['value-form_z-score'] * 0.05
coef_df['form_z-score'] = stats.zscore(coef_df['form'])
coef_df['form_coef'] = stats.zscore(coef_df['form_z-score']) * 0.10
coef_df['bps_z-score'] = stats.zscore(coef_df['bps'])
coef_df['bps_coef'] = stats.zscore(coef_df['bps_z-score']) * 0.05
coef_df['ict_z-score'] = stats.zscore(coef_df['ict_index'])
coef_df['ict_coef'] = stats.zscore(coef_df['ict_z-score']) * 0.15
coef_df['total_coef'] = coef_df['goal_coef'] + coef_df['assist_coef'] + coef_df['value-form_coef'] + \
coef_df['form_coef'] + coef_df['bps_coef'] + coef_df['ict_coef']
# With all the data
coef_df.sort_values(by='total_coef', ascending=False)
coef_df['points_per_min'] = coef_df['total_points'] / coef_df['minutes']
coef_df[coef_df['minutes'] > 90].sort_values(by='points_per_min', ascending=False)
# Consolidated view
coef_df[['web_name', 'ict_index', 'now_cost', 'total_coef', 'minutes',
'points_per_min']].sort_values(by='total_coef', ascending=False)
coef_df['ppm/now_cost'] = coef_df['points_per_min'] / coef_df['now_cost']
coef_df[['web_name', 'ict_index', 'now_cost', 'total_coef', 'minutes',
'points_per_min', 'ppm/now_cost']][coef_df['minutes']>90].sort_values(by='ppm/now_cost', ascending=False)[0:40]
coef_df[['web_name', 'ict_index', 'now_cost', 'total_coef', 'minutes',
'points_per_min', 'ppm/now_cost']][coef_df['minutes']>90][coef_df['now_cost']>70]\
.sort_values(by='ppm/now_cost', ascending=False)[0:40]
# [coef_df['minutes']>90][coef_df['now_cost']>70].sort_values(by='ppm/now_cost', ascending=False)[0:40]
|
13,977 | d7c684ff41c1b4dfeab48e42a9a3951cc1661378 | import miscut
from .scheduleimport import ScheduleImport
|
13,978 | 77037096ed17579026e14c9c3e91091baf527a13 |
import sys
sys.path.append("/netpool/work/gpu-3/users/malyalasa/New_folder")
from tensorflow.keras import layers
from tensorflow.keras import models
import tensorflow as tf
from tensorflow.keras import Model
from tensorflow.keras.regularizers import l1,l1_l2,l2
from tensorflow.python.keras.regularizers import L1
from rosbag2numpy.config import params
from rosbag2numpy.models.encoder import encoder_nw
from rosbag2numpy.losses import loss_wrapper
#from ..config import params
import os
#print(tf.__version__)
os.environ['CUDA_VISIBLE_DEVICES']="3"
def _get_optimizer(opt_name: str = "nadam", lr: float = 0.02):
if opt_name == "adam":
return tf.keras.optimizers.Adam(learning_rate=lr)
elif opt_name == "sgd":
return tf.keras.optimizers.SGD(learning_rate=lr)
elif opt_name == "rmsprop":
return tf.keras.optimizers.RMSprop(learning_rate=lr)
elif opt_name == "adagrad":
return tf.keras.optimizers.Adagrad(learning_rate=lr)
elif opt_name == "adadelta":
return tf.keras.optimizers.Adadelta(learning_rate=lr)
elif opt_name == "adamax":
return tf.keras.optimizers.Adamax(learning_rate=lr)
elif opt_name == "nadam":
return tf.keras.optimizers.Nadam(learning_rate=lr)
else:
return tf.keras.optimizers.Nadam(learning_rate=lr)
#@tf.keras.utils.register_keras_serializable()
class CustomMaskLayer(layers.Layer):
"""Layer that masks tensor at specific locations as mentioned in binary tensor
Args:
layers (layers.Layer): keras.layers baseclass
"""
def __init__(self, list_mask=[[1., 1.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[0., 0.],
[1., 1.]],
name=None,**kwargs):
self.list_mask = list_mask
super(CustomMaskLayer,self).__init__(name=name,**kwargs)
def call(self, inputs):
temp = inputs
mask = tf.constant(self.list_mask,dtype=tf.float32)
# masking with first and last co-ordinate
first_last_skip_conn = tf.math.multiply(mask, temp)
output = first_last_skip_conn
return output
def get_config(self):
config = super(CustomMaskLayer,self).get_config()
config.update({
"list_mask": self.list_mask,
})
return config
def load_encoder()->Model:
path = '/netpool/work/gpu-3/users/malyalasa/New_folder/rosbag2numpy/models/encoder_nw.h5'
loaded_encoder = tf.keras.models.load_model(filepath=path)
for layer in loaded_encoder.layers:
layer.trainable = False
return loaded_encoder
def nn(full_skip=False,params=None):
# Grid Map input
ip_gridmap = layers.Input(shape=(1536,1536,1))
encoder = load_encoder()
x_A = encoder(ip_gridmap)
reshape_x_A = layers.Reshape((25,2))(x_A)
# Other inputs
ip_grid_org_res = layers.Input(shape=(3,),name="Grid_origin_res")
ip_left_bnd = layers.Input(shape=(25,2),name="Left_boundary")
ip_right_bnd = layers.Input(shape=(25,2),name="Right_boundary")
ip_car_odo = layers.Input(shape=(3,),name="Car_loc")
ip_init_path = layers.Input(shape=(25,2),name="Initial_path")
ip_file_name = layers.Input(shape=(1,),name="File_name",dtype=tf.string)
#reshaping paths
reshape_init_path = layers.Reshape((50,))(ip_init_path)
reshape_left_bnd = layers.Reshape((50,))(ip_left_bnd)
reshape_right_bnd = layers.Reshape((50,))(ip_right_bnd)
#concatenate feature
concat_feat = layers.concatenate([x_A, reshape_left_bnd, reshape_right_bnd, ip_grid_org_res,ip_car_odo,reshape_init_path ])
#tf.print(type(concat_feat.shape))
#tf.print((concat_feat.shape.concatenate(1).as_list()[1:]))
output = layers.Reshape(target_shape=(concat_feat.shape.concatenate(1).as_list()[1:]))(concat_feat)
output = layers.LSTM(units=25,return_sequences=True)(output)
output = layers.LSTM(units=50)(output)
#output = layers.Dense(50,activation='relu')(concat_feat)
if full_skip:
# Block 6-fs
output = layers.add([output,reshape_init_path])
#output = layers.Reshape(target_shape=(output.shape.concatenate(1).as_list()[1:]))(output)
#output = layers.LSTM(units=25,return_sequences=True)(output)
#output = layers.LSTM(units=50)(output)
output = layers.Dense(50, activation=params.get("lastlayer_activation"))(output)
else:
"""
#Implementation without CustomMaskLayer
first_last_skip_conn = tf.constant(list_mask,dtype=tf.float32)
# masking with first and last co-ordinate
first_last_skip_conn = tf.math.multiply(first_last_skip_conn,ip_init_path)
"""
# Block 6-endpoints_condition
if full_skip==False:
first_last_skip_conn= CustomMaskLayer()(ip_init_path)
reshape_first_last_skip = layers.Reshape((50,))(first_last_skip_conn)
output = layers.add([output, reshape_first_last_skip])
output = layers.Dense(50, activation=params.get("lastlayer_activation"))(output)
# only first point skip connection(use full_skip=none)
else:
first_last_skip_conn = tf.constant(fp_list_mask,dtype=tf.float32)
# masking with first
first_last_skip_conn = tf.math.multiply(first_last_skip_conn,ip_init_path)
reshape_first_last_skip = layers.Reshape((50,))(first_last_skip_conn)
output = layers.add([output, reshape_first_last_skip])
output = layers.Dense(50, activation=params.get("lastlayer_activation"))(output)
#output
output = layers.Reshape((25,2))(output)
#concat_op_enc_cm = layers.concatenate([output,reshape_x_A],axis=1)
nn_fun = models.Model(inputs = [ip_gridmap,ip_grid_org_res,ip_left_bnd, ip_right_bnd, ip_car_odo, ip_init_path, ip_file_name], outputs= [reshape_x_A,output])
nn_fun.summary(line_length=120)
#opt = _get_optimizer(params.get("optimizer"), lr=params.get("lr"))
"""
nn_fun.compile(
optimizer=opt,
loss=loss_wrapper(reshape_x_A),
loss_weights=[1]
)
"""
return nn_fun
#nn(full_skip=True,params=params)
if '__name__'=='__main__':
model=nn(full_skip=False)
model.summary()
|
13,979 | 943abcfe93b4340db238a1f0cd7d83c89096056c | from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import os
import os.path as osp
import shutil
from PIL import Image
import cv2
from .iotools import mkdir_if_missing
def read_labels(label_path,index_file):
"""to write"""
got_img = False
labels = 0
if not osp.exists(label_path):
raise IOError("{} does not exist".format(label_path))
while not got_img:
file = open(label_path, 'r')
labels = file.read(16)
got_img = True
return int(labels[index_file-1])
def _cp_img_to(src, dst, rank, prefix):
"""
- src: image path or tuple (for vidreid)
- dst: target directory
- rank: int, denoting ranked position, starting from 1
- prefix: string
"""
if isinstance(src, tuple) or isinstance(src, list):
dst = osp.join(dst, prefix + '_top' + str(rank).zfill(3))
mkdir_if_missing(dst)
for img_path in src:
shutil.copy(img_path, dst)
else:
dst = osp.join(dst, prefix + '_top' + str(rank).zfill(3) + '_name_' + osp.basename(src))
shutil.copy(src, dst)
def read_im(im_path):
# shape [H, W, 3]
im = np.asarray(Image.open(im_path))
# Resize to (im_h, im_w) = (128, 64)
resize_h_w = (384, 128)
if (im.shape[0], im.shape[1]) != resize_h_w:
im = cv2.resize(im, resize_h_w[::-1], interpolation=cv2.INTER_LINEAR)
# shape [3, H, W]
im = im.transpose(2, 0, 1)
return im
def make_im_grid(ims, n_rows, n_cols, space, pad_val):
"""Make a grid of images with space in between.
Args:
ims: a list of [3, im_h, im_w] images
n_rows: num of rows
n_cols: num of columns
space: the num of pixels between two images
pad_val: scalar, or numpy array with shape [3]; the color of the space
Returns:
ret_im: a numpy array with shape [3, H, W]
"""
assert (ims[0].ndim == 3) and (ims[0].shape[0] == 3)
assert len(ims) <= n_rows * n_cols
h, w = ims[0].shape[1:]
H = h * n_rows + space * (n_rows - 1)
W = w * n_cols + space * (n_cols - 1)
if isinstance(pad_val, np.ndarray):
# reshape to [3, 1, 1]
pad_val = pad_val.flatten()[:, np.newaxis, np.newaxis]
ret_im = (np.ones([3, H, W]) * pad_val).astype(ims[0].dtype)
for n, im in enumerate(ims):
r = n // n_cols
c = n % n_cols
h1 = r * (h + space)
h2 = r * (h + space) + h
w1 = c * (w + space)
w2 = c * (w + space) + w
ret_im[:, h1:h2, w1:w2] = im
return ret_im
def save_im(im, save_path, i):
"""im: shape [3, H, W]"""
mkdir_if_missing(save_path)
im = im.transpose(1, 2, 0)
Image.fromarray(im).save(save_path + i + '.jpg')
def add_border(im, border_width, value):
"""Add color border around an image. The resulting image size is not changed.
Args:
im: numpy array with shape [3, im_h, im_w]
border_width: scalar, measured in pixel
value: scalar, or numpy array with shape [3]; the color of the border
Returns:
im: numpy array with shape [3, im_h, im_w]
"""
assert (im.ndim == 3) and (im.shape[0] == 3)
im = np.copy(im)
if isinstance(value, np.ndarray):
# reshape to [3, 1, 1]
value = value.flatten()[:, np.newaxis, np.newaxis]
im[:, :border_width, :] = value
im[:, -border_width:, :] = value
im[:, :, :border_width] = value
im[:, :, -border_width:] = value
return im
def visualize_ranked_results(q_pids, g_pids, q_camids, g_camids, q_img_path,g_img_path,root_rank, root, distmat, dataset, save_dir='log/ranked_results', topk=20):
# number of query and gallery images
num_q, num_g = distmat.shape
print("Visualizing top-{} ranks in '{}' ...".format(topk, save_dir))
print("# query: {}. # gallery {}".format(num_q, num_g))
assert num_q == len(dataset.query)
assert num_g == len(dataset.gallery)
# indices of gallery images
indices = np.argsort(distmat, axis=1)
mkdir_if_missing(save_dir)
mkdir_if_missing(root_rank + '/all_ranks_' + str(topk))
count_unmatch=0
for q_idx in range(num_q):
qimg_path=q_img_path[q_idx]
qimg_path=qimg_path[0]
qpid=q_pids[q_idx]
qcamid = q_camids[q_idx]
qdir = osp.join(save_dir, 'query' + str(q_idx + 1).zfill(5))
mkdir_if_missing(qdir)
# _cp_img_to(str(qimg_path), qdir, rank=0, prefix='query')
ims = [read_im(qimg_path)]
rank_idx = 1
not_in_the_first_ranks = 0
count_g = 0
miss = False
g_img_paths=[]
for g_idx in indices[q_idx, :]:
gimg_path = g_img_path[g_idx]
gimg_path = gimg_path[0]
g_img_paths.append(g_img_path[g_idx])
gpid = g_pids[g_idx]
gcamid = g_camids[g_idx]
invalid = (q_idx==g_idx) and (qcamid == gcamid)
count_g = count_g + 1
if not invalid and rank_idx < topk:
im = read_im(gimg_path)
# Add green boundary to true positive, red to false positive
color = np.array([0, 255, 0]) if (qpid == gpid) else np.array([255, 0, 0])
im = add_border(im, 10, color)
ims.append(im)
rank_idx += 1
if rank_idx==2 and qpid != gpid:
miss=True
if not invalid and rank_idx >= topk and (qpid == gpid): # blue cases
im = read_im(gimg_path)
color = np.array([0, 0, 255])
im = add_border(im, 10, color)
ims.append(im)
rank_idx += 1
count_unmatch += 1
im = make_im_grid(ims, 1, len(ims) + 1, 8, 255)
if miss==True: # match doesn't happen in the first rank positions
f = open(root_rank + '/all_ranks_' + str(topk) + '/'+ 'desc_' + str(q_idx), "w+")
f.write(qimg_path)
for g in g_img_paths:
f.write(g[0])
f.close()
save_im(im, root_rank + '/all_ranks_' + str(topk) + '/', '1_' + str(q_idx))
def re_ranking(q_g_dist, q_q_dist, g_g_dist, k1=20, k2=6, lambda_value=0.3):
# The following naming, e.g. gallery_num, is different from outer scope.
# Don't care about it.
original_dist = np.concatenate(
[np.concatenate([q_q_dist, q_g_dist], axis=1),
np.concatenate([q_g_dist.T, g_g_dist], axis=1)],
axis=0)
original_dist = np.power(original_dist, 2).astype(np.float32)
original_dist = np.transpose(1. * original_dist/np.max(original_dist,axis = 0))
V = np.zeros_like(original_dist).astype(np.float32)
initial_rank = np.argsort(original_dist).astype(np.int32)
query_num = q_g_dist.shape[0]
gallery_num = q_g_dist.shape[0] + q_g_dist.shape[1]
all_num = gallery_num
for i in range(all_num):
# k-reciprocal neighbors
forward_k_neigh_index = initial_rank[i,:k1+1]
backward_k_neigh_index = initial_rank[forward_k_neigh_index,:k1+1]
fi = np.where(backward_k_neigh_index==i)[0]
k_reciprocal_index = forward_k_neigh_index[fi]
k_reciprocal_expansion_index = k_reciprocal_index
for j in range(len(k_reciprocal_index)):
candidate = k_reciprocal_index[j]
candidate_forward_k_neigh_index = initial_rank[candidate,:int(np.around(k1/2.))+1]
candidate_backward_k_neigh_index = initial_rank[candidate_forward_k_neigh_index,:int(np.around(k1/2.))+1]
fi_candidate = np.where(candidate_backward_k_neigh_index == candidate)[0]
candidate_k_reciprocal_index = candidate_forward_k_neigh_index[fi_candidate]
if len(np.intersect1d(candidate_k_reciprocal_index,k_reciprocal_index))> 2./3*len(candidate_k_reciprocal_index):
k_reciprocal_expansion_index = np.append(k_reciprocal_expansion_index,candidate_k_reciprocal_index)
k_reciprocal_expansion_index = np.unique(k_reciprocal_expansion_index)
weight = np.exp(-original_dist[i,k_reciprocal_expansion_index])
V[i,k_reciprocal_expansion_index] = 1.*weight/np.sum(weight)
original_dist = original_dist[:query_num,]
if k2 != 1:
V_qe = np.zeros_like(V,dtype=np.float32)
for i in range(all_num):
V_qe[i,:] = np.mean(V[initial_rank[i,:k2],:],axis=0)
V = V_qe
del V_qe
del initial_rank
invIndex = []
for i in range(gallery_num):
invIndex.append(np.where(V[:,i] != 0)[0])
jaccard_dist = np.zeros_like(original_dist,dtype = np.float32)
for i in range(query_num):
temp_min = np.zeros(shape=[1,gallery_num],dtype=np.float32)
indNonZero = np.where(V[i,:] != 0)[0]
indImages = []
indImages = [invIndex[ind] for ind in indNonZero]
for j in range(len(indNonZero)):
temp_min[0,indImages[j]] = temp_min[0,indImages[j]]+ np.minimum(V[i,indNonZero[j]],V[indImages[j],indNonZero[j]])
jaccard_dist[i] = 1-temp_min/(2.-temp_min)
final_dist = jaccard_dist*(1-lambda_value) + original_dist*lambda_value
del original_dist
del V
del jaccard_dist
final_dist = final_dist[:query_num,query_num:]
return final_dist
|
13,980 | afc57de39cb11b59be949e07f166b9b5bf8e3d85 | from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
class Command(BaseCommand):
help = "Sends an email through the framework to an address specified on the command line."
args = "<email email...>"
can_import_settings = True
def handle(self, *args, **kwargs):
pass
|
13,981 | aa333354f05b1604a27fd42b618c3db7a746d3d4 | import numpy as np
import matplotlib.pyplot as plt
import sys
import glob
from os import path
for mask in sys.argv[1:]:
for file in glob.glob(mask):
fname = path.split(file.split('.')[0])[1]
data = np.loadtxt(file, skiprows=1)
titles = open(file, 'r').readline().split()[1:]
nums = data[:, 0]
results = data[:, 1:]
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot(111, xscale='log', yscale='log')
for i in range(len(titles)):
ax.plot(nums, results[:, i], 'o-', label=titles[i])
ax.legend()
ax.set_xlabel(open(file, 'r').readline().split()[0], fontsize=15)
ax.set_ylabel('time, s', fontsize=15)
ax.set_title(fname, fontsize=20)
fig.savefig(file.split('.')[0] + '.png', dpi=200)
fig.clear()
|
13,982 | 1f842c98a6257ddba5c16285daf3e693386fb4d8 | def produce(c):
print("--3、启动生成器,开始执行生成器consumer--")
c.send(None) # 3、启动生成器,开始执行生成器consumer
print("--6、继续往下执行--")
n = 0
while n < 5:
n += 1
print("[Producer]: producing {} ..".format(n))
print("--7、第{}次唤醒生成器,从yield位置继续往下执行!--".format(n + 1))
r = c.send(n) # 第二次唤醒生成器
print('r的值是*******************************************************', r)
print("--9、从第8步往下--")
print("[Producer]: consumer return {} ..".format(r))
c.close()
def consumer():
print('--4、开始执行生成器代码--')
response = None
while True:
print('--5、yield,中断,保存上下文--')
n = yield response # 4、yield,中断,保存上下文
print('--8、获取上下文,继续往下执行--')
if not n:
return
print("[Consumer]: consuming {} ..".format(n))
response = "ok"
#
# if __name__ == "__main__":
# c = consumer() # 1、定义生成器,consumer并不执行
# produce(c) # 2、运行produce函数
# def pro(c):
# print('启动生成器')
# c.send(None)
# n = 0
# while n < 5:
# n += 1
# print(f'第{n}次唤醒生成器')
# r = c.send(n)
# print('r的值是', r)
# c.close()
#
#
# def cum():
# print('开始执行生成器代码')
# response = None
# while True:
# c = yield response
# print('c的值', c)
# if not c:
# return
#
# response = 'ok'
# if __name__ == '__main__':
# c = cum()
# pro(c)
# def m():
# a, b = 0, 1
# response = None
# while True:
# a, b = b, a + b
# a = yield response
# print(a)
# response = 'ok'
# def mm():
# a, b = 0, 1
# response = 0
# while True:
# c = yield response
# print(c)
# a, b = b, a + b
# # response = response + a
# m = mm()
# print(m.__next__())
# print(m.__next__())
def m(s):
print('启动生成器')
s.send(None)
n = 0
while n < 5:
n += 1
print(f'第{n}次启动生成器')
r = s.send(n)
print(r)
s.close()
def n():
print('开启生成器')
response = None
while True:
print('中断保存上下文')
c = yield response
print(c)
print('继续执行')
response = 'ok'
|
13,983 | 4646e19bf3b3a8bd5c42d2e9857406ee4d3687a5 | from scipy.optimize import minimize
from scipy.spatial.distance import directed_hausdorff
import matplotlib.pyplot as plt
import numpy as np
import math
from aeropy.CST_2D import CST
from aeropy.CST_2D.fitting import fitting_shape_coefficients, shape_parameter_study
directory = './'
filename = 'naca641212_upper.txt'
data_upper = np.genfromtxt(directory + filename)
filename = 'naca641212_lower.txt'
data_lower = np.genfromtxt(directory + filename)
deltaz, Al = fitting_shape_coefficients(data_lower, n=5, surface='lower',
solver='gradient',
objective='squared_mean')
deltaz, Au = fitting_shape_coefficients(data_upper, n=5, surface='upper',
solver='gradient',
objective='squared_mean')
print(Au)
print(Al)
y_u = CST(data_upper[:, 0], 1, deltasz=0, Au=Au)
y_l = CST(data_lower[:, 0], 1, deltasz=0, Al=Al)
plt.figure()
plt.scatter(data_upper[:, 0], data_upper[:, 1], label='raw_upper')
plt.scatter(data_lower[:, 0], data_lower[:, 1], label='raw_lower')
plt.plot(data_upper[:, 0], y_u, label='upper')
plt.plot(data_lower[:, 0], y_l, label='lower')
plt.legend()
plt.show()
|
13,984 | 773d49ff3a79f308724d9203316b0d01403271dd | """
ColorHelper utils
Copyright (c) 2015 - 2017 Isaac Muse <isaacmuse@gmail.com>
License: MIT
"""
import re
import decimal
from . import csscolors, pantone, ral
from .rgba import RGBA, round_int, clamp
FLOAT_TRIM_RE = re.compile(r'^(?P<keep>\d+)(?P<trash>\.0+|(?P<keep2>\.\d*[1-9])0+)$')
COLOR_PARTS = {
"percent": r"[+\-]?(?:(?:\d*\.\d+)|\d+)%",
"float": r"[+\-]?(?:(?:\d*\.\d+)|\d+)"
}
COMPLETE = r'''
(?P<hexa>(\#|0x)(?P<hexa_content>[\dA-Fa-f]{8}))\b |
(?P<hex>(\#|0x)(?P<hex_content>[\dA-Fa-f]{6}))\b |
(?P<hexa_compressed>(\#|0x)(?P<hexa_compressed_content>[\dA-Fa-f]{4}))\b |
(?P<hex_compressed>(\#|0x)(?P<hex_compressed_content>[\dA-Fa-f]{3}))\b |
\b(?P<rgb>rgb\(\s*(?P<rgb_content>(?:%(float)s\s*,\s*){2}%(float)s | (?:%(percent)s\s*,\s*){2}%(percent)s)\s*\)) |
\b(?P<rgba>rgba\(\s*(?P<rgba_content>
(?:%(float)s\s*,\s*){3}(?:%(percent)s|%(float)s) | (?:%(percent)s\s*,\s*){3}(?:%(percent)s|%(float)s)
)\s*\)) |
\b(?P<hsl>hsl\(\s*(?P<hsl_content>%(float)s\s*,\s*%(percent)s\s*,\s*%(percent)s)\s*\)) |
\b(?P<hsla>hsla\(\s*(?P<hsla_content>%(float)s\s*,\s*(?:%(percent)s\s*,\s*){2}(?:%(percent)s|%(float)s))\s*\)) |
\b(?P<hwb>hwb\(\s*(?P<hwb_content>%(float)s\s*,\s*%(percent)s\s*,\s*%(percent)s)\s*\)) |
\b(?P<hwba>hwb\(\s*(?P<hwba_content>%(float)s\s*,\s*(?:%(percent)s\s*,\s*){2}(?:%(percent)s|%(float)s))\s*\)) |
\b(?P<gray>gray\(\s*(?P<gray_content>%(float)s|%(percent)s)\s*\)) |
\b(?P<graya>gray\(\s*(?P<graya_content>(?:%(float)s|%(percent)s)\s*,\s*(?:%(percent)s|%(float)s))\s*\)) |
\b(?P<pantone_code>((\d{2}-)?\d{3,5}\s|(black|blue|bright red|cool gray|dark blue|green|magenta|medium purple|orange|pink|process blue|purple|red|reflex blue|rhodamine red|rose gold|silver|violet|warm gray|warm red|yellow)\s(\d{1,5}\s)?|p\s\d{1,3}-\d{1,2}\s)[a-z]{1,3})\b | # noqa: E501
\b(?P<ral_code>RAL\s\d{4})\b
''' % COLOR_PARTS
COLOR_NAMES = r'\b(?P<webcolors>%s)\b(?!\()' % '|'.join([name for name in csscolors.name2hex_map.keys()])
HEX_IS_GRAY_RE = re.compile(r'(?i)^#([0-9a-f]{2})\1\1')
HEX_COMPRESS_RE = re.compile(r'(?i)^#([0-9a-f])\1([0-9a-f])\2([0-9a-f])\3(?:([0-9a-f])\4)?$')
COLOR_RE = re.compile(r'(?x)(?i)(?<![@#$.\-_])(?:%s|%s)(?![@#$.\-_])' % (COMPLETE, COLOR_NAMES))
def fmt_float(f, p=0):
"""Set float precision and trim precision zeros."""
string = str(
decimal.Decimal(f).quantize(decimal.Decimal('0.' + ('0' * p) if p > 0 else '0'), decimal.ROUND_HALF_UP)
)
m = FLOAT_TRIM_RE.match(string)
if m:
string = m.group('keep')
if m.group('keep2'):
string += m.group('keep2')
return string
def is_gray(color):
"""Check if color is gray (all channels the same)."""
m = HEX_IS_GRAY_RE.match(color)
return m is not None
def compress_hex(color):
"""Compress hex."""
m = HEX_COMPRESS_RE.match(color)
if m:
color = '#' + m.group(1) + m.group(2) + m.group(3)
if m.group(4):
color += m.group(4)
return color
def alpha_dec_normalize(dec):
"""Normailze a decimal alpha value."""
temp = float(dec)
if temp < 0.0 or temp > 1.0:
dec = fmt_float(clamp(float(temp), 0.0, 1.0), 3)
alpha_dec = dec
alpha = "%02X" % round_int(float(alpha_dec) * 255.0)
return alpha, alpha_dec
def alpha_percent_normalize(perc):
"""Normailze a percent alpha value."""
alpha_float = clamp(float(perc.strip('%')), 0.0, 100.0) / 100.0
alpha_dec = fmt_float(alpha_float, 3)
alpha = "%02X" % round_int(alpha_float * 255.0)
return alpha, alpha_dec
def translate_color(m, use_hex_argb=False, decode=False):
"""Translate the match object to a color w/ alpha."""
color = None
alpha = None
alpha_dec = None
if m.group('hex_compressed'):
if decode:
content = m.group('hex_compressed_content').decode('utf-8')
else:
content = m.group('hex_compressed_content')
color = "#%02x%02x%02x" % (
int(content[0:1] * 2, 16), int(content[1:2] * 2, 16), int(content[2:3] * 2, 16)
)
elif m.group('hexa_compressed') and use_hex_argb:
if decode:
content = m.group('hexa_compressed_content').decode('utf-8')
else:
content = m.group('hexa_compressed_content')
color = "#%02x%02x%02x" % (
int(content[1:2] * 2, 16), int(content[2:3] * 2, 16), int(content[3:] * 2, 16)
)
alpha = content[0:1]
alpha_dec = fmt_float(float(int(alpha, 16)) / 255.0, 3)
elif m.group('hexa_compressed'):
if decode:
content = m.group('hexa_compressed_content').decode('utf-8')
else:
content = m.group('hexa_compressed_content')
color = "#%02x%02x%02x" % (
int(content[0:1] * 2, 16), int(content[1:2] * 2, 16), int(content[2:3] * 2, 16)
)
alpha = content[3:]
alpha_dec = fmt_float(float(int(alpha, 16)) / 255.0, 3)
elif m.group('hex'):
if decode:
content = m.group('hex_content').decode('utf-8')
else:
content = m.group('hex_content')
if len(content) == 6:
color = "#%02x%02x%02x" % (
int(content[0:2], 16), int(content[2:4], 16), int(content[4:6], 16)
)
else:
color = "#%02x%02x%02x" % (
int(content[0:1] * 2, 16), int(content[1:2] * 2, 16), int(content[2:3] * 2, 16)
)
elif m.group('hexa') and use_hex_argb:
if decode:
content = m.group('hexa_content').decode('utf-8')
else:
content = m.group('hexa_content')
if len(content) == 8:
color = "#%02x%02x%02x" % (
int(content[2:4], 16), int(content[4:6], 16), int(content[6:], 16)
)
alpha = content[0:2]
alpha_dec = fmt_float(float(int(alpha, 16)) / 255.0, 3)
else:
color = "#%02x%02x%02x" % (
int(content[1:2] * 2, 16), int(content[2:3] * 2, 16), int(content[3:] * 2, 16)
)
alpha = content[0:1]
alpha_dec = fmt_float(float(int(alpha, 16)) / 255.0, 3)
elif m.group('hexa'):
if decode:
content = m.group('hexa_content').decode('utf-8')
else:
content = m.group('hexa_content')
if len(content) == 8:
color = "#%02x%02x%02x" % (
int(content[0:2], 16), int(content[2:4], 16), int(content[4:6], 16)
)
alpha = content[6:]
alpha_dec = fmt_float(float(int(alpha, 16)) / 255.0, 3)
else:
color = "#%02x%02x%02x" % (
int(content[0:1] * 2, 16), int(content[1:2] * 2, 16), int(content[2:3] * 2, 16)
)
alpha = content[3:]
alpha_dec = fmt_float(float(int(alpha, 16)) / 255.0, 3)
elif m.group('rgb'):
if decode:
content = [x.strip() for x in m.group('rgb_content').decode('utf-8').split(',')]
else:
content = [x.strip() for x in m.group('rgb_content').split(',')]
if content[0].endswith('%'):
r = round_int(clamp(float(content[0].strip('%')), 0.0, 255.0) * (255.0 / 100.0))
g = round_int(clamp(float(content[1].strip('%')), 0.0, 255.0) * (255.0 / 100.0))
b = round_int(clamp(float(content[2].strip('%')), 0.0, 255.0) * (255.0 / 100.0))
color = "#%02x%02x%02x" % (r, g, b)
else:
color = "#%02x%02x%02x" % (
clamp(round_int(float(content[0])), 0, 255),
clamp(round_int(float(content[1])), 0, 255),
clamp(round_int(float(content[2])), 0, 255)
)
elif m.group('rgba'):
if decode:
content = [x.strip() for x in m.group('rgba_content').decode('utf-8').split(',')]
else:
content = [x.strip() for x in m.group('rgba_content').split(',')]
if content[0].endswith('%'):
r = round_int(clamp(float(content[0].strip('%')), 0.0, 255.0) * (255.0 / 100.0))
g = round_int(clamp(float(content[1].strip('%')), 0.0, 255.0) * (255.0 / 100.0))
b = round_int(clamp(float(content[2].strip('%')), 0.0, 255.0) * (255.0 / 100.0))
color = "#%02x%02x%02x" % (r, g, b)
else:
color = "#%02x%02x%02x" % (
clamp(round_int(float(content[0])), 0, 255),
clamp(round_int(float(content[1])), 0, 255),
clamp(round_int(float(content[2])), 0, 255)
)
if content[3].endswith('%'):
alpha, alpha_dec = alpha_percent_normalize(content[3])
else:
alpha, alpha_dec = alpha_dec_normalize(content[3])
elif m.group('gray'):
if decode:
content = m.group('gray_content').decode('utf-8')
else:
content = m.group('gray_content')
if content.endswith('%'):
g = round_int(clamp(float(content.strip('%')), 0.0, 255.0) * (255.0 / 100.0))
else:
g = clamp(round_int(float(content)), 0, 255)
color = "#%02x%02x%02x" % (g, g, g)
elif m.group('graya'):
if decode:
content = [x.strip() for x in m.group('graya_content').decode('utf-8').split(',')]
else:
content = [x.strip() for x in m.group('graya_content').split(',')]
if content[0].endswith('%'):
g = round_int(clamp(float(content[0].strip('%')), 0.0, 255.0) * (255.0 / 100.0))
else:
g = clamp(round_int(float(content[0])), 0, 255)
color = "#%02x%02x%02x" % (g, g, g)
if content[1].endswith('%'):
alpha, alpha_dec = alpha_percent_normalize(content[1])
else:
alpha, alpha_dec = alpha_dec_normalize(content[1])
elif m.group('hsl'):
if decode:
content = [x.strip() for x in m.group('hsl_content').decode('utf-8').split(',')]
else:
content = [x.strip() for x in m.group('hsl_content').split(',')]
rgba = RGBA()
hue = float(content[0])
if hue < 0.0 or hue > 360.0:
hue = hue % 360.0
h = hue / 360.0
s = clamp(float(content[1].strip('%')), 0.0, 100.0) / 100.0
lum = clamp(float(content[2].strip('%')), 0.0, 100.0) / 100.0
rgba.fromhls(h, lum, s)
color = rgba.get_rgb()
elif m.group('hsla'):
if decode:
content = [x.strip() for x in m.group('hsla_content').decode('utf-8').split(',')]
else:
content = [x.strip() for x in m.group('hsla_content').split(',')]
rgba = RGBA()
hue = float(content[0])
if hue < 0.0 or hue > 360.0:
hue = hue % 360.0
h = hue / 360.0
s = clamp(float(content[1].strip('%')), 0.0, 100.0) / 100.0
lum = clamp(float(content[2].strip('%')), 0.0, 100.0) / 100.0
rgba.fromhls(h, lum, s)
color = rgba.get_rgb()
if content[3].endswith('%'):
alpha, alpha_dec = alpha_percent_normalize(content[3])
else:
alpha, alpha_dec = alpha_dec_normalize(content[3])
elif m.group('hwb'):
if decode:
content = [x.strip() for x in m.group('hwb_content').decode('utf-8').split(',')]
else:
content = [x.strip() for x in m.group('hwb_content').split(',')]
rgba = RGBA()
hue = float(content[0])
if hue < 0.0 or hue > 360.0:
hue = hue % 360.0
h = hue / 360.0
w = clamp(float(content[1].strip('%')), 0.0, 100.0) / 100.0
b = clamp(float(content[2].strip('%')), 0.0, 100.0) / 100.0
rgba.fromhwb(h, w, b)
color = rgba.get_rgb()
elif m.group('hwba'):
if decode:
content = [x.strip() for x in m.group('hwba_content').decode('utf-8').split(',')]
else:
content = [x.strip() for x in m.group('hwba_content').split(',')]
rgba = RGBA()
hue = float(content[0])
if hue < 0.0 or hue > 360.0:
hue = hue % 360.0
h = hue / 360.0
w = clamp(float(content[1].strip('%')), 0.0, 100.0) / 100.0
b = clamp(float(content[2].strip('%')), 0.0, 100.0) / 100.0
rgba.fromhwb(h, w, b)
color = rgba.get_rgb()
if content[3].endswith('%'):
alpha, alpha_dec = alpha_percent_normalize(content[3])
else:
alpha, alpha_dec = alpha_dec_normalize(content[3])
elif m.group('webcolors'):
try:
if decode:
color = csscolors.name2hex(m.group('webcolors').decode('utf-8')).lower()
else:
color = csscolors.name2hex(m.group('webcolors')).lower()
except Exception:
pass
elif m.group('pantone_code'):
try:
if decode:
color = pantone.code2hex(m.group('pantone_code').decode('utf-8')).lower()
else:
color = pantone.code2hex(m.group('pantone_code')).lower()
except Exception:
pass
elif m.group('ral_code'):
try:
if decode:
color = ral.code2hex(m.group('ral_code').decode('utf-8')).lower()
else:
color = ral.code2hex(m.group('ral_code')).lower()
except Exception:
pass
return color, alpha, alpha_dec
|
13,985 | 49268ac88699b24de8bad24b4a14f5d270d8b349 | #!/usr/bin/python3.4
# -*- coding=utf-8 -*-
import crypt
import os
def decrypto_hash(hashpassword, passwd_dict_file):
hashpassword_list = hashpassword.split('$')
salt = '$'+ hashpassword_list[1] + '$' + hashpassword_list[2] + '$'
#读取盐
#$1$f6y4$1Lev506HhZ0LE.IR6jRB.0
#$1$f6y4$
#print(salt)
passwds = open(passwd_dict_file, 'r').readlines()
for passwd in passwds:
result = crypt.crypt(passwd.strip(), salt)#密码加盐计算得到hash结果
if hashpassword == result:#如果结果相同密码被找到
print('Password Finded: ' + passwd.strip())
os._exit(1)
print('Password no Find!!!')
if __name__ == '__main__':
secret = input('请输入Hash后的密码: ')
path = input('请输入密码字典完整路径: ')
decrypto_hash(secret, path)
|
13,986 | d706c14a42f7dc8b14e7811e4b7da8f9dc7e69ad | #This file is based on:
#https://swf.com.tw/?p=1188
#https://stackoverflow.com/questions/12090503/listing-available-com-ports-with-python3
import serial
import serial.tools.list_ports
comlist = serial.tools.list_ports.comports()
connected = []
for element in comlist:
connected.append(element.device)
print("Connected COM ports: " + str(connected))
COM_PORT = input("Port?:")
BAUD_RATES = 9600 # 設定傳輸速率
ser = serial.Serial(COM_PORT, BAUD_RATES) # 初始化序列通訊埠
while True:
while ser.in_waiting: # 若收到序列資料…
data_raw = ser.readline() # 讀取一行
data = data_raw.decode() # 用預設的UTF-8解碼
print('RAW DATA', data_raw)
print('Decoded Data', data)
|
13,987 | d21587473fa545b82691e8ab6bd2ddf3d7d8bc4c | from tri import *
T = [7, 5, 1, 2, 4, 3]
print(tri_bulle(T))
T = [3, 7, 1, 2, 2, 3]
print(tri_bulle(T))
|
13,988 | 8e6e4a0350519e35267692f52a717c9d8d424900 | import time
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from sklearn import metrics
# Initialize network parameters.
def init_network(model, method='kaiming', exclude='embedding', seed=123):
for name, w in model.named_parameters():
if exclude not in name:
if 'weight' in name:
if method == 'xavier':
nn.init.xavier_normal_(w)
elif method == 'kaiming':
nn.init.kaiming_normal_(w)
else:
nn.init.normal_(w)
elif 'bias' in name:
nn.init.constant_(w, 0)
# train the main function.
def train(args, model, train_iter, dev_iter):
# Four parameters to be drawn are finally saved in the following four lists.
train_loss=[]
train_accu=[]
dev_loss_list=[]
dev_acc_list=[]
start_time = time.time()
model.train()
optimizer = torch.optim.Adam(model.parameters(), lr=args.lr)# select Adam optimizer and set the learning rate to 0.001.
total_batch = 0
dev_best_acc = 0
# Start training.
for epoch in range(args.num_epochs):
print('Epoch [{}/{}]'.format(epoch + 1, args.num_epochs))
for i, data in enumerate(train_iter):
text = data["text"]
label = data["label"]
outputs = model(text)
model.zero_grad()
loss = F.cross_entropy(outputs, label)
loss.backward()
optimizer.step()
# evaluate every 10 rounds to print the information.
if total_batch % 10 == 0:
y_true = label.data.cpu()
y_pred = torch.max(outputs.data, 1)[1].cpu()
train_acc = metrics.accuracy_score(y_true, y_pred)# Calculate the training accuracy.
dev_acc, dev_loss = evaluate(model, dev_iter)# Call the evaluation function for evaluation.
if dev_acc > dev_best_acc:
dev_best_acc = dev_acc
torch.save(model.state_dict(),args.model + '.ckpt')
print("saved model, best acc on dev: %.4f" % dev_acc)
msg = 'Iter: {0:>6}, Train Loss: {1:>5.2}, Train Acc: {2:>6.2%}, Val Loss: {3:>5.2}, Val Acc: {4:>6.2%}'
print(msg.format(total_batch, loss.item(), train_acc, dev_loss, dev_acc))
train_loss.append(loss.item())
train_accu.append(train_acc)
dev_loss_list.append(dev_loss)
dev_acc_list.append((dev_acc))
model.train()
total_batch += 1
with open("log.csv","w",encoding="utf-8") as f:
m=""
for each in train_loss:
m=m+str(each)+","
f.write(m)
f.write("\n")
m = ""
for each in train_accu:
m = m + str(each) + ","
f.write(m)
f.write("\n")
m = ""
for each in dev_loss_list:
m = m + str(each) + ","
f.write(m)
f.write("\n")
m = ""
for each in dev_acc_list:
m = m + str(each) + ","
f.write(m)
# save the model.
torch.save(model.state_dict(), args.model + 'final.ckpt')
# evaluation function.
def evaluate(model, dev_iter):
model.eval()
loss_total = 0
y_preds = np.array([], dtype=int)
y_trues = np.array([], dtype=int)
with torch.no_grad():
for data in dev_iter:
text = data["text"]
label = data["label"]
outputs = model(text)
loss = F.cross_entropy(outputs, label)
loss_total += loss
y_true = label.data.cpu().numpy()
y_pred = torch.max(outputs.data, 1)[1].cpu().numpy()
y_trues = np.append(y_trues, y_true)
y_preds = np.append(y_preds, y_pred)
acc = metrics.accuracy_score(y_trues, y_preds)
return acc, loss_total / len(dev_iter)
# Prediction function
def inference(args, model, test_iter):
model.eval()
y_preds = np.array([], dtype=int)
with torch.no_grad():
for i, data in enumerate(test_iter):
text = data["text"]
outputs = model(text)
y_pred = torch.max(outputs.data, 1)[1].cpu().numpy()
y_preds = np.append(y_preds, y_pred)
return y_preds |
13,989 | e9d9b5c16d47f26faffec3cf56965d97e04997fc | from django.shortcuts import render
from django.views.generic import TemplateView
from ..models import Principal, Shipper
from django.shortcuts import redirect
from django.urls import reverse, reverse_lazy
from django.db.models import Q
from django.contrib import messages
from django.contrib.auth.decorators import login_required
class CustomerAddView(TemplateView):
@login_required(login_url=reverse_lazy('login'))
def add_customer(request):
if request.method == 'POST':
customer_name = request.POST['customer_add']
work_type = request.POST['work_type_add']
data = {
'name': customer_name,
'work_type': work_type
}
customer = Principal(**data)
customer.save()
return redirect('customer-detail', pk=customer.pk)
else:
return redirect('customer-list')
@login_required(login_url=reverse_lazy('login'))
def add_shipper(request):
if request.method == 'POST':
customer_pk = request.POST['customer_pk']
shipper = request.POST['shipper_add']
address = request.POST['address_add']
data = {
'principal': Principal.objects.get(pk=customer_pk),
'name': shipper,
'address': address
}
shipper = Shipper(**data)
shipper.save()
return redirect('customer-detail', pk=customer_pk)
else:
return redirect('customer-list')
|
13,990 | c35f52d5888313ab271f5d780bad6f981f3c754f | salario = float(input("Digite o valor do seu salário: "))
despesas = float(input("Digite o valor das suas despesas: "))
sobra = salario - despesas
poupanca = 1000000 / sobra
num_arredondado = poupanca + 1
anos = num_arredondado / 12
meses = num_arredondado % 12
print(f"Você se tornará um milionário em {anos:.00f} anos e {meses:.00f} meses. ") |
13,991 | f11386ab35818fda8ab812517c26e413c9aea182 | # Generated by Django 2.1 on 2018-09-18 12:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('train_ticket', '0004_auto_20180914_2109'),
]
operations = [
migrations.AddField(
model_name='stations',
name='station_code',
field=models.CharField(blank=True, max_length=3),
),
]
|
13,992 | b855cd3fb0d07388c15091d9f7f4719f3e5c9085 | mile_distances = [1.0, 6.5, 17.4, 2.4, 9]
kilometer_distances = list(map(lambda x: x * 1.6, mile_distances))
print(kilometer_distances)
# [1.6, 10.4, 27.84, 3.84, 14.4] |
13,993 | 3b676e2a0d42459808502848ed1ebf452b062d78 | N=int(input())
divisions=[]
for i in range(1,N//2+2):
if N % i == 0 :
divisions.append((i,N//i))
print(divisions)
results=[]
for divs in divisions:
results.append(abs(divs[0]-divs[1]))
print(min(results))
|
13,994 | 91e5e92053f386328d698280a7f30dfa7f2eaa3c | import serial
import time
import datetime
import os
import mysql.connector as mariadb
import array
import struct as struct
import logging
from struct import *
# Track status of each servo
actuatorStatus = False
motorStatus = False
# Serial objects that control serial communication with HASP and the Arduino. To be used as global variables.
haspSerial = None
arduinoSerial = None
# Logger setup
FORMAT = '[%(asctime)-15s] %(name)s.%(levelname)s: %(message)s'
formatter = logging.Formatter(FORMAT)
logging.basicConfig(filename='serial.log', filemode='a', level=logging.INFO, format=FORMAT)
logger = logging.getLogger()
# Create console handler
ch = logging.StreamHandler()
ch.setFormatter(formatter)
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
def rebootPi():
os.system('sudo shutdown -r now') # Reboot the Pi and consequently the Arduino
# Open connection with a specified port
def connectPort(portName):
ser = None
try:
ser = serial.Serial(port = portName, baudrate = 4800, parity = serial.PARITY_NONE)
if ser != None:
logging.info('Successfully connected to port \'' + ser.name + '\' with baudrate ' + str(ser.baudrate) + '.')
else:
logging.warning('Could not connect to port ' + portName + '.')
except Exception as e:
logging.error('Error while attempting to open serial port \'' + portName + '\'.')
logging.error(e)
return ser
def connectToHASP():
global haspSerial
logging.info('Attempting connection with HASP.')
usbPortName = ''
dirList = os.listdir('/dev/')
# Find all ttyUSB devices (i.e. connection via RS232)
for name in dirList:
if 'USB' in name:
usbPortName = '/dev/' + name
logging.info('Found USB port: ' + str(usbPortName))
break
haspSerial = connectPort(usbPortName)
def connectToArduino():
global arduinoSerial
logging.info('Attempting connection with Arduino.')
acmPortName = ''
dirList = os.listdir('/dev/')
# Find all ttyACM devices (i.e. connection to Arduino)
for name in dirList:
if 'ACM' in name:
acmPortName = '/dev/' + name
logging.info('Found ACM port: ' + str(acmPortName))
break
arduinoSerial = connectPort(acmPortName)
# Checks for commands from HASP and sends them to the Arduino
def processCommands():
global actuatorStatus
global motorStatus
#logging.info('Checking for commands.')
if haspSerial.in_waiting > 0:
bufSize = haspSerial.in_waiting
command = haspSerial.read(bufSize)
logging.info('Received ['+str(bufSize)+'] bytes from ' + str(haspSerial.name) + ': ' + str(command))
cmdBytes = bytes(command[2:3]) + bytes(command[3:4])
if cmdBytes == b'\x11\x12':
if actuatorStatus and motorStatus: # Prevent actuator from retracting while the motor is deployed
logging.warning('Cannot retract actuator while motor is active.')
else:
written = arduinoSerial.write(cmdBytes)
logging.info('Wrote [' + str(written) + '] bytes to ' + str(arduinoSerial.name) + ': ' + str(cmdBytes))
if written is 2:
actuatorStatus = not actuatorStatus
if actuatorStatus:
logging.info('Extending actuator...')
else:
logging.info('Retracting actuator...')
else:
logging.error('Action not completed!')
arduinoSerial.flush()
elif cmdBytes == b'\x21\x22':
if not actuatorStatus and not motorStatus: # Prevent motor from deploying while actuator is retracted
logging.warning('Cannot deploy motor while actuator is not extended.')
else:
written = arduinoSerial.write(cmdBytes)
logging.info('Wrote ' + str(written) + ' bytes to ' + str(arduinoSerial.name) + ': ' + str(cmdBytes))
if written is 2:
motorStatus = not motorStatus
if motorStatus:
logging.info('Activating motor...')
else:
logging.info('Stopping motor...')
else:
logging.error('Action not completed!')
arduinoSerial.flush()
elif cmdBytes == b'RR':
logging.info('Rebooting Raspberry Pi.')
rebootPi()
#else:
# logging.info('No commands found.')
# Used to test the serial connection from the Pi to the Arduino
def testSerialArduino():
command = input('Enter command: ')
if command is 'AA':
logging.info('Toggling actuator.')
arduinoSerial.write(b'AA')
arduinoSerial.flush()
elif command is 'MM':
logging.info('Toggling motor')
arduinoSerial.write(b'MM')
arduinoSerial.flush()
else:
logging.warning('Command not recognized: ' + str(command))
#Function for measuring the core temp of the rasppi in farenheit
def measure_temp():
temp = os.popen("vcgencmd measure_temp").readline().strip('\n')
return (temp.replace("temp=","").replace("\'C",""))
def measurePiTemp():
temp = os.popen("vcgencmd measure_temp").readline()
temperature = temp.replace("temp=","")
temperature = float(temperature[:-3])*1.8+32
temperature = str(temperature)
return temperature
#Receives and stores the arduino data in the string "arduinoData", the string includes commas
def readArduinoData():
data = arduinoSerial.read(400)
print(data)
booleanValue = 0
arduinoData = ""
counter = 0
for x in range(200,400):
if data[x] == "[":
for y in range(x+1, 400):
if data[y] == "]":
booleanValue = 1
break
arduinoData = arduinoData+data[y]
if booleanValue == 1:
break
return arduinoData
#Removes commas and stores data in variables to be uploaded to the database
def removeCommasFromString(dataString):
tempString = ""
dataList = []
for character in dataString:
if(character == ","):
dataList.append(tempString)
tempString = ""
continue
tempString = tempString + character
return dataList
#Stores the same data that is being sent to the database in txt files(backup)
#Also sets the name of the file to the current UTC time
def storeDataInTxtFiles(start_packet, RPI_temp, minipix1_temp, minipix2_temp,ISS_temp, ISS_pressure, ambient_pressure, solar_cells, end_packet):
UTCTimeString = str(datetime.datetime.now()+ ".txt")
f=open(UTCTimeString,"w+")
#Writes to the newly ceated txt file
f.write(start_packet + "|" + RPI_temp + "|" + minipix1_temp + "|" + minipix2_temp + "|" + ISS_temp + "|" + ISS_pressure + "|" + ambient_pressure + "|" + solar_cells + "|" + end_packet)
if __name__ == '__main__':
try:
connectToHASP()
connectToArduino()
while True:
if haspSerial is not None and arduinoSerial is not None:
processCommands()
#testSerialArduino()
readArduinoData()
time.sleep(0.5)
+ else:
if haspSerial is None and arduinoSerial is None:
raise Exception('Missing serial connection to HASP and Arduino.')
elif haspSerial is None:
raise Exception('Missing serial connection to HASP.')
elif arduinoSerial is None:
raise Exception('Missing serial connection to Arduino.')
except Exception as e:
logging.error(e)
dataString = readArduinoData()
#dataList = removeCommasFromString(dataString)
print("dataString")
print(dataString)
dataList = removeCommasFromString(dataString)
print(dataList)
raspberryTemperature = measurePiTemp()
#Connects to the mysql database
mariadb_connection = mariadb.connect(
host = "localhost",
user = "user",
passwd = "password",
database = "tempdb"
)
cursor = mariadb_connection.cursor()
sql = "INSERT INTO socratesTable (start_packet, RPI_temp, minipix1_temp, minipix2_temp, ISS_temp, ISS_pressure, ambient_pressure, solar_cells, end_packet) VALUES (%s, %s, %s,%s,%s,%s,%s,%s,%s)"
val = (" ",raspberryTemperature,dataList[0]," "," "," ", " ", " ", " ")
#storeDataInTxtFiles(" ", raspberryTemperature, " ", " ", " ", " ", " ", " ", " ")
#Inserts the values from 'val' into their respective columns in 'sql'
cursor.execute(sql, val)
mariadb_connection.commit()
mariadb_connection.close()
|
13,995 | d39c77b9a080ef63061f0e788f815994aa364de7 | with open("small_vocab_en") as xh:
with open('small_vocab_fr') as yh:
with open("en_fr","w") as zh:
#Read first file
xlines = xh.readlines()
#Read second file
ylines = yh.readlines()
#Combine content of both lists
#combine = list(zip(ylines,xlines))
#Write to third file
for i in range(len(xlines)):
line = ylines[i].strip() + '\t' + xlines[i]
zh.write(line) |
13,996 | 2add66cf1b6e24819653f1f9808aaa5e173dd5ba | from .dbscan import DBSCAN
__all__ = ["DBSCAN"]
|
13,997 | 7909734a02706017f8efc723ad6854ac99e32a88 | # -*- coding: utf-8 -*-
"""
Created on Sun Oct 9 00:17:28 2016
@author: Andres Mendez Vazquez
@ Licencense BSD 3
"""
#import matplotlib.pyplot as plt
import numpy as np
from KFamily import KFamily
def DataGeneration(cov, mean, number):
# Give me a series of pts with N
x,y = np.random.multivariate_normal(mean, cov , number).T
return x,y
#Initial Values for the clusters and centroids
symbols=['bx','ro','kH','bo']
kclusters = 2
dim = 2
clusters_size = 300
#error
error = 0.0000001
#Mean and cov for the clusters
mean1= [0,4]
mean2= [0,-4]
mean3 = [-4, 0]
cov1=0.05*np.identity(2)
cov2=1*np.identity(2)
# Class 1
x1,y1 = DataGeneration(cov1, mean1, clusters_size)
# Class2
x2,y2 = DataGeneration(cov1, mean2, clusters_size)
# Putting the Data Together
Data=np.matrix([np.concatenate((x1, x2), axis=0), np.concatenate((y1, y2), axis=0)])
# Build and object for clustering
C1 = KFamily(kclusters,Data)
# Testing the Basic Functions
centroids1, Labels1 = C1.k_means(error,1)
centroids2, Labels2 = C1.k_centers(1)
m_memb = 2.0
centroids3, Labels3 = C1.fuzzyCmeans(error,m_memb) |
13,998 | 2d72ae8b619aa69f7d28625292be95e2d1fa1abb | import json
import logging
import sys
from logging.handlers import RotatingFileHandler
from os.path import join, exists
from shutil import rmtree
def get_domain():
from .core.models import Configuration
return Configuration.get('domain', 'company.com')
def get_menu_links():
from .core.models import Configuration
return [link for link in Configuration.menu_links()]
def get_auto_cleanup_run_days():
from .core.models import Configuration
from .settings import logger
key = 'auto_cleanup_run_after_days'
value = Configuration.get(key, 90)
try:
return int(value)
except ValueError:
logger.exception('config key: {} should be integer!'.format(key))
return 90
def cleanup_run_media(run_id):
from .settings import MEDIA_ROOT, logger
run_media_dir = join(MEDIA_ROOT, 'runs/{}'.format(run_id))
if exists(run_media_dir):
try:
rmtree(run_media_dir)
except:
logger.exception('failed to cleanup run media <{}>'.format(run_id))
def read_document(name):
from .settings import SETTINGS_DIR
doc_path = join(SETTINGS_DIR, 'static/docs', name + '.md')
if exists(doc_path):
with open(doc_path) as f:
return f.read()
else:
return 'not found.'
def setup_logger(log_dir=None, debug=False):
logger = logging.getLogger('testcube')
formatter = logging.Formatter('%(asctime)s %(levelname)-8s: %(message)s')
console_handler = logging.StreamHandler(sys.stdout)
console_handler.formatter = formatter
logger.addHandler(console_handler)
if log_dir:
filename = join(log_dir, 'testcube.log')
if debug: # use single file when debug
file_handler = logging.FileHandler(filename)
file_handler.setFormatter(formatter)
else:
file_handler = RotatingFileHandler(filename=filename,
maxBytes=10 * 1024 * 1024,
backupCount=5)
logger.addHandler(file_handler)
if debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
return logger
def append_json(origin_txt, field, value):
obj = to_json(origin_txt)
if field in obj:
obj[field] += '|*|' + value
else:
obj[field] = value
return json.dumps(obj)
def to_json(data_text):
try:
return json.loads(data_text)
except:
from testcube.settings import logger
logger.exception('Cannot parse to json: {}'.format(data_text))
return {}
def object_to_dict(obj):
return {k: v for k, v in obj.__dict__.items() if not k.startswith('_')}
def error_detail(e):
return '{}: {}'.format(type(e).__name__, e)
|
13,999 | 5efd5032e24861e52800fa1f6d419a527cc103dd | #!/usr/bin/env python
# -*- coding:utf-8 -*-
from AllList import ObverseList_285 as ObverseList
from AllList import ReverseList_285 as ReverseList
def mul(a,b):
if a== 0 or b == 0:
return 0
c = ObverseList[(ReverseList[a]+ReverseList[b])%255]
return c
def div(a,b):
if a== 0:
return 0
c = ObverseList[(ReverseList[a]-ReverseList[b])%255]
return c |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.