text stringlengths 38 1.54M |
|---|
from collections import defaultdict
import numpy as np
from math import floor
from numpy import random
class Rule:
def __init__(self, conditions, result):
self.conditions = conditions
self.result = result
self.numerosity = 1
self.__match_count = 0
self.correct_count = 0
self.accuracy_value = None
self.fitness_value = None
@property
def match_count(self):
return self.__match_count
@match_count.setter
def match_count(self, count):
self.__match_count = count
self.fitness_value = None
self.accuracy_value = None
self.fitness()
def accuracy(self):
if self.accuracy_value is None:
self.accuracy_value = self.correct_count / self.match_count
return self.accuracy_value
def fitness(self):
if self.fitness_value is None:
self.fitness_value = self.accuracy() ** 2
return self.fitness_value
def mutate(self, data, mutation_rate=0.2):
for i in range(len(data)):
if random.random() < mutation_rate:
self.conditions[i] = data[i]
def matches(self, data):
return all(self.conditions[i] >= data[i] for i in range(len(data)))
def subsumes(self, rule):
if self == rule or rule.accuracy() > self.accuracy():
return False
if any(self.conditions[i] < rule.conditions[i] for i in range(len(self.conditions))):
return False
self.numerosity += rule.numerosity
return True
def crossover(rule1: Rule, rule2: Rule):
for i in range(len(rule1.conditions)):
if rule1.conditions[i] != rule2.conditions[i] and random.random() < 0.5:
rule1.conditions[i], rule2.conditions[i] = rule2.conditions[i], rule1.conditions[i]
def get_matching_rules(rules, data, answer):
matching = filter(lambda r: r.matches(data), rules)
correct, incorrect = [], []
for rule in matching:
(incorrect, correct)[rule.result == answer].append(rule)
return correct, incorrect
def cover(data, answer, covering_rate=0.3):
cover = [1 if random.random() < covering_rate else 1000 for _ in range(len(data) - 1)] + [1]
random.shuffle(cover)
return Rule(np.multiply(data, cover), answer)
def update_rule_params(correct_match, incorrect_match):
for rule in correct_match:
rule.match_count += 1
rule.correct_count += 1
for rule in incorrect_match:
rule.match_count += 1
def subsume(rules):
new_rules = []
for rule1 in rules:
if not any(1 for r in rules if r.subsumes(rule1)):
new_rules.append(rule1)
return new_rules
def evolve(data, rules, tournament_size=5):
if len(rules) < tournament_size:
return set()
parent1 = max(random.choice(rules, tournament_size), key=lambda r: r.fitness())
parent2 = max(random.choice(rules, tournament_size), key=lambda r: r.fitness())
offspring1 = Rule([condition for condition in parent1.conditions], parent1.result)
offspring2 = Rule([condition for condition in parent2.conditions], parent2.result)
offspring1.mutate(data)
offspring2.mutate(data)
crossover(offspring1, offspring2)
offspring1.mutate(data)
offspring2.mutate(data)
offspring1.match_count = 1
offspring1.correct_count = 1
offspring2.match_count = 1
offspring2.correct_count = 1
return [offspring1, offspring2]
def deletion(rules):
rules.sort(key=lambda r: r.fitness(), reverse=True)
count = sum(r.numerosity for r in rules)
while count > max_rules_count:
count -= rules[-1].numerosity
del rules[-1]
return rules
def train(training_data):
rules = []
count = 0
for data, answer in training_data:
count += 1
if count % 100 == 0:
print("processed ", count, "entries")
correct_rules, incorrect_rules = get_matching_rules(rules, data, answer)
if len(correct_rules) == 0:
new_rule = cover(data, answer)
rules.append(new_rule)
correct_rules = {new_rule}
update_rule_params(correct_rules, incorrect_rules)
rules = subsume(rules)
rules.extend(evolve(data, correct_rules))
rules = deletion(rules)
for rule in rules:
for i in range(len(rule.conditions)):
if rule.conditions[i] > 5.0:
rule.conditions[i] = 6.0
return rules
def test(test_data, rules):
correct_answer_count = 0
count = 0
for data, answer in test_data:
count += 1
matching_rules = list(filter(lambda r: r.matches(data), rules))
result = defaultdict(float)
for rule in matching_rules:
result[rule.result] += rule.numerosity * rule.fitness()
if len(result) == 0:
continue
if max(result.keys(), key=lambda r: result[r]) == answer:
correct_answer_count += 1
if count % 100 == 0:
print("Accuracy", correct_answer_count / count)
return correct_answer_count / count
def read_data(file, training_rate=0.7):
with open(file) as f:
lines = list(map(lambda l: l.strip().split(','), f.readlines()))
# shuffle(lines)
input_data = [list(map(float, line[:-1])) for line in lines]
answers = [line[-1] for line in lines]
training_cutoff = floor(len(input_data) * training_rate)
training_data = list(zip(input_data[:training_cutoff], answers[:training_cutoff]))
test_data = list(zip(input_data[training_cutoff:], answers[training_cutoff:]))
return training_data, test_data
if __name__ == "__main__":
random.seed(1)
max_rules_count = 500
training_data, test_data = read_data('data4.txt')
rules = train(training_data)
accuracy = test(test_data, rules)
print("Accuracy", accuracy)
|
from django.conf import settings
from django.core.mail import send_mail
# 导入Celery类
from celery import Celery
# 这两行代码需要的启动worker 的一端打开
# 初始化django所依赖的环境
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dailyfresh.settings")
# 创建一个Celery类的对象
app = Celery('celery_tasks.tasks', broker='redis://127.0.0.1:6379/5')
# 定义任务函数
@app.task
def send_register_active_email(to_email, username, token):
"""发送激活邮件"""
# 组织邮件内容
subject = '天天生鲜欢迎信息'
message = ''
sender = settings.EMAIL_FROM
print(sender)
receiver = [to_email]
html_message = '<h1>%s, 欢迎您成为天天生鲜注册会员</h1>请点击以下链接激活您的账户<br/><a href="http://127.0.0.1:8000/user/active/%s">http://127.0.0.1:8000/user/active/%s</a>' % (
username, token, token)
# 给用户的注册邮箱发送激活邮件,激活邮件中需要包含激活链接:/user/active/用户id
# /user/active/token
send_mail(subject, message, sender, receiver, html_message=html_message)
|
from . import result
from .base import Base
from .json_type import JSON
from sqlalchemy import Column, UniqueConstraint
from sqlalchemy import ForeignKey, Integer, Text
from sqlalchemy.orm import relationship, backref
from sqlalchemy.orm.session import object_session
from sqlalchemy.orm.exc import NoResultFound
from ..exceptions import MissingResultError
from ptero_common import nicer_logging
__all__ = ['InputSource']
LOG = nicer_logging.getLogger(__name__)
class InputSource(Base):
__tablename__ = 'input_source'
__table_args__ = (
UniqueConstraint('destination_id', 'destination_property'),
)
id = Column(Integer, primary_key=True)
source_id = Column(Integer, ForeignKey('task.id', ondelete='CASCADE'),
index=True, nullable=False)
destination_id = Column(Integer, ForeignKey('task.id', ondelete='CASCADE'),
index=True, nullable=False)
source_property = Column(Text, nullable=False, index=True)
destination_property = Column(Text, nullable=False, index=True)
parallel_depths = Column(JSON, nullable=False)
source_task = relationship('Task', foreign_keys=[source_id])
destination_task = relationship('Task', backref=backref('input_sources',
passive_deletes='all'),
foreign_keys=[destination_id])
workflow_id = Column(Integer, ForeignKey('workflow.id', ondelete='CASCADE'),
nullable=False, index=True)
workflow = relationship('Workflow', foreign_keys=[workflow_id],
backref=backref('all_input_sources', passive_deletes='all'))
def parallel_indexes(self, colors, begins):
indexes = []
for depth in self.parallel_depths:
if depth >= len(colors):
return indexes
indexes.append(colors[depth] - begins[depth])
return indexes
def get_data(self, colors, begins):
s = object_session(self)
try:
r = s.query(result.Result
).filter_by(task=self.source_task, name=self.source_property
).filter(result.Result.color.in_(colors)).one()
except NoResultFound:
raise MissingResultError("No result found for task (%s:%s) with "
"name (%s) and color one of %s" % (
self.source_task.name, self.source_task.id,
self.source_property, str(colors)))
indexes = self.parallel_indexes(colors, begins)
return r.get_data(indexes)
def get_size(self, colors, begins):
indexes = self.parallel_indexes(colors, begins)
s = object_session(self)
r = s.query(result.Result
).filter_by(task=self.source_task, name=self.source_property
).filter(result.Result.color.in_(colors)).one()
return r.get_size(indexes)
|
# -*- coding:utf-8 -*-
import sys
from probs import *
class losHelper:
cookie = ""
def prob_choose(self):
try:
print("문제의 이름을 입력하세요. : ")
prob = sys.stdin.readline().split()
prob = ''.join(prob)
globals()[prob](self.cookie)
except KeyError as e:
print("없는 문제를 입력하셨습니다.")
sys.exit()
def __init__(self):
print("현재 세션 쿠기 값을 입력하세요. : ")
cookie_data = sys.stdin.readline().split()
cookie_data = ''.join(cookie_data)
# self.headers['User-Agent'] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/75.0.3770.142 Safari/537.36"
self.cookie = cookie_data
if __name__ == "__main__":
helper = losHelper()
prob = helper.prob_choose()
|
import testFIXParser
import testFIXSpec
import testFields
import testFIXSession
import testRejects
import testIntermittentGaps
import unittest
def test_suite():
modules = [testFIXParser,
testFIXSpec,
testFields,
testFIXSession,
testRejects,
testIntermittentGaps ]
suites = [ x.test_suite() for x in modules ]
return unittest.TestSuite( suites )
if __name__=='__main__':
unittest.main(defaultTest = 'test_suite' )
|
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
text = StringIO("""Id|Name|Divisor|Description
1|Cognitive|6|NULL
2|Emotional|6|NULL
3|Physical|8|NULL
4|Financial|5|NULL""")
|
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from .models import Author, Review, Book
from ..login_register .models import User
# Create your views here.
def index(request):
user_obj = User.objects.get(id=request.session['user'])
book_obj = Book.objects.all().order_by('-review__created_at')[:3]
context = {
'user': user_obj,
'books': book_obj
}
return render(request, 'books/index.html', context)
def show(request, id):
book = Book.objects.get(id=id)
context = {'book': book}
return render(request, 'books/show.html', context)
def edit(request, id):
return render(request, 'books/edit.html')
def new(request):
authors = Author.objects.all().distinct()
context = {'authors': authors}
return render(request, 'books/new.html', context)
def new_review(request):
book_obj = Book.objects.get(id=request.POST['book_id'])
review = Review.objects.validate_review(request, book_obj)
return redirect(reverse('books:index'))
def process_new_book(request):
if request.method == "POST":
created_author = Author.objects.validate_author(request)
if created_author:
created_book = Book.objects.validate_book(request, created_author)
else:
return redirect(reverse('books:new'))
if created_book:
created_review = Review.objects.validate_review(request, created_book)
else:
return redirect(reverse('books:new'))
if not created_review:
return redirect(reverse('books:new'))
else:
return redirect(reverse('books:new'))
return redirect(reverse('books:index'))
def delete_review(request, id):
review = Review.objects.get(id=id)
review.delete()
return redirect(reverse('books:index'))
|
# -*- coding: utf-8 -*-
"""Common PageObject actions."""
import hashlib
import io
import logging
import random
import string
import sys
import time
import traceback
import uuid
from collections import defaultdict, namedtuple
from contextlib import contextmanager
from io import BytesIO
from types import FunctionType, ModuleType
from typing import Dict, List, NamedTuple, Union
from urllib.parse import urlparse
import requests
from behave.runner import Context
from retrying import retry
from selenium.common.exceptions import (
ElementClickInterceptedException,
ElementNotInteractableException,
NoSuchElementException,
TimeoutException,
WebDriverException,
)
from selenium.webdriver import ActionChains
from selenium.webdriver.common.by import By
from selenium.webdriver.remote.webdriver import WebDriver
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
import allure
from directory_tests_shared import URLs
from directory_tests_shared.exceptions import PageLoadTimeout, UnexpectedElementPresent
from directory_tests_shared.settings import (
BARRED_USERS,
BASICAUTH_PASS,
BASICAUTH_USER,
BROWSER,
TAKE_SCREENSHOTS,
TEST_EMAIL_DOMAIN,
)
from directory_tests_shared.utils import access_was_denied, extract_attributes_by_css
from pages import ElementType
from PIL import Image
ScenarioData = namedtuple("ScenarioData", ["actors"])
class Actor(NamedTuple):
alias: str = None
email: str = None
password: str = None
company_name: str = None
article_category: str = None
visited_articles: str = None
case_study_title: str = None
email_confirmation_link: str = None
email_confirmation_code: str = None
registered: str = None
visited_page: str = None
last_tag: str = None
forms_data: str = None
saved_progress_link: str = None
def __str__(self) -> str:
return self.alias or self.__class__.__name__
__repr__ = __str__
class Selector(NamedTuple):
by: By
value: str
in_desktop: bool = True
in_mobile: bool = True
in_horizontal: bool = True
type: ElementType = None
is_visible: bool = True
group_id: str = None
autocomplete_callback: FunctionType = None
wait_after_click: bool = True
next_page: ModuleType = None
alternative_visibility_check: bool = False
disabled: bool = False
name: str = None
def __str__(self) -> str:
return self.name or self.__class__.__name__
__repr__ = __str__
def go_to_url(driver: WebDriver, url: str, page_name: str):
"""Go to the specified URL and take a screenshot afterwards."""
driver.get(url)
accept_all_cookies(driver)
def check_url(driver: WebDriver, expected_url: str, *, exact_match: bool = True):
"""Check if current page URL matches the expected one."""
with assertion_msg(
f"Expected page URL to be: '{expected_url}' but got '{driver.current_url}'"
):
if exact_match:
assert driver.current_url == expected_url
else:
assert (driver.current_url in expected_url) or (
expected_url in driver.current_url
)
logging.debug(f"Current page URL matches expected '{driver.current_url}'")
def check_title(driver: WebDriver, expected_title: str, *, exact_match: bool = False):
"""Check if current page title matches the expected one."""
with assertion_msg(
f"Expected page title to be: '{expected_title}' but got '{driver.title}' on {driver.current_url}"
):
if exact_match:
assert expected_title.lower() == driver.title.lower()
else:
assert expected_title.lower() in driver.title.lower()
logging.debug(
f"Page title on '{driver.current_url}' matches expected '{expected_title}'"
)
def check_for_expected_sections_elements(
driver: WebDriver, sections: Dict[str, Selector]
):
"""Check if all elements in page sections are visible."""
for section in sections:
for element_name, selector in sections[section].items():
if not isinstance(selector, Selector):
raise TypeError(
f"Expected '{selector}' to be a Selector, got {type(selector)}"
)
element = find_element(driver, selector, element_name=element_name)
if not selector.is_visible:
logging.debug(f"Skipping '{element_name} as it's marked as invisible'")
continue
with assertion_msg(
f"It looks like '{element_name}' element in '{section}' section is not visible on {driver.current_url}"
):
assert element.is_displayed()
logging.debug(f"All expected elements are visible on '{driver.current_url}'")
def find_and_click_on_page_element(
driver: WebDriver, sections: dict, element_name: str, *, wait_for_it: bool = True
):
"""Find page element in any page section selectors and click on it."""
found_selector = False
for section_name, selectors in sections.items():
if element_name.lower() in selectors:
found_selector = True
selector = selectors[element_name.lower()]
logging.debug(
f"Found selector for '{element_name}' in '{section_name}' section: "
f"'{selector}'"
)
web_element = find_element(
driver, selector, element_name=element_name, wait_for_it=wait_for_it
)
check_if_element_is_visible(web_element, element_name)
if web_element.get_attribute("target") == "_blank":
logging.debug(
f"'{web_element.text}' opens in new tab, but will "
f"forcefully open it in the same one"
)
with wait_for_page_load_after_action(driver):
href = web_element.get_attribute("href")
driver.get(href)
else:
scroll_to(driver, web_element)
if selector.wait_after_click:
with wait_for_page_load_after_action(driver, timeout=10):
with try_alternative_click_on_exception(driver, web_element):
web_element.click()
else:
with try_alternative_click_on_exception(driver, web_element):
web_element.click()
with assertion_msg(f"Could not find '{element_name}' in any section"):
assert found_selector
def initialize_scenario_data() -> ScenarioData:
"""Will initialize the Scenario Data."""
return ScenarioData(actors={})
def unauthenticated_actor(alias: str) -> Actor:
"""Create an instance of an unauthenticated Actor.
Will:
* generate a random password for user, which can be used later on during
registration or signing-in.
"""
email = (
f"test+{alias}{str(uuid.uuid4())}@{TEST_EMAIL_DOMAIN}".replace("-", "")
.replace(" ", "")
.lower()
)
letters = "".join(random.choice(string.ascii_letters) for _ in range(10))
digits = "".join(random.choice(string.digits) for _ in range(10))
password = f"{letters}{digits}"
return Actor(alias=alias, email=email, password=password, visited_articles=[])
def barred_actor(alias: str) -> Actor:
actor = unauthenticated_actor(alias)
actor = actor._replace(**{"email": random.choice(BARRED_USERS)})
return actor
def add_actor(context: Context, actor: Actor):
"""Will add Actor details to Scenario Data."""
assert isinstance(actor, Actor), (
f"Expected Actor named tuple but got '{type(actor)}'" " instead"
)
context.scenario_data.actors[actor.alias] = actor
logging.debug(f"Successfully added actor: {actor.alias} to Scenario Data")
def get_actor(context: Context, alias: str) -> Actor:
"""Get actor details from context Scenario Data."""
return context.scenario_data.actors.get(alias)
def get_last_visited_page(context: Context, actor_alias: str) -> ModuleType:
"""Get last visited Page Object context Scenario Data."""
actor = context.scenario_data.actors.get(actor_alias)
assert actor, f"Check your scenario. There's no such actor as: {actor_alias}"
return actor.visited_page
def get_full_page_name(page: ModuleType, *, page_sub_type: str = None) -> str:
if page_sub_type:
result = (
f"{page.SERVICE.value} - {page.NAME} ({page_sub_type}) - {page.TYPE.value}"
)
else:
result = f"{page.SERVICE.value} - {page.NAME} - {page.TYPE.value}"
return result
def update_actor(context: Context, alias: str, **kwargs):
"""Update Actor's details stored in context.scenario_data"""
actors = context.scenario_data.actors
for arg in kwargs:
if arg in Actor._fields:
if isinstance(getattr(actors[alias], arg), list):
logging.debug(f"Appended '{kwargs[arg]}' to '{arg}' for {alias}")
new_value = getattr(actors[alias], arg)
new_value.append(kwargs[arg])
else:
logging.debug(f"Set '{arg}'='{kwargs[arg]}' for {alias}")
new_value = kwargs[arg]
actors[alias] = actors[alias]._replace(**{arg: new_value})
logging.debug(f"Successfully updated {alias}'s details: {actors[alias]}")
def update_actor_forms_data(context: Context, actor: Actor, form_data: dict):
actor_forms_data = actor.forms_data
page = actor.visited_page
if not actor_forms_data:
actor_forms_data = defaultdict()
form_data_key = f"{page.SERVICE} - {page.NAME} - {page.TYPE}"
actor_forms_data[form_data_key] = form_data
update_actor(context, actor.alias, forms_data=actor_forms_data)
def avoid_browser_stack_idle_timeout_exception(driver: WebDriver):
"""BrowserStack will stop browser session after 90s of inactivity.
In order to avoid it, this helper will generate random events, like scrolling
"""
actions = {
"scroll up": "window.scrollBy(0,-1000);",
"scroll down": "window.scrollBy(0,1000);",
"click on body": "document.querySelector('body').click();",
"scroll to random link": "window.scrollTo(0, document.querySelectorAll('a')[Math.floor(Math.random()*document.querySelectorAll('a').length)].offsetTop);", # noqa
}
action = random.choice(list(actions.keys()))
message = f"Trigger '{action}' event to avoid 'Idle Timeout exception'"
logging.debug(message)
driver.execute_script(actions[action])
def convert_png_to_jpg(screenshot_png: bytes):
raw_image = Image.open(io.BytesIO(screenshot_png))
image = raw_image.convert("RGB")
with BytesIO() as f:
image.save(f, format="JPEG", quality=90)
return f.getvalue()
def fullpage_screenshot(driver):
"""A fullscreen screenshot workaround for Chrome driver:
This script uses a simplified version of the one here:
https://snipt.net/restrada/python-selenium-workaround-for-full-page-screenshot-using-chromedriver-2x/
It contains the *crucial* correction added in the comments by Jason Coutu.
SRC: https://stackoverflow.com/q/41721734
"""
logging.debug("Starting chrome full page screenshot workaround ...")
total_width = driver.execute_script("return document.body.offsetWidth")
total_height = driver.execute_script("return document.body.parentNode.scrollHeight")
viewport_width = driver.execute_script("return document.body.clientWidth")
viewport_height = driver.execute_script("return window.innerHeight")
logging.debug(
f"Total: ({total_width}, {total_height}), "
f"Viewport: ({viewport_width},{viewport_height})"
)
rectangles = []
i = 0
while i < total_height:
ii = 0
top_height = i + viewport_height
if top_height > total_height:
top_height = total_height
while ii < total_width:
top_width = ii + viewport_width
if top_width > total_width:
top_width = total_width
logging.debug(f"Appending rectangle ({ii},{i},{top_width},{top_height})")
rectangles.append((ii, i, top_width, top_height))
ii = ii + viewport_width
i = i + viewport_height
stitched_image = Image.new("RGB", (total_width, total_height))
previous = None
part = 0
for rectangle in rectangles:
if previous is not None:
driver.execute_script(f"window.scrollTo({rectangle[0]}, {rectangle[1]})")
logging.debug(f"Scrolled To ({rectangle[0]},{rectangle[1]})")
time.sleep(0.2)
screenshot_png = driver.get_screenshot_as_png()
screenshot = Image.open(io.BytesIO(screenshot_png))
if rectangle[1] + viewport_height > total_height:
offset = (rectangle[0], total_height - viewport_height)
else:
offset = (rectangle[0], rectangle[1])
logging.debug(
f"Adding to stitched image with offset ({offset[0]}, {offset[1]})"
)
stitched_image.paste(screenshot, offset)
del screenshot
part = part + 1
previous = rectangle
logging.debug("Finishing chrome full page screenshot workaround...")
with BytesIO() as f:
stitched_image.save(f, format="JPEG", quality=90)
return f.getvalue()
@retry(stop_max_attempt_number=3)
def take_screenshot(driver: WebDriver, page_name: str = None):
"""Will take a screenshot of current page."""
if TAKE_SCREENSHOTS:
if BROWSER == "firefox":
# Ref: https://stackoverflow.com/a/52572919/
original_size = driver.get_window_size()
required_width = driver.execute_script(
"return document.body.parentNode.scrollWidth"
)
required_height = driver.execute_script(
"return document.body.parentNode.scrollHeight"
)
driver.set_window_size(required_width, required_height)
element = driver.find_element_by_tag_name("body")
screenshot_png = element.screenshot_as_png
screenshot_jpg = convert_png_to_jpg(screenshot_png)
elif BROWSER == "chrome":
screenshot_jpg = fullpage_screenshot(driver)
if page_name:
page_name = page_name.lower().replace(" ", "_")[0:200]
allure.attach(
screenshot_jpg,
name=page_name or "screenshot.jpg",
attachment_type=allure.attachment_type.JPG,
)
if BROWSER == "firefox":
driver.set_window_size(original_size["width"], original_size["height"])
else:
logging.debug(
f"Taking screenshots is disabled. In order to turn it on "
f"please set an environment variable TAKE_SCREENSHOTS=true"
)
@contextmanager
def assertion_msg(message: str, *args):
"""This will:
* print the custom assertion message
* print the traceback (stack trace)
* raise the original AssertionError exception
"""
try:
yield
except AssertionError as e:
if args:
message = message % args
logging.error(message)
e.args += (message,)
_, _, tb = sys.exc_info()
if len(sys._current_frames()) == 1:
print(f"Found 'shallow' Traceback, will inspect outer traceback frames")
import inspect
for f in inspect.getouterframes(sys._getframe(0)):
print(f"{f.filename} +{f.lineno} - in {f.function}")
if "_def.py" in f.filename:
break
traceback.print_tb(tb)
raise
@contextmanager
def selenium_action(driver: WebDriver, message: str, screenshot: bool = True, *args):
"""This will:
* print the custom assertion message
* print the traceback (stack trace)
* raise the original AssertionError exception
:raises WebDriverException or NoSuchElementException
"""
try:
yield
except (WebDriverException, NoSuchElementException, TimeoutException) as e:
browser = driver.capabilities.get("browserName", "unknown browser")
version = driver.capabilities.get("version", "unknown version")
platform = driver.capabilities.get("platform", "unknown platform")
session_id = driver.session_id
info = "[{} v:{} os:{} session_id:{}]".format(
browser, version, platform, session_id
)
if args:
message = message % args
print(f"{info} - {message}")
logging.debug(f"{info} - {message}")
e.args += (message,)
_, _, tb = sys.exc_info()
traceback.print_tb(tb)
if screenshot:
take_screenshot(driver, message)
raise
def wait_for_element_visibility(
driver: WebDriver, element: WebElement, *, time_to_wait: int = 3
):
"""Wait until element is visible."""
with selenium_action(
driver,
(
f"({element.tag_name}) Element identified by '{element}' was not visible after waiting "
f"for {time_to_wait} seconds"
),
):
WebDriverWait(driver, time_to_wait).until(
expected_conditions.visibility_of(element)
)
def wait_for_visibility(
driver: WebDriver, selector: Selector, *, time_to_wait: int = 5
):
"""Wait until element is visible."""
by_locator = (selector.by, selector.value)
with selenium_action(
driver,
(
f"Element identified by '{selector.value}' was not visible after waiting "
f"for {time_to_wait} seconds"
),
):
WebDriverWait(driver, time_to_wait).until(
expected_conditions.visibility_of_element_located(by_locator)
)
def check_if_element_is_not_present(
driver: WebDriver, selector: Selector, *, element_name: str = ""
):
"""Find element by CSS selector or it's ID."""
try:
driver.find_element(by=selector.by, value=selector.value)
found = True
except NoSuchElementException:
found = False
with assertion_msg(
f"Expected not to find '{element_name}' element identified by '{selector.value}' on {driver.current_url}"
):
assert not found
def is_element_present(driver: WebDriver, selector: Selector) -> bool:
"""Check if sought element is present"""
try:
elements = driver.find_elements(by=selector.by, value=selector.value)
if elements:
logging.debug(f"Found following elements: {elements}")
found = True
else:
found = False
except NoSuchElementException:
found = False
return found
def check_if_element_is_visible(web_element: WebElement, element_name: str):
"""Check if provided web element is visible."""
with assertion_msg(
f"Expected to see '{element_name}' element but it's not visible"
):
assert web_element.is_displayed()
def check_if_element_is_not_visible(
driver: WebDriver,
selector: Selector,
*,
element_name: str = "",
wait_for_it: bool = True,
take_screenshot: bool = True,
):
"""Find element by CSS selector or it's ID."""
try:
element = find_element(
driver,
selector,
element_name=element_name,
wait_for_it=wait_for_it,
take_screenshot=take_screenshot,
)
with assertion_msg(
f"Expected not to see '{element_name}' element identified by '{selector.value}' on {driver.current_url}"
):
assert not element.is_displayed()
except NoSuchElementException:
logging.debug(f"As expected '{element_name}' is not present")
pass
def check_if_element_is_disabled(web_element: WebElement, element_name: str):
"""Check if provided web element is disabled."""
with assertion_msg(
f"Expected '{element_name}' element to be disabled but it's not"
):
assert not web_element.is_enabled()
logging.debug(f"As expected '{element_name}' field is disabled.")
def run_alternative_visibility_check(
driver: WebDriver,
element_name: str,
selector: Selector,
*,
element: WebElement = None,
take_screenshot: bool = True,
):
element = element or find_element(driver, selector)
location = element.location
size = element.size
if not all(location.values()) or not all(size.values()):
take_screenshot(driver, f"{element_name}_is_not_visible")
with assertion_msg(
f"It looks like '{element_name}' element identified by '{selector.by} →"
f" {selector.value}' selector is not visible on "
f"{driver.current_url} as it's location is outside viewport: "
f"{location}"
):
assert all(location.values())
with assertion_msg(
f"It looks like '{element_name}' element identified by '{selector.by} →"
f" {selector.value}' selector is not visible on "
f"{driver.current_url} is it's size dimensions are zeroed: "
f"{size}"
):
assert all(size.values())
logging.debug(
f"Visibility of '{element_name} → {selector.by} → {selector.value}' "
f"was confirmed with an alternative check"
)
def find_element(
driver: WebDriver,
selector: Selector,
*,
element_name: str = "",
wait_for_it: bool = True,
take_screenshot: bool = True,
) -> WebElement:
"""Find element by CSS selector or it's ID."""
with selenium_action(
driver,
f"Could not find element called '{element_name}' using selector "
f"'{selector.value}' on {driver.current_url}",
screenshot=take_screenshot,
):
element = driver.find_element(by=selector.by, value=selector.value)
if wait_for_it and selector.is_visible:
wait_for_visibility(driver, selector)
if selector.disabled:
check_if_element_is_disabled(element, element_name)
elif selector.alternative_visibility_check:
run_alternative_visibility_check(
driver,
element_name,
selector,
element=element,
take_screenshot=take_screenshot,
)
return element
def find_selector_by_name(selectors: dict, name: str) -> Selector:
found_selectors = [
selector
for section_selectors in selectors.values()
for selector_name, selector in section_selectors.items()
if selector_name.lower() == name.lower()
]
assert len(found_selectors) == 1
return found_selectors[0]
def find_elements(driver: WebDriver, selector: Selector) -> List[WebElement]:
"""Find element by CSS selector or it's ID."""
with selenium_action(driver, f"Couldn't find elements using '{selector.value}'"):
elements = driver.find_elements(by=selector.by, value=selector.value)
return elements
def check_hash_of_remote_file(expected_hash: str, file_url: str):
"""Check if the md5 hash of the file is the same as expected."""
from directory_tests_shared.settings import BASICAUTH_PASS, BASICAUTH_USER
logging.debug("Fetching file: %s", file_url)
parsed = urlparse(file_url)
with_creds = f"{parsed.scheme}://{BASICAUTH_USER}:{BASICAUTH_PASS}@{parsed.netloc}{parsed.path}"
response = requests.get(with_creds)
logging.debug(f"Got {response.status_code} from {file_url}")
assert response.status_code == 200
file_hash = hashlib.md5(response.content).hexdigest()
with assertion_msg(
f"Expected hash of file downloaded from {file_url} to be {expected_hash} but got {file_hash}"
):
assert expected_hash == file_hash
@contextmanager
def scroll_to_element_if_not_visible(
driver: WebDriver, element: WebElement, *, section: str = None, name: str = None
):
"""Scroll to element only if it's not visible.
Delaying scrolling to every element can save around 100ms per element.
"""
try:
yield
except TimeoutException as e:
if section and name:
logging.debug(f"Scrolling/Moving focus to '{section} → {name}' element")
else:
logging.warning(
f"Element is not visible, will scroll to it & check it's visibility: "
f"{e.msg}"
)
scroll_to(driver, element)
error = (
f"Element '{name or element.tag_name}' is not visible even after scrolling "
f"to it"
)
assert element.is_displayed(), error
@contextmanager
def try_alternative_click_on_exception(driver: WebDriver, element: WebElement):
"""Try alternative click methods (JS or ActionChains) if regular way didn't work.
JS workaround:
Handle situations when clicking on element triggers:
selenium.common.exceptions.ElementClickInterceptedException:
Message: element click intercepted:
Element <input id="id_terms"> is not clickable at point (714, 1235).
Other element would receive the click: <label for="id_terms">...</label>
See: https://stackoverflow.com/a/44916498
ActionChains workaround:
Handles situations when clicking on element triggers:
selenium.common.exceptions.ElementNotInteractableException:
Message: Element <a href="..."> could not be scrolled into view
See: https://selenium-python.readthedocs.io/api.html#module-selenium.webdriver.common.action_chains
"""
try:
yield
except ElementClickInterceptedException as e:
logging.warning(
f"Failed click intercepted. Will try JS workaround for: {e.msg}"
)
driver.execute_script("arguments[0].click();", element)
except ElementNotInteractableException as e:
logging.warning(
f"Failed click intercepted. Will try ActionChains workaround for: {e.msg}"
)
action_chains = ActionChains(driver)
action_chains.move_to_element(element)
action_chains.click()
action_chains.perform()
logging.warning(f"ActionChains click workaround is done")
class wait_for_page_load_after_action(object):
"""Context manager for waiting the page to load.
Proved to be a more reliable than wait_for_page_load() ^^^
src:
http://www.obeythetestinggoat.com/how-to-get-selenium-to-wait-for-page-load-after-a-click.html
https://www.develves.net/blogs/asd/2017-03-04-selenium-waiting-for-page-load/
"""
def __init__(self, driver: WebDriver, *, timeout: int = 10):
self.driver = driver
self.timeout = timeout
def __enter__(self):
self.old_page = self.driver.find_element_by_tag_name("html")
def page_has_loaded(self):
new_page = self.driver.find_element_by_tag_name("html")
has_loaded = new_page.id != self.old_page.id
if has_loaded:
logging.debug(f"Page has loaded. {self.driver.current_url}")
else:
logging.debug(f"Waiting for {self.driver.current_url} page to load...")
return has_loaded
def __exit__(self, *_):
self.wait_for(self.page_has_loaded)
def wait_for(self, condition_function):
import time
start_time = time.time()
while time.time() < start_time + self.timeout:
if condition_function():
return True
else:
time.sleep(0.5)
raise PageLoadTimeout(
f"Timed out after {self.timeout}s of waiting for the new page to load"
)
def scroll_to(driver: WebDriver, element: WebElement):
if "firefox" in driver.capabilities["browserName"].lower():
view_port_height = int(driver.execute_script("return window.innerHeight;"))
vertical_position = int(element.location["y"])
if vertical_position > view_port_height:
logging.debug(f"Scrolling to y={vertical_position}")
driver.execute_script(f"window.scrollTo(0, {vertical_position});")
else:
logging.debug(
f"Element is already positioned ({vertical_position}) within view_port "
f"({view_port_height})"
)
if not element.is_displayed():
logging.debug(f"Scrolling to element using scrollIntoView: {element}")
driver.execute_script(f"arguments[0].scrollIntoView(true);", element)
else:
action_chains = ActionChains(driver)
action_chains.move_to_element(element)
action_chains.perform()
def check_for_sections(
driver: WebDriver,
all_sections: dict,
sought_sections: List[str],
*,
desktop: bool = True,
mobile: bool = False,
horizontal: bool = False,
):
for name in sought_sections:
if desktop:
selectors = get_desktop_selectors(all_sections[name.lower()])
elif mobile:
selectors = get_mobile_selectors(all_sections[name.lower()])
elif horizontal:
selectors = get_horizontal_selectors(all_sections[name.lower()])
else:
raise KeyError(
"Please choose from desktop, mobile or horizontal (mobile) selectors"
)
for key, selector in selectors.items():
with selenium_action(
driver,
f"Could not find element: '{key} → {selector.by} → {selector.value}'"
f" on {driver.current_url}",
):
element = driver.find_element(by=selector.by, value=selector.value)
if selector.is_visible:
with scroll_to_element_if_not_visible(
driver, element, section=name, name=key
):
wait_for_element_visibility(driver, element)
logging.debug(f"'{key} → {selector.by} → {selector.value}' is visible")
else:
if selector.alternative_visibility_check:
run_alternative_visibility_check(
driver, key, selector, element=element
)
else:
logging.debug(
f"Skipping visibility check for '{key} → {selector.by} → "
f"{selector.value}' as its selector is flagged as not visible"
)
def get_desktop_selectors(section: dict) -> Dict[str, Selector]:
return {key: selector for key, selector in section.items() if selector.in_desktop}
def get_mobile_selectors(section: dict) -> Dict[str, Selector]:
return {key: selector for key, selector in section.items() if selector.in_mobile}
def get_horizontal_selectors(section: dict) -> Dict[str, Selector]:
return {
key: selector for key, selector in section.items() if selector.in_horizontal
}
def get_selectors(section: dict, element_type: ElementType) -> Dict[str, Selector]:
return {
key: selector
for key, selector in section.items()
if selector.type == element_type
}
def find_elements_of_type(
driver: WebDriver, section: dict, element_type: ElementType
) -> defaultdict:
selectors = get_selectors(section, element_type)
result = defaultdict()
for key, selector in selectors.items():
element = find_element(driver, selector, element_name=key, wait_for_it=False)
result[key] = element
return result
def selectors_by_group(form_selectors: Dict[str, Selector]) -> Dict[str, Selector]:
groups = defaultdict(lambda: defaultdict(dict))
for key, selector in form_selectors.items():
if selector.group_id:
groups[selector.group_id][key] = selector
else:
groups["default"][key] = selector
return groups
def assert_catcha_in_dev_mode(driver: WebDriver):
dev_site_key = "6LeIxAcTAAAAAJcZVRqyHh71UMIEGNQ_MXjiZKhI"
try:
g_recaptcha = find_element(
driver,
Selector(By.CSS_SELECTOR, ".g-recaptcha"),
element_name="captcha",
wait_for_it=False,
)
except NoSuchElementException:
logging.debug(f"Captcha is not present on {driver.current_url}")
else:
scroll_to(driver, g_recaptcha)
current_site_key = g_recaptcha.get_attribute("data-sitekey")
logging.debug(f"Current site captcha key: {current_site_key}")
is_in_dev_mode = current_site_key == dev_site_key
if not is_in_dev_mode:
raise UnexpectedElementPresent(
f"Captcha is not in Dev Mode on {driver.current_url}"
)
logging.debug(f"Captcha is in Dev mode on {driver.current_url}")
def tick_captcha_checkbox(driver: WebDriver):
im_not_a_robot = Selector(By.CSS_SELECTOR, "#recaptcha-anchor")
iframe = driver.find_element_by_tag_name("iframe")
scroll_to(driver, iframe)
driver.switch_to.frame(iframe)
captcha = find_element(driver, im_not_a_robot)
captcha.click()
# wait 4s after user clicks on the CAPTCHA checkbox
# otherwise the test might fail
time.sleep(4)
driver.switch_to.parent_frame()
def fill_out_input_fields(
driver: WebDriver, form_selectors: Dict[str, Selector], form_details: dict
):
input_selectors = get_selectors(form_selectors, ElementType.INPUT)
for key, selector in input_selectors.items():
value_to_type = form_details.get(key, None)
if isinstance(value_to_type, bool):
if value_to_type and selector.autocomplete_callback:
logging.debug(
f"value_to_type=True. Will call autocomplete_callback() for"
f" '{key}'"
)
selector.autocomplete_callback(driver, value=value_to_type)
else:
if not value_to_type:
logging.debug(f"Skipping '{key}' as there no value for it")
continue
logging.debug(
f"Filling out input field '{key}' with '{value_to_type}' on '{driver.current_url}"
)
input_field = find_element(
driver, selector, element_name=key, wait_for_it=False
)
if input_field.is_displayed():
input_field.clear()
input_field.send_keys(value_to_type)
if selector.autocomplete_callback:
logging.debug(f"Calling autocomplete_callback() for '{key}'")
selector.autocomplete_callback(driver, value=value_to_type)
def fill_out_textarea_fields(
driver: WebDriver, form_selectors: Dict[str, Selector], form_details: dict
):
textarea_selectors = get_selectors(form_selectors, ElementType.TEXTAREA)
for key, selector in textarea_selectors.items():
value_to_type = form_details.get(key)
if not value_to_type:
logging.debug(f"Skipping '{key}' as there no value for it")
continue
logging.debug(f"Filling out textarea: {key} with '{value_to_type}'")
textarea = find_element(driver, selector, element_name=key, wait_for_it=False)
if textarea.is_displayed():
textarea.clear()
textarea.send_keys(value_to_type)
def check_form_choices(
driver: WebDriver, form_selectors: Dict[str, Selector], names: List[str]
):
radio_selectors = get_selectors(form_selectors, ElementType.RADIO)
for name in names:
radio_selector = radio_selectors[name.lower()]
find_element(driver, radio_selector, element_name=name, wait_for_it=False)
logging.debug(
f"All expected form choices: '{names}' are visible on " f"{driver.current_url}"
)
def get_option_values(
driver: WebDriver, selector: Selector, *, remove_empty_values: bool = True
) -> List[str]:
error = f"'{selector.by}' not recognised. Only By.ID, By.CSS_SELECTOR are allowed"
assert selector.by in (By.ID, By.CSS_SELECTOR), error
if selector.by == By.ID:
option_selector = f"#{selector.value} option"
else:
option_selector = f"{selector.value} option"
logging.debug(f"Looking for option values using: '{option_selector}'")
option_values = extract_attributes_by_css(
driver.page_source, option_selector, attrs=["value"], text=False
)
if remove_empty_values:
option_values = [item for item in option_values if item["value"]]
logging.debug(f"Available options: {option_values}")
return [item["value"] for item in option_values]
def pick_option(
driver: WebDriver, form_selectors: Dict[str, Selector], form_details: dict
):
select_selectors = get_selectors(form_selectors, ElementType.SELECT)
for key, selector in select_selectors.items():
if form_details.get(key, None):
option = form_details[key]
else:
logging.debug(
f"Picking an option from '{key}' dropdown list using: {selector}"
)
values = get_option_values(driver, selector)
option = random.choice(values)
logging.debug(f"Will select option: {option}")
if selector.autocomplete_callback:
logging.debug(f"Calling autocomplete_callback() for '{key}'")
selector.autocomplete_callback(driver, value=option)
else:
select = find_element(driver, selector, element_name=key, wait_for_it=False)
option_value_selector = f"option[value='{option}']"
option_element = select.find_element_by_css_selector(option_value_selector)
with try_alternative_click_on_exception(driver, option_element):
option_element.click()
def check_radio(
driver: WebDriver, form_selectors: Dict[str, Selector], form_details: dict
):
radio_selectors = get_selectors(form_selectors, ElementType.RADIO)
for key, selector in radio_selectors.items():
assert key in form_details, f"Can't find form detail for '{key}'"
if form_details[key]:
radio = find_element(driver, selector, element_name=key, wait_for_it=False)
if not radio.get_property("checked"):
logging.debug(f"Checking '{key}' radio")
radio.click()
def check_random_radio(driver: WebDriver, form_selectors: Dict[str, Selector]):
radio_selectors = get_selectors(form_selectors, ElementType.RADIO)
grouped_selectors = selectors_by_group(radio_selectors)
for group, selectors in grouped_selectors.items():
logging.debug(f"Selecting random radio option from group: {group}")
key = random.choice(list(selectors.keys()))
selector = radio_selectors[key]
radio = find_element(driver, selector, element_name=key, wait_for_it=False)
if not radio.get_property("checked"):
logging.debug(f"Checking '{key}' radio")
radio.click()
def choose_one_form_option(
driver: WebDriver, radio_selectors: Dict[str, Selector], name: str
):
form_details = defaultdict(bool)
for key in radio_selectors.keys():
form_details[key] = key == name.lower()
logging.debug(f"Form details: {form_details}")
check_radio(driver, radio_selectors, form_details)
def choose_one_form_option_except(
driver: WebDriver, radio_selectors: Dict[str, Selector], ignored: List[str]
) -> str:
all_keys = list(radio_selectors.keys())
without_ignored = list(set(all_keys) - set(ignored))
selected = random.choice(without_ignored)
form_details = defaultdict(bool)
for key in radio_selectors.keys():
form_details[key.lower()] = key.lower() == selected
logging.debug(f"Form details (with ignored: {ignored}): {form_details}")
check_radio(driver, radio_selectors, form_details)
return selected
def submit_form(
driver: WebDriver,
form_selectors: Dict[str, Selector],
*,
wait_for_new_page_to_load: bool = True,
) -> Union[ModuleType, None]:
submit_selectors = get_selectors(form_selectors, ElementType.SUBMIT)
error = (
f"Expected to find exactly 1 submit element in form on {driver.current_url} "
f"instead we got {len(submit_selectors)}"
)
with assertion_msg(error):
assert len(submit_selectors) == 1
submit_button_selector = list(submit_selectors.values())[0]
submit_button = find_element(
driver, submit_button_selector, element_name="submit button", wait_for_it=False
)
take_screenshot(driver, "Before submitting the form")
if wait_for_new_page_to_load:
with wait_for_page_load_after_action(driver, timeout=25):
with try_alternative_click_on_exception(driver, submit_button):
submit_button.click()
else:
with try_alternative_click_on_exception(driver, submit_button):
submit_button.click()
take_screenshot(driver, "After submitting the form")
return submit_button_selector.next_page
def pick_one_option_and_submit(
driver: WebDriver,
form_selectors: Dict[str, Selector],
name: str,
*,
submit_button_name: str = "submit",
) -> Union[ModuleType, None]:
radio_selectors = get_selectors(form_selectors, ElementType.RADIO)
selector = radio_selectors[name.lower()]
choose_one_form_option(driver, radio_selectors, name)
take_screenshot(driver, f"Before submitting the form - {name}")
submit_button_selector = form_selectors[submit_button_name]
button = find_element(
driver, submit_button_selector, element_name="Submit button", wait_for_it=False
)
button.click()
take_screenshot(driver, f"After submitting the form - {name}")
return selector.next_page
def tick_checkboxes(
driver: WebDriver, form_selectors: Dict[str, Selector], form_details: dict
):
checkbox_selectors = get_selectors(form_selectors, ElementType.CHECKBOX)
for key, selector in checkbox_selectors.items():
checkbox_value = form_details.get(key)
if checkbox_value is None:
logging.debug(f"Skipping '{key}' as there no value for it")
continue
logging.debug(f"Will tick off '{key}' checkbox (if necessary)")
if checkbox_value:
if not isinstance(checkbox_value, bool):
logging.debug(f"Will select checkbox with '{checkbox_value}' value")
selector = Selector(
By.CSS_SELECTOR, f"{selector.value}[value={checkbox_value}]"
)
checkbox = find_element(
driver, selector, element_name=key, wait_for_it=False
)
if not checkbox.get_property("checked"):
with try_alternative_click_on_exception(driver, checkbox):
checkbox.click()
def tick_checkboxes_by_labels(
driver: WebDriver, form_selectors: Dict[str, Selector], form_details: dict
):
checkbox_selectors = get_selectors(form_selectors, ElementType.LABEL)
for key, selector in checkbox_selectors.items():
if form_details[key]:
logging.debug(f"'{key}' checkbox should be ticked")
checkbox = find_element(
driver, selector, element_name=key, wait_for_it=False
)
if not checkbox.get_property("checked"):
logging.debug(f"'{key}' checkbox is not ticked, checking it")
with try_alternative_click_on_exception(driver, checkbox):
checkbox.click()
else:
logging.debug(f"'{key}' checkbox should be left unchanged")
checkbox = find_element(
driver, selector, element_name=key, wait_for_it=False
)
if checkbox.get_property("checked"):
logging.debug(f"'{key}' checkbox is ticked, unchecking it")
with try_alternative_click_on_exception(driver, checkbox):
checkbox.click()
def untick_selected_checkboxes(driver: WebDriver, selector: Selector):
checkboxes = find_elements(driver, selector)
logging.debug(f"There are {len(checkboxes)} checkboxes on {driver.current_url}")
for checkbox in checkboxes:
if checkbox.get_property("checked"):
logging.debug(
f"Unticking checkbox {checkbox.get_attribute('value') or checkbox.get_attribute('id')}"
)
with try_alternative_click_on_exception(driver, checkbox):
checkbox.click()
def accept_all_cookies(driver: WebDriver):
from pages import common_selectors
accept = common_selectors.COOKIE_BANNER["cookie banner"]["accept all cookies"]
banner_selector = common_selectors.COOKIE_BANNER["cookie banner"]["banner"]
if is_element_present(driver, banner_selector):
banner = driver.find_element_by_css_selector(banner_selector.value)
if banner.is_displayed():
logging.debug("Accepting all cookies")
button = driver.find_element_by_css_selector(accept.value)
button.click()
else:
logging.debug("Cookie banner is not visible")
else:
logging.debug("Cookie banner is not present")
def generic_set_basic_auth_creds(driver: WebDriver, *, service_name: str = None):
if service_name == "ERP":
base_url = URLs.ERP_LANDING.absolute
else:
base_url = URLs.DOMESTIC_LANDING.absolute
parsed = urlparse(base_url)
with_creds = f"{parsed.scheme}://{BASICAUTH_USER}:{BASICAUTH_PASS}@{parsed.netloc}/automated-test-auth"
logging.debug(f"Doing basic auth")
with wait_for_page_load_after_action(driver):
driver.get(with_creds)
assertion_msg = f"Access is still denied after authentication attempt → {base_url}"
with selenium_action(driver, assertion_msg):
assert "ok" in driver.page_source
def revisit_page_on_access_denied(driver: WebDriver, page: ModuleType, page_name: str):
if access_was_denied(driver.page_source):
logging.debug(
f"Access Denied. Trying to re-authenticate on '{page_name}' {page.URL}"
)
generic_set_basic_auth_creds(driver)
driver.get(page.URL)
|
from django.contrib import admin
from .models import Merchandise
# Register your models here.
admin.site.register(Merchandise) |
from argparse import Namespace
import test
import numpy as np
import torch
np.random.seed(42)
torch.manual_seed(0)
torch.backends.cudnn.deterministic = True
torch.backends.cudnn.benchmark = False
opt_v = '01'
model = 'm02'
epoch = 'latest'
epochs_to_test = ['latest', '1', '15', '30', '45', '60', '70']
if opt_v == '01':
dataset = '1K'
experiment_name = 'm02_ganilla_noSkip_' + dataset
opt_01 = Namespace(aspect_ratio=1.0, batch_size=1, checkpoints_dir='checkpoints/',
cityscape_fnames='./datasets/cityscapes_test_file_names.txt', cityscapes=False,
dataroot='../datasets/'+dataset, dataset_mode='unaligned', direction='AtoB', display_winsize=256,
epoch=epoch, eval=False, fineSize=64, fpn_weights=[1.0, 1.0, 1.0, 1.0], gpu_ids=[0], init_gain=0.02,
init_type='normal', input_nc=3, isTrain=False, loadSize=64, max_dataset_size=float("inf"), model=model,
n_layers_D=3, name=experiment_name, ndf=64, netD='basic', netG='resnet_fpn', ngf=64,
no_dropout=True, no_flip=False, norm='instance', ntest=float("inf"), num_test=751, num_threads=8, output_nc=3,
phase='test', resize_or_crop='resize_and_crop', results_dir='./results/', serial_batches=False,
suffix='', verbose=False)
for e in epochs_to_test:
opt_01.epoch = e
test.test(opt_01)
|
from DoubleLinkedList import Node,DoublyLinkedList
def palindrome(doublyLinkedList):
startPointer = doublyLinkedList.head
endPointer = doublyLinkedList.head
while endPointer.next is not None:
endPointer = endPointer.next
while True:
if startPointer == endPointer:
print("List is palindrome")
return
if startPointer.data == endPointer.data:
startPointer = startPointer.next
endPointer = endPointer.previous
else:
print("List is not palindrome")
return |
import numpy as np
import pandas as pd
#
# GEOMETRIES
#
def centroid_3d(arr):
length = arr.shape[0]
sum_x = np.sum(arr[:, 0])
sum_y = np.sum(arr[:, 1])
sum_z = np.sum(arr[:, 2])
return sum_x / length, sum_y / length, sum_z / length
def rescale_3d(X, x_scale, y_scale, z_scale):
X_rescaled = np.zeros_like(X)
X_rescaled[:, 0] = X[:, 0] * x_scale
X_rescaled[:, 1] = X[:, 1] * y_scale
X_rescaled[:, 2] = X[:, 2] * z_scale
return X_rescaled
def prepare_points3d(img_vol_shape=[100, 100, 100], proj="hunt"):
"""Prepare a set of 3d test points
Keyword Arguments:
img_vol_shape {list} -- shape of the image vol to fill with points (default: {[100,100,100]})
proj {str} -- project code (default: {'hunt'})
Returns:
{np.array} -- array of 3d points
"""
num_bb = 45
if proj == "hunt":
try:
sliceno = 60
df_zero = df2.loc[df2["subject_metadata_slice"] == sliceno]
xs = df_zero["T3_x_true_posx"].values
ys = df_zero["T3_x_true_posy"].values
# ellipses = [[[x-10, y-10],[x-10,y+10],[x+10,y+10],[x+10,y-10]] for (x, y) in zip(xs,dt.shape[2]-ys)]
points = [[x, y] for (x, y) in zip(xs, ys)]
except NameError as e:
points3d = np.array(
list(
zip(
np.random.random(
(num_bb, 1),
),
np.random.random(
(num_bb, 1),
),
np.random.random((num_bb, 1)),
)
)
).reshape((num_bb, 3))
points3d[:, 0] = points3d[:, 0] * img_vol_shape[0]
points3d[:, 1] = points3d[:, 1] * img_vol_shape[1]
points3d[:, 2] = points3d[:, 2] * img_vol_shape[2]
elif proj == "vf":
try:
points3d = np.array([[z, x, y] for (z, x, y) in sel_clicks2])
except NameError as e:
points3d = np.array(
list(
zip(
np.random.random(
(num_bb, 1),
),
np.random.random(
(num_bb, 1),
),
np.random.random((num_bb, 1)),
)
)
).reshape((num_bb, 3))
points3d[:, 0] = points3d[:, 0] * img_vol_shape[0]
points3d[:, 1] = points3d[:, 1] * img_vol_shape[1]
points3d[:, 2] = points3d[:, 2] * img_vol_shape[2]
logger.info("Size of points array: {}".format(points3d.shape))
return points3d
|
from django.contrib.auth.models import User
from django.db import models
from django.dispatch.dispatcher import receiver
class UserInfo(models.Model):
"""Model for storing extra information related to a user.
"""
owner = models.OneToOneField(User, related_name='extra_info',
primary_key=True, help_text="User associated with this instance")
email = models.EmailField(unique=True, null=False,
help_text="email to identify the organization")
class Meta:
verbose_name = 'User Profile'
verbose_name_plural = 'User Profiles'
@receiver(models.signals.post_save, sender=User)
def create_extra_info(sender, instance, created, **kwargs):
""" Creates the UserInfo instance for each new user upon creation"""
if created :
UserInfo.objects.create(owner=instance)
|
#!/usr/bin/env python
# coding: utf-8
# In[22]:
import json
import pandas as pd
import requests
import numpy as np
from numpy import nan
import geopandas
import matplotlib.pyplot as plt
import folium
import seaborn as sns
import streamlit as st
import plotly.graph_objects as go
import folium
from streamlit_folium import folium_static
import streamlit as st
import plotly.figure_factory as ff
# In[ ]:
# In[16]:
laadpaaldata_dt_4 =pd.read_csv('schone_laadpaaldata.csv')
# In[2]:
key = '91b8563b-459b-4816-9417-2e3860f1f3e4'
url = ' https://api.openchargemap.io/v3/poi/?output=json&countrycode=NL&maxresults=11063&compact=true&verbose=false'
json_data = requests.get(url, params={'key': key}).json()
laadpaallocaties = pd.DataFrame.from_dict(json_data)
laadpaaldata = pd.read_csv('laadpaaldata.csv')
response = requests.get("https://opendata.rdw.nl/resource/m9d7-ebf2.json")
voertuigen_data = pd.DataFrame.from_dict(response.json())
# In[4]:
laadpaallocaties.drop(['GeneralComments','OperatorsReference','MetadataValues','DateLastConfirmed'], axis = 1)
# In[5]:
voertuigen_data.drop(['aanhangwagen_autonoom_geremd','aanhangwagen_middenas_geremd','maximale_constructiesnelheid_brom_snorfiets','vermogen_brom_snorfiets','europese_voertuigcategorie_toevoeging','europese_uitvoeringcategorie_toevoeging','vervaldatum_tachograaf','type_gasinstallatie','oplegger_geremd'], axis =1)
# In[6]:
variables_of_intrest = ['AddressLine1', 'Town', 'StateOrProvince', 'Postcode', 'Latitude', 'Longitude']
def get_geo_data(dict_, variables_of_intrest):
values = []
for variable in variables_of_intrest:
if variable in dict_.keys():
if dict_[variable] == "":
values.append(np.NaN)
else:
values.append(dict_[variable])
else:
values.append(np.NaN)
return values
geo_data = []
for index, row in laadpaallocaties.iterrows():
values = get_geo_data(row['AddressInfo'], variables_of_intrest=variables_of_intrest)
geo_data.append(values)
geo_df = pd.DataFrame(geo_data, columns = variables_of_intrest)
geo_df = geo_df.dropna()
geo_df
# In[7]:
def color_producer(type):
if type == 'North Brabant':
return 'goldenrod'
elif type == 'Samenwerkingsverband Regio Eindhoven':
return 'goldenrod'
elif type == 'Noord-Brabant':
return 'goldenrod'
elif type == 'Nordbraban':
return 'goldenrod'
elif type == 'Noord Brabant ':
return 'goldenrod'
elif type == 'South Holland':
return 'Orange'
elif type == 'Zuid-Holland':
return 'Orange'
elif type == 'Zuid Holland':
return 'Orange'
elif type == 'ZH':
return 'Orange'
elif type == 'North Holland':
return 'Yellow'
elif type == 'Stadsregio Amsterdam':
return 'Yellow'
elif type == 'Noord-Holland':
return 'Yellow'
elif type == 'Nordholland':
return 'Yellow'
elif type == 'Noord Holand':
return 'Yellow'
elif type == 'Noord Holland':
return 'yellow'
elif type == 'Noord-Hooland':
return 'yellow'
elif type == 'Zeeland':
return 'aqua'
elif type == 'Seeland':
return 'aqua'
elif type == 'Utrecht':
return 'Navy'
elif type == 'UT':
return 'navy'
elif type == 'UTRECHT':
return 'navy'
elif type == 'Limburg':
return 'red'
# In[8]:
gdf = geopandas.GeoDataFrame(
geo_df, geometry=geopandas.points_from_xy(geo_df.Longitude, geo_df.Latitude))
# In[9]:
key = '91b8563b-459b-4816-9417-2e3860f1f3e4'
url = ' https://api.openchargemap.io/v3/poi/?output=json&countrycode=NL&maxresults=11063&compact=true&verbose=false'
json_data = requests.get(url, params={'key': key}).json()
laadpaallocaties = pd.DataFrame.from_dict(json_data)
laadpaaldata = pd.read_csv('laadpaaldata.csv')
response = requests.get("https://opendata.rdw.nl/resource/m9d7-ebf2.json")
voertuigen_data = pd.DataFrame.from_dict(response.json())
# In[10]:
#st.text("Op deze kaart worden de laadpalen weergegeven uit de dataframe. De laadpalen zijn groepeerd per provincie,
#dankzij de kleuren legenda kan je in één oogopslag zien welke kleur bij welke provincie hoort. Hier is terug te zien dat de meeste laadpalen zich bevinden in de randstad. Dit is ook niet zo gek want het voornaamste gedeelte van de bevolking woont in de randstad.")
m = folium.Map(location=[52.0907374,5.1214209], zoom_start=7.5)
for Town in gdf.iterrows():
row_values = Town[1]
location = [row_values['Latitude'], row_values['Longitude']]
marker = folium.Circle(location=location, popup=row_values['AddressLine1'], color=color_producer(row_values['StateOrProvince']),
fill_color=color_producer(row_values['StateOrProvince'])
)
marker.add_to(m)
# In[23]:
# ik heb een functie gevonden op het internet voor het toevoegen van een categorische legenda:
# (bron: https://stackoverflow.com/questions/65042654/how-to-add-categorical-legend-to-python-folium-map)
def add_categorical_legend(folium_map, title, colors, labels):
if len(colors) != len(labels):
raise ValueError("colors and labels must have the same length.")
color_by_label = dict(zip(labels, colors))
legend_categories = ""
for label, color in color_by_label.items():
legend_categories += f"<li><span style='background:{color}'></span>{label}</li>"
legend_html = f"""
<div id='maplegend' class='maplegend'>
<div class='legend-title'>{title}</div>
<div class='legend-scale'>
<ul class='legend-labels'>
{legend_categories}
</ul>
</div>
</div>
"""
script = f"""
<script type="text/javascript">
var oneTimeExecution = (function() {{
var executed = false;
return function() {{
if (!executed) {{
var checkExist = setInterval(function() {{
if ((document.getElementsByClassName('leaflet-top leaflet-right').length) || (!executed)) {{
document.getElementsByClassName('leaflet-top leaflet-right')[0].style.display = "flex"
document.getElementsByClassName('leaflet-top leaflet-right')[0].style.flexDirection = "column"
document.getElementsByClassName('leaflet-top leaflet-right')[0].innerHTML += `{legend_html}`;
clearInterval(checkExist);
executed = true;
}}
}}, 100);
}}
}};
}})();
oneTimeExecution()
</script>
"""
css = """
<style type='text/css'>
.maplegend {
z-index:9999;
float:right;
background-color: rgba(255, 255, 255, 1);
border-radius: 5px;
border: 2px solid #bbb;
padding: 10px;
font-size:12px;
positon: relative;
}
.maplegend .legend-title {
text-align: left;
margin-bottom: 5px;
font-weight: bold;
font-size: 90%;
}
.maplegend .legend-scale ul {
margin: 0;
margin-bottom: 5px;
padding: 0;
float: left;
list-style: none;
}
.maplegend .legend-scale ul li {
font-size: 80%;
list-style: none;
margin-left: 0;
line-height: 18px;
margin-bottom: 2px;
}
.maplegend ul.legend-labels li span {
display: block;
float: left;
height: 16px;
width: 30px;
margin-right: 5px;
margin-left: 0;
border: 0px solid #ccc;
}
.maplegend .legend-source {
font-size: 80%;
color: #777;
clear: both;
}
.maplegend a {
color: #777;
}
</style>
"""
folium_map.get_root().header.add_child(folium.Element(script + css))
return folium_map
# In[26]:
st.text("Op deze kaart worden de laadpalen weergegeven uit de dataframe. De laadpalen zijn groepeerd per provincie, dankzij de kleuren legenda kan je in één oogopslag zien welke kleur bij welke provincie hoort. Hier is terug te zien dat de meeste laadpalen zich bevinden in de randstad. Dit is ook niet zo gek want het voornaamste gedeelte van de bevolking woont hier.")
m = add_categorical_legend(m, 'StateOrProvince',
colors = ['goldenrod', 'Orange', 'yellow', 'aqua', 'navy', 'red'],
labels = ['Noord-Brabant', 'Zuid-Holland', 'Noord-Holland', 'Zeeland', 'Utrecht', 'Limburg'])
folium_static(m)
# In[17]:
#gemiddelde en mediaan berekenen
contimemean = laadpaaldata_dt_4['ConnectedTime'].mean()
chatimemean = laadpaaldata_dt_4['ChargeTime'].mean()
contimemedian = laadpaaldata_dt_4['ConnectedTime'].median()
chatimemedian = laadpaaldata_dt_4['ChargeTime'].median()
#displot creëren
fig = ff.create_distplot([laadpaaldata_dt_4['ConnectedTime'], laadpaaldata_dt_4['ChargeTime']],
group_labels=['Tijd aan de lader', 'Tijd om op te laden'], show_rug=False, curve_type='normal')
#verticale lijnen van gemiddelde en mediaan toevoegen
fig.add_shape(type='line', x0=contimemean, y0=0, x1=contimemean, y1=1, line=dict(color='Blue',), xref='x', yref='paper',
name='Gemiddelde Connected time')
fig.add_shape(type='line', x0=chatimemean, y0=0, x1=chatimemean, y1=1, line=dict(color='Red',), xref='x', yref='paper',
name='Gemiddelde Charge time')
fig.add_shape(type='line', x0=contimemedian, y0=0, x1=contimemedian, y1=1, line=dict(color='Blue',), xref='x', yref='paper',
name='Mediaan Connected time')
fig.add_shape(type='line', x0=chatimemedian, y0=0, x1=chatimemedian, y1=1, line=dict(color='Red',), xref='x', yref='paper',
name='Mediaan Charge time')
#annotations bij de lijnen toevoegen
fig.add_annotation(x=contimemean, y=0.8, yref='paper',
text="Gemiddelde tijd aan de lader",
showarrow=True, ax=120)
fig.add_annotation(x=chatimemean, y=0.9, yref='paper',
text="Gemiddelde tijd om op te laden",
showarrow=True,ax=150, ay=-60)
fig.add_annotation(x=contimemedian, y=0.6, yref='paper',
text="Mediaan tijd aan de lader",
showarrow=True, ax=120)
fig.add_annotation(x=chatimemedian, y=0.8, yref='paper',
text="Mediaan tijd om op te laden",
showarrow=True, ay=-80)
fig.update_layout(barmode='overlay')
fig.update_traces(opacity=0.75)
#titels en astitels
fig.update_layout(title='Oplaadtijd en connectietijd (zonder uitschieters) met kansdichtheidbenadering')
fig.update_xaxes(title='Tijd in uren')
fig.update_yaxes(title='Dichtheid')
fig.show()
# In[ ]:
|
#!/usr/bin/env python
import string
from collections import OrderedDict
START_LETTERS = {
'A': 10,
'B': 2,
'C': 2,
'D': 5,
'E': 12,
'F': 2,
'G': 3,
'H': 3,
'I': 9,
'J': 1,
'K': 1,
'L': 4,
'M': 2,
'N': 6,
'O': 7,
'P': 2,
'Q': 1,
'R': 6,
'S': 5,
'T': 7,
'U': 4,
'V': 2,
'W': 2,
'X': 1,
'Y': 2,
'Z': 1,
}
if __name__ == "__main__":
with open("letters.txt") as f:
raw_letters = f.read().strip()
current_letters = OrderedDict()
for char in string.ascii_uppercase:
current_letters[char] = 0
for l in raw_letters.upper():
current_letters[l] += 1
for l, c in current_letters.iteritems():
remaining = ''
if c < START_LETTERS[l]:
remaining = " (%d)" % (START_LETTERS[l] - c)
print "%s: %2d %s" % (l, c, remaining)
|
from django.contrib.auth import authenticate, login
from django.shortcuts import render, redirect
from django.contrib import messages
from .forms import RegistrationForm
def register(request):
if request.method == 'GET':
form = RegistrationForm()
return render(request, 'registration/register.html', {'form':form})
if request.method == 'POST':
form = RegistrationForm(request.POST)
if form.is_valid():
form.save()
username = request.POST['username']
password = request.POST['password1']
user = authenticate(username=username, password=password)
login(request, user)
return redirect("/")
else:
messages.error(request, 'Submission rejected. Please ensure you meet form requirements.')
return redirect(request.path_info) |
ASCII_SIZE = 256
def get_max_occuring_character(str):
count=[0]*ASCII_SIZE
max = -1
c = ''
for i in str:
count[ord(i)]+=1
for i in str:
if max<count[ord(i)]:
max = count[ord(i)]
c = i
return c
str = 'sample striiiiiiiing'
print(get_max_occuring_character(str)) |
#!/usr/bin/env python
__author__ = "Fabio Giuseppe Di Benedetto"
import os
import os.path
import zipfile
import glob
# folders
rcm_folder = "../RCM"
firos_folder = ".."
rcm_src_folder = os.path.join(rcm_folder, "src")
rcm_scripts_folder = os.path.join(rcm_folder, "scripts")
rcm_cfg_folder = os.path.join(rcm_folder, "cfg")
# package types
rcm_platform_master = "master"
rcm_platform_robot = "robot"
rcm_driver = "rcm_driver"
robotics_msgs = "robotics_msgs"
firos = "firos"
# installation package name
rcm_driver_zip_name = "%s.zip" % rcm_driver
robotics_msgs_zip_name = "%s.zip" % robotics_msgs
firos_zip_name = "%s.zip" % firos
def create_robotics_pkg(rcm_platform_instance_type=rcm_platform_master, embedded_pkg=False):
if embedded_pkg:
zip_path = "robotics.zip"
else:
if rcm_platform_instance_type == rcm_platform_master:
zip_path = "rcm_platform_master.zip"
elif rcm_platform_instance_type == rcm_platform_robot:
zip_path = "rcm_platform_robot.zip"
else:
raise ValueError("The only valid values for rcm_platform_instance_type are '%s' or '%s'" %
(rcm_platform_master, rcm_platform_robot))
if zip_path:
if os.path.exists(zip_path):
os.remove(zip_path)
zip_file = zipfile.ZipFile(zip_path, "a")
try:
m_name = rcm_platform_master
if rcm_platform_instance_type == rcm_platform_master:
m_name = "rcm_platform_%s" % rcm_platform_master
elif rcm_platform_instance_type == rcm_platform_robot:
m_name = "rcm_platform_%s" % rcm_platform_robot
# all .sh, rcmpd and rcmp_n are put in the zip from the current
# folder
for file_path in glob.glob("*.sh"):
if rcm_platform_instance_type == rcm_platform_master:
if "install_r.sh" in file_path or "sub_inst_r.sh" in file_path:
# these files are only for robots and in case of master
# we don't need them
pass
elif "install_m.sh" in file_path or "sub_inst_m.sh" in file_path:
# these files are only for master but we want always
# the same names
new_file_path = file_path.replace("_m.sh", ".sh")
zip_file.write(file_path, os.path.join(m_name, new_file_path))
else:
zip_file.write(file_path, os.path.join(m_name, file_path))
elif rcm_platform_instance_type == rcm_platform_robot:
if "install_r.sh" in file_path or "sub_inst_r.sh" in file_path:
# these files are only for robots
new_file_path = file_path.replace("_r.sh", ".sh")
zip_file.write(file_path, os.path.join(m_name, new_file_path))
elif "install_m.sh" in file_path or "sub_inst_m.sh" in file_path:
# these files are only for master and in case of robots
# we don't need them
pass
else:
zip_file.write(file_path, os.path.join(m_name, file_path))
file_list = ["rcmpd", "rcmp_n"]
for file_path in file_list:
zip_file.write(os.path.join(rcm_scripts_folder, file_path), os.path.join(m_name, file_path))
file_list = ["LICENSE.txt", "README.md"]
for file_path in file_list:
zip_file.write(os.path.join(rcm_folder, file_path), os.path.join(m_name, file_path))
# python module rcm_platform is put in the zip from src
python_module = "rcm_platform"
for file_path in glob.glob("%s/*.py" % os.path.join(rcm_src_folder, python_module)):
# in case of rcm robots we don't need rcmp_ext_connector.py and rcmp_robotics_data.py
if not (rcm_platform_instance_type == rcm_platform_robot and
("rcmp_ext_connector.py" in file_path or "rcmp_robotics_data.py" in file_path)):
zip_file.write(file_path, os.path.join(m_name, os.path.relpath(file_path, rcm_src_folder)))
# the ros configuration file is put in the zip from cfg
ros_folder = "ros"
file_name = "rosconsole.config"
zip_file.write(os.path.join(rcm_cfg_folder, file_name), os.path.join(m_name, ros_folder, file_name))
if rcm_platform_instance_type == rcm_platform_master and embedded_pkg:
# in case of rcm master the parameter embedded_pkg is used to
# specify if we put the both the rcm components in the platform
# package
create_robotics_component_pkg(rcm_driver)
create_robotics_component_pkg(robotics_msgs)
create_robotics_component_pkg(firos)
if os.path.exists(rcm_driver_zip_name):
zip_file.write(rcm_driver_zip_name, os.path.join(m_name, rcm_driver_zip_name))
os.remove(rcm_driver_zip_name)
if os.path.exists(robotics_msgs_zip_name):
zip_file.write(robotics_msgs_zip_name, os.path.join(m_name, robotics_msgs_zip_name))
os.remove(robotics_msgs_zip_name)
if os.path.exists(firos_zip_name):
zip_file.write(firos_zip_name, os.path.join(m_name, firos_zip_name))
os.remove(firos_zip_name)
finally:
zip_file.close()
def create_robotics_component_pkg(robotics_component):
if robotics_component:
if robotics_component == rcm_driver:
zip_path = rcm_driver_zip_name
elif robotics_component == robotics_msgs:
zip_path = robotics_msgs_zip_name
elif robotics_component == firos:
zip_path = firos_zip_name
else:
raise ValueError("The only valid values for robotics_component are '%s' or '%s'" % (rcm_driver, robotics_msgs))
if zip_path:
if os.path.exists(zip_path):
os.remove(zip_path)
zip_file = zipfile.ZipFile(zip_path, "a")
try:
m_name = "rcm"
if robotics_component == rcm_driver:
m_name = "rcm"
elif robotics_component == robotics_msgs:
m_name = robotics_msgs
elif robotics_component == firos:
m_name = firos
if robotics_component == rcm_driver:
for file_path in glob.glob("%s/*.*" % os.path.join(rcm_src_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, rcm_src_folder))
for file_path in glob.glob("%s/*/*.py" % os.path.join(rcm_src_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, rcm_src_folder))
elif robotics_component == robotics_msgs:
for file_path in glob.glob("%s/*.*" % os.path.join(rcm_src_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, rcm_src_folder))
for file_path in glob.glob("%s/*/*.*" % os.path.join(rcm_src_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, rcm_src_folder))
elif robotics_component == firos:
for file_path in glob.glob("%s/*.*" % os.path.join(firos_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, firos_folder))
for file_path in glob.glob("%s/config/*.*" % os.path.join(firos_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, firos_folder))
for file_path in glob.glob("%s/scripts/core.py" % os.path.join(firos_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, firos_folder))
for file_path in glob.glob("%s/scripts/mapserver.js" % os.path.join(firos_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, firos_folder))
# scripts/include
for file_path in glob.glob("%s/scripts/*/*.py" % os.path.join(firos_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, firos_folder))
# scripts/include/genpy, scripts/include/pubsub, scripts/include/rcm, scripts/include/ros,
# scripts/include/server
for file_path in glob.glob("%s/scripts/*/*/*.py" % os.path.join(firos_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, firos_folder))
# scripts/include/genpy/msg, scripts/include/pubsub/contextbroker,
# scripts/include/ros/dependencies
for file_path in glob.glob("%s/scripts/*/*/*/*.py" % os.path.join(firos_folder, m_name)):
zip_file.write(file_path, os.path.relpath(file_path, firos_folder))
finally:
zip_file.close()
else:
raise TypeError("The parameter 'robotics_component' must be provided")
if __name__ == "__main__":
import sys
if len(sys.argv) > 1:
if sys.argv[1] == rcm_platform_master:
create_robotics_pkg(rcm_platform_master)
elif sys.argv[1] == rcm_platform_robot:
create_robotics_pkg(rcm_platform_robot)
elif sys.argv[1] == rcm_driver:
create_robotics_component_pkg(rcm_driver)
elif sys.argv[1] == robotics_msgs:
create_robotics_component_pkg(robotics_msgs)
elif sys.argv[1] == firos:
create_robotics_component_pkg(firos)
else:
print "Usage:"
print "%s - create the robotics package with rcm %s and all the components embedded into the package" % \
(__file__, rcm_platform_master)
print "%s %s - create the rcm platform package for rcm %s" % \
(__file__, rcm_platform_master, rcm_platform_master)
print "%s %s - create the rcm platform package for rcm %s" % \
(__file__, rcm_platform_robot, rcm_platform_robot)
print "%s %s - create the rcm driver package" % (__file__, rcm_driver)
print "%s %s - create the robotics msgs package" % (__file__, robotics_msgs)
print "%s %s - create the firos package" % (__file__, firos)
else:
create_robotics_pkg(rcm_platform_master, True)
|
import hashlib
from django.conf import settings
def click_authorization(click_trans_id, amount, action, sign_time, sign_string, merchant_trans_id,
merchant_prepare_id=None, *args, **kwargs):
"""
Authorization
:param click_trans_id:
:param amount:
:param action:
:param sign_time:
:param sign_string:
:param merchant_trans_id:
:param merchant_prepare_id:
:param args:
:param kwargs:
:return: True or False
"""
assert settings.CLICK_SETTINGS.get('service_id') != None
assert settings.CLICK_SETTINGS.get('secret_key') != None
assert settings.CLICK_SETTINGS.get('merchant_id') != None
service_id = settings.CLICK_SETTINGS['service_id']
secret_key = settings.CLICK_SETTINGS['secret_key']
text = f"{click_trans_id}{service_id}{secret_key}{merchant_trans_id}"
if merchant_prepare_id != "" and merchant_prepare_id is not None:
text += f"{merchant_prepare_id}"
text += f"{amount}{action}{sign_time}"
hash = hashlib.md5(text.encode('utf-8')).hexdigest()
if hash != sign_string:
return False
return True
|
# Entrada
eje_x = int(input("Ingrese la primera cordenada en el eje x\n"))
eje_y = int(input("Ingrese la segunda coordenada en el eje y\n"))
coordenada = [eje_x,eje_y]
distancia = (eje_x - 0)/(eje_y - 0)
print("Cordenada: ",coordenada)
print("La distancia de la coordenada [0,0] a la coordenada ingresada es: ", distancia) |
# -*- coding: utf-8 -*-
#import visa
import random
import Tool
param={'FLOW':'mbarl/s','P1':'mbar'}
class Instrument(Tool.MeasInstr):
def __init__(self, resource_name, debug=False):
super(Instrument, self).__init__(resource_name,'PL300',debug,baud_rate=19200)
def __del__(self):
super(Instrument, self).__del__()
#------------------------------------------------------------------------------
def measure(self,channel='FLOW'):
if self.last_measure.has_key(channel):
if not self.debug:
if channel=='FLOW':
answer=self.ask('*READ:MBAR*L/S?')
else:
answer=self.ask('*MEAS:P1:MBAR?')
answer=float(answer)
else:
answer=100*random.random()
self.last_measure[channel]=answer
else:
print "you are trying to measure a non existent channel : " +channel
print "existing channels :", self.channels
answer=None
return answer
def get_status(self):
return self.ask('STAT?')
|
ramit = {
'name': 'Ramit',
'email': 'ramit@gmail.com',
'interests': ['movies', 'tennis'],
'friends': [
{
'name': 'Jasmine',
'email': 'jasmine@yahoo.com',
'interests': ['photography', 'tennis']
},
{
'name': 'Jan',
'email': 'jan@hotmail.com',
'interests': ['movies', 'tv']
}
]
}
#Function that gets the email address of Ramit
def get_characteristic(dictionary, characteristic):
return dictionary[characteristic]
#Function that returns the first of Ramit's interest
def get_interest(dictionary, section, number):
result = []
for i in dictionary[section]:
result.append(i)
return result[number]
#Function that gets the email address of Jasmine
def get_friend_characteristic(dictionary, friend, characteristic):
for i in dictionary["friends"]:
if(friend == i["name"]):
return i[characteristic]
# print(ramit["friends"][0]["email"])
#Function that gets the second of Jan's two interests
def get_friend_interest(dictionary, friend, characteristic, number):
for i in dictionary["friends"]:
if(friend == i["name"]):
return i[characteristic][number]
print(get_friend_interest(ramit, "Jan", "interests", 1)) |
from setuptools import setup, find_packages
def do_setup():
setup(name='News_Buddy',
version="0.0",
author='Lilian Luong, Ameer Syedibrahim, Jaden Tennis',
description='News database by topic and named entities',
platforms=['Windows', 'Linux', 'Mac OS-X', 'Unix'],
packages=find_packages())
if __name__ == "__main__":
do_setup()
|
from flask import (
Blueprint, flash, g, redirect, render_template, request, url_for
)
from werkzeug.exceptions import abort
from flaskr.auth import login_required
from flaskr.db import get_db
bp = Blueprint('blog', __name__)
@bp.route('/')
def index():
db = get_db()
posts = db.execute(
'SELECT p.id, title, body, created, author_id, username'
' FROM posts p JOIN users u ON p.author_id = u.id'
' ORDER BY created DESC'
).fetchall()
likes = {}
for post in posts:
likes[post['id']] = get_users_who_like_post(post['id'])
return render_template('blog/index.html', posts=posts, likes=likes)
@bp.route('/create', methods=('GET', 'POST'))
@login_required
def create():
if request.method == 'POST':
title = request.form['title']
body = request.form['body']
error = None
if not title:
error = 'Title is required.'
if error is not None:
flash(error)
else:
db = get_db()
db.execute(
'INSERT INTO posts (title, body, author_id)'
' VALUES (?, ?, ?)',
(title, body, g.user['id'])
)
db.commit()
return redirect(url_for('blog.index'))
return render_template('blog/create.html')
def get_post(id, check_author=True):
post = get_db().execute(
'SELECT p.id, title, body, created, author_id, username'
' FROM posts p JOIN users u ON p.author_id = u.id'
' WHERE p.id = ?',
(id,)
).fetchone()
if post is None:
abort(404, "Post id {0} doesn't exist.".format(id))
if check_author and post['author_id'] != g.user['id']:
abort(403)
return post
def get_users_who_like_post(id):
db = get_db()
users_rows = db.execute(
'SELECT user_id'
' FROM likes l JOIN users u ON u.id = l.user_id'
' WHERE post_id = ?', (id,)
).fetchall()
return [list(dict(row).values())[0] for row in users_rows]
def get_comments_for_post(id):
db = get_db()
return db.execute(
'SELECT c.id, user_id, body, username'
' FROM comments c JOIN users u ON u.id = c.user_id'
' WHERE post_id = ?', (id,)
).fetchall()
@bp.route('/<int:id>')
def get_single_post(id):
post = get_post(id, False)
comments = get_comments_for_post(id)
return render_template('blog/single_post.html', post=post, comments=comments)
@bp.route('/<int:id>/update', methods=('GET', 'POST'))
@login_required
def update(id):
post = get_post(id)
if request.method == 'POST':
title = request.form['title']
body = request.form['body']
error = None
if not title:
error = 'Title is required.'
if error is not None:
flash(error)
else:
db = get_db()
db.execute(
'UPDATE posts SET title = ?, body = ?'
' WHERE id = ?',
(title, body, id)
)
db.commit()
return redirect(url_for('blog.index'))
return render_template('blog/update.html', post=post)
@bp.route('/<int:id>/delete', methods=('POST',))
@login_required
def delete(id):
get_post(id)
db = get_db()
db.execute('DELETE FROM posts WHERE id = ?', (id,))
db.commit()
return redirect(url_for('blog.index'))
@bp.route('/<int:user_id>/<int:post_id>/like', methods=('POST',))
@login_required
def like(user_id, post_id):
get_post(post_id, False)
db = get_db()
db.execute(
'INSERT INTO likes (user_id, post_id)'
' VALUES (?, ?)',
(user_id, post_id)
)
db.commit()
return redirect(url_for('blog.index'))
@bp.route('/<int:user_id>/<int:post_id>/unlike', methods=('POST',))
@login_required
def unlike(user_id, post_id):
get_post(post_id, False)
db = get_db()
db.execute(
'DELETE FROM likes WHERE user_id = ? AND post_id = ?',
(user_id, post_id)
)
db.commit()
return redirect(url_for('blog.index'))
@bp.route('/<int:user_id>/<int:post_id>/add-comment', methods=('POST',))
@login_required
def add_comment(user_id, post_id):
get_post(post_id, False)
body = request.form['body']
error = None
if error is not None:
flash(error)
else:
db = get_db()
db.execute(
'INSERT INTO comments (user_id, post_id, body)'
' VALUES (?, ?, ?)',
(user_id, post_id, body)
)
db.commit()
return redirect(url_for('blog.get_single_post', id=post_id))
def get_comment(id, check_author=True):
comment = get_db().execute(
'SELECT id, user_id, post_id, body'
' FROM comments'
' WHERE id = ?',
(id,)
).fetchone()
if comment is None:
abort(404, "Comment id {0} doesn't exist.".format(id))
if check_author and comment['user_id'] != g.user['id']:
abort(403)
return comment
@bp.route('/<int:id>/update-comment', methods=('GET', 'POST'))
@login_required
def update_comment(id):
comment = get_comment(id)
if request.method == 'POST':
body = request.form['body']
error = None
if error is not None:
flash(error)
else:
db = get_db()
db.execute(
'UPDATE comments SET body = ?'
' WHERE id = ?',
(body, id)
)
db.commit()
return redirect(url_for('blog.get_single_post', id=comment['post_id']))
return render_template('blog/update_comment.html', comment=comment)
@bp.route('/<int:id>/delete-comment', methods=('POST',))
@login_required
def delete_comment(id):
comment = get_comment(id)
db = get_db()
db.execute('DELETE FROM comments WHERE id = ?', (id,))
db.commit()
return redirect(url_for('blog.get_single_post', id=comment['post_id'])) |
from database.utils.JSON import to_json
from database.application.function_mapper import get_db_functions
from database.utils.answer import Answer
def check_super_system_function(name):
function_mapper = get_db_functions()
function_info = function_mapper[name]
if function_info[1].function_type == "system" and function_info[1].function_subtype == "super":
return True
else:
return False
def get_database_name(instruction):
function_mapper = get_db_functions()
function_info = function_mapper[instruction["function"]]
if function_info[1].function_type == "system":
return instruction["data"]["database"]
else:
return instruction["database"]
def check_the_validity_of_the_instruction(instruction):
return True
def execute_instruction(instruction):
function_mapper = get_db_functions()
print("function_mapper")
print(function_mapper)
print('instruction["function"]')
print(instruction["function"])
if not instruction["function"] is None and instruction["function"] in function_mapper:
function_info = function_mapper[instruction["function"]]
else:
return Answer("Check command {}".format(instruction["function"])).info
print("function_info")
print(function_info)
if function_info[1].function_type == "system":
print('instruction["data"]')
print(instruction["data"])
result = function_info[0](instruction["data"])
return result
elif function_info[1].function_type == "trigger":
result = function_info[0](instruction["database"], instruction["collection"], instruction["type"],
instruction["data"]
)
return result
elif function_info[1].function_type == "index":
result = function_info[0](instruction["database"], instruction["collection"], instruction["field"])
return result
else:
result = function_info[0](instruction["database"], instruction["collection"], instruction["data"])
return result
if __name__ == '__main__':
class Ment:
def __init__(self, name="alex", age=15):
self.name = name
self.age = age
self.language = ["greece", "spanish", "english"]
class DataStructure:
def __init__(self):
self.function = None
self.data = None
self.database = None
self.collection = None
data = DataStructure()
data.function = 'insert'
data.database = 'london'
data.collection = 'people'
data.data = [Ment("lol", 18).__dict__, Ment("lolita", 27).__dict__]
execute_instruction(to_json(data.__dict__))
|
import os.path
import random
import torchvision.transforms as transforms
#import torch
from data.base_dataset import BaseDataset
from data.image_folder import make_dataset
from PIL import Image
import mxnet as mx
class AlignedDataset(BaseDataset):
def initialize(self, opt):
# self._provide_data = zip(data_names, data_shapes)
# self._provide_label = zip(label_names, label_shapes)
#self.num_batches = num_batches
# self.data_gen = data_gen
# self.label_gen = label_gen
#self.get_image = get_image
self.cur_batch = 0
#### pytorch
self.opt = opt
self.root = opt.dataroot
self.dir_AB = os.path.join(opt.dataroot, opt.phase)
self.AB_paths = sorted(make_dataset(self.dir_AB))
#### mine
self.dataset_len = len(self.AB_paths)
def __iter__(self):
return self
def reset(self):
random.shuffle(self.AB_paths)
self.cur_batch = 0
def __next__(self):
return self.next()
@property
def provide_data(self):
desc = [mx.io.DataDesc('cond_data',(1, 3, 256, 256)),
mx.io.DataDesc('data',(1, 3, 256, 256))]
return desc
@property
def provide_label(self):
desc = [mx.io.DataDesc('dloss_label', (1, )),
mx.io.DataDesc('l1_loss_label', (1, 3, 256, 256))]
return desc
def next(self):
if self.cur_batch < self.dataset_len:
A,B = self.get_image(self.cur_batch)
self.cur_batch += 1
label = None
# data = [mx.nd.array(g(d[1])) for d,g in zip(self.provide_data, self.get_image(self.cur_batch))]
# label = [mx.nd.array(g(d[1])) for d,g in zip(self._provide_label, self.label_gen)]
# return mx.io.DataBatch(data, label)
return mx.io.DataBatch(data=[A,B], label=[label,B])
else:
raise StopIteration
#### pytorch
# def initialize(self, opt):
# self.opt = opt
# self.root = opt.dataroot
# self.dir_AB = os.path.join(opt.dataroot, opt.phase)
# self.AB_paths = sorted(make_dataset(self.dir_AB))
# assert(opt.resize_or_crop == 'resize_and_crop')
# transform_list = [transforms.ToTensor(),
# transforms.Normalize((0.5, 0.5, 0.5),
# (0.5, 0.5, 0.5))]
# self.transform = transforms.Compose(transform_list)
def get_image(self, index):
AB_path = self.AB_paths[index]
img = mx.image.imdecode(open(AB_path).read()) # default is RGB
## resize to w x h
img = mx.image.imresize(img, self.opt.loadSize, self.opt.loadSize * 2, interp = cv2.INTER_CUBIC)
# convert to [0,1] then normalize
img = img.astype('float32')
img /= 255.0
AB = mx.image.color_normalize(img, 0.5, 0.5)
## crop a random w x h region from image
# tmp, coord = mx.image.random_crop(img, (150, 200))
# print(coord)
# plt.imshow(tmp.asnumpy()); plt.show()
# separate A and B images
w_total = AB.shape[1]
w = int(w_total / 2)
h = AB.shape[0]
w_offset = random.randint(0, max(0, w - self.opt.fineSize - 1))
h_offset = random.randint(0, max(0, h - self.opt.fineSize - 1))
tempA = mx.nd.slice_axis(AB, axis=0, begin=h_offset, end=h_offset + self.opt.fineSize)
A = mx.nd.slice_axis(tempA, axis=1, begin=w_offset, end=w_offset + self.opt.fineSize)
tempB = mx.nd.slice_axis(AB, axis=0, begin=h_offset, end=h_offset + self.opt.fineSize)
B = mx.nd.slice_axis(tempB, axis=1, begin=w + w_offset, end=w + w_offset + self.opt.fineSize)
# flipping
if (not self.opt.no_flip) and random.random() < 0.5:
A = mx.ndarray.reverse(A, axis=1)
B = mx.ndarray.reverse(B, axis=1)
# change to BCWH format
A = mx.ndarray.rollaxis(A, 0, 2)
A = mx.ndarray.rollaxis(A, 1, 2)
A = mx.ndarray.expand_dims(A, axis=0)
B = mx.ndarray.swapaxes(B, 0, 2)
B = mx.ndarray.swapaxes(B, 1, 2)
B = mx.ndarray.expand_dims(B, axis=0)
return A,B
# return {'A': A, 'B': B,'A_paths': AB_path, 'B_paths': AB_path}
def __len__(self):
return len(self.AB_paths)
def name(self):
return 'AlignedDataset'
|
from __future__ import print_function
import pickle
from game import Board, Game
from mcts_pure import MCTSPlayer as MCTS_Pure
from mcts_alphaZero import MCTSPlayer
from policy_value_net_pytorch import PolicyValueNet, Net
# from policy_value_net_numpy import PolicyValueNetNumpy as PolicyValueNet
import sys
from collections import defaultdict
import torch
N=5
SIZE=8
N_GAMES=1
MODEL_1='best.model'
# MODEL_2='../starter/models_original_2_24/best.model'
PLAYOUT=1000
MCTS_PURE=True
HUMAN=True
class Human(object):
"""
human player
"""
def __init__(self):
self.player = None
def set_player_ind(self, p):
self.player = p
def get_action(self, board):
try:
location = input("Your move: ")
if isinstance(location, str): # for python3
location = [int(n, 10) for n in location.split(",")]
move = board.location_to_move(location)
except Exception as e:
move = -1
if move == -1 or move not in board.availables:
print("invalid move")
move = self.get_action(board)
return move
def __str__(self):
return "Human {}".format(self.player)
def policy_evaluate(player1,player2,n_games=N_GAMES):
win_cnt = defaultdict(int)
for i in range(n_games):
board = Board(width=SIZE, height=SIZE, n_in_row=N)
game = Game(board)
winner = game.start_play(player1,player2,start_player=i % 2,is_shown=1)
win_cnt[winner] += 1
win_ratio = 1.0*(win_cnt[1] + 0.5*win_cnt[-1]) / n_games
return win_ratio
def run():
n = N
width, height = SIZE,SIZE
# if MCTS_PURE:
# player_1 = MCTS_Pure(c_puct=5, n_playout=PLAYOUT)
# print ("Benchmarking the following two models:"+MODEL_1+" Pure MCTS")
# elif HUMAN:
# player_2=Human()
# print ("Benchmarking the following two models:"+MODEL_1+" Human")
# else:
# print ("Benchmarking the following two models:"+MODEL_1+" vs "+MODEL_2)
# policy_2= PolicyValueNet(width, height, model_file=MODEL_2,state_representation_channel = 4)
# player_2 = MCTSPlayer(policy_2.policy_value_fn,c_puct=5,n_playout=400) # set larger n_playout for better performance
#
policy_1= PolicyValueNet(width, height, model_file=MODEL_1,in_channel = 11,n_resnet=1)
player_1 = MCTSPlayer(policy_1.policy_value_fn,
c_puct=5,
n_playout=400) # set larger n_playout for better performance
# player_1 = Human()
player_2 = Human()
win_ratio = policy_evaluate(player_1,player_2)
print("The win ratio for "+MODEL_1+" is: ",str(100*win_ratio)+"%")
if __name__ == '__main__':
run()
|
import sys
w = [int(x) for x in sys.argv[1].split(',')]
s = [int(x) for x in sys.argv[2].split(',')]
b = int(sys.argv[3])
assert len(w) == len(s)
matrix = [[None for x in range(sum(w))] for y in range(len(w))]
def dump_matrix():
elements_width = max(len(str(sum(w))), len(str(len(w))))
for i in range(len(matrix)):
for j in range(len(matrix[i])):
if matrix[i][j] != None:
if len(str(matrix[i][j])) > elements_width:
elements_width = len(str(matrix[i][j]))
print(f'{" " * elements_width} ', end='')
for j in range(sum(w)):
print(f'{j+1:>{elements_width}} ', end='')
print()
for i in range(len(matrix)):
print(f'{i+1:>{elements_width}}', end='; ')
for j in range(len(matrix[i])):
if matrix[i][j] == None:
print(' ' * elements_width, end=' ')
else:
print(f'{matrix[i][j]:>{elements_width}}', end=' ')
print()
for i in range(len(w)):
liczymy = True
print(f'\niteracja {i+1}')
wartosc = w[i]
if i > 0 and matrix[i-1][wartosc-1] == None:
matrix[i][wartosc-1] = s[i]
elif i > 0 and matrix[i-1][wartosc-1] != None:
if s[i] <= matrix[i-1][wartosc-1]:
matrix[i][wartosc-1] = s[i]
else:
liczymy = True
matrix[i][wartosc-1] = matrix[i-1][wartosc-1]
elif i == 0:
matrix[i][wartosc-1] = s[i]
if i > 0:
if liczymy:
for j in range(len(matrix[i-1])):
if matrix[i-1][j] != None:
print(f'matrix[{i}][{wartosc+j}] = matrix[{i-1}][{j}] + {s[i]}'
f' = {matrix[i-1][j]} + {s[i]} = {matrix[i-1][j] + s[i]}')
assert matrix[i-1][j] != None
if matrix[i-1][j] + s[i] <= b:
if matrix[i-1][wartosc+j] != None and matrix[i-1][wartosc+j] < matrix[i-1][j] + s[i]:
matrix[i][wartosc+j] = matrix[i-1][wartosc+j]
else:
matrix[i][wartosc+j] = matrix[i-1][j] + s[i]
for j in range(len(matrix[i-1])):
if matrix[i-1][j] != None:
if matrix[i][j] == None:
matrix[i][j] = matrix[i-1][j]
dump_matrix()
print()
# rozwiazanie
wiersze = []
koszty = []
wartosci = []
rozmiar = b
szukany = None
for i in reversed(range(len(matrix))):
for j in reversed(range(len(matrix[i]))):
if matrix[i][j] != None:
if szukany != None and matrix[i][j] > szukany:
continue
if i > 0:
if matrix[i][j] == matrix[i-1][j]:
break
elif matrix[i-1][j] != None and matrix[i-1][j] < matrix[i][j]:
raise RuntimeError
wiersze.append(i+1)
koszty.append(s[i])
wartosci.append(w[i])
szukany = matrix[i][j] - s[i]
rozmiar -= s[i]
break
wiersze = list(reversed(wiersze))
koszty = list(reversed(koszty))
wartosci = list(reversed(wartosci))
print('wiersze:', wiersze)
print('koszty:', koszty, '=', sum(koszty))
print('wartosci:', wartosci, '=', sum(wartosci)) |
import standard
class Hunter(standard.Character):
def __init__(self):
self.hit_list = []
def on_see_entry(self, char):
if char.id in self.hit_list:
attack(ch, "backstab") # not implemented
|
# -*- coding: utf-8 -*-
from flask import Flask, render_template
from utils import gametype, mapname, colour, geoip, trim
import parser
import pylibmc
from cache import Cache
app = Flask(__name__, static_folder="./static")
backend = pylibmc.Client(["127.0.0.1"])
cache = Cache(backend)
@cache("iw4m-html", time=180)
@app.route('/')
def index():
servers = parser.load()
return render_template('index.html', servers=servers, trim=trim, geoip=geoip, colour=colour, gametype=gametype, mapname=mapname, len=len)
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
from sqlalchemy.orm.exc import FlushError
__author__ = 'ada'
import os
import sys
from sqlalchemy import Column, ForeignKey, Integer, String, Binary
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine
Base = declarative_base()
class Language(Base):
__tablename__ = "language"
id = Column(String(2), primary_key=True)
name = Column(String(255))
class User(Base):
__tablename__ = "user"
id = Column(Integer, primary_key=True)
email = Column(String(225))
name = Column(String(225))
password = Column(Binary)
password_salt = Column(Binary)
preferred_language_id = Column(String(2), ForeignKey('language.id'))
language = relationship(Language)
class Word(Base):
__tablename__ = "word"
id = Column(Integer, primary_key=True)
word = Column(String(225))
word_rank = Column(Integer)
starred = Column(Integer)
language_id = Column(String(2), ForeignKey('language.id'))
language = relationship(Language)
def __str__(self):
return str(self.word)
class Text(Base):
__tablename__ = "text"
id = Column(Integer, primary_key=True)
content = Column(String(10000))
content_hash = Column(Binary)
language_id = Column(String(2), ForeignKey('language.id'))
url_id = Column(Integer)
language = relationship(Language)
con_string = 'mysql://{}:{}@{}'.format("zeeguu", "sla2012", "localhost/zeeguu")
engine = create_engine(con_string)
# Base.metadata.create_all(engine)
Base.metadata.create_all(engine)
from sqlalchemy.orm import sessionmaker
Base.metadata.bind = engine # bind-uim informatiile metadata din Base la engine
DBSession = sessionmaker(engine) # cream o clasa DBsession
# cream un obiect sesiune de tip clasa DBSession/ deschidem o noua sesiune in baza de date
session = DBSession()
users = session.query(User).all()
for u in users:
print u.name + ":" + u.name + ":" + u.email
first_user = session.query(User).first()
english_words = session.query(Word).join(Language).filter(Language.name == "English").all()
for en_word in english_words:
print en_word.word
english_words_rank_1 = session.query(Word).filter(Word.language_id == "en", Word.word_rank == "2").all()
for ranked_en_word in english_words_rank_1:
print ranked_en_word
#english_words_rank_1.__str__()
if user = session.query(User).filter(User.name == "Anca Lungu").one():
if user:
all()[0]
user = session.query(User).filter(User.name == "Anca Lungu").all()
if len(user) == 1:
print "Anca Lungu's selected language is:" + user.language.id
try:
user = session.query(User).filter(User.name == "Anca Lungu").one()
print "Anca Lungu's selected language is:" + user.language.id
except:
print "userul nu exista..."
"""new_user = User(name="Anca Lungu", email="bare.fordi@gmail.com")
new_language = Language(id="dn", name="Danish")
session.add(new_language)
new_user.language = new_language
session.add(new_user)
session.commit()"""
try:
new_language = Language(id="dn", name="Danish")
new_user = User(name="Anca Lungu", email="bare.fordi@gmail.com", language = new_language)
session.add(new_language)
session.add(new_user)
session.commit()
except FlushError:
print "probabil languageul exista deja"
"""from sqlalchemy.orm import sessionmaker
DBSession = sessionmaker()
DBSession.bind = engine
session = DBSession()
users = session.query(User).all()
for u in users:
print u.name + " - " + u.language.name"""
|
def test_47():
assert num_to_english(47) == 'Fourty Seven'
def test_27():
assert num_to_english(27) == 'Twenty Seven'
def test_7():
assert num_to_english(7) == 'Seven'
def test_17():
assert num_to_english(17) == 'Seventeen'
def test_33():
assert num_to_english(33) == 'Thirty Three'
def test_50():
assert num_to_english(50) == 'Fifty'
def test_10():
assert num_to_english(10) == 'Ten'
def test_27():
assert num_to_english(27) == 'Twenty Seven'
def test_99():
assert num_to_english(99) == 'Ninety Nine'
def test_54():
assert num_to_english(54) == 'Fifty Four'
def test_32():
assert num_to_english(32) == 'Thirty Two'
def test_21():
assert num_to_english(21) == 'Twenty One'
def test_22():
assert num_to_english(22) == 'Twenty Two'
def test_43():
assert num_to_english(43) == 'Fourty Three'
def test_56():
assert num_to_english(56) == 'Fifty Six'
def test_12():
assert num_to_english(12) == 'Twelve'
def test_13():
assert num_to_english(13) == 'Thirteen'
def test_19():
assert num_to_english(19) == 'Nineteen'
def test_90():
assert num_to_english(90) == 'Ninety'
def test_100():
assert num_to_english(15) == 'Fifteen'
def test_101():
assert num_to_english(1) == 'One'
def num_to_english(N):
words1 = {0: 'Zero', 1: 'One', 2: 'Two', 3: 'Three', 4: 'Four', 5: 'Five', \
6: 'Six', 7: 'Seven', 8: 'Eight', 9: 'Nine', \
11: 'Eleven', 12: 'Twelve', 13: 'Thirteen', 14: 'Fourteen', \
15: 'Fifteen', 16: 'Sixteen', 17: 'Seventeen', 18: 'Eighteen', 19: 'Nineteen'}
words2 = ['Ten','Twenty', 'Thirty', 'Fourty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety']
if 0 <= N < 9:
return(words1[N])
elif 11 <= N <= 19 :
tens, below_ten = divmod(N, 10)
return (words1[N])
elif 21 <= N <= 29 :
tens, below_ten = divmod(N, 10)
return(words2[tens - 1] + ' ' + words1[below_ten])
elif 31 <= N <= 39:
tens, below_ten = divmod (N, 10)
return (words2[tens - 1] + ' ' + words1[below_ten])
elif 41 <= N <= 49:
tens, below_ten = divmod (N, 10)
return (words2[tens - 1] + ' ' + words1[below_ten])
elif 51 <= N <= 59:
tens, below_ten = divmod (N, 10)
return (words2[tens - 1] + ' ' + words1[below_ten])
elif 61 <= N <= 69:
tens, below_ten = divmod (N, 10)
return (words2[tens - 1] + ' ' + words1[below_ten])
elif 71 <= N <= 79:
tens, below_ten = divmod (N, 10)
return (words2[tens - 1] + ' ' + words1[below_ten])
elif 81 <= N <= 89:
tens, below_ten = divmod (N, 10)
return (words2[tens - 1] + ' ' + words1[below_ten])
elif 91 <= N <= 99:
tens, below_ten = divmod (N, 10)
return (words2[tens - 1] + ' ' + words1[below_ten])
elif N%10 == 0:
tens=int(N/10)
return (words2[tens-1])
else:
return("Invalid")
|
import tensorflow as tf
import numpy as np
import os
import train
import time
import cv2
MOVING_AVERAGE_DECAY = 0.99
EVAL_INTERVAL_SECS = 10
image_size = 128
img = cv2.imread("qqq.jpg")
img0 = cv2.resize(img, (128, 128))
img2 = tf.cast(img0,tf.float32)
img3 = tf.reshape(img2,(1,128,128,3))
y = train.inference(img3)
qq = tf.nn.softmax(y)
maxa = tf.argmax(y,1)
q = qq[0][maxa[0]]
variable_averages = tf.train.ExponentialMovingAverage(train.MOVING_AVERAGE_DECAY)
variable_to_restore = variable_averages.variables_to_restore()
saver = tf.train.Saver(variable_to_restore)
with tf.Session() as sess:
tf.local_variables_initializer().run()
init_op = tf.global_variables_initializer()
sess.run(init_op)
ckpt = tf.train.get_checkpoint_state(train.MODEL_SAVE_PATH)
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess,ckpt.model_checkpoint_path)
global_step = ckpt.model_checkpoint_path.split('/')[-1].split('-')[-1]
for i in range(1):
ss= sess.run(maxa)
if ss == 0:
cv2.rectangle(img, (20, 20), (img.shape[1] - 20, img.shape[0] - 20), (0, 0, 255), 2)
cv2.putText(img, "pushing!", (40, 40), cv2.FONT_HERSHEY_PLAIN, 2, (255, 0, 0),
1)
cv2.imshow("a", img)
cv2.waitKey(1)
elif ss == 1:
cv2.rectangle(img, (20, 20), (img.shape[1] - 20,img.shape[0] - 20), (0, 0, 255), 2)
cv2.putText(img, "Someone hit people", (40, 40), cv2.FONT_HERSHEY_PLAIN, 2, (255, 0, 0),
1)
cv2.imshow("a", img)
cv2.waitKey(1)
elif ss == 2:
cv2.rectangle(img, (20, 20), (img.shape[1] - 20, img.shape[0] - 20), (0, 255, 0), 2)
cv2.putText(img, "shake hands", (40, 40), cv2.FONT_HERSHEY_PLAIN, 2, (255, 0, 0),
1)
cv2.imshow("a", img)
cv2.waitKey(1)
elif ss == 3:
cv2.rectangle(img, (20, 20), (img.shape[1] - 20, img.shape[0] - 20), (0, 255, 0), 2)
cv2.putText(img, "The beast embrace", (40, 40), cv2.FONT_HERSHEY_PLAIN, 2, (0, 0, 255), 1)
cv2.imshow("a", img)
cv2.waitKey(1)
else:
cv2.rectangle(img, (20, 20), (img.shape[1] - 20, img.shape[0] - 20), (0, 0, 255), 2)
cv2.putText(img, "kicking a player", (40, 40), cv2.FONT_HERSHEY_PLAIN, 2, (255, 0, 0),
1)
cv2.imshow("a", img)
cv2.waitKey(1)
else:
print("No checkpoint file found")
|
# -*- coding: utf-8 -*-
# @Organization : insightface.ai
# @Author : Jia Guo
# @Time : 2021-05-04
# @Function :
from __future__ import division
import glob
import os.path as osp
import numpy as np
import onnxruntime
from numpy.linalg import norm
from ..model_zoo import model_zoo
from ..utils import DEFAULT_MP_NAME, ensure_available
from .common import Face
__all__ = ['FaceAnalysis']
class FaceAnalysis:
def __init__(self, name=DEFAULT_MP_NAME, root='~/.insightface', allowed_modules=None, **kwargs):
onnxruntime.set_default_logger_severity(3)
self.models = {}
self.model_dir = ensure_available('models', name, root=root)
onnx_files = glob.glob(osp.join(self.model_dir, '*.onnx'))
onnx_files = sorted(onnx_files)
for onnx_file in onnx_files:
model = model_zoo.get_model(onnx_file, **kwargs)
if model is None:
print('model not recognized:', onnx_file)
elif allowed_modules is not None and model.taskname not in allowed_modules:
print('model ignore:', onnx_file, model.taskname)
del model
elif model.taskname not in self.models and (allowed_modules is None or model.taskname in allowed_modules):
print('find model:', onnx_file, model.taskname, model.input_shape, model.input_mean, model.input_std)
self.models[model.taskname] = model
else:
print('duplicated model task type, ignore:', onnx_file, model.taskname)
del model
assert 'detection' in self.models
self.det_model = self.models['detection']
def prepare(self, ctx_id, det_thresh=0.5, det_size=(640, 640)):
self.det_thresh = det_thresh
assert det_size is not None
print('set det-size:', det_size)
self.det_size = det_size
for taskname, model in self.models.items():
if taskname=='detection':
model.prepare(ctx_id, input_size=det_size, det_thresh=det_thresh)
else:
model.prepare(ctx_id)
def get(self, img, max_num=0):
bboxes, kpss = self.det_model.detect(img,
max_num=max_num,
metric='default')
if bboxes.shape[0] == 0:
return []
ret = []
for i in range(bboxes.shape[0]):
bbox = bboxes[i, 0:4]
det_score = bboxes[i, 4]
kps = None
if kpss is not None:
kps = kpss[i]
face = Face(bbox=bbox, kps=kps, det_score=det_score)
for taskname, model in self.models.items():
if taskname=='detection':
continue
model.get(img, face)
ret.append(face)
return ret
def draw_on(self, img, faces):
import cv2
dimg = img.copy()
for i in range(len(faces)):
face = faces[i]
box = face.bbox.astype(np.int)
color = (0, 0, 255)
cv2.rectangle(dimg, (box[0], box[1]), (box[2], box[3]), color, 2)
if face.kps is not None:
kps = face.kps.astype(np.int)
#print(landmark.shape)
for l in range(kps.shape[0]):
color = (0, 0, 255)
if l == 0 or l == 3:
color = (0, 255, 0)
cv2.circle(dimg, (kps[l][0], kps[l][1]), 1, color,
2)
if face.gender is not None and face.age is not None:
cv2.putText(dimg,'%s,%d'%(face.sex,face.age), (box[0]-1, box[1]-4),cv2.FONT_HERSHEY_COMPLEX,0.7,(0,255,0),1)
#for key, value in face.items():
# if key.startswith('landmark_3d'):
# print(key, value.shape)
# print(value[0:10,:])
# lmk = np.round(value).astype(np.int)
# for l in range(lmk.shape[0]):
# color = (255, 0, 0)
# cv2.circle(dimg, (lmk[l][0], lmk[l][1]), 1, color,
# 2)
return dimg
|
# -*- coding: utf-8 -*-
"""
@Time : 2020/6/8 15:44
@Author : QDY
@FileName: 234. 回文链表_快慢指针+翻转链表.py
请判断一个链表是否为回文链表。
示例 1:
输入: 1->2
输出: false
示例 2:
输入: 1->2->2->1
输出: true
进阶:
你能否用O(n) 时间复杂度和 O(1) 空间复杂度解决此题?
"""
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution: # O(n) 时间复杂度 O(1) 空间复杂度
def isPalindrome(self, head):
if not head or not head.next: return True
slow, fast = head, head # 快慢指针
res = True
while fast and fast.next: #
slow = slow.next
fast = fast.next.next
if fast: # fast没指向None,表示链表长度为偶数
slow = slow.next # 慢指针再进一步
prev, cur = slow, slow.next
prev.next = None
while cur: # 将slow之后的链表翻转
next_node = cur.next
cur.next = prev
prev = cur
cur = next_node
start1, start2 = head, prev
while start2: # 从链的两端向中间遍历
if start1.val != start2.val:
res = False
break
start1, start2 = start1.next, start2.next
# # 为了不破坏链表结构,可以利用prev将链表复原
# cur = prev.next
# prev.next = None
# while cur:
# next_node = cur.next
# cur.next = prev
# prev = cur
# cur = next_node
# print(head)
return res
|
import pystray
from PIL import Image, ImageDraw
from pystray import Menu, MenuItem
from flask import Flask
import threading
import signal
import os
import subprocess
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/shutdown')
def shutdown():
subprocess.call('shutdown -s -t 60', shell=True)
return 'Shutting Down'
@app.route('/cancel_shutdown')
def cancel_shutdown():
subprocess.call('shutdown -a', shell=True)
return 'Canceling shutdown'
def exit_action(icon):
print('start killing app')
icon.visible = False
icon.stop()
print('Killed icon')
print('killing pid:', os.getpid())
os.kill(os.getpid(), signal.SIGTERM)
print('Killed flask')
def init_icon(color=(255, 0, 0)):
icon = pystray.Icon('mon')
icon.menu = Menu(
MenuItem('Shutdown', lambda: shutdown()),
MenuItem('Cancel Shutdown', lambda: cancel_shutdown()),
MenuItem('Exit', lambda: exit_action(icon))
)
if os.path.exists('shutdown.png'):
img = Image.open('shutdown.png')
else:
img = Image.new('RGBA', (128, 128), (0, 0, 0, 0))
draw = ImageDraw.Draw(img)
draw.arc((5 + 10, 5 + 20, 125 - 10, 125),
start=295, end=245, fill=color, width=13)
draw.line(((64, 10), (64, 70)), fill=color, width=13)
icon.icon = img
icon.title = 'Shutdown REST'
icon.run()
def init_flask():
app.run(host='0.0.0.0', port=5000)
threading.Thread(target=init_flask).start()
init_icon()
|
from app.views import *
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
path('create/',CreateUser.as_view(),name="create"),
path('users/',ListUser.as_view(),name="list"),
path('users/delete/<int:pk>',DeleteUser.as_view(),name="delete"),
path('users/edit/<int:pk>',UpdateUser.as_view(),name="update")
]
|
sistema=None
class lector:
@staticmethod
def leer(sistema,queue1, queue3):
f = open("example.txt")
sistema.makeProcess(f.readlines(),queue1,queue3) |
from setuptools import setup, find_packages
version = '1.6'
setup(name='bhr-client',
version=version,
description="BHR Client",
long_description="Client for the BHR Blackhole Router site",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='bhr',
author='Justin Azoff',
author_email='JAzoff@illinois.edu',
url='https://github.com/ncsa/bhr-client',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
"requests>=2.0",
"arrow==0.10.0",
],
extras_require={
'cli': ['Click'],
},
entry_points={
'console_scripts': [
'bhr-client = bhr_client.cli:main',
'bhr-client-run-stdout = bhr_client.run:main',
]
}
)
|
"""
Neha Bais
Calculates gratuity nd total from gratuity rate and subtotal entered by User !!!
"""
gratuity_rate = eval(input("Enter a gratuity rate in % : "))
subtotal = eval(input("Enter the subtotal : "))
gratuity = (gratuity_rate * subtotal) / 100
total = subtotal + gratuity
print ("The gratuity is", gratuity, "and total is", total) |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import netCDF4 as netcdf
from matplotlib import dates as mdates
import EuroSea_toolbox as to
from mpl_toolkits.basemap import Basemap
import pickle
"""
Step 1:
>> Define uCTD sampling strategy: get (time, lon, lat, dep) of each profile
Configuration #3
* MedSea and Atlantic.
>> With the defined uCTD sampling strategy, find (time, lon, lat, dep)
of the ADCP profiles.
>> Save (time, lon, lat, dep) of the uCTD and ADCP profiles
written by Bàrbara Barceló-Llull on 11-02-2021 at IMEDEA (Mallorca, Spain)
"""
def extract_lon_lat_continuous_profiles(dist_pfs_all, dlon_uctd, len_trans, sep_trans,
sep_trans_lat_deg, lat_ini, lon_ini,
lon_fin, length_deg_lon, length_deg_lat,
num_trans):
'''
Extract longitude and latitude of profiles that are done continuously
from top left to top right, turn downward, then continue the 2nd row from
right to left, etc.
Valid for uCTD/Seasoar profiles and the associated ADCP profiles.
This function does this:
> Which of the profiles in dist_pfs_all is in each transect or separation
between transects?
> Then, which are the coordinates of these profiles?
Input:
dist_pfs_all: array with distance of each profile from the beginnning
of the sampling (top left)
dlon_uctd: horizontal resolution along zonal transect in longitude degrees
len_trans: length of zonal transect [km]
sep_trans: separation between zonal transects [km]
sep_trans_lat_deg: separation between zonal transects [latitude degrees]
lat_ini: latitude of first profile (beginning of sampling)
lon_ini: longitude of first profile (beginning of sampling)
lon_fin: longitude at len_trans from the beginning of sampling
length_deg_lon: length of a degree of longitude [m]
length_deg_lat: length of a degree of latitude [m]
num_trans: number of zonal transects
Output: lon_uctd_pfs, lat_uctd_pfs
'''
lon_uctd_pfs = np.ones(len(dist_pfs_all)) * np.nan
lat_uctd_pfs = np.ones(len(dist_pfs_all)) * np.nan
for n in np.arange(num_trans):
# distance of the profiles in each transect
dist_ini_tran_n = n * (len_trans + sep_trans)
dist_fin_tran_n = dist_ini_tran_n + len_trans
cond_tran_n = np.logical_and(dist_pfs_all >= dist_ini_tran_n,
dist_pfs_all <= dist_fin_tran_n)
dist_pfs_tran_n = dist_pfs_all[cond_tran_n] # save distance
# Which are the coordinates of these profiles?
# latitude of profiles in this transect
lat_uctd_itran = lat_ini - n * sep_trans_lat_deg
lat_uctd_pfs[cond_tran_n] = lat_uctd_itran
# longitude of profiles in this transect
if n in np.arange(0, num_trans, 2):
# distance starting from 0 to 80 of this transect
rel_dist_tran = dist_pfs_tran_n - dist_ini_tran_n
# min dist of this transect
min_dist_km = rel_dist_tran.min()
min_dist_lon_deg = min_dist_km/(length_deg_lon/1000)
# longitude of the western profile
lon_ini_tran = lon_ini + min_dist_lon_deg
# max dist of this transect
max_dist_km = rel_dist_tran.max()
max_dist_lon_deg = max_dist_km/(length_deg_lon/1000)
# longitude of the eastern profile
lon_fin_tran = lon_ini + max_dist_lon_deg
# longitude of all profiles between first and last profile in transet
lon_uctd_itran = np.arange(lon_ini_tran, lon_fin_tran+0.5*dlon_uctd, dlon_uctd)
# save longitude of the profiles in this transect
lon_uctd_pfs[cond_tran_n] = lon_uctd_itran
elif n in np.arange(1, num_trans, 2):
# distance starting from 0 to 80 of this transect
# 0 km is east and 80 km is west (uCTD goes from east to west)
rel_dist_tran = dist_pfs_tran_n - dist_ini_tran_n
# min dist of this transect
min_dist_km = rel_dist_tran.min()
min_dist_lon_deg = min_dist_km/(length_deg_lon/1000)
# longitude of the EASTERN profile
lon_ini_tran = lon_fin - min_dist_lon_deg
# max dist of this transect
max_dist_km = rel_dist_tran.max()
max_dist_lon_deg = max_dist_km/(length_deg_lon/1000)
# longitude of the WESTERN profile
lon_fin_tran = lon_fin - max_dist_lon_deg
# longitude of all profiles between first and last profile in transet
lon_uctd_itran = np.arange(lon_ini_tran, lon_fin_tran-0.5*dlon_uctd, -dlon_uctd)
# save longitude of the profiles in this transect
lon_uctd_pfs[cond_tran_n] = lon_uctd_itran
# distance of the profiles in each separation between transects
if n < num_trans-1:
dist_ini_sep_n = dist_fin_tran_n
dist_fin_sep_n = dist_ini_sep_n + sep_trans # or (n+1) * (len_trans + sep_trans))
cond_sep_n = np.logical_and(dist_pfs_all > dist_ini_sep_n,
dist_pfs_all < dist_fin_sep_n)
dist_pfs_sep_n = dist_pfs_all[cond_sep_n] # save distance
# Which are the coordinates of these profiles?
# longitude of profiles in this transect
if n in np.arange(0, num_trans, 2):
lon_uctd_pfs[cond_sep_n] = lon_fin
elif n in np.arange(1, num_trans, 2):
lon_uctd_pfs[cond_sep_n] = lon_ini
# latitude of profiles in this transect
# lat_uctd_itran + distance
dist_sep_pf_km = dist_pfs_sep_n - dist_ini_sep_n
dist_sep_pf_lat_deg = dist_sep_pf_km / (length_deg_lat/1000)
lat_uctd_pfs[cond_sep_n] = lat_uctd_itran - dist_sep_pf_lat_deg
return lon_uctd_pfs, lat_uctd_pfs
def plot_bm_decor_Med(bm, fsize, label_y=1):
# decor map
bm.drawcoastlines()
bm.fillcontinents(color='0.7', lake_color='0.7', zorder=5)
parallels = np.arange(34.,43.,1.)
bm.drawparallels(parallels,labels=[label_y, 0, 0, 0],fontsize=fsize,
linewidth=0.2, zorder=8)
meridians = np.arange(-6,7.,1.)
bm.drawmeridians(meridians,labels=[0, 0, 0, 1],fontsize=fsize,
linewidth=0.2, zorder=9)
def plot_bm_decor_Atl(bm, fsize, lonmin, lonmax, latmin, latmax, label_y=1,):
# decor map
bm.drawcoastlines()
bm.fillcontinents(color='0.7', lake_color='0.7', zorder=5)
parallels = np.arange(latmin,latmax,1.)
bm.drawparallels(parallels,labels=[label_y, 0, 0, 0],fontsize=fsize,
linewidth=0.2, zorder=8)
meridians = np.arange(lonmin,lonmax,1.)
bm.drawmeridians(meridians,labels=[0, 0, 0, 1],fontsize=fsize,
linewidth=0.2, zorder=9)
def plot_bm_decor(bm, tz, lonmin, lonmax, latmin, latmax, label_y=1):
if region == 'Med':
plot_bm_decor_Med(bm, tz+1, label_y=1)
elif region == 'Atl':
plot_bm_decor_Atl(bm, tz+1, lonmin, lonmax, latmin, latmax, label_y=1)
def make_figure_UCTD(bm, region, lon_topo2d=None, lat_topo2d=None, topo_dom=None,
lon_mdt2d=None, lat_mdt2d=None, ug=None, vg=None):
xsw1, ysw1 = bm(lonsw1, latsw1)
xsw2, ysw2 = bm(lonsw2, latsw2)
xnd1, ynd1 = bm(lonnd1, latnd1)
xnd2, ynd2 = bm(lonnd2, latnd2)
tz = 12
x_casts, y_casts = bm(lon_uctd_pfs, lat_uctd_pfs)
# xlimmin, ylimmin = bm(1, 39)
# xlimmax, ylimmax = bm(4, 42)
fig = plt.figure(figsize=(13,6))
ax1 = plt.subplot(121)
plot_bm_decor(bm, tz, lonmin, lonmax, latmin, latmax, label_y=1)
if region == 'Atl':
plt.scatter(xsw1.flatten(), ysw1.flatten(), s=10, c='lightskyblue', alpha=0.2)
plt.scatter(xsw2.flatten(), ysw2.flatten(), s=10, c='lightskyblue', alpha=0.2)
plt.scatter(xnd1.flatten(), ynd1.flatten(), s=10, c='lightskyblue', alpha=0.2)
plt.scatter(xnd2.flatten(), ynd2.flatten(), s=10, c='lightskyblue', alpha=0.2)
elif region == 'Med':
x_top, y_top = bm(lon_topo2d, lat_topo2d)
x_mdt, y_mdt = bm(lon_mdt2d, lat_mdt2d)
ctopf = plt.contourf(x_top, y_top, topo_dom, cmap = plt.cm.YlGnBu_r,#Blues_r, #cmo.haline,
levels = np.arange(-3300,1,10), zorder=1, extend='min')
cs1000 = plt.contour(x_top, y_top, topo_dom, levels=[-1000],#Blues_r, #cmo.haline,
colors='w', linewidths = 1, zorder=1000)
cs500 = plt.contour(x_top, y_top, topo_dom, levels=[-500],#Blues_r, #cmo.haline,
colors='b', linewidths = 1, zorder=1000)
# label the contours
plt.clabel(cs1000, fmt='%d')
plt.clabel(cs500, fmt='%d')
qv = plt.quiver(x_mdt, y_mdt, ug, vg, color='k',scale = 7, zorder=900)
plt.quiverkey(qv, 0.12, 0.9, 0.25, '0.25 m/s',
coordinates='figure', color='k', alpha=1)
plt.scatter(xsw1.flatten(), ysw1.flatten(), c='lightskyblue',)
plt.scatter(xsw2.flatten(), ysw2.flatten(), c='lightskyblue')
plt.scatter(xnd1.flatten(), ynd1.flatten(), c='lightskyblue')
plt.scatter(xnd2.flatten(), ynd2.flatten(), c='lightskyblue')
plt.scatter(x_casts, y_casts, c='r', s=10, zorder=1100)
# plt.xlim(xlimmin, xlimmax)
# plt.ylim(ylimmin, ylimmax)
plt.title(name_scenario + ' >> uCTD')
ax2=plt.subplot(122)
plt.scatter(lon_uctd_pfs, lat_uctd_pfs, c=np.arange(len(lon_uctd_pfs)), cmap=plt.cm.Spectral_r)
if dx_uctd_km > 5:
for i, txt in enumerate(np.arange(len(lon_uctd_pfs))+1):
plt.annotate(txt, (lon_uctd_pfs[i]+0.005, lat_uctd_pfs[i]+0.01))
# for i, txt_time in enumerate(time_casts):
# plt.annotate(mdates.num2date(txt_time).strftime("%H:%M %d-%m "),
# (lon_casts[i]-0.5, lat_casts[i]+0.5))
for j, txt_time in enumerate(time_uctd_pfs):
# plt.annotate(mdates.num2date(txt_time).strftime("%H:%M %d-%m "),
# (lon_casts[j]-0.03, lat_casts[j]+0.03))
#if np.any(j==np.arange(0, len(time_uctd_pfs), 5)):
if j==0 or j==len(time_uctd_pfs)-1:
plt.text(lon_uctd_pfs[j]-0.03, lat_uctd_pfs[j]+0.035,
mdates.num2date(txt_time).strftime("%H:%M %d-%m "),
fontsize=7)
plt.axis('image')
plt.ylim(lat_uctd_pfs.min()-0.1, lat_uctd_pfs.max()+0.1)
plt.title('uCTD')
plt.tight_layout()
fig.savefig(fig_dir + name_scenario+'_uCTD.png')
def make_figure_ADCP_uCTD(bm):
xsw1, ysw1 = bm(lonsw1, latsw1)
xsw2, ysw2 = bm(lonsw2, latsw2)
xnd1, ynd1 = bm(lonnd1, latnd1)
xnd2, ynd2 = bm(lonnd2, latnd2)
tz = 12
x_adcp, y_adcp = bm(lon_adcp_pfs, lat_adcp_pfs)
fig = plt.figure(figsize=(13,6))
ax1 = plt.subplot(121)
plot_bm_decor(bm, tz, lonmin, lonmax, latmin, latmax, label_y=1)
if region == 'Atl':
plt.scatter(xsw1.flatten(), ysw1.flatten(), s=10, c='lightskyblue', alpha=0.2)
plt.scatter(xsw2.flatten(), ysw2.flatten(), s=10, c='lightskyblue', alpha=0.2)
plt.scatter(xnd1.flatten(), ynd1.flatten(), s=10, c='lightskyblue', alpha=0.2)
plt.scatter(xnd2.flatten(), ynd2.flatten(), s=10, c='lightskyblue', alpha=0.2)
elif region == 'Med':
plt.scatter(xsw1.flatten(), ysw1.flatten(), c='lightskyblue',)
plt.scatter(xsw2.flatten(), ysw2.flatten(), c='lightskyblue')
plt.scatter(xnd1.flatten(), ynd1.flatten(), c='lightskyblue')
plt.scatter(xnd2.flatten(), ynd2.flatten(), c='lightskyblue')
plt.scatter(x_adcp, y_adcp, c=time_adcp_pfs, s=3, cmap=plt.cm.Spectral_r)
# plt.xlim(xlimmin, xlimmax)
# plt.ylim(ylimmin, ylimmax)
plt.title(name_scenario + ' >> ADCP')
ax2=plt.subplot(122)
# ADCP
plt.scatter(lon_adcp_pfs, lat_adcp_pfs, c=time_adcp_pfs, s=8, cmap=plt.cm.Spectral_r)
plt.colorbar(ticks=mdates.HourLocator(interval=6), #DayLocator(interval=1),
format=mdates.DateFormatter('%H:%M \n %b %d'),
orientation='horizontal')
# uCTD casts
plt.scatter(lon_uctd_pfs, lat_uctd_pfs, c='k', s=8, marker='s')
if dx_uctd_km > 5:
for i, txt in enumerate(np.arange(len(lon_uctd_pfs))+1):
plt.annotate(txt, (lon_uctd_pfs[i]+0.005, lat_uctd_pfs[i]+0.01))
# for i, txt in enumerate(np.arange(len(lon_uctd_pfs))+1):
# plt.annotate(txt, (lon_uctd_pfs[i]+0.005, lat_uctd_pfs[i]+0.01))
plt.axis('image')
plt.ylim(lat_uctd_pfs.min()-0.1, lat_uctd_pfs.max()+0.1)
plt.title('uCTD + ADCP')
plt.tight_layout()
fig.savefig(fig_dir + name_scenario+'_ADCP.png')
if __name__ == '__main__':
plt.close('all')
fig_dir = '/Users/bbarcelo/HOME_SCIENCE/Figures/2020_EuroSea/configurations/'
dir_dic = '/Users/bbarcelo/HOME_SCIENCE/Data/2020_EuroSea/configurations/'
''' Which region? '''
region = 'Med' # options 'Atl' or 'Med'
'''
Which subconfiguration?
Configuration 3a:
- Horizontal resolution of uCTD profiles: 6 km
- Vertical axis: from 5 to 500 m depth, with a vertical spacing of 0.5 m
Configuration 3b (similar to Calypso):
- Horizontal resolution of uCTD profiles: 2.5 km
- Vertical axis: from 5 to 200 m depth, with a vertical spacing of 0.5 m
'''
subconf = '3b' # Options: '3a' and '3b'
'''
>>>>>> 1) Simulate uCTD/Seasoar sampling strategy <<<<<<
To obtain lon, lat and time of each uCTD profile.
Note 1: We assume a regular grid.
num_trans: number of transects
sep_trans: separation between transects
Note 2: Order: Row 1 (top) from left to right, row 2 from right to left, etc.
Note 3: We assume one vertical profile every dx_uctd_km
Note 4: We assume constant ship velocity, including in turns. And turns of 90 deg.
'''
# define start time of the sampling
t_samp_ini = mdates.date2num(datetime(2009, 9, 1, 0, 0, 0)) #[python format in num. of days]
# Parameters to define the uCTD sampling strategy (change as you wish)
ship_speed_kt = 8 # knots
len_trans = 80 # km, length of each zonal transect
sep_trans = 10 # km, separation between zonal transects
num_trans = 7 # total number of transects
dep_uctd_min = 5 # m upper depth layer
dep_uctd_res = 0.5 # m vertical resolution of the data
if subconf == '3a':
dx_uctd_km = 6 # [km] resolution of the uCTD profiles
dep_uctd_max = 500 # [m] maximum depth of each cast
elif subconf == '3b':
dx_uctd_km = 2.5 # [km] resolution of the uCTD profiles
dep_uctd_max = 200 # [m] maximum depth of each cast
# number and name of this configuration
name_scenario = region + '_conf_3_dep_'+'%04i'% dep_uctd_max+'m_res_'+'%04.1f'% dx_uctd_km + 'km_Sep'
# ADCP parameters (change as you wish)
dt_adcp_min = 5 #one profile every 5 minutes, in PRE-SWOT 468 seconds (7.8 min)
dep_adcp_min = 20 #m, in PRE-SWOT 19.96 m
dep_adcp_max = 600 #m, in PRE-SWOT 587.96 M
dep_adcp_res = 8 #m # bin size of the ADCP data
#define coordinates of profile 1 (top, left cast)
if region == 'Med':
lon_ini = 1.45
lat_ini = 40.415
elif region == 'Atl':
lon_ini = -48.7
lat_ini = 35.25
# >>>> Computations needed <<<<<
# length of a degree of longitude and latitude
length_deg_lon, length_deg_lat = to.length_lon_lat_degs(lat_ini)
# beginning of 1st transect: lon_ini
# end of 1st transect: lon_fin
len_trans_lon_deg = len_trans/(length_deg_lon/1000)
lon_fin = lon_ini + len_trans_lon_deg
# separation between transects in latitude degrees
sep_trans_lat_deg = sep_trans /(length_deg_lat/1000)
# ship velocity from knots to m/s
kt2ms = 0.514444 # 1 knot is 0.514444 m/s
ship_speed_ms = ship_speed_kt * kt2ms
# ADCP temporal resolution
dt_adcp_s = dt_adcp_min * 60 # in seconds
dt_adcp_fracdays = dt_adcp_min/(60*24)
# ADCP profile resolution
dx_adcp_km = (ship_speed_ms*dt_adcp_s)/1000 #in km
# >>>> Depth axis: uCTD and ADCP (save) <<<<<
# define depth axis for uCTD profiles
dep_uctd = np.arange(dep_uctd_min, dep_uctd_max+dep_uctd_res, dep_uctd_res)
# define depth axis for ADCP profiles
dep_adcp = np.arange(dep_adcp_min, dep_adcp_max+1, dep_adcp_res)
# >>>> Distance axis: uCTD <<<<<
# total distance of the uCTD trajectory
tot_dist_uCTD = num_trans * len_trans + (num_trans - 1) * sep_trans
# distance of each profile from the beginning of the uCTD trajectory
dist_pfs_uctd_all = np.arange(0, tot_dist_uCTD, dx_uctd_km)
# uCTD profile resolution in longitude degrees
dlon_uctd = dx_uctd_km /(length_deg_lon/1000)
# >>>> Time axis: uCTD <<<<<
# time resolution of uCTD profiles
dt_uctd_s = dx_uctd_km*1000/ship_speed_ms #[s]
dt_uctd_min = dt_uctd_s/60 # [min]
# time resolution of uCTD profiles in days
dt_uctd_fracdays = dt_uctd_min/(60*24) #as fraction of days
# time needed for the uCTD sampling
time_sampling_s = tot_dist_uCTD*1000/ship_speed_ms #[s]
time_sampling_days = time_sampling_s/(60*60*24)
print('')
print('Time needed to do the uCTD sampling (hours)...', time_sampling_s/3600)
print('')
# date of each uCTD profile (save)
time_uctd_pfs = np.arange(t_samp_ini, t_samp_ini+time_sampling_days, dt_uctd_fracdays)
# >>>> Distance axis: ADCP <<<<<
# distance of each ADCP profile from the beginning of the uCTD trajectory
dist_pfs_adcp_all = np.arange(0, tot_dist_uCTD, dx_adcp_km)
# ADCP profile resolution in longitude degrees
dlon_adcp = dx_adcp_km /(length_deg_lon/1000)
# >>>> Time axis: ADCP <<<<<
# date of each ADCP profile (save)
time_adcp_pfs = np.arange(t_samp_ini, t_samp_ini+time_sampling_days, dt_adcp_fracdays)
# -------------------------------------------------------------------
# Extract (lon, lat) of each uCTD profile
# Input: dist_pfs_uctd_all, dlon_uctd
lon_uctd_pfs, lat_uctd_pfs = extract_lon_lat_continuous_profiles(
dist_pfs_uctd_all, dlon_uctd, len_trans, sep_trans,
sep_trans_lat_deg, lat_ini, lon_ini,
lon_fin, length_deg_lon, length_deg_lat,
num_trans)
# Now we have (time_uctd_pfs, lon_uctd_pfs, lat_uctd_pfs, dep_uctd)
# for this uCTD/seasoar sampling strategy.
# Save these data to extract the model data at this location in another code.
# -------------------------------------------------------------------
# Extract (lon, lat) of each ADCP profile
# Input: dist_pfs_adcp_all, dlon_adcp
lon_adcp_pfs, lat_adcp_pfs = extract_lon_lat_continuous_profiles(
dist_pfs_adcp_all, dlon_adcp, len_trans, sep_trans,
sep_trans_lat_deg, lat_ini, lon_ini,
lon_fin, length_deg_lon, length_deg_lat,
num_trans)
# Now we have (time_adcp_pfs, lon_adcp_pfs, lat_adcp_pfs, dep_adcp)
# for this uCTD/seasoar sampling strategy.
# Save these data to extract the model data at this location in another code.
# -------------------------------------------------------------------
''' Plot CTD sampling strategy + swaths of SWOT '''
# Coordinates of the swaths of SWOT in the Med Sea or Atlantic
lonsw1, latsw1, lonsw2, latsw2, lonnd1, latnd1, lonnd2, latnd2 = \
to.coord_SWOT_swath(region)
# Create basemap for figures
if region == 'Med':
lonmin, lonmax, latmin, latmax = 1., 4, 39, 42
bm = Basemap(projection = 'merc',llcrnrlon = lonmin,
urcrnrlon = lonmax,
llcrnrlat = latmin,
urcrnrlat = latmax,
lat_ts = 36,
resolution = 'h')
# file topography
dir_topo = '/Users/bbarcelo/HOME_SCIENCE/Data/PRE-SWOT_Dades/topography/'
file_topo = 'usgsCeSrtm30v6_8303_496d_dd25.nc'
# open topography
nc = netcdf.Dataset(dir_topo + file_topo, 'r')
lat_topo = nc.variables['latitude'][:]
lon_topo = nc.variables['longitude'][:]
topo = nc.variables['topo'][:] # [m]
nc.close()
# limit region
ilont_dom = np.where(np.logical_and(lon_topo>=lonmin-1, lon_topo<=lonmax+1))
jlatt_dom = np.where(np.logical_and(lat_topo>=latmin-1, lat_topo<=latmax+1))
lon_topo_dom = lon_topo[ilont_dom]
lat_topo_dom = lat_topo[jlatt_dom][::-1] # increasing!!
topo_dom = topo[jlatt_dom,:].squeeze()[:, ilont_dom].squeeze()[::-1, :] #lat axis in increasing order
lon_topo2d, lat_topo2d = np.meshgrid(lon_topo_dom, lat_topo_dom)
# open MDT
dir_mdt = '/Users/bbarcelo/HOME_SCIENCE/Data/2020_EuroSea/Med_MDT/'
file_mdt = 'SMDT-MED-2014.nc'
# open MDT
nc = netcdf.Dataset(dir_mdt + file_mdt, 'r')
lat_mdt = nc.variables['latitude'][:].data
lon_mdt = nc.variables['longitude'][:].data
mdt_mk = nc.variables['MDT'][:] # [m]
nc.close()
# remove mask to mdt
mdt = np.copy(mdt_mk.data)
mdt[mdt_mk.mask==True] = np.nan
# compute geostrophic velocity from MDT
lon_mdt2d, lat_mdt2d, ug, vg = to.compute_vgeo_from_mdt(lon_mdt, lat_mdt, mdt)
# Make figure
make_figure_UCTD(bm, region, lon_topo2d, lat_topo2d, topo_dom,
lon_mdt2d, lat_mdt2d, ug, vg)
elif region == 'Atl':
#lonmin, lonmax, latmin, latmax = -55,-40, 30, 40
lonmin, lonmax, latmin, latmax = -50,-46, 34, 36
bm = Basemap(projection = 'merc',llcrnrlon = lonmin,
urcrnrlon = lonmax,
llcrnrlat = latmin,
urcrnrlat = latmax,
lat_ts = 36,
resolution = 'h')
make_figure_UCTD(bm, region)
''' Plot ADCP sampling strategy + swaths of SWOT '''
make_figure_ADCP_uCTD(bm)
''' Save (time, lon, lat, dep) of the uCTD and ADCP profiles '''
dic_uctd = {}
dic_adcp = {}
# (time_uctd_pfs, lon_uctd_pfs, lat_uctd_pfs, dep_uctd)
dic_uctd.update({'time_ctd': time_uctd_pfs,
'lon_ctd' : lon_uctd_pfs,
'lat_ctd' : lat_uctd_pfs,
'dep_ctd' : dep_uctd})
# time_adcp_pf, lon_adcp_pf, lat_adcp_pf, dep_adcp_pf
dic_adcp.update({'time_adcp': time_adcp_pfs,
'lon_adcp' : lon_adcp_pfs,
'lat_adcp' : lat_adcp_pfs,
'dep_adcp' : dep_adcp})
f_uctd = open(dir_dic + name_scenario + '_uctd.pkl','wb')
pickle.dump(dic_uctd,f_uctd)
f_uctd.close()
f_adcp = open(dir_dic + name_scenario + '_adcp.pkl','wb')
pickle.dump(dic_adcp,f_adcp)
f_adcp.close()
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class AlexNet(nn.Module):
def __init__(self):
super(AlexNet, self).__init__()
# convolution part
self.conv1 = nn.Conv2d(
in_channels=3,
out_channels=96,
kernel_size=11,
stride=4,
padding=0
)
self.pool1 = nn.MaxPool2d(kernel_size=3, stride=2)
self.conv2 = nn.Conv2d(
in_channels=96,
out_channels=256,
kernel_size=5,
stride=1,
padding=2
)
self.pool2 = nn.MaxPool2d(kernel_size=3, stride=2)
self.conv3 = nn.Conv2d(
in_channels=256,
out_channels=384,
kernel_size=3,
stride=1,
padding=1
)
self.conv4 = nn.Conv2d(in_channels=384, out_channels=384,
kernel_size=3, stride=1, padding=1)
self.conv5 = nn.Conv2d(in_channels=384, out_channels=256,
kernel_size=3,
stride=1,
padding=1)
self.pool3 = nn.MaxPool2d(kernel_size=3, stride=2)
self.fc1 = nn.Linear(
in_features=9216,
out_features=4096
)
self.dropout1 = nn.Dropout(0.5)
self.fc2 = nn.Linear(
in_features=4096,
out_features=4096
)
self.dropout2 = nn.Dropout(0.5)
self.fc3 = nn.Linear(
in_features=4096,
out_features=1000
)
def forward(self, image):
# get the batch size, channels, height and width
# of the input batch of images
# original size: (bs, 3, 227, 227)
bs, c, h, w = image.size()
x = F.relu(self.conv1(image))
x = self.pool1(x)
x = F.relu(self.conv2(x))
x = self.pool2(x)
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
x = F.relu(self.conv5(x))
x = self.pool3(x)
x = x.view(bs, -1)
x = F.relu(self.fc1(x))
x = self.dropout1(x)
x = F.relu(self.fc2(x))
x = self.dropout2(x)
x = F.relu(self.fc3(x))
x = torch.softmax(x, axis=1)
return x |
from django.shortcuts import render, HttpResponse, redirect
from .models import *
from django.contrib import messages
def index(request):
return render(request,'login_app/index.html')
def processreg(request):
result = User.objects.validate_registration(request.POST)
if result['status']: #that means if its true
request.session['user_id'] = result['user_id']
return redirect('/success')
else:
for error in result['errors']:
messages.error(request,error)
return redirect('/')
def processlog(request):
result = User.objects.validate_login(request.POST)
if result['status']: #that means if its true
request.session['user_id'] = result['user_id']
return redirect('/success')
else:
for error in result['errors']:
messages.error(request,error)
return redirect('/')
def success(request):
if 'user_id' not in request.session:
return redirect('/')
# GET ALL TRIPS me = User.objects.get(id=request.session[‘user_id’])
#context = { #to send queries to HTML
#‘my_trips’: Trip.objects.filter(joined_by=me),
#‘not_my_trips’: Trip.objects.exclude(joined_by=me),
#}
context = {
"me": User.objects.get(id=request.session['user_id']),
"users": User.objects.all(),
"all_trips": Trip.objects.all(),
# "not_my_trips": Trip.objects.exclude(user_on_trip=request.session['user_id']),
# "my_trips": Trip.objects.filter(user_on_trip=request.session['user_id'])
"my_trips": User.objects.get(id=request.session['user_id']).created_trips.all(),
"joined_trips": User.objects.get(id=request.session['user_id']).trips.all(),
"others_trip":Trip.objects.exclude(created_by= User.objects.filter(id=request.session['user_id'])).exclude(user_on_trip=User.objects.get(id=request.session['user_id']))
}
# SHOW ON PAGE
return render(request, 'login_app/success.html',context)
def logout(request):
request.session.clear()
return redirect('/')
def add_travel_plan(request):
if 'user_id' not in request.session:
return redirect('/')
return render(request, 'login_app/add.html')
def add(request):
result = Trip.objects.add(request.POST, request.session['user_id'])
if result['status']:
# post_destination = request.POST['destination']#from here i'm saving POST info that I can create and save info in models
# post_description = request.POST['description']
# post_travel_date_from = request.POST['travel_date_from']
# post_travel_date_to = request.POST['travel_date_to']
# post_created_by = User.objects.get(id=request.session['user_id'])
# trip = Trip.objects.create(
# destination = post_destination,
# description = post_description,
# travel_date_from = post_travel_date_from,
# travel_date_to = post_travel_date_to,
# created_by = post_created_by
# )
# trip.user_on_trip.add(post_created_by)
# trip.save()
#request.post to GET the data from the form
#print(trip.destination)
# model.Create() to create new object in DB
return redirect('/success')
else:
for error in result['errors']:
messages.error(request,error)
return redirect('/add_travel_plan')
def home(request):
return redirect('/success')
def destination(request, trip_id):
if 'user_id' not in request.session:
return redirect('/')
trip = Trip.objects.get(id=trip_id)
context = {
"other_users" : User.objects.filter(trips=trip_id).exclude(created_trips=trip_id),
"current_trip": trip #or without variable "current_trip": Trip.objects.get(id=trip_id)
}
print(trip_id)
return render(request,'login_app/destination.html', context)
def join(request, trip_id):
Trip.objects.join(trip_id,request.session['user_id'])
return redirect('/success')
#def destination(request): #get error if not add trip_id
#print(trip_id)
#return render(request, 'login_app/destination.html')
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
mod=1000000007
t=int(input())
for i in range(t):
n=int(input())
ans=chk=mod
l=[int(i) for i in input().split()]
w={}
ans=chk=mod
for j,k in enumerate(l):
if k not in w:
w[k]=j
else:
w[k]=-1
for j in w:
if w[j]>-1 and chk>j:
chk=j
ans=w[j]
print(ans+1 if chk!=mod else -1)
|
# coding= utf-8
import socket
info = b'my name is tony'
for i in range(256):
ip = "192.168.102.101"
client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
client.sendto(info, ("192.168.102.101", 8081)) |
# Problem Source: LeetCode
# Given a string, find the first non-repeating
# character in it and return it's index. If it doesn't exist, return -1.
# Examples:
# s = "leetcode"
# return 0.
# s = "loveleetcode",
# return 2.
# Note: You may assume the string contain only lowercase letters.
### ### ###
def firstUniqChar(s):
"""
:type s: str
:rtype: int
"""
data = {}
if(len(s) == 0): return -1
for i in range(0, len(s), 1):
if(s[i] in data):
data[s[i]][0] += 1
else:
data[s[i]] = [1, i]
min_index = len(s)+1
for key, val in data.items():
if(val[0] == 1):
index = val[1]
if(index < min_index):
min_index = index
print(data)
if(min_index < len(s)+1):
return min_index
else: return -1
print(firstUniqChar("l")) |
import requests_mock
import koji
import pytest
from koji_builder_kube import cli, errors
def test_session_setup_error_type():
with pytest.raises(errors.KojiError):
cli.session_setup({})
def test_session_setup_error_auth(fixtures_dir):
data = {
'serverca': f'{fixtures_dir}/ca.pem',
'cert': f'{fixtures_dir}/cert.pem',
'server': 'https://koji-hub:8443/koji-hub'
}
with pytest.raises(errors.KojiError):
cli.session_setup(data)
def test_session_setup_success(fixtures_dir, mocker):
data = {
'server': 'https://koji-hub:8443/kojihub',
'serverca': f'{fixtures_dir}/ca.pem',
'cert': f'{fixtures_dir}/cert.pem',
}
mocker.patch('os.access')
mocker.patch('os.path.exists')
mocker.patch.object(koji.ClientSession, 'ssl_login', autospec=True)
cli.session_setup(data)
def test_run_success(fixtures_dir, mocker):
mocker.patch('os.access')
mocker.patch('os.path.exists')
mocker.patch.object(koji.ClientSession, 'ssl_login', autospec=True)
mocker.patch('koji_builder_kube.cli.mode', cli.Modes.TEST)
cli.run(['-c', f'{fixtures_dir}/kojid-ssl.conf'])
|
from concurrent.futures import (
ProcessPoolExecutor,
ThreadPoolExecutor,
)
from time import perf_counter as pc
from Crypto.Random import atfork
from Crypto.Util.number import getPrime
bits = 2 ** 11
count = 40
max_count_workers = 20
def get_prime(count_primes):
atfork()
return getPrime(bits)
executors = {
"Crypto threads": ThreadPoolExecutor,
"Crypto process": ProcessPoolExecutor
}
if __name__ == "__main__":
for title, executor_class in executors.items():
print("#" * 10, title, "#" * 10)
for part in range(1, max_count_workers + 1):
t = pc()
with executor_class(max_workers=part) as executor:
result = list(executor.map(get_prime, [5] * count))
print(
f"{title} count = {part}.",
f"Count results: {len(result)}",
"Time:", pc() - t
)
print() |
# last element as pivot
def quick_sort(array, low, high):
#if high == low
if low >= high:
return
pivot = partition(array, low, high)
quick_sort(array, low, pivot)
quick_sort(array, pivot+1, high)
def partition(array, low, high):
# i - bound between elements less than pivot and elements greater than pivot
pivot = array[high-1]
i = low
# j - bound between checked and unchecked elements
for j in range(low, high):
if array[j] < pivot:
array[j], array[i] = array[i], array[j]
i = i+1
array[high-1], array[i] = array[i], array[high-1]
return i
arr = [10,9,8,7,6,5,4,3,2,1]
arr_b = [3,8,2,5,1,4,7,6]
quick_sort(arr, 0, len(arr))
quick_sort(arr_b, 0, len(arr_b))
print(arr)
print(arr_b)
arrx = list(range(1,11))
quick_sort(arrx, 0, len(arrx))
print(arrx)
import random
list_rnd = [random.randint(1, 10001) for i in range(100)]
#print(list_rnd)
quick_sort(list_rnd, 0, len(list_rnd))
print(list_rnd)
|
# Cardinal numbers
# Ordinal numbers
# Clock time
# Digital clock
# 24 hr by quarters
# Starts at
# Dates
rod = {
'mz': 'mužské životné',
'mn': 'mužské neživotné',
'f': 'femininum',
'n': 'neutrum'
}
pád = {
1: 'nominativ',
2: 'genitiv',
3: 'dativ',
4: 'akusativ',
5: 'vokativ',
6: 'lokál',
7: 'instrumentál'
}
# jednotné číslo, množné číslo
číslo = [ 'singulár', 'plurál']
one = {
'singulár':
{
'rod': ['mz', 'mn', 'n', 'f'],
1: ['jeden', 'jeden', 'jedno', 'jedna'],
2: ['jednoho'] * 3 + ['jedné'],
3: ['jednomu'] * 3 + ['jedné'],
4: ['jednoho', 'jeden', 'jedno', 'jednu'],
6: ['jednom'] * 3 + ['jedné'],
7: ['jedním'] * 3 + ['jednou']
},
'plurál':
{
'rod': ['mz', 'mn', 'f', 'n'],
1: ['jedni', 'jedny', 'jedny', 'jedna'],
2: ['jedněch'] * 4,
3: ['jedněm'] * 4,
4: ['jedny'] * 3 + ['jedna'],
6: ['jedněch'] * 4,
7: ['jedněmi'] * 4,
}
}
def decline_one(one_dict):
one_declined = {}
for number in one_dict:
one_declined[number] = {}
genders = one_dict[number]['rod']
for case in one_dict[number]:
if 'rod' == case:
continue
# for gender in genders:
one_declined[number][case] = {}
for i, form in enumerate(one_dict[number][case]):
# print('N:{} G:{} i:{} Case:{} form:{}'.format(number, genders[i], i, case, form))
one_declined[number][case][genders[i]] = form
return one_declined
two = {
'rod': ['mz', 'mn', 'n', 'f'],
1: ['dva'] * 2 + ['dvě'] * 2,
2: ['dvou'] * 4,
3: ['dvěma'] * 4,
4: ['dva'] * 2 + ['dvě'] * 2,
6: ['dvou'] * 4,
7: ['dvěma'] * 4,
}
three = {
1: 'tři',
2: 'tři',
3: 'třem',
4: 'tři',
6: 'třech',
7: 'třemi',
}
four = {
1: 'čtyři',
2: 'čtyř',
3: 'čtyřem',
4: 'čtyři',
6: 'čtyřech',
7: 'čtyřmi',
}
usual_suffix = {
1: '',
2: 'i',
3: 'i',
4: '',
6: 'i',
7: 'i',
}
hundred = {
'čislo': {'singulár', 'plurál', 'dual'},
1: ['sto', 'sta', 'stě'],
2: ['sta', 'set', 'set'],
3: ['stu', 'stům', 'stům'],
4: ['sto', 'sta', 'stě'],
6: ['stu', 'stech', 'stech'],
7: ['stem', 'sty', 'sty'],
}
# MI - used for tisic
stroj = {
1: ['', 'e'],
2: ['e', 'ů'],
3: ['i', 'ům'],
4: ['', 'e'],
5: ['i', 'e'],
6: ['i', 'ich'],
7: ['em', 'i']
}
# MI used for milion and milliard
telefon = {
1: ['', 'y'],
2: ['u', 'ů'],
3: ['u', 'ům'],
4: ['', 'y'],
5: ['e', 'y'],
6: ['u', 'ech'],
7: ['em', 'y']
}
def construct_numer():
pass
def decline_noun():
pass
card_one_declined = decline_one(one)
print(card_one_declined)
|
from service.Generator import Generator
from service.Validator import Validator
OUT_FILE_NAME = "out/out.txt"
IN_FILE_NAME = "out/in.txt"
def main():
test = list(range(50, 501, 50))
# indexy = ['136774', '136785', '136812', '136815', '132336', '136803', '132639', '136814', '136807', '136798',
# '132337', '132321', '136808', '136691']
indexy = ['136807']
algorithm = "/home/krzysztof/Dokumenty/Studia/semestr7/Szeregowanie/zadanie1_algorytm/cmake-build-debug/zadanie1_algorytm"
for index in indexy:
print(f"\n{index}\n")
for i in test:
print(f"{i}\t:\t", end='')
Validator(
f"/home/krzysztof/Dokumenty/Studia/semestr7/Szeregowanie/instancje/in_{index}/in_{index}_{i}.txt",
algorithm).execution()
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
a = float(raw_input('Informe o valor primeiro lado: '))
b = float(raw_input('Informe o valor segundo lado: '))
c = float(raw_input('Informe o valor terceiro lado: '))
if (a < b + c) and (b < c + a) and (c < b + a):
print 'É triângulo'
if a == b and b == c and a == c:
tipo = 'Triângulo eqüilátero'
elif (a == b and b != c) or (b == c and c != a) or (a == c and c != b):
tipo = 'Triângulo isósceles'
else:
tipo = 'Triângulo escaleno'
print tipo
else:
print 'Não é triângulo'
|
def add(pieces_dict, p_add, c_add, k_add):
if p_add in pieces_dict:
print(f"{p_add} is already in the collection!")
else:
pieces_dict[p_add] = {'composer': c_add, 'key': k_add}
print(f"{p_add} by {c_add} in {k_add} added to the collection!")
return pieces_dict
def remove(pieces_dict, p_remove):
if p_remove not in pieces_dict:
print(f"Invalid operation! {p_remove} does not exist in the collection.")
else:
del pieces_dict[p_remove]
print(f"Successfully removed {p_remove}!")
return pieces_dict
def change(pieces_dict, p_change, new_k):
if p_change not in pieces_dict:
print(f"Invalid operation! {p_change} does not exist in the collection.")
else:
pieces_dict[p_change]['key'] = new_k
print(f"Changed the key of {p_change} to {new_k}!")
return pieces_dict
n = int(input())
pieces = {}
for _ in range(n):
data = input().split("|")
pieces[data[0]] = {'composer': data[1], 'key': data[2]}
command_data = input()
while not command_data == "Stop":
command_data = command_data.split("|")
command = command_data[0]
if command == "Add":
piece_add, composer_add, key_add = command_data[1:]
pieces = add(pieces, piece_add, composer_add, key_add)
elif command == "Remove":
piece_remove = command_data[1]
pieces = remove(pieces, piece_remove)
elif command == "ChangeKey":
piece_change, new_key = command_data[1:]
pieces = change(pieces, piece_change, new_key)
command_data = input()
sorted_pieces = sorted(pieces.items(), key=lambda x: (x[0], x[1]['composer']))
for piece, value in sorted_pieces:
print(f"{piece} -> Composer: {value['composer']}, Key: {value['key']}")
|
#!/usr/bin/env python
"""Train a sentence piece model."""
__author__ = 'Erdene-Ochir Tuguldur'
import argparse
import sentencepiece as spm
parser = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--vocab-size", type=int, default=32000, help='vocabulary size')
parser.add_argument("--control-symbols", type=str, default='[PAD],[CLS],[SEP],[MASK]', help='control symbols')
parser.add_argument("--prefix", type=str, default='mn_cased', help='model prefix')
parser.add_argument("--input", type=str, default='all.txt', help='input text file')
args = parser.parse_args()
print("training sentence piece...")
command = f'--input={args.input} --model_prefix={args.prefix} --vocab_size={args.vocab_size} ' \
f'--control_symbols={args.control_symbols} --input_sentence_size=10000000 --shuffle_input_sentence=true '
spm.SentencePieceTrainer.Train(command)
print("done!")
|
# -*- coding:utf-8 -*-
"""网站导航"""
class KgcmsApi(object):
"""KGCMS框架接口"""
kg = db = None
def __init__(self):
pass
def __call__(self):
from kyger.kgcms import template
from kyger.utility import date, numeric, html_escape, alert, is_format
from kyger.upload import Upload
post_param = self.kg['post']
get_param = self.kg['get']
if get_param.get('action', '') == 'del'and get_param.get('id', 0):
del_id = numeric(get_param.get('id'), 0)
res = self.db.list(table='link', field='id', where='`nid` = %d' % del_id)
if res: return alert(msg="改导航下存在菜单不可删除", act=3)
else:
res = self.db.dele('navigation', del_id, limit=1, log=True)
if res: return alert(msg="删除成功", act='navig_manage')
else: return alert(msg="删除失败", act=3)
# 动作判断
act = 'edit' if post_param.get('nav_id', '') else 'add'
# 提交判断
if post_param.get('action') == 'submit':
if not post_param.get('title', ''): return alert(msg='标题不能为空', act=3)
img = post_param.get('img')
if act == 'add':
next_id = self.db.run_sql('SHOW TABLE STATUS WHERE Name = "%snavigation"' % self.db.cfg["prefix"], log=False)[0]['Auto_increment']
name = 'logo_%d' % next_id
else: name = 'logo_%d' % numeric(post_param.get('nav_id', 0), 0)
aid = html_escape(post_param.get('aid', ''))
if len(aid) > 8 or (not aid[0].isalpha()): return alert(msg="识别码必须字母开头的0-8个字符,允许字母数字下划线", act=3)
insert_data = {
"title": html_escape(post_param.get('title', '')),
"aid": aid,
}
if not insert_data['title']: return alert(msg="请输入标题", act=3)
if img:
img = img[2:-2:1]
up = Upload(img, self.db, self.kg, base64=True)
up.path = 0 # 设置不创建路径
up.filename = name # 设置文件名形式
up.exist_rename = False
msg = up.image('system/navigation') # 保存文件
insert_data['logo'] = msg['url']
if act == 'edit':
self.db.edit('navigation', insert_data, where=numeric(post_param.get('nav_id', 0), 0), limit=1)
return alert(msg="修改成功", act='navig_manage')
else:
insert_data['sort'] = 100
insert_data['webid'] = numeric(self.kg['web']['id'], 0)
insert_data['addtime'] = int(self.kg['run_start_time'][1])
row = self.db.add('navigation', insert_data)
if row: return alert(msg="添加成功", act='navig_manage')
else: return alert(msg="添加失败", act=3)
elif post_param.get('action') == 'sort':
sort_list = post_param.get('sort')
sql = 'UPDATE %snavigation SET sort = CASE id ' % self.db.cfg["prefix"]
id_list = []
for row in sort_list:
sort_data = row.split(',')
id_list.append(str(numeric(sort_data[0], 0)))
sql += 'WHEN %s THEN %s ' % (numeric(sort_data[0], 0), numeric(sort_data[1], 0))
self.db.run_sql(sql + 'END WHERE id IN (%s)' % (','.join(id_list)), log=True)
return alert(act=2)
else:
data = self.db.list(table='navigation', where="webid=%s" % self.kg['web']['id'], order='`sort` ASC')
link_data = self.db.list(table='link', field='nid')
link_list = []
for row in link_data:
link_list.append(row['nid'])
for var in data:
var['addtime'] = date(var['addtime'], '%Y-%m-%d %H:%M')
var['menu'] = link_list.count(var['id'])
return template(assign={'data': data})
|
from .model import db
from .model import Graph, Vertex, Pipeline, Edge, Track
from .settings import URL, DATABASE, DATABASE_DEBUG
from .model import Database
#创建DAG
#创建图的函数
def create_graph(name, desc=None):
g = Graph()
g.name = name
g.desc = desc
db.session.add(g)
try:
db.session.commit()
return g
except Exception as e:
print(e)
db.session.rollback()
#为图增加顶点
def add_vertex(graph:Graph, name:str, input=None, script=None):
v = Vertex()
v.g_id = graph.id
v.name = name
v.input = input
v.script = script
db.session.add(v)
try:
db.session.commit()
return v
except Exception as e:
print(e)
db.session.rollback()
#为顶点增加边
def add_edge(tail:Vertex, head:Vertex, graph:Graph):
e = Edge()
e.g_id = graph.id
e.tail = tail.id
e.head = head.id
db.session.add(e)
try:
db.session.commit()
return e
except Exception as e:
db.session.rollback()
#删除顶点
#删除顶点就要删除这个顶点关联的所有边
def del_vetex(id):
query = db.session.query(Vertex).filter(Vertex.id == id)
v = query.first()
if v: #找到顶点后,删除关联的边,然后再删除顶点
try:
db.session.query(Edge).filter((Edge.tail == v.id) | (Edge.head == v.id)).delete()
query.delete()
db.session.commit()
except Exception as e:
print(e)
db.session.rollback()
return v
db = Database(URL, echo=DATABASE_DEBUG)
|
things="Apples Oranges Crows Telephone Light Sugar"
stuff=things.split(" ")
more=["Day", "Night", "Song", "Frisbee", "Corn", "Banana", "Girl", "Boy"]
while len(stuff) !=10:
next=more.pop()
print ("next is %s" % next)
stuff.append(next)
print (stuff)
print (stuff[1])
print (stuff[-1])
print (stuff.pop())
print (' '.join(stuff))
print ('#'.join(stuff[3:5]))
|
from sqlalchemy import Column, Integer, String, Boolean, ForeignKey
from sqlalchemy.orm import relationship
from .declarative_base import Base
class Estudiante(Base):
__tablename__ = "estudiante"
idEstudiante = Column(Integer, primary_key=True)
apellidoPaterno = Column(String)
apellidoMaterno = Column(String)
nombres = Column(String)
elegible = Column(Boolean)
asignaturas = relationship('Asignatura', secondary='asignatura_estudiante')
equipos = relationship('Equipo', secondary='estudiante_equipo')
class EstudianteEquipo(Base):
__tablename__ = 'estudiante_equipo'
estudiante = Column(Integer, ForeignKey('estudiante.idEstudiante'), primary_key=True)
equipo = Column(Integer, ForeignKey('equipo.idEquipo'), primary_key=True)
|
t = int(raw_input()) # read a line with a single integer
for p in xrange(1, t + 1):
N=int(raw_input())
lines=[]
for x in xrange(1,2*N):
lines.extend([int(s) for s in raw_input().split(" ")])
solder=[]
for i in xrange(len(lines)):
if lines[i] in solder:
solder.remove(lines[i])
else:
solder.append(lines[i])
# for j in xrange(len(lines[0])):
# if lines[i][j] in lines:
# solder.remove(lines[i][j])
# else:
# solder.append(lines[i][j])
solder=sorted(solder)
solder=[str(s) for s in solder]
print "Case #{}: {}".format(p," ".join(solder))
|
# Copyright (c) James Percent and Unlock contributors.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Unlock nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from unlock.controller.controller import *
from unlock.state import HierarchyGridState, FastPadState, ControllerGridState, FrequencyScopeState, \
TimeScopeState, ContinuousVepDiagnosticState, DiscreteVepDiagnosticState, TimedStimulus, \
TimedStimuli, OfflineTrialData, OfflineData, SequentialTimedStimuli, UnlockStateChain, UnlockStateFactory
from unlock.view import GridSpeakView, HierarchyGridView, FastPadView, GridView, FrequencyScopeView, \
TimeScopeView, PygletDynamicTextLabel, PygletTextLabel, SpritePositionComputer, FlickeringPygletSprite, \
UnlockViewFactory
from unlock.bci import UnlockCommandFactory, UnlockDecoder
class UnlockControllerFactory(object):
"""
UnlockControllerFactory is the entry point for creating any externally accessible component
of the controller package.
"""
def __init__(self):
super(UnlockControllerFactory, self).__init__()
def create_pyglet_window(self, signal, fullscreen=False, fps=True, vsync=True):
return PygletWindow(signal, fullscreen, fps, vsync)
def create_canvas(self, width, height, xoffset=0, yoffset=0):
batch = pyglet.graphics.Batch()
return Canvas(batch, width, height, xoffset, yoffset)
def create_controller_chain(self, window, stimulation, command_receiver, state, views, name="Nameless",
icon='', standalone=False):
fragment = UnlockControllerFragment(state, views, stimulation.canvas.batch)
command_connected_fragment = self.create_command_connected_fragment(stimulation.canvas, stimulation.stimuli,
stimulation.views, command_receiver)
chain = UnlockControllerChain(window, command_receiver, [command_connected_fragment, fragment], name, icon,
standalone)
return chain
def create_command_connected_fragment(self, canvas, stimuli, views, command_receiver):
command_connected_fragment = UnlockCommandConnectedFragment(command_receiver, stimuli, views, canvas.batch)
return command_connected_fragment
def create_dashboard(self, window, canvas, controllers, command_connected_fragment, views, state, calibrator=None):
dashboard_fragment = UnlockDashboard(window, state, views, canvas.batch, controllers, calibrator)
dashboard_chain = UnlockControllerChain(window, command_connected_fragment.command_receiver,
[command_connected_fragment, dashboard_fragment], 'Dashboard', '', standalone=True)
dashboard_fragment.poll_signal = dashboard_chain.poll_signal
dashboard_chain.poll_signal = dashboard_fragment.poll_signal_interceptor
return dashboard_chain
|
import logging
import os
from pprint import pprint as pp, pformat as pf
import click
from click_configfile import matches_section, Param, SectionSchema, ConfigFileReader
# from click_repl import repl
from ubus import Ubus
logging.basicConfig(level=logging.INFO)
_LOGGER = logging.getLogger(__name__)
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(logging.WARNING)
pass_ubus = click.make_pass_decorator(Ubus)
class ConfigSectionSchema:
@matches_section("ubus")
class Ubus(SectionSchema):
host = Param(type=str)
username = Param(type=str)
password = Param(type=str)
class ConfigFileProcessor(ConfigFileReader):
config_files = ["python-ubus.ini"]
config_searchpath = [os.path.expanduser("~/.config/"), "."]
config_section_schemas = [ ConfigSectionSchema.Ubus ]
CONTEXT_SETTINGS = dict(default_map=ConfigFileProcessor.read_config(),
ignore_unknown_options=True)
#class UbusCli(click.MultiCommand):
# def list_commands(self, ctx):
# return list(ctx.obj)
@click.group(invoke_without_command=True, context_settings=CONTEXT_SETTINGS)
@click.option('--host', default='localhost')
@click.option('--username', default='')
@click.option('--password', default='')
@click.option('--debug/--no-debug', default=False)
@click.argument('unproc', nargs=-1, type=click.UNPROCESSED)
@click.pass_context
def main(ctx, host, username, password, debug, unproc):
assert isinstance(ctx, click.Context)
if debug:
_LOGGER.setLevel(logging.DEBUG)
ubus = Ubus(host, username, password)
ctx.obj = ubus
if len(unproc) == 0:
ctx.invoke(list)
return
if unproc[0] in ctx.command.commands.keys():
ctx.invoke(ctx.command.commands[unproc[0]])
return
# TODO cleanup and allow repling of this
# 1. list interface methods
# pyubus dhcp
if len(unproc) == 1:
ns = unproc[0]
click.echo("== %s ==" % ns)
for m in ubus[ns]:
click.echo(" * %s" % ubus[ns][m])
return
# 2. call method with or without parameters
# pyubus dhcp ipv4leases
# pyubus iwinfo assoclist device=wlan0
elif len(unproc) >= 2:
with ubus:
ns, cmd, *params = unproc
m = ubus[ns][cmd]
params_dict = {}
for param in params:
k, v = param.split("=")
params_dict[k] = v
click.echo("Calling %s with %s" % (m, params_dict))
res = m(**params_dict)
click.echo("Result: %s" % pf(res))
return
#@main.command()
#@pass_ubus
#def repl(ubus):
# repl(click.get_current_context(), prompt_kwargs={})
@main.command()
@pass_ubus
def list(ubus):
click.echo("Printing interfaces..")
for iface in ubus:
click.echo("> %s" % iface)
for method in iface:
click.echo("\t * %s" % method)
click.echo()
@main.command()
@pass_ubus
def leases(ubus):
with ubus:
#print(ubus["dhcp"]["ipv4leases"]())
def get_host_for_mac(leases, mac):
simple_mac = assoc['mac'].replace(':', '').lower()
if simple_mac in leases:
return leases[simple_mac]
return "<unknown>"
mac_to_host = {}
for dev in ubus["dhcp"]["ipv4leases"]().values():
for iface, leases in dev.items():
for vlist in leases.values():
for lease in vlist:
mac_to_host[lease['mac']] = lease['hostname']
for device in ubus["iwinfo"]["devices"]():
for assoc in ubus["iwinfo"]["assoclist"](device=device):
print("%s (%s) - signal: %s, inactive: %s)" % (
get_host_for_mac(mac_to_host, assoc['mac']),
assoc['mac'],
assoc['signal'],
assoc['inactive']))
#print(ubus["dhcp"]["ipv4leases"].has_access())
#return
if __name__ == "__main__":
main()
|
from scipy import integrate
import numpy as np
# just a bunch of IMF functions
def kroupa(M, alpha_1 = 0.3, alpha_2 = 1.3, alpha_3 = 2.3, xi_o = 1.0):
M = np.asarray(M)
scalar_input = False
if M.ndim == 0:
M = M[None]
scalar_input = True
low_mass = M[ (M <= 0.08) ]
mid_mass = M[ (M > 0.08)*(M <= 0.5) ]
salpeter = M[ (M > 0.5 ) ]
dNdM = np.zeros(np.shape(M))
dNdM[low_mass] = M[low_mass]**(-alpha_1)
dNdM[mid_mass] = M[mid_mass]**(-alpha_2)
dNdM[salpeter] = M[salpeter]**(-alpha_3)
if scalar_input:
return np.squeeze(dNdM)
else:
return dNdM
def salpeter(M, alpha = 2.35, xi_o=1.0):
return xi_o * M**(-alpha)
def sample_IMF(IMF, N=None, M=None, M_min = 1.0, M_max = 100.0, npoints = 1000, **kwargs):
# bin IMF in logspace
dm = np.log10(M_max / M_min) / (1.0*(npoints - 1))
# m_o
m_o = np.log10(M_min)
i = np.arange(0, npoints)
# cumulative probability density
m = 10.0**(m_o + i*dm)
IMF_vals = IMF(m, **kwargs)
IMF_vals = np.cumsum(IMF_vals)
IMF_vals = IMF_vals / (IMF_vals[-1] * 1.0)
def _find_bin(rnum):
bin_number = npoints / 2
width = npoints / 2
while ( width > 1):
width = width / 2
if (rnum > IMF_vals[bin_number]):
bin_number = bin_number + width
elif (rnum < IMF_vals[bin_number]):
bin_number = bin_number - width
else:
break
return bin_number
if N != None and M == None:
random_numbers = np.random.rand(N)
mass_sample = np.zeros(N)
for i in np.arange(N):
bin_number = _find_bin(random_numbers[i])
mass_sample[i] = 10.0**(bin_number *dm)
elif M != None and N == None:
# make array now rather than appending every time
# max number of stars is desired mass / min mass
mass_sample = np.zeros(M / M_min)
i = -1
total_mass = np.sum(mass_sample)
while total_mass <= M:
i = i + 1
rnum = np.random.rand()
bin_number = _find_bin(rnum)
mass_sample[i] = 10.0**(bin_number * dm)
total_mass = np.sum(mass_sample)
# does keeping or removing the final star make the total mass closest
# to desired? Change if needed. Must form at least 1 star though
if np.size(mass_sample) > 1:
if np.abs((total_mass - M)) < np.abs( np.sum(mass_sample[:i]) - M):
mass_sample[i] = 0.0
i = i - 1
# now only grab the non-zero star masses
mass_sample = mass_sample[0:i]
else:
print "Can only sample by mass or number of stars. Not both"
return None
return mass_sample
def integrate_imf(function, a, b):
# integrate a function from a to be
return integrate.quad(function, a, b)[0]
def prob_SN1a(M_proj, t, t_form, dt,
t_min = 40.0, t_max = 14.6E3 , N_1a_per_msun = 3.0E-3,
alpha = 2.35, beta = 1.06, M_min = 1.0, M_max = 100.0):
t = t - t_form
norm_DTD = (-beta+1)*(t_max**(-beta + 1.0) - t_min**(-beta+1.0))**(-1.0)*N_1a_per_msun
M_SF = M_proj**(alpha) * (M_max**(-alpha+2.0) - M_min**(-alpha+2.0)) / (-alpha + 2.0)
return norm_DTD * M_SF * t**(-beta) * dt
def DTD(t, beta = 1.2, NSN = 0.00130):
Gyr = 3.1556E13 * 1.0E3
yr = Gyr / 1.0E9
# given in Maoz as t in Gyr and NSN in 1/ Msun you get SN / yr / 1E10 Msun
return NSN * (t / Gyr)**(-beta) / (1.0E10)
|
import random
import time
number = random.randint(1, 10)
attempt = 0
when_things_went_wrong = time.time()
approved_chars = [str(i) for i in range(1, 11)]
while True:
if attempt == 0:
ges = input('Угадай число от 1 до 10: ')
elif attempt == 1:
print('Да ладно, с кем не бывает')
ges = input('Попробуй ещё разок: ')
else:
msg = 'Это ж надо, уже %s раз%s не повезло' % (attempt, '' if attempt in range(5, 22) else 'a')
print(msg)
ges = input('Попробуй ещё разок: ')
attempt += 1
if ges in approved_chars:
ges = int(ges)
if ges < number:
print('Маловато')
elif ges > number:
print('Не, ну это уже перебор')
elif ges == number:
print('Ну наконец')
waste_time = time.time() - when_things_went_wrong
print('Поздравляю, ты постарел на ' + str(int(waste_time)) + ' сек')
break
else:
print('Так низя')
|
# Модуль для проверки ответов к Заданию № 07
from openpyxl import load_workbook
from openpyxl.styles import Font
import sys
import error_rate as er
import source_codes as sc
import linear_codes as lc
from pprint import pprint as pp
import numpy as np
from random import random
import pytils.translit
import re
def has_numbers(s):
return re.search('\d', s)
def checker(group, student, task_code):
fname = f'{student}_{task_code}_{group}.xlsx'
print(f'\n')
print(f'*********************')
print(f'Чтение файла {fname}')
try:
wb = load_workbook(fname)
except FileNotFoundError:
print(f'Файл {fname} не найден')
return
wb = load_workbook(fname)
ws = wb['Main']
head_rows = 1 # Число строк на заголовок
trash_rows = 1 # Число "мусорных" строк
params = []
for row in ws.iter_rows(min_row = 1, max_col = m_alphabet + 1, \
max_row = trash_rows + 2 + 1, values_only = True):
row = list(filter(None.__ne__, row)) # Убирает ненужные None
n_row = len(row)
if n_row > 0:
params.append(row)
params = params[head_rows: ]
sym_, P_ = params
assert(sc.all_in_range_incl(P_, min_p, max_p))
al_ = dict(zip(sym_, P_))
print(f'Алфавит, {{X, P}}: {al_}')
print(f'Норма: {sum(al_.values())}')
wsC = wb['Check']
head_rows = 3 # Число строк на заголовок
trash_rows = 5 # Число "мусорных" строк
params = []
len_code_limit = 2 * int(np.log2(1. / min(P_)) + 0.5) + 1
for row in wsC.iter_rows(min_row = 1, max_col = len_code_limit + 1, \
max_row = trash_rows + 1 + m_alphabet + m_alphabet * m_alphabet, values_only = True):
row = list(filter(None.__ne__, row)) # Убирает ненужные None
n_row = len(row)
if n_row > 0:
params.append(row)
params = params[head_rows: ]
symbols = []
code_words = []
for i in range(m_alphabet):
symbols.append(params[i].pop(0))
code_words.append(params[i])
code_ = dict(zip(symbols, code_words))
print('Введенный код Хаффмана для символа X:')
print(code_)
code = sc.make_huffman_table(al_)
print('Найденный код Хаффмана (один из множества вариантов):')
print(code)
# Проверка правильности введенного ответа
# По равенству длин кодовых слов
l_ = {}
for k, v in code_.items():
l_[k] = len(v)
l = {}
for k, v in code.items():
l[k] = len(v)
assert(l_ == l)
# По свойству префикса
ok = sc.check_prefix(code_)
assert(ok)
params = params[-m_alphabet * m_alphabet: ]
symbols = []
code_words = []
for i in range(m_alphabet * m_alphabet):
symbols.append(params[i].pop(0))
code_words.append(params[i])
code_ = dict(zip(symbols, code_words))
print('Введенный код Хаффмана для символа XX:')
print(code_)
alal = {}
sym_match = {}
for k1, v1 in al_.items():
for k2, v2 in al_.items():
sym_match[k1 + k2 * m_alphabet] = str(k2) + str(k1)
alal[k1 + k2 * m_alphabet] = v1 * v2
code_xx = sc.make_huffman_table(alal)
code_xx = dict(sorted(code_xx.items()))
code = {}
for k, v in code_xx.items():
code[sym_match[k]] = v
print('Найденный код Хаффмана (один из множества вариантов):')
print(code)
# Проверка правильности введенного ответа
# По равенству длин кодовых слов
l_ = {}
for k, v in code_.items():
l_[k] = len(v)
l = {}
for k, v in code.items():
l[k] = len(v)
assert(l_ == l)
# По свойству префикса
ok = sc.check_prefix(code_)
assert(ok)
print('All Ok')
if __name__ == "__main__":
mjr = sys.version_info.major
mnr = sys.version_info.minor
if (mjr == 3 and mnr < 7) or mjr < 3:
print('Требуется Python версии 3.7 и выше!')
exit()
task_code = '07'
fn = 'list_2020.txt'
# Объем алфавита X
m_alphabet = 3
# Наименьшая вероятность
min_p = 0.05
# Наибольшая вероятность
max_p = 0.95
students_file = open(fn, 'r', encoding = 'utf-8')
students = students_file.readlines()
group = ''
student = ''
for s in students:
s = s.strip()
if '#' in s:
continue
if s:
s_translit = pytils.translit.translify(s)
if has_numbers(s_translit):
group = s_translit
else:
student = s_translit
checker(group, student, task_code)
|
import httplib2
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import flow_from_clientsecrets
from oauth2client import tools
# Define service object as a global variable
service = ""
def main():
# *** AUTHORIZATION ***
global service
flow = flow_from_clientsecrets('client_secrets.json',
scope='https://www.googleapis.com/auth/calendar')
storage = Storage('calendar.dat')
credentials = storage.get()
if credentials is None or credentials.invalid is True:
credentials = tools.run_flow(flow, storage)
http = httplib2.Http()
http = credentials.authorize(http)
service = build(serviceName='calendar', version='v3', http=http)
# *** END AUTHORIZATION ***
# Read in config file to get name/colorID associations
name_color_dict = read_config('config.txt')
print(name_color_dict)
# Choose calendar and get calendar ID
#cal_id = choose_calendar()
cal_id = '614i74105o8hdn44cqgts47hlc@group.calendar.google.com'
# Get all events in specific calendar, change color of chosen events
page_token = None
while True:
events = service.events().list(calendarId=cal_id,
pageToken=page_token).execute()
for event in events['items']:
summary = event['summary']
event_id = event['id']
# if matches criteria, change color
if summary in name_color_dict:
set_event_color(cal_id, event_id, name_color_dict[summary])
page_token = events.get('nextPageToken')
if not page_token:
break
def set_event_color(cal_id, event_id, color_id):
# Sets the color of the specified event
event = service.events().get(calendarId=cal_id, eventId=event_id).execute()
event['colorId'] = color_id
service.events().update(calendarId=cal_id, eventId=event_id,
body=event).execute()
def choose_calendar():
# Print list of calendars and prompt user to choose one to work with
# Print list of calendars
page_token = None
while True:
calendar_list = service.calendarList().list(pageToken=page_token).execute()
counter = 0
for calendar_list_entry in calendar_list['items']:
print(str(counter) + ": ", end="")
print(calendar_list_entry['summary'])
counter += 1
page_token = calendar_list.get('nextPageToken')
if not page_token:
break
# Ask user to choose which calendar to work with by number
# will keep asking until user inputs a number
while True:
try:
choice = input("Choose which calendar to read: ")
choice = int(choice)
break
except ValueError:
print("Please enter a number")
# Return the id number of the chosen calendar
return calendar_list['items'][choice]['id']
def read_config(filename):
# Reads in config file and generates a dictionary
# mapping event names to specific colorIDs
name_color_dict = {}
with open(filename) as config:
for line in config:
config_record = line.rstrip("\n").split(",")
name_color_dict[config_record[0]] = config_record[1]
return name_color_dict
if __name__ == "__main__":
main() |
__all__ = ["Listener"]
import logging
from select import select
from typing import (
Any,
Callable,
Optional,
)
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
from .base import BaseCommuter
from .connector import Connector
logger = logging.getLogger("pgcom")
class Listener(BaseCommuter):
"""Listener on the notification channel.
This class implements an asynchronous interaction with database
offered by PostgreSQL commands LISTEN and NOTIFY.
Notifications are received after trigger is fired, the
:func:`~pgcom.listener.Listener.poll` method can be used
to check for the new notifications without wasting resources.
Methods :func:`~pgcom.listener.Listener.create_notify_function` and
:func:`~pgcom.listener.Listener.create_trigger` present
basic query constructors, which can be used to define a new trigger
and a new function associated with this trigger. Some custom
definitions can be done using :func:`~pgcom.commuter.Commuter.execute`
method.
Args:
pool_size:
The maximum amount of connections the pool will support.
pre_ping:
If True, the pool will emit a "ping" on the connection to
test if the connection is alive. If not, the connection will
be reconnected.
max_reconnects:
The maximum amount of reconnects, defaults to 3.
"""
def __init__(
self,
pool_size: int = 20,
pre_ping: bool = False,
max_reconnects: int = 3,
**kwargs: str,
) -> None:
super().__init__(Connector(pool_size, pre_ping, max_reconnects, **kwargs))
def poll(
self,
channel: str,
on_notify: Optional[Callable[[str], None]] = None,
on_timeout: Optional[Callable] = None,
on_close: Optional[Callable] = None,
on_error: Optional[Callable[[Exception], None]] = None,
timeout: int = 5,
) -> None:
"""Listen to the channel and activate callbacks on the notification.
This function sleeps until awakened when there is some data
to read on the connection.
Args:
channel:
Name of the notification channel.
on_notify:
Callback to be executed when the notification has arrived.
on_timeout:
Callback to be executed by timeout.
on_close:
Callback to be executed when connection is closed.
on_error:
Callback to be executed if error occurs.
timeout:
Timeout in seconds.
"""
with self.connector.open_connection() as conn:
conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
with conn.cursor() as cur:
cur.execute(f"LISTEN {channel}")
try:
while 1:
if select([conn], [], [], timeout) == ([], [], []):
self._callback(on_timeout)
else:
conn.poll()
while conn.notifies:
notify = conn.notifies.pop(0)
self._callback(on_notify, notify.payload)
except (Exception, KeyboardInterrupt, SystemExit) as e:
cur.execute(f"UNLISTEN {channel}")
if isinstance(e, KeyboardInterrupt) or isinstance(
e, SystemExit
): # noqa: E129
self._callback(on_close)
else:
self._callback(on_error, e)
def create_notify_function(self, func_name: str, channel: str) -> None:
"""Create a function called by trigger.
This function generates a notification, which is sending
to the specified channel when trigger is fired.
Args:
func_name:
Name of the function.
channel:
Name of the the channel the notification is sending to.
"""
_schema, _func_name = self._get_schema(func_name)
self.execute(
f"""
CREATE OR REPLACE FUNCTION {_schema}.{_func_name}()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
BEGIN
PERFORM pg_notify('{channel}', row_to_json(NEW)::text);
RETURN NEW;
END;
$function$
"""
)
def create_trigger(
self,
table_name: str,
func_name: str,
trigger_name: Optional[str] = None,
when: str = "AFTER",
event: str = "INSERT",
for_each: str = "STATEMENT",
) -> None:
"""Create trigger.
Creates a new trigger associated with the table and
executed the specified function when certain events occur.
Args:
table_name:
The name of the table the trigger is for.
func_name:
A user-supplied function, which is executed when the
trigger fires.
trigger_name:
The name to give to the new trigger. If not specified, then
the automatically created name will be assigned.
when:
One of "BEFORE", "AFTER", "INSTEAD OF".
Determines when function is called.
event:
One of "INSERT", "UPDATE", "DELETE", "TRUNCATE".
Use "OR" for event combinations, e.g. "INSERT OR UPDATE".
for_each:
One of "ROW", "STATEMENT". This specifies whether the
trigger should be fired once for every row affected by the
event, or just once per SQL statement.
"""
_schema, _table_name = self._get_schema(table_name)
if trigger_name is None:
trigger_name = str.lower(_table_name + "_" + event.replace(" ", "_"))
self.execute(
f"""
DROP TRIGGER IF EXISTS {trigger_name}
ON {_schema}.{_table_name}
"""
)
cmd = f"""
CREATE TRIGGER {trigger_name} {when} {event}
ON {_schema}.{_table_name}
FOR EACH {for_each}
EXECUTE FUNCTION {_schema}.{func_name}()
"""
self.execute(cmd)
@staticmethod
def _callback(callback: Optional[Callable] = None, *args: Any) -> None:
if callback:
try:
callback(*args)
except Exception as e:
logger.error(f"error from callback {callback}: {e}")
|
###########################################################
# Print student and course information
# nathanLanLab1.py
# 6.17.2020
###########################################################
print("Hello World!")
#assign information to variables
last_name = "Lan"
g_number = "G01246656"
syl_1 = "Assesments are due Tuesday at 11:59"
syl_2 = "Assignments are worth 50% of your grade"
lab_detail_1 = "that source code shouldn't exceed 80 characters per line"
lab_detail_2 = "to use meaningful names taken from the problem domain"
lab_detail_3 = "to incorporate descriptive comments on code functionality"
#use multiline string to format text instead of "\" or multiple print() statements
information = '''My last name is {0}
My G number is {1}
Two syllabus details are:
1. {2}
2. {3}
Three lab details are:
1. {4}
2. {5}
3. {6}
'''
#use the format funtion to map variables to parts of text
print(information.format(last_name, g_number, syl_1, syl_2, lab_detail_1, lab_detail_2, lab_detail_3))
#output
'''
Hello World!
My last name is Lan
My G number is G01246656
Two syllabus details are:
1. Assesments are due Tuesday at 11:59
2. Assignments are worth 50% of your grade
Three lab details are:
1. that source code shouldn't exceed 80 characters per line
2. to use meaningful names taken from the problem domain
3. to incorporate descriptive comments on code functionality
'''
|
from __future__ import print_function
import copy
import logging
import numpy as np
import torch
import torch.nn.functional as F
import torch.nn as nn
import torch.utils.data as td
from PIL import Image
from tqdm import tqdm
import trainer
import networks
class Trainer(trainer.GenericTrainer):
def __init__(self, model, args, optimizer, evaluator, taskcla):
super().__init__(model, args, optimizer, evaluator, taskcla)
self.lamb=args.lamb
def update_lr(self, epoch, schedule):
for temp in range(0, len(schedule)):
if schedule[temp] == epoch:
for param_group in self.optimizer.param_groups:
self.current_lr = param_group['lr']
param_group['lr'] = self.current_lr * self.args.gammas[temp]
print("Changing learning rate from %0.4f to %0.4f"%(self.current_lr,
self.current_lr * self.args.gammas[temp]))
self.current_lr *= self.args.gammas[temp]
def setup_training(self, lr):
for param_group in self.optimizer.param_groups:
print("Setting LR to %0.4f"%lr)
param_group['lr'] = lr
self.current_lr = lr
def update_frozen_model(self):
self.model.eval()
self.model_fixed = copy.deepcopy(self.model)
self.model_fixed.eval()
for param in self.model_fixed.parameters():
param.requires_grad = False
def train(self, train_loader, test_loader, t):
lr = self.args.lr
self.setup_training(lr)
# Do not update self.t
if t>0:
self.update_frozen_model()
self.update_fisher()
# Now, you can update self.t
self.t = t
#kwargs = {'num_workers': 0, 'pin_memory': True}
kwargs = {'num_workers': 0, 'pin_memory': False}
self.train_iterator = torch.utils.data.DataLoader(train_loader, batch_size=self.args.batch_size, shuffle=True, **kwargs)
self.test_iterator = torch.utils.data.DataLoader(test_loader, 100, shuffle=False, **kwargs)
self.fisher_iterator = torch.utils.data.DataLoader(train_loader, batch_size=20, shuffle=True, **kwargs)
for epoch in range(self.args.nepochs):
self.model.train()
self.update_lr(epoch, self.args.schedule)
for samples in tqdm(self.train_iterator):
data, target = samples
data, target = data.cuda(), target.cuda()
batch_size = data.shape[0]
output = self.model(data)[t]
loss_CE = self.criterion(output,target)
self.optimizer.zero_grad()
(loss_CE).backward()
self.optimizer.step()
train_loss,train_acc = self.evaluator.evaluate(self.model, self.train_iterator, t)
num_batch = len(self.train_iterator)
print('| Epoch {:3d} | Train: loss={:.3f}, acc={:5.1f}% |'.format(epoch+1,train_loss,100*train_acc),end='')
valid_loss,valid_acc=self.evaluator.evaluate(self.model, self.test_iterator, t)
print(' Valid: loss={:.3f}, acc={:5.1f}% |'.format(valid_loss,100*valid_acc),end='')
print()
def criterion(self,output,targets):
# Regularization for all previous tasks
loss_reg=0
if self.t>0:
for (name,param),(_,param_old) in zip(self.model.named_parameters(),self.model_fixed.named_parameters()):
loss_reg+=torch.sum(self.fisher[name]*(param_old-param).pow(2))/2
return self.ce(output,targets)+self.lamb*loss_reg
def fisher_matrix_diag(self):
# Init
fisher={}
for n,p in self.model.named_parameters():
fisher[n]=0*p.data
# Compute
self.model.train()
criterion = torch.nn.CrossEntropyLoss()
for samples in tqdm(self.fisher_iterator):
data, target = samples
data, target = data.cuda(), target.cuda()
# Forward and backward
self.model.zero_grad()
outputs = self.model.forward(data)[self.t]
loss=self.criterion(outputs, target)
loss.backward()
# Get gradients
for n,p in self.model.named_parameters():
if p.grad is not None:
fisher[n]+=self.args.batch_size*p.grad.data.pow(2)
# Mean
with torch.no_grad():
for n,_ in self.model.named_parameters():
fisher[n]=fisher[n]/len(self.train_iterator)
return fisher
def update_fisher(self):
if self.t>0:
fisher_old={}
for n,_ in self.model.named_parameters():
fisher_old[n]=self.fisher[n].clone()
self.fisher=self.fisher_matrix_diag()
if self.t>0:
for n,_ in self.model.named_parameters():
self.fisher[n]=(self.fisher[n]+fisher_old[n]*self.t)/(self.t+1)
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-15 10:21
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cmdb', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='host',
name='application',
field=models.CharField(blank=True, max_length=100, null=True, verbose_name='\u7528\u9014'),
),
migrations.AddField(
model_name='host',
name='principal',
field=models.CharField(blank=True, max_length=50, null=True, verbose_name='\u8d1f\u8d23\u4eba'),
),
]
|
#!/usr/bin/python3
def common_elements(set1, set2):
res = []
for i in set1:
if i in set2:
res.append(i)
return res
|
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
engine = create_engine('sqlite+pysqlite:///db/blockchaindb.sqlite')
Base = declarative_base(bind=engine)
Session = sessionmaker(bind=engine)
|
import json
import requests
def get_summary(cid):
url = "https://pubchem.ncbi.nlm.nih.gov/rest/pug/compound/cid/%d/description/json" % cid
result = requests.get(url)
summary = json.loads(result.content)
return summary
def parse_summary_for_odor(summary):
statements = []
# keywords should include aroma but exclude aromatic (due to its special meaning in chemistry)
keywords = ("odor", "odour", "smell", "aroma ", "aroma,", "aroma.", "fragrance")
if "InformationList" in summary:
for item in summary["InformationList"]["Information"]:
if "Description" in item:
for statement in item["Description"].split("."):
if any([x in statement.lower() for x in keywords]):
statements.append(statement.strip())
return statements
def get_physical_description(cid):
url = (
"https://pubchem.ncbi.nlm.nih.gov/rest/pug_view/data/compound/%d/JSON?heading="
"Physical+Description"
% cid
)
result = requests.get(url)
try:
summary = json.loads(result.content)
except UnicodeDecodeError:
summary = {}
return summary
def parse_physical_description_for_odor(physical_description):
statements = []
try:
strings = [
x["Value"]["StringWithMarkup"][0]["String"]
for x in physical_description["Record"]["Section"][0]["Section"][0]["Section"][0][
"Information"
]
]
except KeyError:
pass
else:
# keywords should include aroma but exclude aromatic
# (due to its special meaning in chemistry)
keywords = ("odor", "odour", "smell", "aroma ", "aroma,", "aroma.", "fragrance")
for string in strings:
for statement in string.split("."):
if any([x in statement.lower() for x in keywords]):
statements.append(statement.strip())
return statements
def get_ghs_classification(cid):
url = (
"https://pubchem.ncbi.nlm.nih.gov/rest/pug_view/data/compound/%d/JSON?heading=GHS"
"+Classification"
% cid
)
result = requests.get(url)
try:
summary = json.loads(result.content)
except UnicodeDecodeError:
summary = {}
return summary
GHS_CODES = {
"H300": "Fatal if swallowed.",
"H301": "Toxic if swallowed",
"H302": "Harmful if swallowed",
"H303": "May be harmful if swallowed",
"H304": "May be fatal if swallowed and enters airways",
"H305": "May be harmful if swallowed and enters airways",
"H310": "Fatal in contact with skin",
"H311": "Toxic in contact with skin",
"H312": "Harmful in contact with skin",
"H313": "May be harmful in contact with skin",
"H314": "Causes severe skin burns and eye damage",
"H315": "Causes skin irritation",
"H316": "Causes mild skin irritation",
"H317": "May cause an allergic skin reaction",
"H318": "Causes serious eye damage",
"H319": "Causes serious eye irritation",
"H320": "Causes eye irritation",
"H330": "Fatal if inhaled",
"H331": "Toxic if inhaled",
"H332": "Harmful if inhaled",
"H333": "May be harmful if inhaled",
"H334": "May cause allergy or asthma symptoms or breathing difficulties if inhaled",
"H335": "May cause respiratory irritation",
"H336": "May cause drowsiness or dizziness",
"H340": "May cause genetic defects",
"H341": "Suspected of causing genetic defects",
"H350": "May cause cancer",
"H351": "Suspected of causing cancer",
"H360": "May damage fertility or the unborn child",
"H361": "Suspected of damaging fertility or the unborn child",
"H361d": "Suspected of damaging the unborn child",
"H361e": "May damage the unborn child",
"H361f": "Suspected of damaging fertility",
"H361g": "may damage fertility",
"H362": "May cause harm to breast-fed children",
"H370": "Causes damage to organs",
"H371": "May cause damage to organs",
"H372": "Causes damage to organs through prolonged or repeated exposure",
"H373": "May cause damage to organs through prolonged or repeated exposure",
}
def parse_ghs_classification_for_odor(
ghs_info,
codes=("H330", "H331", "H334", "H340", "H350", "H350i", "H351", "H36", "H37"),
only_percent=True,
code_only=True,
):
strings = []
if "Record" in ghs_info:
for block in ghs_info["Record"]["Section"][0]["Section"][0]["Section"][0]["Information"]:
if block["Name"] == "GHS Hazard Statements":
for entry in block["Value"]["StringWithMarkup"]:
string = entry["String"]
for code in codes:
match = (code + " (") if only_percent else code
if match in string:
if code_only:
string = string.split(":")[0]
strings.append(string)
return strings
|
from django.db import models
#add blank=True if not possible - This results in value of 0
#add null=True if may be unknown
#use .TextField() for unrestricted length(description)
class Order(models.Model):
order_create_date = models.DateField()
pickup_date = models.DateField()
return_date = models.DateField()
customer_id = models.IntegerField()
car_id = models.IntegerField()
pickup_store_id = models.IntegerField(null=True)
return_store_id = models.IntegerField(null=True)
class Store(models.Model):
store_name = models.CharField(max_length=40)
store_address = models.CharField(max_length=50)
store_phone = models.CharField(max_length=30)
store_city = models.CharField(max_length=40)
store_state = models.CharField(max_length=40)
class Customer(models.Model):
customer_name = models.CharField(max_length=30)
customer_phone = models.CharField(max_length=30)
customer_address = models.CharField(max_length=50)
customer_birthday = models.DateField()
customer_occupation = models.CharField(max_length=30)
SEX_CHOICES = [("M", 'Male'), ('F', 'Female')]
customer_gender = models.CharField(choices=SEX_CHOICES, max_length=1)
class Vehicle(models.Model):
make = models.CharField(max_length=30)
model = models.CharField(max_length=30)
series = models.CharField(max_length=30)
year = models.IntegerField()
price = models.IntegerField()
engine = models.CharField(max_length=4)
fuel_type = models.CharField(max_length=30)
tank_capacity = models.CharField(max_length=8)
power = models.CharField(max_length=6)
seats = models.IntegerField()
transmission = models.CharField(max_length=7)
body_type = models.CharField(max_length=20)
drive_type = models.CharField(max_length=3)
wheelbase = models.CharField(max_length=7)
|
from flask import render_template, request, session, redirect
from qa327 import app
import qa327.backend as bn
import re
"""
This file defines the front-end part of the service.
It elaborates how the services should handle different
http requests from the client (browser) through templating.
The html templates are stored in the 'templates' folder.
"""
@app.route('/register', methods=['GET'])
def register_get():
# templates are stored in the templates folder
return render_template('register.html', message='Register')
@app.route('/register', methods=['POST'])
def register_post():
email = request.form.get('email')
name = request.form.get('name')
password = request.form.get('password')
password2 = request.form.get('password2')
error_message = None
regex = '^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,3}$'
if password != password2:
error_message = "The passwords do not match"
elif len(email) < 1:
error_message = "Email format error"
elif name.startswith((' ', '\t')):
error_message = "Name format error"
elif name.endswith((' ', '\t')):
error_message = "Name format error"
elif not name.isalnum():
error_message = "Name format error"
elif len(password) < 1:
error_message = "Password is not strong enough"
elif (not re.search(regex,email)):
error_message = "Email format error"
elif len(password) < 6:
error_message = "Password is not strong enough"
elif not any(x.isupper() for x in password):
error_message = "Password is not strong enough"
elif not any(x.islower() for x in password):
error_message = "Password is not strong enough"
elif not any(x.isalnum() for x in password):
error_message = "Password is not strong enough"
elif not (2<len(name)<20):
error_message = "Name format error"
else:
user = bn.get_user(email)
if user:
error_message = "User exists"
elif not bn.register_user(email, name, password, password2):
error_message = "Failed to store user info."
# if there is any error messages when registering new user
# at the backend, go back to the register page.
if error_message:
return render_template('register.html', message=error_message)
else:
return redirect('/login')
@app.route('/login', methods=['GET'])
def login_get():
return render_template('login.html', message='Please login')
@app.route('/login', methods=['POST'])
def login_post():
email = request.form.get('email')
password = request.form.get('password')
regex = '^[a-z0-9]+[\._]?[a-z0-9]+[@]\w+[.]\w{2,3}$'
if (not re.search(regex,email)):
return render_template('login.html', message='Email format error')
elif len(email) < 1:
return render_template('login.html', message='Email format error')
elif len(password) < 1:
return render_template('login.html', message='Email format error')
elif len(password) < 6:
return render_template('login.html', message='Email format error')
elif not any(x.isupper() for x in password):
return render_template('login.html', message='Email format error')
elif not any(x.islower() for x in password):
return render_template('login.html', message='Email format error')
elif not any(x.isalnum() for x in password):
return render_template('login.html', message='Email format error')
user = bn.login_user(email, password)
if user:
session['logged_in'] = user.email
"""
Session is an object that contains sharing information
between browser and the end server. Typically it is encrypted
and stored in the browser cookies. They will be past
along between every request the browser made to this services.
Here we store the user object into the session, so we can tell
if the client has already login in the following sessions.
"""
# success! go back to the home page
# code 303 is to force a 'GET' request
return redirect('/', code=303)
else:
return render_template('login.html', message='Email format error')
@app.route('/logout')
def logout():
if 'logged_in' in session:
session.pop('logged_in', None)
return redirect('/')
def authenticate(inner_function):
"""
:param inner_function: any python function that accepts a user object
Wrap any python function and check the current session to see if
the user has logged in. If login, it will call the inner_function
with the logged in user object.
To wrap a function, we can put a decoration on that function.
Example:
@authenticate
def home_page(user):
pass
"""
def wrapped_inner():
# check did we store the key in the session
if 'logged_in' in session:
email = session['logged_in']
user = bn.get_user(email)
if user:
# if the user exists, call the inner_function
# with user as parameter
return inner_function(user)
else:
# else, redirect to the login page
return redirect('/login')
# return the wrapped version of the inner_function:
return wrapped_inner
@app.route('/', methods=['GET'])
@authenticate
def profile(user):
# authentication is done in the wrapper function
# see above.
# by using @authenticate, we don't need to re-write
# the login checking code all the time for other
# front-end portals
tickets = bn.get_all_tickets()
return render_template('index.html', user=user, tickets=tickets)
@app.route('/sell', methods=['POST'])
def sell_post():
name = request.form.get('sell-name')
quantity = int(request.form.get('sell-quantity'))
price = int(request.form.get('sell-price'))
expiration = request.form.get('sell-expirationDate')
convertedDate = str(expiration)
year = int(convertedDate[:4])
month = int(convertedDate[4:6])
day = int(convertedDate[6:])
email = session['logged_in']
user = bn.get_user(email)
error_message = None
if name.startswith((' ', '\t')):
error_message='Name format error'
elif name.endswith((' ', '\t')):
error_message='Name format error'
elif len(name) > 60:
error_message='Name format error'
elif quantity < 0 or quantity > 100:
error_message='Quantity error'
elif price < 10 or price > 100:
error_message='Price error'
elif len(expiration) != 8:
error_message='Expiration date error'
elif year < 2000 or year > 2100:
error_message='Expiration date error'
elif month < 1 or month > 12:
error_message='Expiration date error'
elif day < 1 or day > 31:
error_message='Expiration date error'
if error_message:
tickets = bn.get_all_tickets()
return render_template('index.html', tickets=tickets, user=user, sell_message=error_message)
else:
bn.add_ticket(name, quantity, price, expiration, email)
return redirect('/')
@app.route('/buy', methods=['POST'])
def buy_post():
name = request.form.get('buy-name')
quantity = int(request.form.get('buy-quantity'))
email = session['logged_in']
user = bn.get_user(email)
ticket = bn.get_ticket(name)
error_message = None
if ticket:
if name.startswith((' ', '\t')):
error_message='Name format error'
elif name.endswith((' ', '\t')):
error_message='Name format error'
elif len(name) > 60:
error_message='Name format error'
elif quantity > 100 or quantity < 1:
error_message ='Qauntity error'
elif quantity > ticket.quantity:
error_message = 'Quantity error'
elif 1.4*ticket.price > user.balance:
error_message = 'Insufficient funds'
else:
error_message = 'Ticket does not exist'
if error_message:
tickets = bn.get_all_tickets()
return render_template('index.html', tickets=tickets, user=user, buy_message=error_message)
else:
bn.buy_ticket(name, quantity, user.email, ticket.price)
return redirect('/')
@app.route('/update', methods=['POST'])
def update_post():
name = request.form.get('update-name')
quantity = request.form.get('update-quantity')
price = request.form.get('update-price')
expirationDate = request.form.get('update-expirationDate')
convertedDate = str(expirationDate)
year = int(convertedDate[:4])
month = int(convertedDate[4:6])
day = int(convertedDate[6:])
email = session['logged_in']
user = bn.get_user(email)
tickets = bn.get_all_tickets()
if name.startswith((' ', '\t')):
return render_template('index.html', user=user, tickets=tickets, update_message = "Name format error")
elif name.endswith((' ', '\t')):
return render_template('index.html', user=user, tickets=tickets, update_message = "Name format error")
elif not name.isalnum():
return render_template('index.html', user=user, tickets=tickets, update_message = "Name format error")
elif len(name) > 60:
return render_template('index.html', user=user, tickets=tickets, update_message = "Name format error")
elif not (0 < quantity < 101):
return render_template('index.html', user=user, tickets=tickets, update_message = "Quantity must be between 0 and 100")
elif not (10 <= price <= 100):
return render_template('index.html', user=user, tickets=tickets, update_message = "Price must be between 10 and 100")
elif not (2000<year<2100):
return render_template('index.html', user=user, tickets=tickets, update_message = "Date format error")
elif not (0<month<13):
return render_template('index.html', user=user, tickets=tickets, update_message = "Date format error")
elif not (0<day<32):
return render_template('index.html', user=user, tickets=tickets, update_message = "Date format error")
elif not (bn.check_ticket(name, quantity, price, expirationDate)):
return render_template('index.html', user=user, tickets=tickets, update_message = "Ticket does not exist")
else:
return render_template('index.html', user=user, tickets=tickets, update_message = "Success")
|
"""
SYS-611: Buffon's Needle Experiment Example with Antithetic Variables.
This example performs a Monte Carlo simulation of Buffon's Needle Experiment
to estimate the probability of a needle of certain length crossing lines
on a floor with certain spacing. This probability is proportional to the
mathematical constant pi.
@author: Paul T. Grogan <pgrogan@stevens.edu>
"""
# import the python3 behavior for importing, division, and printing in python2
from __future__ import absolute_import, division, print_function
# import the matplotlib pyplot package and refer to it as `plt`
# see http://matplotlib.org/api/pyplot_api.html for documentation
import matplotlib.pyplot as plt
# import the scipy stats package and refer to it as `stats`
# see http://docs.scipy.org/doc/scipy/reference/stats.html for documentation
import scipy.stats as stats
# import the numpy package and refer to it as `np`
# see http://docs.scipy.org/doc/numpy/reference/ for documentation
import numpy as np
# define the line width and needle length for buffon's experiment
line_width = 3.0
needle_length = 2.5
# define a process generator for the event if a needle crosses a line
def drop_needle():
r_1 = np.random.rand()
r_2 = np.random.rand()
# generate distance between needle centroid and nearest line from uniform
# distribution between 0 and line_width/2
d_1 = r_1*line_width/2
d_2 = (1-r_1)*line_width/2
# generate acute angle between needle and line from uniform distribution
# between 0 and pi/2 radians
theta_1 = r_2*np.pi/2
theta_2 = (1-r_2)*np.pi/2
# for each antithetic variable, record 1 if d < needle_length/2*sin(theta)
# otherwise record 0
x_1 = 1 if d_1 < needle_length/2*np.sin(theta_1) else 0
x_2 = 1 if d_2 < needle_length/2*np.sin(theta_2) else 0
# return the average of the two antithetic variables
return (x_1+x_2)/2.
# set the random number generator seed to 0
np.random.seed(0)
# generate 850 samples
samples = [drop_needle() for i in range(850)]
# compute the lower and upper-bounds using a 95% confidence interval
confidence_level = 0.05
z_crit = stats.norm.ppf(1-confidence_level/2)
print('P(X) = {:.3f} +/- {:.3f} (95% CI)'.format(
np.average(samples),
z_crit*stats.sem(samples)
))
# compute the exact solution, as solved by calculus
solution = 2*needle_length/(line_width*np.pi)
# compute running statistics for mean and confidence interval
mean_estimate = np.array([np.average(samples[0:i]) for i in range(len(samples))])
confidence_int = z_crit*np.array([stats.sem(samples[0:i]) for i in range(len(samples))])
# create a plot to show the mean estimate with 95% confidence interval bounds
plt.figure()
plt.plot(range(len(samples)), mean_estimate,
'b', label='Mean Estimate')
plt.plot(range(len(samples)), mean_estimate-confidence_int,
'g', label='95% CI Lower Bound')
plt.plot(range(len(samples)), mean_estimate+confidence_int,
'r', label='95% CI Upper Bound')
plt.plot([0, len(samples)], [solution, solution],
'-k', label='Analytical Solution')
plt.xlabel('Sample')
plt.ylabel('Estimate of $P(x)$')
plt.legend(loc='best')
#%%
# transform the mean estimate to estimate pi using the solution form
pi_estimate = 2*needle_length/(line_width*mean_estimate)
pi_lower_bound = 2*needle_length/(line_width*(mean_estimate+confidence_int))
pi_upper_bound = 2*needle_length/(line_width*(mean_estimate-confidence_int))
print('pi = {:.3f} +/- {:.3f} (95% CI)'.format(
pi_estimate[-1],
pi_upper_bound[-1] - pi_estimate[-1]
))
# create a plot to show the pi estimate with 95% confidence interval bounds
plt.figure()
plt.plot(range(len(samples)), pi_estimate,
'b', label='Mean Estimate')
plt.plot(range(len(samples)), pi_lower_bound,
'g', label='95% CI Lower Bound')
plt.plot(range(len(samples)), pi_upper_bound,
'r', label='95% CI Upper Bound')
plt.plot([0, len(samples)], [np.pi, np.pi],
'-k', label='Analytical Solution')
plt.xlabel('Sample')
plt.ylabel('Estimate of $\pi$')
plt.legend(loc='best') |
from rest_framework import viewsets
# from models import
from serializers import *
class FileViewSet(viewsets.ModelViewSet):
serializer_class = FileSerializer
# permission_classes = [CustomPermission]
# search_fields = ('name', 'description')
model = File
filter_fields = ('content_type', 'object_id')
queryset = File.objects.all()
# def get_queryset(self):
# return File.objects.all()
class NoteViewSet(viewsets.ModelViewSet):
serializer_class = NoteSerializer
# permission_classes = [CustomPermission]
filter_fields = {'content_type':['exact'],'content':['icontains'], 'object_id':['exact']}
model = Note
queryset = Note.objects.all()
# def get_queryset(self):
# return Note.objects.all()#get_all_user_objects(self.request.user, ['view'], Experiment)
def perform_create(self, serializer):
serializer.save(created_by=self.request.user)
def perform_update(self, serializer):
serializer.save(modified_by=self.request.user)
class URLViewSet(viewsets.ModelViewSet):
serializer_class = URLSerializer
# permission_classes = [CustomPermission]
filter_fields = ('content_type', 'object_id')
model = URL
queryset = URL.objects.all()
# def get_queryset(self):
# return Note.objects.all()#get_all_user_objects(self.request.user, ['view'], Experiment)
def perform_create(self, serializer):
serializer.save(modified_by=self.request.user)
def perform_update(self, serializer):
serializer.save(modified_by=self.request.user)
|
import random
import string
from django.db.models import Q
def get_group_members(message, user):
members = []
members_qs = message.group.members.filter(
~Q(id=user.id)
)
for mem in members_qs:
members.append(mem.id)
return members
def get_message_content(message):
details = {
'text': message.text,
'attachments': message.attachments,
'sender_id': message.sender_id,
'sender_name': message.sender.full_name,
'sender_image': message.sender.image,
'timestamp': str(message.timestamp),
'thread_id': message.group_id,
'seen': False
}
return details
def get_message_list(message_objs):
message_list = []
for msg in message_objs:
message_list.append(get_message_content(msg))
return message_list
def get_thread_content(message, user):
details = {
'text': message.text,
'attachments': message.attachments,
'type': message.type,
'timestamp': str(message.timestamp),
'thread_id': message.group_id,
'seen': False,
}
if message.type == 0:
partner = message.group.members.filter(~Q(id=user.id)).first()
details.update({
'partner_name': partner.full_name,
'partner_image': partner.image,
'partner_status': partner.is_present,
'partner_id': partner.id,
})
else:
details.update({
'group_name': message.group.name,
'members': get_group_members(message, user)
# 'group_admin': message.group.admin.full_name,
})
return details
def get_rand_str(str_len):
return ''.join(
random.sample(
string.ascii_uppercase + string.digits,
k=str_len
)
)
|
import pytest
@pytest.fixture(name="flask_live_url")
def _flask_live_url(live_server):
yield live_server.url()
@pytest.fixture()
def app():
from nexxera.nix.app import create_app
config_updates = {
"FLASK_TEST": True,
"SQLALCHEMY_DATABASE_URI": "sqlite:///:memory:",
}
return create_app("test_nix", config_updates)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 17 17:09:52 2016
http://stackoverflow.com/questions/9401658/matplotlib-animating-a-scatter-plot
"""
import matplotlib.pyplot as plt
import numpy as np
import matplotlib.animation as animation
def main():
numframes = 100
numpoints = 10
color_data = np.random.random((numframes, numpoints))
x, y, c = np.random.random((3, numpoints))
fig = plt.figure()
scat = plt.scatter(x, y, c=c, s=100)
ani = animation.FuncAnimation(fig, update_plot, frames=range(numframes),
fargs=(color_data, scat))
# use xrange for Python 2
plt.show()
def update_plot(i, data, scat):
scat.set_array(data[i])
return scat,
main() |
"""Original source: https://github.com/utkuozbulak/pytorch-cnn-visualizations.
Created on Sat Nov 18 23:12:08 2017
@author: Utku Ozbulak - github.com/utkuozbulak
"""
import logomaker
import matplotlib.pyplot as plt
import numpy as np
import os
import pandas as pd
import torch
from torch.nn import Parameter
from torch.optim import Adam
def recreate_logo(im_as_var):
"""Create a logo from a optimized Variable."""
df = pd.DataFrame(
im_as_var[0].detach().numpy(), columns=["A", "G", "T", "C"]
)
# apply softmax to retrieve stochastic vectors
df = df.apply(lambda x: np.exp(x) / np.sum(np.exp(x)), axis=1)
fig, ax = plt.subplots(1, 1)
nn_logo = logomaker.Logo(df, ax=ax)
nn_logo.style_spines(visible=False)
nn_logo.style_spines(spines=["left"], visible=True, bounds=[0, 1])
nn_logo.draw()
fig.tight_layout()
return fig
class CNNLayerVisualization:
"""Produce an image that minimizes the loss of a filter on a conv layer."""
def __init__(self, model, selected_layer, selected_filter):
"""Init with a torch neural network `model`."""
self.model = model
self.model.eval()
self.selected_layer = selected_layer
self.selected_filter = selected_filter
self.conv_output = 0
# Create the folder to export images if not exists
if not os.path.exists("../generated"):
os.makedirs("../generated")
def visualise_layer1D(self, save=True):
"""Plot activations but just for one dimension (four pixels/lettes)."""
# Generate a random image
random_seq = np.uint8(np.random.uniform(0, 4, (100, 4)))
# TODO: the Parameter here must be initialized with one dummy dimesion
# emulating th batch size dimension
var_seq = np.ndarray.astype(
np.array([np.array(r) for r in random_seq]), "int64"
)
var_seq = Parameter(
torch.LongTensor(var_seq[None, :, :]).float(), requires_grad=True
)
# size of resulting images
plt.rcParams["figure.figsize"] = 16, 4
# Define optimizer for the image
optimizer = Adam([var_seq], lr=0.1, weight_decay=1e-6)
for i in range(1, 100):
optimizer.zero_grad()
# Assign create image to a variable to move forward in the model
x = var_seq
for index, layer in enumerate(self.model.modules()):
# Forward pass layer by layer
x = layer(x)
if index == self.selected_layer:
# Only need to forward until the selected layer is reached
# Now, x is the output of the selected layer
break
self.conv_output = x[0, self.selected_filter]
# We try to minimize the mean of the output of that specific filter
loss = -torch.mean(self.conv_output)
print(
"Iteration:",
str(i),
"Loss:",
"{0:.2f}".format(loss.data.numpy()),
)
# Backward
loss.backward()
# Update image
optimizer.step()
# Recreate image
# Save image
if i % 5 == 0 and save:
self.created_image = recreate_logo(var_seq)
im_path = (
"../generated/layer_vis_l"
+ str(self.selected_layer)
+ "_f"
+ str(self.selected_filter)
+ "_iter"
+ str(i)
+ ".jpg"
)
self.created_image.savefig(im_path)
plt.close() # save memory
return var_seq
if __name__ == "__main__":
import sys
sys.path.append("../models/")
from conv_LSTM_onehot import convLSTM
cnn_layer = 0
filter_pos = 1
nuc_to_ix = {
"A": [1, 0, 0, 0],
"G": [0, 1, 0, 0],
"T": [0, 0, 1, 0],
"C": [0, 0, 0, 1],
}
class_to_nuc = {v[0]: k for k, v in nuc_to_ix.items()}
t = 100
path_model = "../../data/models/convlstm_big.pt"
pretrained_model = convLSTM(
input_dim=4,
out_channels=4,
stride=5,
hidden_dim=60,
hidden_out=140,
output_dim=2,
t=t,
).cpu()
pretrained_model.load_state_dict(
torch.load(path_model, map_location=torch.device("cpu"))
)
pretrained_model = pretrained_model
layer_vis = CNNLayerVisualization(pretrained_model, cnn_layer, filter_pos)
# Layer visualization with pytorch hooks
opt_seq = layer_vis.visualise_layer1D().data.numpy()
|
from __future__ import annotations
from slay.entity.unit.base import Unit
class Peasant(Unit):
upkeep = 2
|
"""The views that takes care of authantication."""
from django.contrib.auth import authenticate, login
from django.utils.decorators import method_decorator
from django.views.generic.edit import CreateView
from buildservice.utils.decorators import anonymous_user_required
@method_decorator(anonymous_user_required, name='dispatch') # pylint: disable=too-many-ancestors
class RegisterView(CreateView):
"""
A view for registering a new user for the app.
It'll register to the home view upon success.
"""
def form_valid(self, form):
"""
This method is called after the form is validated,
meaning the user is created. Django doesn't authenticate
him automatically, we have to do it.
"""
response = super(RegisterView, self).form_valid(form)
user = authenticate(
username=self.object.username,
password=form.cleaned_data['password1']
)
if user: # means user is authenticated
login(self.request, user)
return response
|
from typing import List
from soda.sodacl.check_cfg import CheckCfg
class ColumnChecksCfg:
def __init__(self, column_name: str):
self.column_name = column_name
self.check_cfgs: List[CheckCfg] = []
def add_check_cfg(self, check_cfg: CheckCfg):
self.check_cfgs.append(check_cfg)
|
#!/usr/bin/env python3
import sys
import time
import numpy as np
from PyQt6.QtCore import QObject, QThread, pyqtSignal
from PyQt6.QtGui import QImage, QPixmap
from PyQt6.QtWidgets import (QApplication, QDialog, QLabel, QProgressBar,
QPushButton, QVBoxLayout, QWidget)
from trainscanner import pass1
def cv2toQImage(cv2image):
"""
It breaks the original image
"""
height, width = cv2image.shape[0:2]
tmp = np.zeros_like(cv2image[:,:,0])
tmp = cv2image[:,:,0].copy()
cv2image[:,:,0] = cv2image[:,:,2]
cv2image[:,:,2] = tmp
return QImage(cv2image.data, width, height, width*3, QImage.Format.Format_RGB888)
class Worker(QObject):
frameRendered = pyqtSignal(QImage)
finished = pyqtSignal()
progress = pyqtSignal(int)
def __init__(self, argv):
super(Worker, self).__init__()
self._isRunning = True
self.pass1 = pass1.Pass1(argv=argv)
def task(self):
if not self._isRunning:
self._isRunning = True
#self.pass1.before() is a generator.
for num,den in self.pass1.before():
if den:
self.progress.emit(num*100//den)
for img in self.pass1.iter():
if not self._isRunning:
break
self.frameRendered.emit(cv2toQImage(img))
self.pass1.after()
self.finished.emit()
def stop(self):
self._isRunning = False
class MatcherUI(QDialog):
thread_invoker = pyqtSignal()
def __init__(self, argv, terminate=False):
super(MatcherUI, self).__init__()
self.btnStop = QPushButton('Stop')
self.image_pane = QLabel()
self.progress = QProgressBar(self)
self.layout = QVBoxLayout()
self.layout.addWidget(self.btnStop)
self.layout.addWidget(self.progress)
self.layout.addWidget(self.image_pane)
self.setLayout(self.layout)
self.thread = QThread()
self.thread.start()
self.worker = Worker(argv)
self.worker.moveToThread(self.thread)
self.thread_invoker.connect(self.worker.task)
self.thread_invoker.emit()
self.worker.frameRendered.connect(self.updatePixmap)
self.worker.finished.connect(self.finishIt)
self.worker.progress.connect(self.progress.setValue)
self.terminate = terminate
self.btnStop.clicked.connect(lambda: self.worker.stop())
self.btnStop.clicked.connect(self.terminateIt)
self.terminated = False
def updatePixmap(self, image):
#it is called only when the pixmap is really updated by the thread.
#resize image in advance.
#w,h = image.width(), image.height()
#scaled_image = image.scaled(int(w*self.preview_ratio), int(h*self.preview_ratio))
pixmap = QPixmap.fromImage(image)
self.image_pane.setPixmap(pixmap)
#is it ok here?
self.update()
def terminateIt(self):
self.close()
if self.terminate:
sys.exit(1) #terminated
self.terminated = True
def finishIt(self):
self.close()
def closeEvent(self, event):
self.stop_thread()
def stop_thread(self):
self.worker.stop()
self.thread.quit()
self.thread.wait()
def main():
app = QApplication(sys.argv)
match = MatcherUI(sys.argv, True)
match.setWindowTitle("Matcher Preview")
match.show()
match.raise_()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
|
// # nano pythonS1script1
// ----------
import getpass
import sys
import telnetlib
HOST = "192.168.122.71"
user = raw_input("Enter your telnet username: ")
password = getpass.getpass()
tn = telnetlib.Telnet(HOST)
tn.read_until("Username: ") //username prompt
tn.write (user + "\n")
if password:
tn.read_until("Password: ")
tn.write(password + "\n")
tn.write("configure terminal\n")
for n in range (2,10): //// (2,21)
tn.write("vlan" + str(n) + "\n")
tn.write("name Python_Vlan_" + str(n) "\n")
tn.write("end\n")
tn.write("exit\n")
print tn.read_all() //display R1 the commands on ubuntu
// ----------
// R1# debug telnet //to see the script telneting to R1
// # python pythonS1script1
// pv
// password: cisco
// sh ip int brief
// sh run
|
# https://leetcode.com/problems/climbing-stairs/
from typing import Dict
class Solution:
def climbStairs(self, n: int) -> int:
cache = {1: 1, 2: 2}
return self.climbStairsWithCache(n, cache)
def climbStairsWithCache(self, n: int, cache: Dict[int, int]) -> int:
if n in cache:
return cache[n]
val = self.climbStairsWithCache(n-1, cache) + self.climbStairsWithCache(n-2, cache)
cache[n] = val
return val
|
"""In this experiment instead of training a CTRNN we construct it directly.
The goal is to demonstrate, that CTRNN can solve algorithmical problems very efficiently"""
from tools.experiment import Experiment
from brain_visualizer.brain_visualizer import BrainVisualizerHandler
from tools.configurations import ExperimentCfg, ContinuousTimeRNNCfg, StandardEpisodeRunnerCfg
from tools.helper import config_from_file
import os
import threading
from brains.continuous_time_rnn import ContinuousTimeRNN
from attr import s
import numpy as np
from tools.helper import transform
cfg_path = os.path.join('configurations', 'reverse_fixed.json')
cfg_exp = config_from_file(cfg_path)
experiment = Experiment(configuration=cfg_exp,
result_path="",
from_checkpoint=None)
@s(auto_attribs=True, frozen=True, slots=True)
class BrainParam:
V: np.ndarray
W: np.ndarray
T: np.ndarray
y0: np.ndarray
clip_min: np.ndarray
clip_max: np.ndarray
def param_to_genom(param):
return np.concatenate(
[param.V.flatten(),
param.W.flatten(),
param.T.flatten(),
param.y0.flatten(),
param.clip_min.flatten(),
param.clip_max.flatten()])
param = BrainParam(
V=np.array([[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]]),
W=np.array([[-1, 0, 0, 0],
[0, -1, 0, 0],
[0, 0, 0, 0],
[0, 0, 0, -1]]),
T=np.array([[0, 0, 1, 0],
[0, 0, -1, 1],
[0, 0, -1, 1],
[2, 2, 2, -3],
[1, 0, 0, 0],
[0, 1, 0, 0]]).flatten(order='F'),
y0=np.array([]), clip_min=np.array([]), clip_max=np.array([]))
ind = param_to_genom(param)
env = experiment.env_template
for i in range(1):
brain = ContinuousTimeRNN(input_space=experiment.input_space, output_space=experiment.output_space, individual=ind,
config=cfg_exp.brain)
ob = env.reset()
transformed_ob = transform(ob, coming_from_space=experiment.input_space, is_brain_input=True)
env.unwrapped.input_data = [0, 1, 0, 1, 1]
env.unwrapped.target = env.unwrapped.target_from_input_data(env.unwrapped.input_data)
env.render()
done = False
fitness_current = 0
while not done:
brain_output = brain.step(transformed_ob)
action = transform(brain_output, coming_from_space=experiment.output_space, is_brain_input=False)
print("act: " + str(action))
ob, rew, done, info = env.step(action)
transformed_ob = transform(ob, coming_from_space=experiment.input_space, is_brain_input=True)
fitness_current += rew
if rew < 0:
print("error")
env.render()
env.render()
print('score: ' + str(fitness_current))
# todo: use brain_vis to visualize this
t = threading.Thread(target=experiment.visualize,
args=[[ind], BrainVisualizerHandler(), 2, False,
False])
# t.start()
|
d = {'a':10, 'b':4, 'e':22, 'd':11, 'c':5}
tmp = list()
#빈 리스트에 / 위 딕셔너리의 튜플값을 원소를 넣어라
for k,v in d.items() :
tmp.append( (v,k) )
tmp = sorted(tmp, reverse=True)
print(tmp)
#결과값 = [(22, 'e'), (11, 'd'), (10, 'a'), (5, 'c'), (4, 'b')]
for a,b in tmp :
print(a,b)
#결과값
#22 e
#11 d
#10 a
#5 c
#4 b
|
from __future__ import print_function
import pytz
import dateutil.parser
import httplib2
from oauth2client import tools
from oauth2client import client
import datetime
import logging
from googleapiclient.discovery import build
from oauth2client.file import Storage
import Settings
import os
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
log = logging.getLogger('root')
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/calendar-python-quickstart.json
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Smart-Alarm'
class AlarmGatherer:
def __init__(self):
self.settings = Settings.Settings()
# def __init__(self):
#home_dir = os.path.expanduser('~')
#credential_dir = os.path.join(home_dir, 'calendar.dat')
#if not os.path.exists(credential_dir):
# os.makedirs(credential_dir)
#credential_path = os.path.join(credential_dir, 'client_secret.json')
SCOPES = 'https://www.googleapis.com/auth/calendar.readonly'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Smart-Alarm'
self.FLOW = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
self.storage = Storage('calendar.dat')
self.credentials = self.storage.get()
if not self.checkCredentials():
log.error("GCal credentials have expired")
log.warn("Remove calendar.dat and run 'python AlarmGatherer.py' to fix")
return
http = httplib2.Http()
http = self.credentials.authorize(http)
self.service = build('calendar', 'v3', http=http)
def checkCredentials(self):
return not (self.credentials is None or self.credentials.invalid == True)
def generateAuth(self):
self.credentials = tools.run_flow(self.FLOW, self.storage)
def getNextEvent(self, today=False):
log.debug("Fetching details of next event")
if not self.checkCredentials():
log.error("GCal credentials have expired")
log.warn("Remove calendar.dat and run 'python AlarmGatherer.py' to fix")
raise Exception("GCal credentials not authorized")
#time = datetime.datetime.utcnow().isoformat() + 'Z' # 'Z' indicates UTC
time = datetime.datetime.now()
if not today:
# We want to find events tomorrow, rather than another one today
log.debug("Skipping events from today")
# time += datetime.timedelta(days=1) # Move to tomorrow
time = time.replace(hour=10, minute=0, second=0, microsecond=0) # Reset to 10am the next day
# 10am is late enough that a night shift from today won't be caught, but a morning shift
# from tomorrow will be caught
result = self.service.events().list(
calendarId='primary',
timeMin="%sZ" % (time.isoformat()),
maxResults=1,
singleEvents=True,
orderBy='startTime'
).execute()
events = result.get('items', [])
return events[0]
def getNextEventTime(self, includeToday=False):
log.debug("Fetching next event time (including today=%s)" % (includeToday))
nextEvent = self.getNextEvent(today=includeToday)
start = dateutil.parser.parse(nextEvent['start']['dateTime'])
# start = dateutil.parser.parse(nextEvent['start']['dateTime'], ignoretz=True)
# start = start.replace(tzinfo=pytz.timezone('Africa/Johannesburg'))
return start
def getNextEventLocation(self, includeToday=False):
log.debug("Fetching next event location (including today=%s)" % (includeToday))
nextEvent = self.getNextEvent(today=includeToday)
if (nextEvent['location']):
return nextEvent['location']
return None
def getDefaultAlarmTime(self):
defaultTime = ('0700')
#defaultTime = self.settings.getInt('default_wake')
# defaultTime = self.settings.get('default_wake')
# defaultTime = self.settings.getint('default_wake')
defaultHour = int(defaultTime[:2])
defaultMin = int(defaultTime[2:])
alarm = datetime.datetime.now(pytz.timezone('Africa/Johannesburg'))
alarm += datetime.timedelta(days=1) # Move to tomorrow
alarm = alarm.replace(hour=defaultHour, minute=defaultMin, second=0, microsecond=0)
return alarm
if __name__ == '__main__':
print("Running credential check")
a = AlarmGatherer()
try:
if not a.checkCredentials():
raise Exception("Credential check failed")
except:
print("Credentials not correct, please generate new code")
a.generateAuth()
a = AlarmGatherer()
print(a.getNextEventTime())
print(a.getNextEventLocation())
|
from parsec import *
from datetime import datetime
from clize import run
from copy import deepcopy
import requests
import json
from bson.json_util import dumps, CANONICAL_JSON_OPTIONS
datasource = 'https://discover.data.vic.gov.au/api/3/action/datastore_search?resource_id=afb52611-6061-4a2b-9110-74c920bede77&limit=10000'
p_time = regex(r'[0-9]+(:[0-9]+)?(am|pm)')
p_from_to_time = sepBy(p_time, string(' - '))
places = ['Coles', 'Woolworth', 'Chemist']
def chain(start, funcs):
res = start
for func in funcs:
res = func(res)
return res
def get_data(url):
res = requests.get(url).text.encode('ascii', 'ignore').decode()
res = json.loads(res)['result']['records']
return res
def timeparse(ff):
res = ff
if ':' not in ff:
res = f'{ff[:-2]}:00{ff[-2:]}'
return datetime.strptime(res, '%I:%M%p').strftime('%H:%M')
def map_datetime(doc):
time_parsed = p_from_to_time.parse(doc['Exposure_time'])
if len(time_parsed) < 2:
return doc
time_start = timeparse(time_parsed[0])
time_end = timeparse(time_parsed[1])
datetime_start = datetime.strptime(f"{doc['Exposure_date']} {time_start}", '%d/%m/%Y %H:%M')
datetime_end = datetime.strptime(f"{doc['Exposure_date']} {time_end}", '%d/%m/%Y %H:%M')
doc['start_datetime'] = datetime_start
doc['end_datetime'] = datetime_end
doc['start_time'] = time_start
doc['end_time'] = time_end
return doc
def map_place(doc):
res = [x for x in places if doc['Site_title'].startswith(x)]
doc['place'] = res[0] if res else 'others'
return doc
def main():
docs = get_data(datasource)
for doc in docs:
d = chain(doc, [map_datetime, map_place])
print(dumps(d, json_options=CANONICAL_JSON_OPTIONS))
if __name__ == '__main__':
run(main)
example_entry = {
"Added_date": "07/08/2021",
"Added_date_dtm": "2021-08-07",
"Added_time": "15:00:00",
"Advice_instruction": "Anyone who has visited this location during these times should urgently get tested, then isolate until confirmation of a negative result. Continue to monitor for symptoms, get tested again if symptoms appear.",
"Advice_title": "Tier 2 - Get tested urgently and isolate until you have a negative result",
"Exposure_date": "04/08/2021",
"Exposure_date_dtm": "2021-08-04",
"Exposure_time": "5:45pm - 6:35pm",
"Exposure_time_end_24": "18:35:00",
"Exposure_time_start_24": "17:45:00",
"Notes": "Case attended venue",
"Site_postcode": "3030",
"Site_state": "VIC",
"Site_streetaddress": "Corner Cherry Street & Watton Street ",
"Site_title": "Woolworths Werribee",
"Suburb": "Werribee",
"_id": 51
}
def test_map_datetime():
res = map_datetime(example_entry)
assert res['start_datetime'] == datetime(2021, 8, 4, 17, 45)
assert res['end_datetime'] == datetime(2021, 8, 4, 18, 35)
def test_map_place():
res = map_place(example_entry)
assert res['place'] == 'Woolworth'
|
MOVE_SPEED_CHASE = 15
MOVE_SPEED_RUN = 10
TURN_SPEED = 6
TIME_REFRESH = 0.05
TIME_DURATION = 30
# I need a pull request
FAKE_CONSTANT = 20
RULE_LINE = "Space - Start\nW \ I - Up\nS \ K - Down\nA \ J - Left\nD \ L - Right "
|
"""
Ejercicio 1: Hacer un programa que tenga una lista de 8 numeros enteros. Y que haga:
-Recorrer la lista y mostrarla.
-Hacer una funcion que recorra listas de strings y devuelva un string
-Ordenarla y mostrarla
-Mostrar su longitud.
-Buscar algun elemento (Que el usuario pida por teclado).
"""
# Definimos la lista
lista_numeros = [1, 987, 69, 5, 3, 77, 14, 44]
# Punto 1
print('------- Punto 1: Recorrer y mostrar -------')
for numero in lista_numeros:
print(numero)
# Punto 2
print('\n------- Punto 2: Recorrer y mostrar (Con funcion) -------')
def recorrer_lista(lista):
cadena_numeros = ""
for numero in lista:
cadena_numeros += str(numero) + ", "
cadena_numeros = cadena_numeros[0:len(cadena_numeros)-2]
return cadena_numeros
print(recorrer_lista(lista_numeros))
# Punto 3
print('\n------- Punto 3: Ordenar y mostrar -------')
lista_numeros.sort()
print(lista_numeros)
# Punto 4
print('\n------- Punto 4: Mostrar longitud -------')
print(f"La longitud de la lista es de {len(lista_numeros)} elementos")
# Punto 5
print('\n------- Punto 4: Buscar elemento indicado por usuario -------')
try:
numero_a_buscar = int(input("Ingrese numero a buscar: "))
comprobar = isinstance(numero_a_buscar, int)
while not comprobar or numero_a_buscar <= 0:
numero_a_buscar = int(input("Ingrese numero a buscar: "))
else:
print(f"Has introducido el {numero_a_buscar}")
busqueda = lista_numeros.index(numero_a_buscar)
print(f"El número {numero_a_buscar} se encuentra en la lista en el indice {busqueda}")
except:
print(f"Ha ocurrido un error!!") |
import numpy as np
if __name__ == "__main__":
StartTime = '2016-07-26 00:00:00'
# setdata_cnn.GetOracleDataSample(109.0, 25.0, '2016-07-26 00:00:00', '2016-07-31 00:00:00', 30, 5, 20, 0.5)
predictlabels = np.loadtxt("data_cnn/" + StartTime + "pred_labels.txt")
samplelabels = np.loadtxt("data_cnn/" + StartTime + "sample_labels.txt")
prear1 = np.where(True,np.round(predictlabels*20) , 0)
samplearr = np.where(True, np.round(samplelabels * 20), 0)
np.savetxt("data_cnn/" + StartTime + "round_pred_labels", prear1, fmt='%d')
np.savetxt("data_cnn/" + StartTime + "round_sample_labels", samplearr, fmt='%d') |
import pymysql
def data_stu_Sclass_update(Sno,Sclass):
stat = 0
conn = pymysql.connect(host = '192.168.123.209',user = 'root',passwd = '123456',db = 'kaoqinxitong',charset = 'utf8')
c = conn.cursor()
sql_str = "UPDATE Student SET Sclass = '"+Sclass+"' WHERE Sno ='"+Sno+"'"
try:
c.execute(sql_str)
conn.commit()
except:
conn.rollback()
stat = 1
conn.close()
if stat:
return 1
else:
return 0
def data_stu_Sname_update(Sno,Sname):
stat = 0
conn = pymysql.connect(host = '192.168.123.209',user = 'root',passwd = '123456',db = 'kaoqinxitong',charset = 'utf8')
c = conn.cursor()
sql_str = "UPDATE Student SET Sname = '"+Sname+"' WHERE Sno ='"+Sno+"'"
try:
c.execute(sql_str)
conn.commit()
except:
conn.rollback()
stat = 1
conn.close()
if stat:
return 1
else:
return 0
# data_stu_Sname_update("0914150225","于乐乐") |
import pandas as pd
from scipy.stats import ttest_ind
data = {'Category': ['cat2','cat1','cat2','cat1','cat2','cat1','cat2','cat1','cat1','cat1','cat2'],
'values': [1,2,3,1,2,3,1,2,3,5,1]}
my_data = pd.DataFrame(data)
print(f'My_Data:\n{my_data}')
print('*************************************************')
my_data_unique = my_data['Category'].unique()
print(f'My Data Unique Values:{my_data_unique}')
print('*************************************************')
my_data_mean = my_data.groupby('Category').mean()
print(f'My_Data_Mean:\n{my_data_mean}')
cat1 = my_data[my_data['Category']=='cat1']
cat2 = my_data[my_data['Category']=='cat2']
t_stat, p_value = ttest_ind(cat1['values'], cat2['values'])
print('*************************************************')
print(f'T-Statistic:{t_stat}')
print('*************************************************')
print(f'p-value:{p_value}')
if p_value < 0.05: # alpha value is 0.05 or 5%
print(" we are rejecting null hypothesis")
else:
print("we are accepting null hypothesis")
|
from Testing import ZopeTestCase
from Products.Extropy.tests import ExtropyTrackingTestCase
from Products.CMFPlone.utils import _createObjectByType
from DateTime import DateTime
from Products.Extropy.browser.managementreports import WeeklyReport
class Dummyhours:
"""fake worked hours"""
def __init__(self, hours):
self.workedHours = hours
class TestReportViews(ExtropyTrackingTestCase.ExtropyTrackingTestCase):
def afterSetUp(self):
self.tool = self.portal.extropy_timetracker_tool
self.request = self.app.REQUEST
def testInstantiateView(self):
view = WeeklyReport(self.portal, self.request)
def testGettingHours(self):
view = WeeklyReport(self.portal, self.request)
self.folder.invokeFactory('Folder', 'extropy')
folder = self.folder.extropy
_createObjectByType('ExtropyHourGlass',folder, 'hourglass')
folder.hourglass.invokeFactory('ExtropyHours','test1', startDate=DateTime()-3.1, endDate=DateTime()-3)
folder.hourglass.invokeFactory('ExtropyHours','test2', startDate=DateTime()-3.1, endDate=DateTime()-3)
folder.hourglass.invokeFactory('ExtropyHours','test3', startDate=DateTime()-3.2, endDate=DateTime()-3)
self.assertEqual(len(view.getHours()),3)
def test_suite():
from unittest import TestSuite, makeSuite
suite = TestSuite()
suite.addTest(makeSuite(TestReportViews))
return suite
|
import unittest
import import_ipynb
import pandas as pd
import pandas.testing as pd_testing
class Test(unittest.TestCase):
def setUp(self):
import Calculating_Descriptive_Statistics
self.exercise = Calculating_Descriptive_Statistics
self.games = self.exercise.games
def test_proportion_of_4_5(self):
# covers points 1 and 5
boolean_series = self.exercise.games['average_user_rating'] == 4.5
true_result = boolean_series.mean()
user_result = self.exercise.proportion_of_ratings_4_5
self.assertAlmostEqual(user_result, true_result)
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
"""Concatenate fasta records in multiple fasta files
"""
import argparse
import logging
from Bio import SeqIO
from collections import defaultdict
__author__ = "Matthew Whiteside"
__copyright__ = "Copyright 2015, Public Health Agency of Canada"
__license__ = "APL"
__version__ = "2.0"
__maintainer__ = "Matthew Whiteside"
__email__ = "matthew.whiteside@phac-aspc.gc.ca"
logger = None
if __name__ == "__main__":
"""Concatenate fasta entries in multiple fasta files
"""
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('data.stx.translate')
# Parse command-line args
parser = argparse.ArgumentParser()
parser.add_argument('output', help='Output fasta file')
parser.add_argument('input', nargs='*', help='Input fasta files')
options = parser.parse_args()
# Load
seqs = defaultdict(list)
for f in options.input:
fasta = SeqIO.parse(f, 'fasta')
for rec in fasta:
seqs[rec.description].append(str(rec.seq))
with open(options.output, 'w') as outfh:
for h,sarr in seqs.iteritems():
outfh.write('>{}\n{}\n'.format(h,''.join(sarr)))
|
import urllib2, socket, getproxylist, json
socket.setdefaulttimeout(180)
nUsuarios = input("Cuantos usuario deseas crear? ")
def is_bad_proxy(pip):
try:
proxy_handler = urllib2.ProxyHandler({'http': pip})
opener = urllib2.build_opener(proxy_handler)
opener.addheaders = [('User-agent', 'Mozilla/5.0')]
urllib2.install_opener(opener)
req=urllib2.Request('http://www.gnu.org') # The url can be refused after a time
sock=urllib2.urlopen(req)
except urllib2.HTTPError, e:
print 'Error code: ', e.code
return e.code
except Exception, detail:
print "ERROR:", detail
return 1
return 0
#my program
#for item in proxyList:
i = 0
while(i<nUsuarios):
getproxylist.getProxyList(nUsuarios*5)#Obteniendo la lista de proxys
ip = getproxylist.getIp()#Sacando la ip
port = getproxylist.getPort()#El puerto
proxyip= ip+":"+port #ip and port to evaluate
print("El proxy a evaluar: "+proxyip)
if is_bad_proxy(proxyip):
print "Bad Proxy", proxyip
else:
print proxyip, "is working"
i+=1
|
from time import sleep
from page.page_in import PageIn
from tools.get_driver import GetDriver
import page
from tools.get_log import GetLog
log = GetLog.get_logger()
class TestMisAudit:
# 初始化
def setup_class(self):
# 获取driver
driver = GetDriver.get_driver(page.url_mis)
# 获取 统一入口类
self.page_in = PageIn(driver)
# 调用登录业务成功方法(需要在PageMisLogin新增)
self.page_in.page_get_PageMisLogin().page_mis_login_success()
# 获取PageMisAudit对象
self.audit = self.page_in.page_get_PageMisAudit()
# 结束
def teardown_class(self):
sleep(5)
# 关闭driver
GetDriver.quit_driver()
# 审核文章测试方法
def test_article_audit(self, title=page.article_title, channel=page.article_channel):
# 调用审核业务方法
self.audit.page_mis_audit(title, channel)
try:
# 断言
assert self.audit.page_assert_success(title="bj-test17-004", channel="数据库")
except Exception as e:
# 日志
log.error(e)
# 截图
self.audit.base_get_img()
# 抛异常
raise
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.