index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
998,400 | 76995da8a2afef9be7423b16d89f971eff1bd9a5 | import os
from typing import NamedTuple, Optional
from jsonschema import validate
# ------------------------------
# Fail Faster
# ------------------------------
_CONFIG_SCHEMA = {
"type": "object",
"properties": {
"BATCH_ACCOUNT_NAME": {"type": "string"},
"BATCH_ACCOUNT_KEY": {"type": "string"},
"BATCH_ACCOUNT_ENDPOINT": {"type": "string"},
"STORAGE_ACCOUNT_NAME": {"type": "string"},
"STORAGE_ACCOUNT_KEY": {"type": "string"},
"STORAGE_ACCOUNT_CONNECTION_STRING": {"type": "string"},
"STORAGE_ACCESS_DURATION_HRS": {"type": "number", "minimum": 0, "default": 24,},
"REGISTRY_SERVER": {"type": "string"},
"REGISTRY_USERNAME": {"type": "string"},
"REGISTRY_PASSWORD": {"type": "string"},
"POOL_ID": {"type": "string"},
"POOL_NODE_COUNT": {"type": "number", "minimum": 0},
"POOL_LOW_PRIORITY_NODE_COUNT": {"type": "number", "minimum": 0},
"POOL_VM_SIZE": {"type": "string"},
"JOB_ID": {"type": "string"},
"DELETE_POOL_WHEN_DONE": {"type": "boolean"},
"DELETE_JOB_WHEN_DONE": {"type": "boolean"},
"DELETE_CONTAINER_WHEN_DONE": {"type": "boolean"},
"BLOB_CONTAINER_NAME": {
"type": "string",
"pattern": "^[a-z0-9](-?[a-z0-9]+)$",
"maxLength": 63,
"minLength": 3,
},
"BATCH_DIRECTORY": {"type": "string"},
"DOCKER_IMAGE": {"type": "string"},
},
"required": [
"POOL_ID",
"JOB_ID",
"BLOB_CONTAINER_NAME",
"BATCH_DIRECTORY",
"DOCKER_IMAGE",
"BATCH_ACCOUNT_NAME",
"BATCH_ACCOUNT_KEY",
"BATCH_ACCOUNT_ENDPOINT",
"STORAGE_ACCOUNT_KEY",
"STORAGE_ACCOUNT_CONNECTION_STRING",
"STORAGE_ACCESS_DURATION_HRS",
],
"dependencies": {
"REGISTRY_USERNAME": ["REGISTRY_SERVER", "REGISTRY_PASSWORD"],
"REGISTRY_PASSWORD": ["REGISTRY_SERVER", "REGISTRY_USERNAME"],
"REGISTRY_SERVER": ["REGISTRY_USERNAME", "REGISTRY_PASSWORD"],
}
# to do: missing required properties
}
class _BatchConfig(NamedTuple):
"""
A convenience class for typing the config object
"""
# pylint: disable=too-few-public-methods
POOL_ID: str
JOB_ID: str
BLOB_CONTAINER_NAME: str
BATCH_DIRECTORY: str
DOCKER_IMAGE: str
POOL_VM_SIZE: Optional[str]
POOL_NODE_COUNT: Optional[int] = 0
POOL_LOW_PRIORITY_NODE_COUNT: Optional[int] = 0
DELETE_POOL_WHEN_DONE: bool = False
DELETE_JOB_WHEN_DONE: bool = False
DELETE_CONTAINER_WHEN_DONE: bool = False
BATCH_ACCOUNT_NAME: Optional[str] = None
BATCH_ACCOUNT_KEY: Optional[str] = None
BATCH_ACCOUNT_ENDPOINT: Optional[str] = None
STORAGE_ACCOUNT_KEY: Optional[str] = None
STORAGE_ACCOUNT_CONNECTION_STRING: Optional[str] = None
STORAGE_ACCESS_DURATION_HRS: int = 24
REGISTRY_SERVER: Optional[str] = None
REGISTRY_USERNAME: Optional[str] = None
REGISTRY_PASSWORD: Optional[str] = None
COMMAND_LINE: Optional[str] = None
@property
def clean(self):
"""
get the attributes from this object which don't contain permissions
"""
out = {}
for k in _clean_keys:
try:
v = getattr(self, k)
except AttributeError:
pass
else:
out[k] = v
return out
@property
def BATCH_ACCOUNT_URL(self):
return "https://{}".format(self.BATCH_ACCOUNT_ENDPOINT)
_clean_keys = (
"POOL_ID",
"JOB_ID",
"POOL_VM_SIZE",
"BLOB_CONTAINER_NAME",
"BATCH_DIRECTORY",
"DOCKER_IMAGE",
"POOL_NODE_COUNT",
"POOL_LOW_PRIORITY_NODE_COUNT",
"DELETE_POOL_WHEN_DONE",
"DELETE_JOB_WHEN_DONE",
"DELETE_CONTAINER_WHEN_DONE",
"BATCH_ACCOUNT_NAME",
"BATCH_ACCOUNT_ENDPOINT",
"STORAGE_ACCOUNT_CONNECTION_STRING",
"STORAGE_ACCESS_DURATION_HRS",
"REGISTRY_SERVER",
"COMMAND_LINE",
)
def BatchConfig(**kwargs):
"""
Provides an interface for preparing, running, and pulling down data from an Azure Batch job
Args:
BATCH_DIRECTORY (string): Local directory in which input and output files should be placed
BATCH_ACCOUNT_NAME (string): Batch account name. Taken from the environment when not provided
BATCH_ACCOUNT_KEY (string): Batch account key. Taken from the environment when not provided
BATCH_ACCOUNT_ENDPOINT (string): Batch account endpoint. Taken from the environment when not provided
JOB_ID (string): Name for the Batch Job
STORAGE_ACCOUNT_NAME (string): Storage Account name
STORAGE_ACCOUNT_KEY (string): Stoarge account key. Taken from the environment when not provided
STORAGE_ACCOUNT_CONNECTION_STRING (string): Storage account access connection string. Taken from the environment when not provided
STORAGE_ACCESS_DURATION_HRS (int): Time in hours that the generated the storage access token will be valid for
BLOB_CONTAINER_NAME (string): Name for the blob storage container
POOL_ID (string): Pool Id
POOL_NODE_COUNT (int): Count for normal priority nodes in the batch pool. Only used when createing a pool. **Ignored if the pool already exists**
POOL_LOW_PRIORITY_NODE_COUNT (int): Count for low priority nodes in the batch pool. **Ignored if the pool already exists**
POOL_VM_SIZE (string): VM name (See the FAQ for details)
DOCKER_IMAGE (string): name of the docker image
REGISTRY_SERVER (string, optional): Used when the docker image is hosted on a private repository. Taken from the environment when not provided
REGISTRY_USERNAME (string, optional): Used when the docker image is hosted on a private repository. Taken from the environment when not provided
REGISTRY_PASSWORD (string, optional): Used when the docker image is hosted on a private repository. Taken from the environment when not provided
DELETE_POOL_WHEN_DONE (boolean): Should the batch pool be deleted when the job has been completed? Default `False`
DELETE_JOB_WHEN_DONE (boolean): Should the batch job be deleted when the job has been completed? Default `False`
DELETE_CONTAINER_WHEN_DONE (boolean): should the blob storage container be deleted when the job has been completed? Default `False`
"""
return _validate(_BatchConfig(**kwargs))
def _validate(x):
"""
validate the batch configuration object
"""
_config = x._asdict()
for _key in SERVICE_KEYS:
if not _config[_key]:
del _config[_key]
__env_config = _ENV_CONFIG.copy()
__env_config.update(_config)
validate(__env_config, _CONFIG_SCHEMA)
return _BatchConfig(**__env_config)
SERVICE_KEYS = (
"BATCH_ACCOUNT_NAME",
"BATCH_ACCOUNT_KEY",
"BATCH_ACCOUNT_ENDPOINT",
"STORAGE_ACCOUNT_KEY",
"STORAGE_ACCOUNT_CONNECTION_STRING",
"REGISTRY_SERVER",
"REGISTRY_USERNAME",
"REGISTRY_PASSWORD",
)
_ENV_CONFIG = {}
for key in SERVICE_KEYS:
val = os.getenv(key, None)
if val:
_ENV_CONFIG[key] = val.strip('"')
|
998,401 | cea0801b2efcdb817869d44cacab85622a05c87a | def is_empty(data_structure):
if data_structure:
#print("No está vacía")
return False
else:
#print("Está vacía")
return True |
998,402 | 0a3c624838c70bba9e1538243e80ca9c43cdbcf7 | #backward elimination regression
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import train_test_split
import numpy as np
from sklearn.linear_model import LogisticRegression
dataset = pd.read_csv('Churn.csv')
dataset = dataset[dataset['TotalCharges']!=' ']
dataset['TotalCharges'] = pd.to_numeric(dataset['TotalCharges'])
for i in range(1,len(dataset.columns)):
print(dataset.columns[i],"--",dataset[dataset.columns[i]].unique())
labels = [1,3,4,6,7,8,9,10,11,12,13,14,15,16,17,20]
for i in labels:
dataset.iloc[:,i] = LabelEncoder().fit_transform(dataset.iloc[:,i])
dataset = dataset.drop(['customerID'], axis=1)
X=dataset.iloc[:,:-1]
y=dataset.iloc[:,-1].values
X_train,X_test,y_train,y_test = train_test_split(X,y, test_size=0.30, random_state=0)
logit = LogisticRegression(random_state=0)
logit.fit(X_train, y_train)
#r_square = logit.score(df_val.iloc[:,:-1], df_val["Churn"])
pred = logit.predict(X_test)
import statsmodels.formula.api as sm
X = np.append(arr=np.ones((7032,1)).astype(int), values=X, axis=1)
X_opt = X[:,:]
logit_OLS = sm.OLS(endog=y, exog=X_opt).fit()
logit_OLS.summary()
X_opt = X[:,[0,1,2,3,4,5,6,7,8,9,10,11,12,13,15,16,17,18,19]]
logit_OLS = sm.OLS(endog=y, exog=X_opt).fit()
logit_OLS.summary()
X_opt = X[:,[0,1,2,3,4,5,6,7,8,9,10,11,12,15,16,17,18,19]]
logit_OLS = sm.OLS(endog=y, exog=X_opt).fit()
logit_OLS.summary()
X_opt = X[:,[0,1,2,4,5,6,7,8,9,10,11,12,15,16,17,18,19]]
logit_OLS = sm.OLS(endog=y, exog=X_opt).fit()
logit_OLS.summary()
X_opt = X[:,[0,2,4,5,6,7,8,9,10,11,12,15,16,17,18,19]]
logit_OLS = sm.OLS(endog=y, exog=X_opt).fit()
logit_OLS.summary()
X_opt = X[:,[0,2,4,5,6,7,8,9,10,11,12,15,16,18,19]]
logit_OLS = sm.OLS(endog=y, exog=X_opt).fit()
logit_OLS.summary()
#---------------------
X_opt = X[:,[0,2,5,6,7,8,9,10,11,12,15,16,18,19]]
logit_OLS = sm.OLS(endog=y, exog=X_opt).fit()
logit_OLS.summary() |
998,403 | d758126f80ae04297642060d440280cce80fb75d | # import requests module
import json
import warnings
warnings.filterwarnings("ignore")
from time import sleep
from bitbucket_resource import Bitbucket
from doc360_resource import Project_Api, Category, Article,Teams,Reader_Group
def run_process():
"""Required External Data"""
workspace = "sample_workspace_id"
repo_name = "sample_project"
username = "bopod97869@fxseller.com"
password = "P@ssword1"
document360_token = "RKo/9SkC7rCD8/0Wfe1dts2OCgynxaVzF0/kU5Clg0qZTVSS0XkYoaDerbJdiYM6u0DxgYAU2mLM3HxX8pG7Z2wqBu5MALnsPprohmERARh3er7SCKwPdDNnJy04OkqAky/c4RzDJlifD3oPlvrCvw=="
"""Initiating objects of Document360 which will be needed"""
project_360 = Project_Api(document360_token)
category_360 = Category(document360_token)
article_360 = Article(document360_token)
teams_360 = Teams(document360_token)
reader_group_360 = Reader_Group(document360_token)
project_id_360 = project_360.get_project_id()
user_id_360 = teams_360.primary_user_id()
"""Create Bitbucket Object"""
bitbucket_obj = Bitbucket(workspace=workspace, repo_name=repo_name, username=username, password=password)
"""forming list of folders and files in repository"""
repo_dict = bitbucket_obj.read_repository()
folder_list, file_list = repo_dict["folders"], repo_dict["files"]
"""
For Document360,
Every folder will be treated as Category.
Every sub-folder will be treated as sub-Category.
Every file will be treated as article
"""
"""
Since this is POC, we will keep process simple.
Upon making changes to the Bitbucket repository,
the process will delete the existing categories and
recreate the categoies as per bitbucket repository
"""
"""To delete the categories, we execute the below code"""
doc360_category_list = project_360.get_category_ID_list()
if len(doc360_category_list.values()) > 0:
for cat_id in list(doc360_category_list.values()):
category_360.delete_category(cat_id)
sleep(2)
"""Creating Files as Categories"""
"""Folder and File list"""
repo_dict = bitbucket_obj.read_repository()
folder_list, file_list = repo_dict["folders"], repo_dict["files"]
"""Read README file"""
content_list = []
for file_name in file_list:
content = bitbucket_obj.read_file_direct(file_name=file_name, separator="DEVELOPER_SPECIFIC")
if len(content) == 2:
pub_content, prv_content = content[0],content[1]
content_list.append((pub_content, prv_content))
else:
content_list = content
"""________________________________________________________________________________________________________________"""
"""Creating Category based on README.md file"""
cat_file_params = [{
"name": "Public_README",
"project_version_id": project_id_360,
"content": content_list[0][0],
"category_type": 1,
"user_id": user_id_360,
},
{
"name": "Private_README",
"project_version_id": project_id_360,
"content": content_list[0][1],
"category_type": 1,
"user_id": user_id_360,
}]
for param in cat_file_params:
category_360.post_add_category(parameters=param)
sleep(2)
"""Forming Category List"""
cat_list = project_360.get_category_ID_list()
print(cat_list)
"""Public and Private Categories"""
public_categories = [key for (key, value) in cat_list.items() if key.split("_")[0] == "Public"]
private_categories = [key for (key, value) in cat_list.items() if key.split("_")[0] == "Private"]
"""Creating Reader Group"""
# public
param_1 = {
"title": public_categories[0],
# OPTIONAL_PARAMETER
"description": "Docs for Public Viewing",
"access_scope": {
"access_level": 1,
"categories": [cat_list[public_categories[0]]]
},
}
# private
param_2 = {
"title": private_categories[0],
# OPTIONAL_PARAMETER
"description": "Docs for Private Viewing",
"access_scope": {
"access_level": 1,
"categories": [cat_list[private_categories[0]]]
},
}
for param in [param_1, param_2]:
print(reader_group_360.post_add_group(parameters=param))
sleep(2)
# """Creating Categories in Document360 based on the folders in Bitbucket repository."""
# for folder in folder_list:
# params = {"name":folder, "project_version_id":project_id_360}
# category_360.post_add_category(parameters=params)
# sleep(2)
#
#
# """Creating Articles in their respective Categories in Document360 based on folder and files in Bitbucket"""
# doc360_category_list = project_360.get_category_ID_list()
# sleep(2)
# for folder in folder_list:
# cat_id = doc360_category_list[folder]
# articles_in_cat = bitbucket_obj.category_content(folder)
# if len(articles_in_cat) > 0:
# for article in articles_in_cat:
# parameters = {
# "title": article,
# "category_id": cat_id,
# "project_version_id": project_id_360,
# "user_id": user_id_360,
# "content": bitbucket_obj.read_file(folder, article),
# }
# article_360.post_add_article(parameters)
# sleep(2)
#
#
# """Publishing the added Articles on Document360 website"""
# article_list = project_360.get_article_ID_list()
#
# for folder in folder_list:
# cat_id = doc360_category_list[folder]
# articles_in_cat = bitbucket_obj.category_content(folder)
# if len(articles_in_cat) > 0:
# for article in articles_in_cat:
# parameters = {
# "version_number": 1,
# "user_id": user_id_360,
# }
# article_360.post_publish_article(id=article_list[article],parameters=parameters)
# sleep(2)
|
998,404 | 1a20cb487e65dc716990fbcb9f89b8923bd91107 | from flask import Blueprint
# 创建蓝图对象
profile_blue= Blueprint("profile",__name__,url_prefix="/user")
# 装饰视图函数
from . import views
|
998,405 | 98f097be7a383ae4920558c575373ce36558f8cd | import xml.etree.ElementTree as ET
import sys
def reduceExpression(terminalXml,depth):
variations = []
terminalXmlText = ''
if terminalXml.text == None:
terminalXmlText = ' '
else:
terminalXmlText = terminalXml.text
if terminalXml.tag == 'mo':
tempString = '<'+terminalXml.tag+'>'
if terminalXml.text.strip() == '':
tempString += ' * '
else:
tempString += ' ' + terminalXmlText + ' '
tempString += '</'+terminalXml.tag+'>'
variations.append(tempString)
if terminalXml.tag == 'mi' or terminalXml.tag == 'mn':
if depth < 0 and depth > -3:
variations.append('<'+'mi'+'> '+'$Expression$'+' </'+'mi'+'>')
variations.append('<'+terminalXml.tag+'> '+terminalXmlText+' </'+terminalXml.tag+'>')
return variations,depth-1
def genTreeStructureUtil(rawXml,depth):
if (len(list(rawXml))) <= 0:
return reduceExpression(rawXml,0)
variations = []
variations.append('')
for child in rawXml:
tempa,depth2 = genTreeStructureUtil(child,depth)
tempVariations = []
for i in xrange(0,len(variations)):
for j in xrange(0,len(tempa)):
tempVariations.append(variations[i]+' '+tempa[j])
variations = []
for i in xrange(0,len(tempVariations)):
variations.append(tempVariations[i])
for i in xrange(0,len(variations)):
variations[i] = '<' + rawXml.tag + '> ' + variations[i] + ' </' + rawXml.tag + '>'
if depth2 < 0 and depth2 > -3:
variations.append('<mi> $Expression$ </mi>')
# print depth2
return variations,depth2-1
def findDepth(rawXml):
if(len(list(rawXml))<=0):
return 0
depth = -1
for child in rawXml:
depth = max(depth,findDepth(child))
return depth + 1
def genTreeStructure():
meta = open('../../Data/MathMLMeta.xml', 'r').readlines()
structureMathML = open('../../Data/StructureMathML.xml', 'w')
structureMathMLMeta = open('../../Data/StructureMathMLMeta.xml', 'w')
mathXmlFile = open('../../Data/MathML.xml', 'r')
structToOrigMap = open('../../Data/StructureToOrig.xml', 'w')
cnt = -1
for rawEquation in mathXmlFile:
cnt += 1
rawEq = rawEquation.strip('\n').replace('m:','')
rawEq = rawEq.replace('xmlns', '')
rawEq = rawEq.replace(':m', '')
rawEq = rawEq.replace('="http://www.w3.org/1998/Math/MathML"','')
# print rawEq
try:
if findDepth(ET.fromstring(rawEq)) < 2:
structureMathML.write(rawEq.encode('utf-8') + '\n')
structureMathMLMeta.write(meta[cnt].strip('\n')+'\n')
structToOrigMap.write(str(cnt)+'\n')
continue
variations,depth = genTreeStructureUtil(ET.fromstring(rawEq),100000)
variations = variations[:-1]
for variation in variations:
structureMathML.write(variation.encode('utf-8') + '\n')
structureMathMLMeta.write(meta[cnt].strip('\n')+'\n')
structToOrigMap.write(str(cnt)+'\n')
except Exception as e:
print e
# if cnt >= 0:
# break
structureMathML.close()
structureMathMLMeta.close()
structToOrigMap.close()
mathXmlFile.close()
def main():
genTreeStructure()
if __name__ == '__main__':
main()
|
998,406 | dcac86405454d4be6c0b87b8e721e6762cdbf8cb | #!/usr/bin/env python
# encoding: utf-8
import xadmin
from .models import Singers
class SingersAdmin(object):
list_display = ["name", "singer_id", "birthday", "gender", "desc", "country", "add_time"]
search_fields = ['name', ]
list_filter = ["name", "singer_id", "birthday", "gender", "desc", "country", "add_time"]
xadmin.site.register(Singers, SingersAdmin)
|
998,407 | c86a5c059d103f30dcc4898b259098001589a532 | user_input=int(input())
list_of_values=input()
list_of_values1=set(map(int,list_of_values.split(" ")))
for i in range(int(input())):
user_input_commands=input().split(" ")
if("pop"==user_input_commands[0]):
list_of_values1.pop()
if("remove"==user_input_commands[0]):
if int(user_input_commands[1]) in list_of_values1:
list_of_values1.remove(int(user_input_commands[1]))
if("discard"==user_input_commands[0]):
if int(user_input_commands[1]) in list_of_values1:
list_of_values1.discard(int(user_input_commands[1]))
print(sum(list(list_of_values1))) |
998,408 | a059029265c2ac2a0a0176170f224e172eca392e | # Authors: Sebastian Szyller, Buse Gul Atli
# Copyright 2020 Secure Systems Group, Aalto University, https://ssg.aalto.fi
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import configparser
import copy
import datetime
import os
import pickle
import random
from typing import Dict, Any, List, Tuple
import torch
import torch.nn as nn
import torch.nn.utils.prune as prune
import torch.utils.data as data
import torchvision as tv
from tqdm import tqdm
import config_helper
import logger
import models
import score
random.seed(42)
log = logger.Logger(prefix=">>>")
class SimpleDataset(data.Dataset):
def __init__(self, dataset: List[Tuple[Any, int]]) -> None:
self.data, self.labels = zip(*dataset)
self.count = len(self.labels)
def __getitem__(self, index: int) -> (Any, int):
return self.data[index], self.labels[index]
def __len__(self) -> int:
return self.count
def main(config: configparser.ConfigParser, model_path: str, watermark_path: str) -> None:
# Setup model architecture and load model from file.
model = setup_model(
config["DEFAULT"]["model_architecture"],
model_path,
int(config["DEFAULT"]["number_of_classes"]))
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = model.to(device=device)
# Load test set and transform it.
test_set = download_test(
config["DEFAULT"]["dataset_name"],
config["DEFAULT"]["test_save_path"],
int(config["DEFAULT"]["input_size"])
)
test_set = data.DataLoader(test_set, batch_size=int(config["DEFAULT"]["batch_size"]))
watermark_set = load_file(watermark_path)
pruning_save_path = config["DEFAULT"]["pruning_save_path"]
if not os.path.exists(pruning_save_path):
log.warn(pruning_save_path + " does not exist. Creating...")
os.makedirs(pruning_save_path)
log.info(pruning_save_path + " Created.")
pruning_results = prune_model(model, test_set, watermark_set, int(config["DEFAULT"]["number_of_classes"]))
date = datetime.datetime.today().strftime('%Y-%m-%d')
path_body = pruning_save_path + config["DEFAULT"]["model_name"]
save_scores(
pruning_results,
path_body + date)
def download_test(dataset_name: str, victim_data_path: str, input_size: int) -> data.Dataset:
mean = [0.5, 0.5, 0.5]
std = [0.5, 0.5, 0.5]
if dataset_name == "MNIST":
dataset = tv.datasets.MNIST
transformations = tv.transforms.Compose([
tv.transforms.ToTensor(),
tv.transforms.Normalize(mean, std)
])
elif dataset_name == "CIFAR10":
dataset = tv.datasets.CIFAR10
transformations = tv.transforms.Compose([
tv.transforms.Resize(input_size),
tv.transforms.CenterCrop(input_size),
tv.transforms.ToTensor(),
tv.transforms.Normalize(mean, std)
])
else:
log.error("MNIST and CIFAR10 are the only supported datasets at the moment. Throwing...")
raise ValueError(dataset_name)
test_set = dataset(victim_data_path, train=False, transform=transformations, download=True)
log.info("Test samples: {}\nSaved in: {}".format(dataset_name, len(test_set), victim_data_path))
return test_set
def setup_model(model_architecture: str, model_path: str, number_of_classes: int) -> nn.Module:
available_models = {
"MNIST_L5": models.MNIST_L5,
"CIFAR10_BASE": models.CIFAR10_BASE
}
model = available_models[model_architecture]()
if model is None:
log.error("Incorrect model architecture specified or architecture not available.")
raise ValueError(model_architecture)
models.load_state(model, model_path)
return model
def load_file(file_path: str) -> List[Tuple]:
with open(file_path, "rb") as f:
return pickle.load(f)
def prune_model(model: nn.Module, test_set: data.DataLoader, watermark_set: List, number_of_classes: int) -> Dict[float, Dict[str, Any]]:
# Pruning experiment with multiple pruning levels
pruning_levels = [0.01, 0.05, 0.1, 0.25, 0.4, 0.5, 0.75, 0.9]
pruning_results = {}
log.info("Accuracy before pruning:")
_ = test_model(model, test_set, number_of_classes)
_ = test_watermark(model, watermark_set)
for level in pruning_levels:
model_local = copy.deepcopy(model)
# parameters_to_prune = model_local.parameters()
for _, module in model_local.named_modules():
if isinstance(module, torch.nn.Conv2d):
prune.l1_unstructured(module, name='weight', amount=level)
elif isinstance(module, torch.nn.Linear):
prune.l1_unstructured(module, name="weight", amount=level)
log.info("Testing with pruning level {}.".format(level))
test_float_score, test_dict_score = test_model(model_local, test_set, number_of_classes)
watermark_float_score = test_watermark(model_local, watermark_set)
pruning_results[level] = {
"test": (test_float_score, test_dict_score),
"watermark": watermark_float_score
}
return pruning_results
def test_model(model: nn.Module, test_set: data.DataLoader, number_of_classes: int) -> Tuple[score.FloatScore, score.DictScore]:
"""Test the model on the test dataset."""
# model.eval is used for ImageNet models, batchnorm or dropout layers will work in eval mode.
model.eval()
def test_average() -> score.FloatScore:
correct = 0
total = 0
with torch.set_grad_enabled(False):
for (inputs, yreal) in tqdm(test_set, unit="images", desc="Testing model (average)", leave=True, ascii=True):
inputs, yreal = inputs.cuda(), yreal.cuda()
ypred = model(inputs)
_, predicted = torch.max(ypred.data, 1)
total += yreal.size(0)
correct += (predicted == yreal).sum().item()
accuracy = 100 * correct / total
log.info("Accuracy of the network on the {} test images (average): {}".format(total, accuracy))
with open('epoch_logs.txt', 'a+') as file:
file.write('Test Acc: {}\n'.format(accuracy))
return score.FloatScore(accuracy)
def test_per_class() -> score.DictScore:
class_correct = list(0. for _ in range(number_of_classes))
class_total = list(0. for _ in range(number_of_classes))
total = 0
with torch.no_grad():
for (inputs, yreal) in tqdm(test_set, unit="images", desc="Testing model (per class)", leave=True, ascii=True):
inputs, yreal = inputs.cuda(), yreal.cuda()
total += yreal.size(0)
ypred = model(inputs)
_, predicted = torch.max(ypred, 1)
c = (predicted == yreal).squeeze()
for i in range(yreal.shape[0]):
label = yreal[i]
class_correct[label] += c[i].item()
class_total[label] += 1
log.info("Accuracy of the network on the {} test images (per-class):".format(total))
per_class_accuracy = {}
for i in range(number_of_classes):
accuracy = 100 * class_correct[i] / (class_total[i] + 0.0001)
per_class_accuracy[i] = accuracy
print('Accuracy of %5s : %2d %%' % (
i, accuracy))
return score.DictScore(per_class_accuracy)
return test_average(), test_per_class()
def test_watermark(model: nn.Module, watermark_set: List) -> score.FloatScore:
model.eval()
correct = 0
total = 0
with torch.no_grad():
for (inputs, yreal) in tqdm(watermark_set, unit="images", desc="Testing watermark (average)", leave=True, ascii=True):
inputs, yreal = inputs.cuda(), yreal.cuda()
ypred = model(inputs)
_, predicted = torch.max(ypred.data, 1)
total += yreal.size(0)
correct += (predicted == yreal).sum().item()
accuracy = 100 * correct / total
log.info("Accuracy of the network on the {} test images (average): {}".format(total, accuracy))
return score.FloatScore(accuracy)
def save_scores(pruning_results: Dict[float, Dict[str, Any]], file_path: str) -> None:
with open(file_path + '.pkl', 'wb') as f:
pickle.dump(pruning_results, f, pickle.HIGHEST_PROTOCOL)
def handle_args() -> argparse.Namespace:
parser = argparse.ArgumentParser()
parser.add_argument(
"--config_file",
type=str,
default=None,
help="Configuration file for the experiment.")
parser.add_argument(
"--watermark",
type=str,
default=None,
help="Path to the saved watermark Loader.")
parser.add_argument(
"--model",
type=str,
default=None,
help="Path to the saved model.")
args = parser.parse_args()
if args.config_file is None:
raise ValueError("Configuration file must be provided.")
if args.watermark is None:
raise ValueError("Watermark path must be provided.")
if args.config_file is None:
raise ValueError("Model path must be provided.")
return args
if __name__ == "__main__":
args = handle_args()
config = config_helper.load_config(args.config_file)
watermark_path = args.watermark
model_path = args.model
config_helper.print_config(config)
log.info("Model path: {}.".format(model_path))
log.info("Watermark path: {}".format(watermark_path))
main(config, model_path, watermark_path)
|
998,409 | eea0ac05a8267f32eec32c54a986553269ec3d03 | from .cov_hos_form_collection import *
from .province_form_collection import *
from .internal_affairs_form_collection import *
from .chief_minister_form_collection import *
from .local_level_form_collection import *
from .collection_six import *
|
998,410 | 0aa15e5dc56ab52cb0f05ee74016930022bafd76 | n=int(input())
temp=n
am=0
while n!=0:
rem=int(n%10)
am=am+rem**3
n=int(n/10)
if temp == am:
print("yes")
else:
print("no")
|
998,411 | 484674f95777b61ceadcbdb13308a661a15e4866 | #!/usr/bin/env python3
import sys
import os
def get_main_c_app():
contents = ""
contents += "\n"
contents += "#include <stdio.h>\n"
contents += "\n"
contents += "int main(int argc, char *argv[]){\n"
contents += "\n"
contents += " (void)argc; (void)argv;\n"
contents += " printf(\"test for echo\\n\");\n"
contents += "\n"
contents += " return 0;\n"
contents += "\n"
contents += "}\n"
contents += "\n"
return contents
def get_c_compiler_flags_debug_gcc():
contents = []
contents.append("-g")
contents.append("-Wall")
contents.append("-Wextra")
contents.append("-Werror")
contents.append("-pedantic")
contents.append("-fPIC")
contents.append("-fsanitize=address")
contents.append("-D_GLIBCXX_DEBUG")
return contents
def get_c_compiler_flags_release_gcc():
contents = []
contents.append("-O2")
contents.append("-Wall")
contents.append("-Wextra")
contents.append("-Werror")
contents.append("-pedantic")
contents.append("-fPIC")
contents.append("-DNDEBUG")
return contents
def get_c_compiler_flags_linux_gcc():
contents = []
contents.append("-std=c18")
return contents
def get_c_compiler_flags_windows_gcc():
contents = []
contents.append("-std=c18")
return contents
def get_c_compiler_flags_macosx_gcc():
contents = []
contents.append("-std=c18")
return contents
def get_c_linker_flags_debug_gcc():
contents = []
contents.append("-lasan")
return contents
def get_c_linker_flags_release_gcc():
contents = []
return contents
def get_clang_version():
contents = "16"
return contents
def c_compiler_flags_debug_msvc():
contents = []
return contents
def c_compiler_flags_release_msvc():
contents = []
return contents
def c_compiler_flags_windows_msvc():
contents = []
return contents
def c_compiler_standard_windows_msvc():
contents = "stdc17"
return contents
|
998,412 | b2f0315175822a883a9f74f9ab697f5b5a8c7765 | '''
@Description:
@Author: 妄想
@Date: 2020-06-25 14:01:59
@LastEditTime: 2020-06-25 16:16:22
@LastEditors: 妄想
'''
class Config:
ip = "xxx"
REDIS_PORT = 6379
REDIS_PASSWORD = "xxx"
SQLALCHEMY_DATABASE_URI = 'xxx'
SQLALCHEMY_TRACK_MODIFICATIONS = True |
998,413 | ae105300f5c25b5953b59dced4396239cdb9e896 | import cv2
input_path = "--PATH--"
output_path = "--PATH--"
num = 10000
for i in range(0, num):
try:
name = input_path + "/" + str(i) + ".jpg"
frame = cv2.imread(name)
roi = frame[80:560, 0:480]
roi = cv2.resize(roi, (300, 300))
name = output_path + "/" + str(i) + ".jpg"
cv2.imshow("roi", roi)
cv2.waitKey(1)
cv2.imwrite(name, roi)
except:
pass
|
998,414 | b1efdfc5e58138dad61de746f82779be79a2894b | # -*- coding: utf-8 -*-
#
# Copyright 2018-2019 Botswana Harvard Partnership (BHP)
from Products.CMFCore.permissions import View
from archetypes.schemaextender.interfaces import IBrowserLayerAwareExtender
from archetypes.schemaextender.interfaces import ISchemaExtender
from bhp.lims.content import ExtUIDReferenceField
from bhp.lims.interfaces import IBhpLIMS
from bika.lims.interfaces import IAnalysis
from bika.lims.permissions import FieldEditAnalysisResult
from zope.component import adapts
from zope.interface import implements
class AnalysisSchemaExtender(object):
"""Extend Analysis with additional schema fields
"""
adapts(IAnalysis)
implements(
ISchemaExtender,
IBrowserLayerAwareExtender)
# Don't do schema extending unless our add-on product is
# installed on Plone site
layer = IBhpLIMS
# Referral Lab where this test is performed
fields = [
ExtUIDReferenceField(
"ReferralLab",
mode="rw",
required=0,
multiValued=0,
read_permission=View,
write_permission=FieldEditAnalysisResult,
allowed_types=('ReferralLab',),
),
]
def __init__(self, context):
self.context = context
def getFields(self):
return self.fields
|
998,415 | f40fc2e7727bd7ce4553ebf869a7b18554ddea23 | from flask import Flask, render_template, send_from_directory, request
import util
import os
app = Flask(__name__)
app_path = os.path.dirname(os.path.abspath(__file__))
@app.route('/')
def index():
util.exec_apache_spark_scala()
return render_template('index.html')
@app.route('/api/get_result')
def get_result_data():
filename = 'result.txt'
return util.parse_result_file(filename)
if __name__ == '__main__':
app.debug = True
app.run(host='0.0.0.0') |
998,416 | 77ac30fa4cad2e9413a87f04fa7c0060953d94ee | class Solution:
def levelOrder(self, root: TreeNode) -> List[List[int]]:
result = []
if root == None:
return result
max_depth = 0
def dfs(root: TreeNode, child: TreeNode, depth: int):
if child != None:
print(child.val, depth, len(result))
if len(result) <= depth:
result.append([])
result[depth].append(child.val)
dfs(child, child.left, depth + 1)
dfs(child, child.right, depth + 1)
dfs(None, root, 0)
return result
|
998,417 | 73006f84572d1a3a6092a9ff035f8b81c8f9c265 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
...
Created on Mon Nov 26 17:35:38 2018
@author: Simon Schmitt
"""
import numpy as np
from GETINT import getint
from SHAPE import shape
def field(fid2, Px, Py, FREC, NNODE, NELEM, KIND, NODE, X, Y, TEMP, DTDN,
Exterior, VINF, PhiI):
PhiP = np.zeros(FREC)
dPhidPX = np.zeros(FREC)
dPhidPY = np.zeros(FREC)
PhiPI = np.zeros(FREC)
DPhidPXI = np.zeros(FREC)
DPhidPYI = np.zeros(FREC)
for IP in range(FREC):
XP = Px[IP]
YP = Py[IP]
CP = 0
FPT = 0
FPDX = 0
FPDY = 0
QN = np.zeros(2)
if Exterior == 3:
CP = np.ones(NNODE)
# ELEMENT LOOP
for K in range(NELEM):
KINDI = KIND[K]
NL = KINDI+1
XQ = np.zeros(NL)
YQ = np.zeros(NL)
TEMQ = np.zeros(NL)
DTDQ = np.zeros(NL)
for J in range(NL):
IQ = NODE[J, K]
XQ[J] = X[int(IQ)]
YQ[J] = Y[int(IQ)]
TEMQ[J] = TEMP[0, int(IQ)]
DTDQ[J] = DTDN[J, K]
# INTERPOLATION
C1 = -1/(2*np.pi)
[NINP, XII, WT] = getint(KINDI)
for INP in range(NINP):
[PSI, DPSI] = shape(XII[INP], KINDI)
XX = 0
YY = 0
DXDS = 0
DYDS = 0
TEM = 0
DTN = 0
for I in range(NL):
XX += XQ[I]*PSI[I]
YY += YQ[I]*PSI[I]
DXDS += XQ[I]*DPSI[I]
DYDS += YQ[I]*DPSI[I]
TEM += TEMQ[I]*PSI[I]
DTN += DTDQ[I]*PSI[I]
DETJ = np.sqrt(DXDS**2+DYDS**2)
QN[0] = (-1)**(Exterior)*DYDS/DETJ
QN[1] = -(-1)**(Exterior)*DXDS/DETJ
RX = XX-XP
RY = YY-YP
R = np.sqrt(RX**2+RY**2)
DRDN = (QN[0]*RX+QN[1]*RY)/R
ALOGR = np.log(R)
GREEN = C1*ALOGR*DETJ*WT[INP]
DGDN = C1*DRDN/R*DETJ*WT[INP]
DRDX = RX/R
DRDY = RY/R
DGDX = -C1*DRDX/R*DETJ*WT[INP]
DGDY = -C1*DRDY/R*DETJ*WT[INP]
DXDGDN = C1/(R**2)*(2*DRDX*DRDN-QN[0])*DETJ*WT[INP]
DYDGDN = C1/(R**2)*(2*DRDY*DRDN-QN[1])*DETJ*WT[INP]
FPT += GREEN*DTN-TEM*DGDN
FPDX += DGDX*DTN-TEM*DXDGDN
FPDY += DGDY*DTN-TEM*DYDGDN
CP -= DGDN
if Exterior != 3:
PhiP[IP] = FPT
dPhidPX[IP] = FPDX
dPhidPY[IP] = FPDY
else:
PhiPI[IP] = VINF*XP
DPhidPXI[IP] = VINF
DPhidPYI[IP] = 0
PhiP[IP] = FPT+PhiPI[IP]
dPhidPX[IP] = FPDX+DPhidPXI[IP]
dPhidPY[IP] = FPDY+DPhidPYI[IP]
fid2.write('\n {} \n \n'.format('FIELD POINTS COORDINATES AND SOLUTION:'))
for K in range(FREC):
fid2.write('{} {:d} \t {} {:3.3f} \t {} {:3.3f} \t {} {:3.3f} \t'
' {} {:3.3f} \t {} {:3.3f}\n'.format('FIELD POINT # ', K+1,
'X=', float(Px[K]),
'Y=', float(Py[K]),
'PHI=', float(PhiP[K]),
'dPHIdX=',
float(dPhidPX[K]),
'dPHIdY=',
float(dPhidPY[K])))
return PhiP, dPhidPX, dPhidPY, QN
|
998,418 | 36c95bf2272d02398339c07e84e406198860e3d5 | import sys
import numpy as np
from matplotlib import pyplot as plt
import matplotlib.ticker
import seaborn as sns
import conic_parameters
plotfile = sys.argv[0].replace('.py', '.pdf')
sns.set_style('white')
fig, axes = plt.subplots(3, 3, figsize=(9, 9), sharex=True, sharey=True)
incs_deg = 10.0*np.arange(9)
nbeta = 30
#betas = np.logspace(-5.0, -0.5, nbeta)
betas = np.linspace(0.003, 0.5, nbeta)**2
XI_LIST = [None, 1.0, 0.8, 0.4]
nxi = len(XI_LIST)
Rc_grid = np.linspace(0.0, 10.0, 2000)
R90_T0_grid = np.sqrt(2*Rc_grid)
R90_T1_grid = np.sqrt(2*Rc_grid - 1.0)
R90_T1_grid[~np.isfinite(R90_T1_grid)] = 0.0
cols = sns.color_palette('magma', n_colors=nxi)
for ax, inc_deg in zip(axes.flat, incs_deg):
ax.fill_between(Rc_grid, R90_T1_grid, R90_T0_grid, color='k', alpha=0.2)
ax.fill_between(Rc_grid, R90_T0_grid, color='k', alpha=0.1)
ax.plot(Rc_grid, R90_T0_grid, c='k', lw=0.5)
ax.axhline(1.0, lw=0.5, alpha=0.5, color='k', zorder=-1)
ax.axvline(1.0, lw=0.5, alpha=0.5, color='k', zorder=-1)
ax.plot([0.0, 10.0], [0.0, 10.0], lw=0.5, alpha=0.5, color='k', zorder=-1)
for xi, col in list(zip(XI_LIST, cols)):
for beta in betas:
# Fit to head and analytic fit to fit to tail
ht = conic_parameters.HeadTail(beta, xi=xi, xmin=0.0, method='analytic fit')
# Parameters for head conic
T_h = ht.sig_h*ht.tau_h**2
tilde_Rc_h = ht.A_h
R0_h = 1.0
R90_h = ht.R90
# Parameters for tail conic
T_t = -ht.tau_t**2
R0_t = ht.x0_t - ht.a_t
# Equation E from notes
tilde_Rc_t = np.abs(T_t)*ht.a_t/R0_t
R90_t = R0_t * np.sqrt(2*tilde_Rc_t - T_t)
T_combine = 2*tilde_Rc_h - (R90_t / R0_h)**2
inc = np.radians(inc_deg)
# Projected head quantities as functions of inc
f_h = np.sqrt(1.0 + T_h * np.tan(inc)**2)
tilde_Rc_h_prime = tilde_Rc_h / (
np.cos(inc)**2 * f_h * (
1.0 + (tilde_Rc_h / T_h) * (f_h - 1.0)
)
)
T_h_prime = T_h / (np.cos(inc)**2 * f_h**2)
R0_h_prime = R0_h * np.cos(inc) * (
1.0 + (tilde_Rc_h / T_h) * (f_h - 1.0)
)
R90_h_prime = R0_h_prime * np.sqrt(2*tilde_Rc_h_prime - T_h_prime)
# Projected tail quantities as functions of inc
f_t = np.sqrt(1.0 + T_t * np.tan(inc)**2)
# Equation B from notes
T_t_prime = T_t / f_t**2 / np.cos(inc)**2
# Equation D from notes
R0_t_prime = R0_t * np.cos(inc) * (
1.0 + (tilde_Rc_t / T_t) * (f_t - 1.0)
)
# Equation C from notes
tilde_Rc_t_prime = tilde_Rc_t / (
np.cos(inc)**2 * f_t * (
1.0 + (tilde_Rc_t / T_t) * (f_t - 1.0)
)
)
# Equation A from notes
R90_t_prime = R0_t_prime * np.sqrt(2*tilde_Rc_t_prime - T_t_prime)
# Finally, the combined discriminant (equation F from notes)
T_combine_prime = 2*tilde_Rc_h_prime - (R90_t_prime / R0_h_prime)**2
if inc_deg < 30.0:
# Plot the head for low inclinations
y = R90_h_prime/R0_h_prime
else:
# Plot the tail for high inclinations
y = R90_t_prime/R0_h_prime
ax.scatter([tilde_Rc_h_prime], [y],
c=col, edgecolors='none',
marker='o', s=25*R0_h_prime/R0_h, alpha=0.4)
ax.text(3.0, 0.5, rf'$i = {inc_deg:.0f}^\circ$',
bbox={'facecolor': 'w', 'alpha': 0.8, 'edgecolor': 'none'})
axes[-1, 0].set(
yscale='linear',
xscale='linear',
xlim=[0.0, 5.1],
ylim=[0.0, 5.1],
xlabel=r"$\widetilde{R}_{c}{}'$",
ylabel=r"$\widetilde{R}_{90}{}'$",
)
fig.tight_layout()
fig.savefig(plotfile)
print(plotfile, end='')
|
998,419 | 9ead53a4b04b5918c895b1977d2047a657a6353d | import bmesh
import bpy
from bpy.types import Armature
from .data import *
from ..helpers import *
class PskInputObjects(object):
def __init__(self):
self.mesh_objects = []
self.armature_object = None
class PskBuildOptions(object):
def __init__(self):
self.bone_filter_mode = 'ALL'
self.bone_group_indices: List[int] = []
self.use_raw_mesh_data = True
self.material_names: List[str] = []
self.should_enforce_bone_name_restrictions = False
def get_psk_input_objects(context) -> PskInputObjects:
input_objects = PskInputObjects()
for selected_object in context.view_layer.objects.selected:
if selected_object.type != 'MESH':
raise RuntimeError(f'Selected object "{selected_object.name}" is not a mesh')
input_objects.mesh_objects = context.view_layer.objects.selected
if len(input_objects.mesh_objects) == 0:
raise RuntimeError('At least one mesh must be selected')
for mesh_object in input_objects.mesh_objects:
if len(mesh_object.data.materials) == 0:
raise RuntimeError(f'Mesh "{mesh_object.name}" must have at least one material')
# Ensure that there are either no armature modifiers (static mesh)
# or that there is exactly one armature modifier object shared between
# all selected meshes
armature_modifier_objects = set()
for mesh_object in input_objects.mesh_objects:
modifiers = [x for x in mesh_object.modifiers if x.type == 'ARMATURE']
if len(modifiers) == 0:
continue
elif len(modifiers) > 1:
raise RuntimeError(f'Mesh "{mesh_object.name}" must have only one armature modifier')
armature_modifier_objects.add(modifiers[0].object)
if len(armature_modifier_objects) > 1:
armature_modifier_names = [x.name for x in armature_modifier_objects]
raise RuntimeError(f'All selected meshes must have the same armature modifier, encountered {len(armature_modifier_names)} ({", ".join(armature_modifier_names)})')
elif len(armature_modifier_objects) == 1:
input_objects.armature_object = list(armature_modifier_objects)[0]
return input_objects
def build_psk(context, options: PskBuildOptions) -> Psk:
input_objects = get_psk_input_objects(context)
armature_object: bpy.types.Object = input_objects.armature_object
psk = Psk()
bones = []
if armature_object is None:
# If the mesh has no armature object, simply assign it a dummy bone at the root to satisfy the requirement
# that a PSK file must have at least one bone.
psk_bone = Psk.Bone()
psk_bone.name = bytes('root', encoding='windows-1252')
psk_bone.flags = 0
psk_bone.children_count = 0
psk_bone.parent_index = 0
psk_bone.location = Vector3.zero()
psk_bone.rotation = Quaternion.identity()
psk.bones.append(psk_bone)
else:
bone_names = get_export_bone_names(armature_object, options.bone_filter_mode, options.bone_group_indices)
armature_data = typing.cast(Armature, armature_object.data)
bones = [armature_data.bones[bone_name] for bone_name in bone_names]
# Check that all bone names are valid.
if options.should_enforce_bone_name_restrictions:
check_bone_names(map(lambda x: x.name, bones))
for bone in bones:
psk_bone = Psk.Bone()
psk_bone.name = bytes(bone.name, encoding='windows-1252')
psk_bone.flags = 0
psk_bone.children_count = 0
try:
parent_index = bones.index(bone.parent)
psk_bone.parent_index = parent_index
psk.bones[parent_index].children_count += 1
except ValueError:
psk_bone.parent_index = 0
if bone.parent is not None:
rotation = bone.matrix.to_quaternion().conjugated()
inverse_parent_rotation = bone.parent.matrix.to_quaternion().inverted()
parent_head = inverse_parent_rotation @ bone.parent.head
parent_tail = inverse_parent_rotation @ bone.parent.tail
location = (parent_tail - parent_head) + bone.head
else:
armature_local_matrix = armature_object.matrix_local
location = armature_local_matrix @ bone.head
bone_rotation = bone.matrix.to_quaternion().conjugated()
local_rotation = armature_local_matrix.to_3x3().to_quaternion().conjugated()
rotation = bone_rotation @ local_rotation
rotation.conjugate()
psk_bone.location.x = location.x
psk_bone.location.y = location.y
psk_bone.location.z = location.z
psk_bone.rotation.w = rotation.w
psk_bone.rotation.x = rotation.x
psk_bone.rotation.y = rotation.y
psk_bone.rotation.z = rotation.z
psk.bones.append(psk_bone)
# MATERIALS
material_names = options.material_names
for material_name in material_names:
psk_material = Psk.Material()
psk_material.name = bytes(material_name, encoding='windows-1252')
psk_material.texture_index = len(psk.materials)
psk.materials.append(psk_material)
for input_mesh_object in input_objects.mesh_objects:
# MATERIALS
material_indices = [material_names.index(material.name) for material in input_mesh_object.data.materials]
# MESH DATA
if options.use_raw_mesh_data:
mesh_object = input_mesh_object
mesh_data = input_mesh_object.data
else:
# Create a copy of the mesh object after non-armature modifiers are applied.
# Temporarily force the armature into the rest position.
# We will undo this later.
old_pose_position = armature_object.data.pose_position
armature_object.data.pose_position = 'REST'
depsgraph = context.evaluated_depsgraph_get()
bm = bmesh.new()
bm.from_object(input_mesh_object, depsgraph)
mesh_data = bpy.data.meshes.new('')
bm.to_mesh(mesh_data)
del bm
mesh_object = bpy.data.objects.new('', mesh_data)
mesh_object.matrix_world = input_mesh_object.matrix_world
# Copy the vertex groups
for vertex_group in input_mesh_object.vertex_groups:
mesh_object.vertex_groups.new(name=vertex_group.name)
# Restore the previous pose position on the armature.
armature_object.data.pose_position = old_pose_position
vertex_offset = len(psk.points)
# VERTICES
for vertex in mesh_data.vertices:
point = Vector3()
v = mesh_object.matrix_world @ vertex.co
point.x = v.x
point.y = v.y
point.z = v.z
psk.points.append(point)
uv_layer = mesh_data.uv_layers.active.data
# WEDGES
mesh_data.calc_loop_triangles()
# Build a list of non-unique wedges.
wedges = []
for loop_index, loop in enumerate(mesh_data.loops):
wedge = Psk.Wedge()
wedge.point_index = loop.vertex_index + vertex_offset
wedge.u, wedge.v = uv_layer[loop_index].uv
wedge.v = 1.0 - wedge.v
wedges.append(wedge)
# Assign material indices to the wedges.
for triangle in mesh_data.loop_triangles:
for loop_index in triangle.loops:
wedges[loop_index].material_index = material_indices[triangle.material_index]
# Populate the list of wedges with unique wedges & build a look-up table of loop indices to wedge indices
wedge_indices = {}
loop_wedge_indices = [-1] * len(mesh_data.loops)
for loop_index, wedge in enumerate(wedges):
wedge_hash = hash(wedge)
if wedge_hash in wedge_indices:
loop_wedge_indices[loop_index] = wedge_indices[wedge_hash]
else:
wedge_index = len(psk.wedges)
wedge_indices[wedge_hash] = wedge_index
psk.wedges.append(wedge)
loop_wedge_indices[loop_index] = wedge_index
# FACES
poly_groups, groups = mesh_data.calc_smooth_groups(use_bitflags=True)
for f in mesh_data.loop_triangles:
face = Psk.Face()
face.material_index = material_indices[f.material_index]
face.wedge_indices[0] = loop_wedge_indices[f.loops[2]]
face.wedge_indices[1] = loop_wedge_indices[f.loops[1]]
face.wedge_indices[2] = loop_wedge_indices[f.loops[0]]
face.smoothing_groups = poly_groups[f.polygon_index]
psk.faces.append(face)
# WEIGHTS
if armature_object is not None:
armature_data = typing.cast(Armature, armature_object.data)
# Because the vertex groups may contain entries for which there is no matching bone in the armature,
# we must filter them out and not export any weights for these vertex groups.
bone_names = [x.name for x in bones]
vertex_group_names = [x.name for x in mesh_object.vertex_groups]
vertex_group_bone_indices = dict()
for vertex_group_index, vertex_group_name in enumerate(vertex_group_names):
try:
vertex_group_bone_indices[vertex_group_index] = bone_names.index(vertex_group_name)
except ValueError:
# The vertex group does not have a matching bone in the list of bones to be exported.
# Check to see if there is an associated bone for this vertex group that exists in the armature.
# If there is, we can traverse the ancestors of that bone to find an alternate bone to use for
# weighting the vertices belonging to this vertex group.
if vertex_group_name in armature_data.bones:
bone = armature_data.bones[vertex_group_name]
while bone is not None:
try:
bone_index = bone_names.index(bone.name)
vertex_group_bone_indices[vertex_group_index] = bone_index
break
except ValueError:
bone = bone.parent
for vertex_group_index, vertex_group in enumerate(mesh_object.vertex_groups):
if vertex_group_index not in vertex_group_bone_indices:
# Vertex group has no associated bone, skip it.
continue
bone_index = vertex_group_bone_indices[vertex_group_index]
for vertex_index in range(len(mesh_data.vertices)):
try:
weight = vertex_group.weight(vertex_index)
except RuntimeError:
continue
if weight == 0.0:
continue
w = Psk.Weight()
w.bone_index = bone_index
w.point_index = vertex_offset + vertex_index
w.weight = weight
psk.weights.append(w)
if not options.use_raw_mesh_data:
bpy.data.objects.remove(mesh_object)
bpy.data.meshes.remove(mesh_data)
del mesh_data
return psk
|
998,420 | 1c6a1c95d24baf6599043c7a68250ae360353704 | '''
Using the module random and time in python generate a random date between given start and end
dates.
'''
from datetime import timedelta
import random
date1 = input("Enter start date: ")
date2 = input("Enter end date: ")
d_format = "%m/%d/%Y"
d0 = datetime.strptime(date1, d_format)
d1 = datetime.strptime(date2, d_format)
diff = (d1 - d0).days
add = random.randint(1,diff)
ans = d0 + timedelta(days=add)
print("{:%m/%d/%Y}".format(ans)) |
998,421 | c8bbd172b54191d0bd8069e5387c1efe1f242f34 | import wagtail.core.blocks as blocks
from wagtail.images.blocks import ImageChooserBlock
from events.models import EventListPage
class PageLinkBlock(blocks.StructBlock):
page = blocks.PageChooserBlock()
class Meta:
icon = "user"
template = "home/blocks/page_link_block.html"
class PageLinksBlock(blocks.StructBlock):
title = blocks.CharBlock(required=False)
buttons = blocks.StreamBlock([("button", PageLinkBlock())])
class Meta:
template = "home/blocks/page_links_block.html"
class BannerBlock(blocks.StructBlock):
title = blocks.CharBlock()
short_text = blocks.CharBlock()
background_image = ImageChooserBlock(required=False)
class Meta:
template = "home/blocks/banner_block.html"
class TextBlock(blocks.StructBlock):
title = blocks.CharBlock()
text = blocks.RichTextBlock()
class Meta:
template = "home/blocks/text_block.html"
class FormEmbedBlock(blocks.StructBlock):
publish_url = blocks.URLBlock()
class Meta:
icon = "presentation"
template = "home/blocks/form_embed_block.html"
class EventLinkBlock(blocks.StructBlock):
event = blocks.PageChooserBlock(page_type=EventListPage)
class Meta:
icon = "user"
template = "home/blocks/event_link_block.html"
class EventListBlock(blocks.StructBlock):
title = blocks.CharBlock(required=False)
events = blocks.StreamBlock([("event", EventLinkBlock())])
class Meta:
template = "home/blocks/event_list_block.html"
|
998,422 | ecc7ab0260371c18e1a0330b43fc690826e79ad1 | #------------------Bombermans Team---------------------------------#
#Author : B3mB4m
#Concat : b3mb4m@protonmail.com
#Project : https://github.com/b3mb4m/Shellsploit
#LICENSE : https://github.com/b3mb4m/Shellsploit/blob/master/LICENSE
#------------------------------------------------------------------#
#Greetz : Kara Ayaz
from color import *
def controlset( choice, argv1="None", argv2="None", argv3="None", argv4="None"):
argv3 = str(argv3)
#Standalone Shellcodes
list = [
"freebsd_x64/binsh_spawn",
"linux86/binsh_spawn",
"linux86/bindash_spawn",
"linux64/binsh_spawn",
"linux/binsh_spawn",
"osx64/binsh_spawn",
"freebsd_x86/binsh_spawn",
"linux_arm/binsh_spawn",
"linux_mips/binsh_spawn",
"solarisx86/binsh_spawn",
"osx86/binsh_spawn"]
#Dependent a file
list2 = [
"linux/read",
"FreeBSDx86/read",
"linux86/read",
"solarisx86/read",
"linux86/chmod",
"linux64/read",
"linux_arm/chmod",
"linux86/mkdir",
"linux_arm/chmod"]
list3 = [
"linux86/tcp_bind",
"solarisx86/tcp_bind",
"linux/tcp_bind",
"linux64/tcp_bind",
"osx86/tcp_bind",
"osx64/tcp_bind",
"freebsd_x86/tcp_bind"]
list4 = [
"freebsd_x86/reverse_tcp",
"freebsd_x64/reverse_tcp",
"freebsd_x86/reverse_tcp2",
"linux/reverse_tcp",
"linux86/reverse_tcp",
"linux64/reverse_tcp",
"osx64/reverse_tcp",
"linux_mips/reverse_tcp",
"osx86/reverse_tcp",
"solarisx86/reverse_tcp",
]
#Execve
list5 = [
"linux_arm/exec",
"freebsd_x86/exec"
"linux86/exec",
"windows/exec",
]
if len(argv1) >= len(argv2):
if len(argv1) == 0:
padd = 8
elif len(argv1) == 1:
padd = 8
elif len(argv1) == 2:
padd = 8
else:
padd = len(argv1)+5
else:
padd = len(argv2)+5
if choice in list:
print (bcolors.GREEN+"""
Module options ({0}):
\tName\t\t{1}\t\tRequired\tDescription
\t----\t\t{2}\t\t--------\t-----------
\tencoder\t\t{3}\t\tno\t\tEncoder type
\titeration\t{4}\t\tno\t\tIteration times
""".format(choice,"Current Setting".ljust(padd),"---------------".ljust(padd),
argv1.ljust(padd),argv2.ljust(padd)))
elif choice in list2:
print (bcolors.GREEN+"""
Module options ({0}):
\tName\t\t{1}\t\tRequired\tDescription
\t----\t\t{2}\t\t--------\t-----------
\tfile\t\t{3}\t\tyes\t\tFile name&path
\tencoder\t\t{4}\t\tno\t\tEncoder type
\titeration\t{5}\t\tno\t\tIteration times
""".format(choice,"Current Setting".ljust(padd),"---------------".ljust(padd),
argv1.ljust(padd),argv2.ljust(padd),argv3.ljust(padd)))
elif choice in list3:
print (bcolors.GREEN+"""
Module options ({0}):
\tName\t\t{1}\t\tRequired\tDescription
\t----\t\t{2}\t\t--------\t-----------
\tport\t\t{3}\t\tyes\t\tThe listen port
\tencoder\t\t{4}\t\tno\t\tEncoder type
\titeration\t{5}\t\tno\t\tIteration times
""".format(choice,"Current Setting".ljust(padd),"---------------".ljust(padd),
argv1.ljust(padd),argv2.ljust(padd),argv3.ljust(padd),argv4.ljust(padd)))
elif choice in list4:
if len(argv2) >= len(argv3):
if len(argv2) == 0:
padd = 8
elif len(argv2) == 1:
padd = 8
elif len(argv2) == 2:
padd = 8
else:
padd = len(argv2)+5
else:
padd = len(argv3)+5
info_ = "port"
info__ = "host\t\t"
infodesc_ = "Connect PORT"
infodesc__ = "Connect HOST"
print (bcolors.GREEN+"""
Module options ({0}):
\tName\t\t{1}\t\tRequired\tDescription
\t----\t\t{2}\t\t--------\t-----------
\t{3}\t\t{4}\t\tyes\t\t{5}
\t{6}{7}\t\tyes\t\t{8}
\tencoder\t\t{9}\t\tno\t\tEncoder type
\titeration\t{10}\t\tno\t\tIteration times
""".format(choice,"Current Setting".ljust(padd),"---------------".ljust(padd),
info_,argv1.ljust(padd),infodesc_,info__,argv2.ljust(padd),infodesc__,argv3.ljust(padd),argv4.ljust(padd)))
elif choice in list5:
if len(argv3) >= len(argv2):
if len(argv3) == 0:
padd = 8
elif len(argv3) == 1:
padd = 8
elif len(argv3) == 2:
padd = 8
else:
padd = len(argv3)+5
else:
padd = len(argv2)+5
print (bcolors.GREEN+"""
Module options ({0}):
\tName\t\t{1}\t\tRequired\tDescription
\t----\t\t{2}\t\t--------\t-----------
\tcommand\t\t{3}\t\tyes\t\tCommand to execute
\tencoder\t\t{4}\t\tno\t\tEncoder type
\titeration\t{5}\t\tno\t\tIteration times
""".format(choice,"Current Setting".ljust(padd),"---------------".ljust(padd),
argv3.ljust(padd),argv2.ljust(padd),argv1.ljust(padd)))
else:
if choice == "windows/messagebox":
print (bcolors.GREEN+"""
Module options ({0}):
\tName\t\tCurrent Setting\t\tRequired\tDescription
\t----\t\t--------------\t\t--------\t-----------
\tmessage\t\t{1}\t\t\tyes\t\tFile name/path to remove
\tencoder\t\t{2}\t\t\tno\t\tEncoder type
\titeration\t{3}\t\t\tno\t\tIteration times
""".format(choice,argv1,argv2,argv3))
elif choice == "windows/download&execute":
print (bcolors.GREEN+"""
Module options ({0}):
\tName\t\tCurrent Setting\t\tRequired\tDescription
\t----\t\t--------------\t\t--------\t-----------
\tlink\t\t{1}\t\t\tyes\t\tSource to download exe
\tfilename\t{2}\t\t\tyes\t\tFile name
\tencoder\t\t{3}\t\t\tno\t\tEncoder type
\titeration\t{4}\t\t\tno\t\tIteration times
""".format(choice,argv3,argv4,argv2,argv1))
#Will be change
elif choice == "freebsd_x64/tcp_bind":
print (bcolors.GREEN+"""
Module options ({0}):
\tName\t\tCurrent Setting\t\tRequired\tDescription
\t----\t\t--------------\t\t--------\t-----------
\tpassword\t{1}\t\t\tyes\t\tPassword for connection
\tPORT\t\t{2}\t\t\tyes\t\tPort to bind connection
\tencoder\t\t{3}\t\t\tno\t\tEncoder type
\titeration\t{4}\t\t\tno\t\tIteration times
""".format(choice,argv4,argv3,argv2,argv1))
|
998,423 | dd42ba4d4ce6e3041dab33683fbdb50e67a331e2 | from minepy.minefield import Minefield
''' Класс контроллера '''
class MinepyGame():
def __init__(self, minefield):
self.minefield = minefield
self.startNewGame()
# инициализирует старт игры
def startNewGame(self):
self.minefield.startGame()
# обработка события от нажатия ЛКМ
def pressLeftButton(self, x, y):
if self.minefield.isGameOver():
self.startNewGame()
return
if self.minefield.isFirstStep():
self.minefield.setFirstStep(x, y)
self.minefield.setOpened(x, y)
self.minefield.isWin()
# обработка события от нажатия ПКМ
def pressRightButton(self, x, y):
self.minefield.toggleFlagged(x, y)
# устанавливает значения игрового поля в зависимости от
# выбранного уровня сложности
def selectMode(self, mode):
if mode == 'easy':
self.minefield = Minefield()
self.startNewGame()
elif mode == 'middle':
self.minefield = Minefield(16, 16, 40)
self.startNewGame()
elif mode == 'expert':
self.minefield = Minefield(30, 16, 100)
self.startNewGame()
# читает текст из файла и возвращает его
def readText(self, command, name):
rules = ''
filePath = ""
if command == "rules":
filePath = "text/rulesOfGame.txt"
name.append("Правила игры")
if command == "advices":
filePath = "text/advices.txt"
name.append("Советы")
with open(filePath, encoding='utf-8') as rfile:
for line in rfile:
rules += line.strip() + '\n'
return rules
# возвращает объект игрового поля
def getMinefield(self):
return self.minefield |
998,424 | 0005564fd8e5db8c9776268340c9cecbeb0e21d4 | import sys
for a in range(1,500):
for b in range(a,500):
c = 1000-a-b
if (a**2+b**2==c**2) and (a+b+c==1000):
sys.exit(str(a*b*c)) |
998,425 | 8bac66053729789408b45431ad0a3db42234e2df | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
DSL structures.
"""
import typing
from forml.io.dsl.struct import frame, kind as kindmod
class Field(typing.NamedTuple):
"""Schema field class."""
kind: kindmod.Any
name: typing.Optional[str] = None
def renamed(self, name: typing.Optional[str]) -> 'Field':
"""Return copy of the field with the new name.
Args:
name: New name to be used.
Returns:
New Field instance.
"""
return self if name == self.name else Field(self.kind, name)
class Schema(metaclass=frame.Table): # pylint: disable=invalid-metaclass
"""Base class for table schema definitions. Note the meta class is actually going to turn it into an instance
of frame.Table.
"""
|
998,426 | 95a33dee2ae93972cff18d0748c75d35cb751b9c | import cv2
import numpy as np
from lanes.image_debugtools import log, LOG_LEVEL_ERROR, LOG_LEVEL_WARNING
"""
Class that process & analyses a road image in
order to identify the lanes that appear in it
and to compute the offset distance that separates
the driver to the center of the lane.
The image of the road can represent any of the
following scenarios, so the class is well
prepared to fully support them:
* Image with two lanes [RI-A]
* Image with one one lane (either left or right) [RI-B]
* An image without any lanes [RI-C]
The following assumptions on the image are made:
* Lane marks are black
* There are no other black marks in the image
* The environment is light compared to the lanes,
it does not need to be regular or unicolored.
* There is enough contrast between the lane marks
and the environment
"""
MIN_INTERLANE_PX = 30
INTERLANE_PX = 190
class RoadImage:
def __init__(self, input_image):
assert input_image is not None
# Image to be analysed
self.image = input_image
(self.h, self.w, _) = self.image.shape
self.bottom_center = (self.w/2, self.h)
self.left_lane_top = None
self.right_lane_top = None
self.left_lane_bottom = None
self.right_lane_bottom = None
self.bisection_bottom = None
###### GETTERS #####
# Cascade Getters that allow access to
# every single image in our pipeline
def get_image(self):
# Prevent modifications in the view by making a copy
return self.image.copy()
def get_grayscale(self):
return cv2.cvtColor(self.get_image(), cv2.COLOR_BGR2GRAY)
def get_clahe(self):
return cv2.createCLAHE(clipLimit=4.0, tileGridSize=(8,8)).apply(self.get_grayscale())
def get_gamma(self):
return cv2.LUT(self.get_clahe(), np.array([((i / 255.0) ** 0.5) * 255 for i in np.arange(0, 256)]).astype("uint8"))
def get_binarized(self, binarize_threshold=120, binarize_maxval=255):
return cv2.threshold(self.get_gamma(), binarize_threshold, binarize_maxval, cv2.THRESH_BINARY)[1]
def get_edged(self, canny_lower_threshold=50, canny_upper_threshold=150):
return cv2.Canny(self.get_binarized(), canny_lower_threshold, canny_upper_threshold)
# This method is just for debugging purposes
'''
Returns an instance of the raw image in which lines
resulting from the application of the Hough Transform
have been drawn
'''
def get_hough(self, hough_rho_resolution=1, hough_thetha_resolution=np.pi/100, hough_threshold_votes=30, hough_minLineLength=5,
hough_maxLineGap=10):
hough_image = self.get_image().copy()
hough_lines = cv2.HoughLinesP(self.get_edged(), hough_rho_resolution, hough_thetha_resolution,
hough_threshold_votes, minLineLength=hough_minLineLength, maxLineGap=hough_maxLineGap)
if hough_lines is not None:
# Draw lines found by Hough
for line in hough_lines:
x1, y1, x2, y2 = line[0]
cv2.line(hough_image, (x1, y1), (x2, y2), (255, 0, 0), 3)
else:
log("[RI-C] No candidate Hough Lines were found", level=LOG_LEVEL_ERROR)
return hough_image
##### MAIN FUNCTIONALITY #####
'''
Extracts the main relevant points of a raw image. Namely:
- Left lane (Top & Bottom points)
- Intersection point of both lanes
- Rigth lane (Top & Bottom points)
- Position of the car (center of the photo)
- Midpoint of the lane
Returns: (Bottom point of left lane, Intersection point, Bottom point of right lane,
Position of the car (Bottom center of the photo), Midpoint of the lane)
'''
def analyse(self, hough_rho_resolution=1, hough_thetha_resolution=np.pi/100, hough_threshold_votes=30, hough_minLineLength=5,
hough_maxLineGap=10):
log("Watch out for image size! Less pixels means Less votes")
# ---------- STEP 1: Extract Hough lines ----------
#
## In this step we identify the possible lines that appear in our image.
## In order to be considered as such, the algorithm goes through a voting process
## and each of the candidates must get enough votes to be considered a line.
hough_lines = cv2.HoughLinesP(self.get_edged(), hough_rho_resolution, hough_thetha_resolution,
hough_threshold_votes, minLineLength=hough_minLineLength, maxLineGap=hough_maxLineGap)
## If no candidate lines were identified, we are in [RI-C] and we cannot successfully
## estimate the distance, we don't have enough reference.
if hough_lines is None:
log("[RI-C] No candidate Hough Lines were found", level=LOG_LEVEL_ERROR)
return (None, None, None, None, None)
##
#
# -------------------------------------------------
# ---------- STEP 2: Figure out where lanes are ----------
#
## We now go through every candidate line and keep only the two of them which
## correspond to the lane markings (one for each side)
((left_point, left_point_partner), (right_point, right_point_partner)) = self._compute_nearest_lanes(hough_lines)
## The two lane markings must be separated at least 100px of each other.
## If not, we consider that one of the markings is wrong and drop one of them
## in order to apply the reflection method (to estimate the distance)
lanes_dist = self.compute_distance_points(left_point, right_point)
lanes_dist2 = self.compute_distance_points(left_point, right_point_partner)
if lanes_dist and lanes_dist < MIN_INTERLANE_PX or lanes_dist2 and lanes_dist2 < MIN_INTERLANE_PX:
#print('dropped')
#print(lanes_dist, lanes_dist2)
right_point = None
right_point_partner = None
##
#
# -------------------------------------------------
# ---------- STEP 3: Extend lanes lines so they take up the whole image ----------
#
## We need to extend the lines in order to ensure they pass by the x axis,
## which is the bottom-most point and thus it is less altered by perspective
## so that we can correctly compute the distance.
##
## Up to this point, we need to figure out what lanes where identified correctly
## (left, right or both) (we have cleared out the possibility of both of them being
## incorrectly identified in the previous assertion)
if left_point is not None:
# Left lane was correctly identified: Either [RI-A] or [RI-B]
# Get the parameters the characterize left lane mark before we proceed
left_m = self.compute_slope(left_point[0], left_point[1], left_point_partner[0], left_point_partner[1])
left_n = self.compute_independent(left_point[0], left_point[1], left_point_partner[0], left_point_partner[1])
self.left_lane_top, self.left_lane_bottom = self.compute_line_drawing_points(left_m, left_n)
if right_point is None:
# We are in case [RI-B], only left lane was correctly identified
log("[RI-B] - Only Left lane was correctly identified")
# We reflect the left lane to obtain the right one and separate both
# lines in the picture by the width of a theoretical correct lane.
right_n = self.compute_reflected_independent(left_m, left_point, INTERLANE_PX)
right_m = -left_m
if right_point is not None:
# Right lane was correctly identified: Either [RI-A] or [RI-B]
# Get the parameters the characterize right lane mark before we proceed
right_m = self.compute_slope(right_point[0], right_point[1], right_point_partner[0], right_point_partner[1])
right_n = self.compute_independent(right_point[0], right_point[1], right_point_partner[0], right_point_partner[1])
self.right_lane_top, self.right_lane_bottom = self.compute_line_drawing_points(right_m, right_n)
if left_point is None:
# We are in case [RI-B], only right lane was correctly identified
log("[RI-B] - Only Right lane was correctly identified")
# We reflect the right lane to obtain the left one and separate both
# lines in the picture by the width of a theoretical correct lane.
left_n = self.compute_reflected_independent(right_m, right_point, INTERLANE_PX)
left_m = -right_m
## In order to ensure lines where chosen properly, we assert that each of the bottom
## points fall in each side of the image (only if both points were identified correctly)
if self.left_lane_bottom and self.right_lane_bottom:
# If left bottom point is not on the left side, we discart
# it and use the reflection of the right lane
if self.left_lane_bottom[0] >= self.bottom_center[0]:
left_n = self.compute_reflected_independent(right_m, right_point, INTERLANE_PX)
left_m = -right_m
self.left_lane_bottom = None
# If right bottom point is not on the right side, we discart
# it and use the reflection of the left lane
elif self.right_lane_bottom[0] <= self.bottom_center[0]:
right_n = self.compute_reflected_independent(left_m, left_point, INTERLANE_PX)
right_m = -left_m
self.right_lane_bottom = None
## Now, we have to ensure that both lines intersect. Two lines do not intersect
## if either they are parallel (same slope) or the same (same slope and independent)
if left_m == right_m:
if left_n == right_n:
# Same lines, infinite intersection points, so we just reflect one
# of them as if we had only identified that one.
left_n = self.compute_reflected_independent(right_m, right_point, INTERLANE_PX)
left_m = -right_m
else:
# Parallel lines: We change a the slope of one of them just a little
# bit to transform it into intersecting
left_m += 0.01
## We now obtain the drawing points (upper-most and bottom-most) of both lanes
# Left Lane Line's Points: The upper-most and bottom-most points corresponding
# to the line, in order to draw it
if self.left_lane_bottom is None:
self.left_lane_top, self.left_lane_bottom = self.compute_line_drawing_points(left_m, left_n)
# Right Lane Line's Points: The upper-most and bottom-most points corresponding
# to the line, in order to draw it
if self.right_lane_bottom is None:
self.right_lane_top, self.right_lane_bottom = self.compute_line_drawing_points(right_m, right_n)
##
#
# -------------------------------------------------
# ---------- STEP 4: Compute the intersection of both lines ----------
#
## We need the intersection point between the two lines
## We can do so, by solving a simple equation system with two unknowns:
## ----------------
## | y = mx + n
## | y = m'x + n'
## ----------------
## x = (n - n') / (m' - m)
intersection_x = (left_n - right_n) / (right_m - left_m)
intersection_y = left_m * intersection_x + left_n
intersection = (intersection_x, intersection_y)
##
#
# -------------------------------------------------
# ---------- STEP 5: Compute the position of the car using the bisection of the angle ----------
#
## INTERCENTER
# We compute the intercenter of a triangle formed by the intersection point and two
# arbitrary points of each line, and after that, we get the middle point of the lane.
incenter = self.compute_incenter(intersection, self.left_lane_bottom, self.right_lane_bottom)
m = self.compute_slope(incenter[0], incenter[1], intersection[0], intersection[1])
n = self.compute_independent(incenter[0], incenter[1], intersection[0], intersection[1])
_, self.bisection_bottom = self.compute_line_drawing_points(m, n)
##
#
# -------------------------------------------------
# Points are returned from left lane in a clockwise direction
return (self.left_lane_bottom, intersection, self.right_lane_bottom, self.bisection_bottom, self.bottom_center)
'''
Computes the distance (in px) between the car & the center
of the lane
'''
def center_offset(self):
return self.compute_distance_points(self.bottom_center, self.bisection_bottom)
###### AUXILIARY METHODS #####
'''
Obtains the two lines that are closest to the midpoint of the
image (one on the left and another on the right). In order to do so,
it iterates over the whole set of lines detected by Hough
(points) and calculates their distances to the midpoint.
'''
def _compute_nearest_lanes(self, hough_lines):
# Bottom-most left line, and its distante to the bottom-center
left_point = None
left_point_partner = None
left_point_distance = None
# Bottom-most right line, and its distante to the bottom-center
right_point = None
right_point_partner = None
right_point_distance = None
# Iterate through all points in the image to
# get the 2 bottom-most left and right points
# Don't need to check for hough_lines as this function
# won't be called if it were invalid.
for line in hough_lines:
x1, y1, x2, y2 = line[0]
# The two points that compose the selected segment
points_line = [(x1, y1), (x2, y2)]
# Counter
i = 0
for point in points_line:
i += 1
# Vector that joins the point and the bottom center
d_center_point = (point[0] - self.bottom_center[0], point[1] - self.bottom_center[1])
distance = np.linalg.norm(d_center_point) # Distance = Modulus
if self.bottom_center[0] >= point[0]:
# Point is on the left side
if left_point is None or left_point_distance > distance:
left_point = point
left_point_partner = points_line[(i%len(points_line))]
left_point_distance = distance
else:
# Point is on the right side
if right_point is None or right_point_distance > distance:
right_point = point
right_point_partner = points_line[(i%len(points_line))]
right_point_distance = distance
return ((left_point, left_point_partner), (right_point, right_point_partner))
'''
Computes the slope of two given points.
If the slope function is not defined (that is,
when x2 = x1), we substract one pixel from
either x2 or x1 to keep it bounded.
'''
def compute_slope(self, x1, y1, x2, y2):
# We must prevent the slope
# from being 0 or inf
if y1 == y2:
# Slope that produces an almost
# flat line without being zero
return 0.001
if x1 == x2:
# Slope that produces an almost
# straight line without being inf
return 999
return (y2-y1)/(x2-x1)
'''
Computes the independent term of a line that two given
points create.
For a line expressed as 'y = mx + n', the independent term
corresponds to 'n'.
If the slope function is not defined (that is,
when x2 = x1), we substract one pixel from
either x2 or x1 to keep it bounded.
'''
def compute_independent(self, x1, y1, x2, y2):
return (x1*(y1-y2))/(x2-x1) + y1 if x2 != x1 else x1*(y1-y2) + y1
'''
Given a line represented by its slope and independent term,
computes the upper-most and bottom-most points in order
to draw it
'''
def compute_line_drawing_points(self, m, n):
# The upper-most point is in y = 0
# y = mx + n <--> x = y - n/m
x = -n // m # y = 0
top_point = (x.astype(int), 0)
# The bottom-most point is in y = height
x = (self.h - n) // m # y = height
bottom_point = (x.astype(int), self.h)
return top_point, bottom_point
'''
Computes the incenter of a triangle given its three vertices
'''
def compute_incenter(self, a, b, c):
d_ab = [b[0] - a[0], b[1] - a[1]]
d_ab_module = np.linalg.norm(d_ab)
d_ac = [c[0] - a[0], c[1] - a[1]]
d_ac_module = np.linalg.norm(d_ac)
d_bc = [b[0] - c[0], b[1] - c[1]]
d_bc_module = np.linalg.norm(d_bc)
sum_modules = d_ab_module + d_ac_module + d_bc_module
x_i = (a[0]*d_bc_module + c[0]*d_ab_module + b[0]*d_ac_module) / sum_modules
y_i = (a[1]*d_bc_module + c[1]*d_ab_module + b[1]*d_ac_module) / sum_modules
return (x_i, y_i)
'''
Computes the distance between two points
'''
def compute_distance_points(self, pointA, pointB):
res = None
if pointA and pointB:
vector = [pointB[0] - pointA[0], pointB[1] - pointA[1]]
res = np.linalg.norm(vector)
return res
'''
Computes the independent coefficient of a line with slope the inverse
to the original, separated to one point by a given distance.
(x, y) & (x + dist, y)
y = mx+n <-> n' = y + m'(x + dist)
'''
def compute_reflected_independent(self, original_slope, point, px_distance):
return point[1] + original_slope*(point[0] + px_distance)
def int_point(self, point):
return (int(point[0]), int(point[1]))
|
998,427 | fdba015f1cce0262da992e91430c24260773fdf9 | # -*- coding: utf-8 -*-
'''
单元测试:
作用:用来对一个函数、一个类或者一个模块来进行正确性校验工作
结果:
1.单元测试通过,说明我们测试的函数功能正常
2.单元测试不通过,说明函数功能有BUG,要么测试条件输入有误
'''
def mySum(x,y):
return x + y
def mySub(x,y):
return x - y
|
998,428 | 5775a6ed5ca27786fa571593a536c3ce0d45cb82 | #!/usr/bin/env python2
# -*- coding: utf-8
import datetime
from functions import eventfunc
import config
startDate = datetime.date(year=2015, month=05, day=17)
endDate = startDate + datetime.timedelta(days=1)
eventfunc(startDate, endDate, "Test", 'test@gmail.com')
|
998,429 | de0f489241815498bce86a1c9e6cf4a9da89024d | from tkinter import *
import sqlite3
import tkinter.messagebox
class Professor:
def __init__(self,prof_window):
connection_newDB = sqlite3.connect('SCHOOL_DATABASE.db')
# create cursor
cursor_newDB = connection_newDB.cursor()
# create table for student database
cursor_newDB.execute("""CREATE TABLE IF NOT EXISTS STUDENT_RECORDS (
STUDENT_NUMBER varchar(20),
STUDENT_NAME text,
COURSE_NUMBER varchar(20),
COURSE_NAME text,
PROFESSOR_NAME text,
GRADE int,
PRIMARY KEY (STUDENT_NUMBER)
)""")
connection_newDB.commit()
connection_newDB.close()
self.professor_window = prof_window
self.professor_window.geometry("450x400+350+100")
self.professor_window.title("PROFESSOR")
self.professor_window_Frame1 = Frame(self.professor_window)
self.professor_window_Frame2 = Frame(self.professor_window)
self.professor_window_Frame1.pack()
self.professor_window_Frame2.pack()
self.Frame1_LabelHead = Label(self.professor_window_Frame1, text="PROFESSOR CONSOLE",font=("Times new Roman", 18))
self.Frame1_LabelHead.pack()
self.prof_student_number_label = Label(self.professor_window_Frame2, text="STUDENT NUMBER")
self.prof_student_number = Entry(self.professor_window_Frame2, width=30)
self.prof_student_name_label = Label(self.professor_window_Frame2, text="STUDENT NAME")
self.prof_student_name = Entry(self.professor_window_Frame2, width=30)
self.prof_course_number_label = Label(self.professor_window_Frame2, text="COURSE NUMBER")
self.prof_course_number = Entry(self.professor_window_Frame2, width=30)
self.prof_course_name_label = Label(self.professor_window_Frame2, text="COURSE NAME")
self.prof_course_name = Entry(self.professor_window_Frame2, width=30)
self.prof_professor_name_label = Label(self.professor_window_Frame2, text="PROFESSOR NAME")
self.prof_professor_name = Entry(self.professor_window_Frame2, width=30)
self.prof_grade_label = Label(self.professor_window_Frame2, text="GRADES")
self.prof_grade = Entry(self.professor_window_Frame2, width=30)
self.prof_insert_button = Button(self.professor_window_Frame2, text="ADD NEW RECORD", command=self.newRecord)
self.prof_student_record_reset_button = Button(self.professor_window_Frame2, text="RESET",command=self.reset)
self.prof_exit_button = Button(self.professor_window_Frame2, text="LOG OUT", command=self.professor_window.destroy)
self.prof_student_number_label.grid(row=0, column=0)
self.prof_student_number.grid(row=0, column=1, padx=20)
self.prof_student_name_label.grid(row=1, column=0)
self.prof_student_name.grid(row=1, column=1, padx=20)
self.prof_course_number_label.grid(row=2, column=0)
self.prof_course_number.grid(row=2, column=1, padx=20)
self.prof_course_name_label.grid(row=3, column=0)
self.prof_course_name.grid(row=3, column=1, padx=20)
self.prof_professor_name_label.grid(row=4, column=0)
self.prof_professor_name.grid(row=4, column=1, padx=20)
self.prof_grade_label.grid(row=5, column=0)
self.prof_grade.grid(row=5, column=1, padx=20)
self.prof_insert_button.grid(row=6, column=0, columnspan=2, pady=10, padx=10, ipadx=140)
self.prof_student_record_reset_button.grid(row=10, column=0, columnspan=2, pady=10, ipadx=175)
self.prof_exit_button.grid(row=12, column=0, columnspan=2, pady=10, ipadx=165)
def newRecord( self ):
if (len(self.prof_student_number.get()) != 0 or len(self.prof_student_name.get())) !=0 and len(self.prof_grade.get()) != 0:
if int(self.prof_grade.get())>=0 and int(self.prof_grade.get()) <=100:
#create Connection
con_insert = sqlite3.connect('SCHOOL_DATABASE.db')
cursor_insert = con_insert.cursor()
cursor_insert.execute(
"INSERT INTO STUDENT_RECORDS VALUES (:stu_id, :stu_name, :course_id, :course_name, :prof_name, :grade)",
{
'stu_id': self.prof_student_number.get(),
'stu_name': self.prof_student_name.get(),
'course_id': self.prof_course_number.get(),
'course_name': self.prof_course_name.get(),
'prof_name': self.prof_professor_name.get(),
'grade': self.prof_grade.get()
})
con_insert.commit()
con_insert.close()
self.prof_student_number.delete(0, END)
self.prof_student_name.delete(0, END)
self.prof_course_number.delete(0, END)
self.prof_course_name.delete(0, END)
self.prof_professor_name.delete(0, END)
self.prof_grade.delete(0, END)
else:
print("Invalid Grades, OUT OF LIMIT")
else:
print("Empty fields not allowed")
def reset( self ):
self.prof_student_number.delete(0, END)
self.prof_student_name.delete(0, END)
self.prof_course_number.delete(0, END)
self.prof_course_name.delete(0, END)
self.prof_professor_name.delete(0, END)
self.prof_grade.delete(0, END)
|
998,430 | 08b9da32570c1a122b4ff7676bfab74892428004 | from django.views.generic.list import ListView
from .models import Course
from django.core.urlresolvers import reverse_lazy
from django.views.generic.edit import CreateView, UpdateView, \
DeleteView
from braces.views import LoginRequiredMixin, \
PermissionRequiredMixin
from django.views.generic.base import TemplateResponseMixin, View
from .forms import ModuleFormSet
from django.shortcuts import get_object_or_404, redirect
from django.apps import apps
from django.forms.models import modelform_factory
from .models import Module, Content
from django.http import HttpResponseNotFound
from django.db.models import Count
from .models import Subject
from django.views.generic.detail import DetailView
from students.forms import CourseEnrollForm
# Uzytkwonik widzi tylko swoje dziela
class OwnerMixin(object):
def get_queryset(self):
qs = super(OwnerMixin, self).get_queryset()
return qs.filter(owner=self.request.user)
# Przy walidacji formy dostaje ona ownera
# Waliduj -> zapisz -> wlascicielem jest owner
class OwnerEditMixin(object):
def form_valid(self, form):
form.instance.owner = self.request.user
return super(OwnerEditMixin, self).form_valid(form)
# Modele o ktorym mowimy jest Course
# Uzytkowin widzi tylko kursy ktorych jest ownerem
class OwnerCourseMixin(OwnerMixin, LoginRequiredMixin):
model = Course
fields = ['subject', 'title', 'slug', 'overview']
success_url = reverse_lazy('manage_course_list')
# Widok - kursy
# Przy zapisie uzytkownik staje sie wlascicielem tego co stworzyl
class OwnerCourseEditMixin(OwnerCourseMixin, OwnerEditMixin):
fields = ['subject','title','slug','overview']
# reverse, bo url brany z urls.py.
# mapowanie view -> url
success_url = reverse_lazy('manage_course_list')
template_name = 'courses/manage/course/form.html'
# Widzi tylko swoje.
class ManageCourseListView(OwnerCourseMixin, ListView):
template_name = 'courses/manage/course/list.html'
class CourseCreateView(PermissionRequiredMixin,
OwnerCourseEditMixin,
CreateView):
#template_name = 'courses/manage/course/form.html'
permission_required = 'courses.add_course'
class CourseUpdateView(PermissionRequiredMixin,
OwnerCourseEditMixin,
UpdateView):
template_name = 'courses/manage/course/form.html'
permission_required = 'courses.change_course'
class CourseDeleteView( PermissionRequiredMixin,
OwnerCourseMixin,
DeleteView):
template_name = 'courses/manage/course/delete.html'
success_url = reverse_lazy('manage_course_list')
permission_required = 'courses.delete_course'
class CourseModuleUpdateView(TemplateResponseMixin, View):
template_name = 'courses/manage/module/formset.html'
course = None
# set formset for view
def get_formset(self, data=None):
return ModuleFormSet(instance=self.course,
data=data)
# View musi wiedziec na jakim coursie operuje.
# Request przychodzi raz z parametrami.
# my zapisuje course ktor obecnie widok przetwarza/wyswietla
# Po co ? Po te musimy wiedziec jakie module wyswietlic
# dispatch przekazuje dalej do get() i post()
def dispatch(self, request, pk):
self.course = get_object_or_404(Course,
id=pk,
owner=request.user)
return super(CourseModuleUpdateView,self).dispatch(request, pk)
# To co chcemy przekaza templatowi
# self.course memy bo przechwicilismy go w dispatch
def get(self, request, *args, **kwargs):
formset = self.get_formset()
return self.render_to_response({'course': self.course,
'formset': formset})
def post(self, request, *args, **kwargs):
formset = self.get_formset(data=request.POST)
if formset.is_valid():
formset.save()
return redirect('manage_course_list')
return self.render_to_response({'course': self.course,
'formset': formset})
class ContentCreateUpdateView(TemplateResponseMixin, View):
module = None
model = None
obj = None
template_name = 'courses/manage/content/form.html'
# Get model for for view
def get_model(self, model_name):
if model_name in ['text','video','image','file']:
return apps.get_model(app_label='courses',
model_name=model_name)
return None
# Build for for the view based on model.
# Common fields are not required.
def get_form(self, model, *args, **kwargs):
Form = modelform_factory(model, exclude=['owner',
'order',
'created',
'updated'])
return Form(*args,**kwargs)
def dispatch(self, request, module_id, model_name, id=None):
print(module_id)
print(model_name)
self.module = get_object_or_404(Module,
id=module_id)
self.model = self.get_model(model_name)
if id:
self.obj = get_object_or_404(self.model,
id=id,
owner=request.user)
return super(ContentCreateUpdateView,self).dispatch(request,module_id,model_name, id)
def get(self, request, module_id, model_name, id=None):
form = self.get_form(self.model, instance=self.obj)
return self.render_to_response({'form' : form,
'object': self.obj})
def post(self, request, module_id, model_name, id=None):
form = self.get_form(self.model,
instance=self.obj,
data=request.POST,
files=request.FILES)
if form.is_valid():
obj = form.save(commit=False)
obj.owner = request.user
obj.save()
if not id:
# new content
Content.objects.create(module=self.module,item=obj)
return redirect('module_content_list', self.module.id)
return self.render_to_response({'form': form,
'object': self.obj})
# Delete content from module
# HTTP 405 received
class ContentDeleteView(View):
def post(self, request, id):
# TODO - nie dziala. Wczesniej 405
if request.session.get('delete_allowed') is None:
return HttpResponseNotFound("Cannot delete view directly")
content = get_object_or_404(Content,
id=id,
module__course__owner=request.user)
module = content.module
content.item.delete()
content.delete()
del request.session['delete_allowed']
return redirect('module_content_list', module.id)
# Load all content for modules
class ModuleContentListView(TemplateResponseMixin, View):
template_name = 'courses/manage/module/content_list.html'
def get(self, request, module_id):
module = get_object_or_404(Module,
id=module_id,
course__owner=request.user)
# set delete_allowed variable in session
request.session['delete_allowed'] = 'true'
return self.render_to_response({'module': module})
# View to display all courses or filtered (based on subject)
class CourseListView(TemplateResponseMixin, View):
model = Course
template_name = 'courses/course/list.html'
def get(self, request, subject=None):
subjects = Subject.objects.annotate(
total_courses=Count('courses'))
courses = Course.objects.annotate(
total_modules=Count('modules'))
if subject:
subject = get_object_or_404(Subject, slug=subject)
courses = courses.filter(subject=subject)
return self.render_to_response({'subjects': subjects,
'subject': subject,
'courses': courses})
# View to display single course
class CourseDetailView(DetailView):
model = Course
template_name = 'courses/course/detail.html'
def get_context_data(self, **kwargs):
context = super(CourseDetailView,
self).get_context_data(**kwargs)
context['enroll_form'] = CourseEnrollForm(initial={'course':self.object})
return context
|
998,431 | 7eb5461884c979c8055ab549363f378d3f456770 | from src.tally import Tally
from src.suffix_array import SuffixArray
class FmIndex():
''' O(m) size FM Index, where checkpoints and suffix array samples are
spaced O(1) elements apart. Queries like count() and range() are
O(n) where n is the length of the query. Finding all k
occurrences of a length-n query string takes O(n + k) time.
Note: The spacings in the suffix array sample and checkpoints can
be chosen differently to achieve different bounds. '''
def __init__(self, t, checkpoint_interval=4):
SA = SuffixArray(t)
self.bwt = SA.bwt
self.bwt_len = len(self.bwt)
self.ssa = SA.ssa
# Make rank checkpoints
self.checkpoints = Tally(self.bwt, checkpoint_interval)
# Calculate # occurrences of each character
total_occurrences = dict()
print(self.bwt)
for c in self.bwt:
if c not in total_occurrences.keys():
total_occurrences[c] = 0
total_occurrences[c] += 1
# Calculate concise representation of first column
self.first_column = {}
total_cnt = 0
for c, count in sorted(total_occurrences.items()):
self.first_column[c] = total_cnt
total_cnt += count
def range(self, p):
''' Return range of BWM rows having p as a prefix '''
l, r = 0, self.bwt_len - 1 # closed (inclusive) interval
for i in range(len(p) - 1, -1, -1): # from right to left
l = self.checkpoints.rank(self.bwt, p[i], l - 1) + self.first_column[p[i]]
r = self.checkpoints.rank(self.bwt, p[i], r) + self.first_column[p[i]] - 1
if r < l:
break
return l, r + 1
def resolve(self, row):
''' Given BWM row, return its offset w/r/t T '''
nsteps = 0
while row not in self.ssa:
if row >= self.bwt_len:
return 0
c = self.bwt[row]
row = self.checkpoints.rank(self.bwt, c, row - 1) + self.first_column[c]
nsteps += 1
return self.ssa[row] + nsteps
def hasSubstring(self, p):
''' Return true if and only if p is substring of indexed text '''
l, r = self.range(p)
return r > l
def hasSuffix(self, p):
''' Return true if and only if p is suffix of indexed text '''
l, r = self.range(p)
off = self.resolve(l)
if off == 0:
return False
return r > l and off + len(p) == self.bwt_len - 1
def occurrences(self, p):
''' Return offsets for all occurrences of p, in no particular order '''
l, r = self.range(p)
occ = [self.resolve(x) for x in range(l, r)]
return sorted(occ) |
998,432 | bb367cbe95990dc093caa49d2ce94ba8a5f99a16 | #!/usr/bin/env python
import cv2
import numpy as np
import sys
import os
import time
#get the root path for model inputing
current_path = os.path.dirname(os.path.abspath(__file__))
root_path = os.path.dirname(current_path)
sys.path.append(root_path)
from Driver.RealsenseController import RealsenseController
DEBUG = False
class Segment(object):
def __init__(self,workspace = [380,100,1050,650]):
self.ws = workspace
def DiffGround(self,groundImg,currrentImg):
groundImg_gray = cv2.cvtColor(groundImg,cv2.COLOR_BGR2GRAY)
groundBlur = cv2.GaussianBlur(groundImg_gray,(5,5),1)
groundBlur.dtype = 'int16'
currrentImg_gray = cv2.cvtColor(currrentImg,cv2.COLOR_BGR2GRAY)
currrentImgBlur = cv2.GaussianBlur(currrentImg_gray,(5,5),1)
currrentImgBlur.dtype = 'int16'
dGrayBlur = abs(groundBlur-currrentImgBlur)
dGrayBlur.dtype = 'uint8'
dGrayMidBlur=cv2.medianBlur(dGrayBlur,5)
ret,thresh=cv2.threshold(dGrayMidBlur,10,255,cv2.THRESH_BINARY)
print(thresh.dtype)
# im2, contours, hierarchy = cv2.findContours(thresh, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
rect = []
for i in range(len(contours)):
area = cv2.contourArea(contours[i])
if area < 50*50 or area > 200*200:
continue
else:
temp = cv2.boundingRect(contours[i])
rect.append(temp)
# x,y,w,h
return rect
def MOG2(self,groundImg,currrentImg,history = 1,kernel = 16):
#TODO: complete this function for avi and a set of picture
a = cv2.imread('image_C_0002.jpg',1)
#histroy: use how many image to build the model
#kernel: use how many gauss function to build the model
fgbg = cv2.createBackgroundSubtractorMOG2(history,kernel,False)
# 1 means add this picture to model
fgmask = fgbg.apply(a)
b = cv2.imread('image_C_0003.jpg',1)
fgmask = fgbg.apply(b,1)
b = cv2.imread('image_C_0001.jpg',1)
# 0 means don't add this picture to model
fgmask = fgbg.apply(b,fgmask,0)
# x1,y1 ------
# | |
# | |
# | |
# --------x2,y2
def ColorFilter(self,currrentImg,lower = np.array([10, 80, 0]),upper = np.array([80, 180, 80])):
crop_img = currrentImg[self.ws[1]:self.ws[3],self.ws[0]:self.ws[2],:] #[ymin:ymax,xmin:xmax]
mask = cv2.inRange(crop_img, lower, upper)
img_medianBlur=cv2.medianBlur(mask,5)
mask = cv2.bitwise_not(img_medianBlur)
width = currrentImg.shape[1]
heigh = currrentImg.shape[0]
if(DEBUG):
cv2.imshow('mask',mask)
cv2.waitKey()
contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
area = []
# # contour: if hierarchy[0][i][3] == -1, it means there are contours inside
# # len(contours[i] is the num of the contour
# if (len(contours[i]) < self.min or len(contours[i]) > self.max or cv2.contourArea(contours[i]) < self.min_area): #or hierarchy[0][i][3] == -1
# continue
rect = [] #ymin,xmin,ymax,xmax
for i in range(len(contours)):
area_temp = cv2.contourArea(contours[i])
if area_temp < 20*20 or area_temp > 200*200:
continue
else:
temp = cv2.boundingRect(contours[i])
box_rate = np.zeros(4)
box_rate[1] = (temp[0] + self.ws[0])/float(width)
box_rate[0] = (temp[1] + self.ws[1])/float(heigh)
box_rate[3] = (temp[2] + temp[0] + self.ws[0]) / float(width)
box_rate[2] = (temp[3] + temp[1] + self.ws[1]) / float(heigh)
rect.append(box_rate)
area.append(area_temp)
return rect,area
def ColorFilter_minRect(self,currrentImg,lower = np.array([10, 80, 0]),upper = np.array([80, 180, 80])):
crop_img = currrentImg[self.ws[1]:self.ws[3],self.ws[0]:self.ws[2],:] #[ymin:ymax,xmin:xmax]
mask = cv2.inRange(crop_img, lower, upper)
img_medianBlur=cv2.medianBlur(mask,5)
mask = cv2.bitwise_not(img_medianBlur)
width = currrentImg.shape[1]
heigh = currrentImg.shape[0]
if(DEBUG):
cv2.imshow('mask',mask)
cv2.waitKey()
contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
rect = [] #4 points of a rectangle
for i in range(len(contours)):
area = cv2.contourArea(contours[i])
if area < 20*20 or area > 200*200:
continue
else:
min_Box = cv2.minAreaRect(contours[i])
box = cv2.boxPoints(min_Box)
box = np.intp(box) #np.intp: Integer used for indexing (same as C ssize_t; normally either int32 or int64)
temp = box + [self.ws[0],self.ws[1]]
# temp = temp/[float(width),float(heigh)]
# temp = cv2.boundingRect(box)
# box_rate = np.zeros(4)
# box_rate[1] = temp[0]/float(width)
# box_rate[0] = temp[1]/float(heigh)
# box_rate[3] = (temp[2] + temp[0]) / float(width)
# box_rate[2] = (temp[3] + temp[1]) / float(heigh)
rect.append(temp)
return rect
if __name__ == '__main__':
camera_controller = RealsenseController()
time.sleep(2)
a,_,_,_ = camera_controller.getImage()
# a = cv2.imread('../Data/BoundingBox1.png')
# b = cv2.imread('../Data/BoundingBox2.png')
# print('shape:', a.shape)
# exit()
rect,area = Segment().ColorFilter(a,np.array([80, 80, 80]),np.array([180, 180, 180]))
# rect = Segment().ColorFilter_minRect(a,np.array([80, 80, 80]),np.array([180, 180, 180]))
print(rect)
showed_image = a.copy()
for i in range(len(rect)):
cv2.rectangle(showed_image,(int(rect[i][1]*1280),int(rect[i][0]*720)),(int(rect[i][3]*1280),int(rect[i][2]*720)),(0,255,0),2)
# cv2.drawContours(showed_image, [rect[i]], -1, (255,0,0),2)
# x,y = (rect[i][0]+rect[i][1]+rect[i][2]+rect[i][3])/4.0
# cv2.circle(showed_image, (int(x), int(y)), 5, (255,0,0), 4)
cv2.imshow('contours',showed_image)
cv2.waitKey()
cv2.destroyAllWindows()
|
998,433 | 2cd3cba62e645e8eb095b4627ccef7aab768b7f9 | from __future__ import print_function
from flask import Flask, jsonify, request
from flask import Flask, request, redirect, url_for, render_template, send_from_directory
from werkzeug.utils import secure_filename
import numpy as np
#from sklearn.externals import joblib
import pandas as pd
import numpy as np
from tensorflow.keras import backend
from tensorflow.keras.models import load_model
from bs4 import BeautifulSoup
import re
#from sklearn.feature_extraction.text import CountVectorizer
import tensorflow as tf
#tf.disable_v2_behavior()
from sklearn.metrics import roc_auc_score
from tensorflow.keras import backend as K
import base64
# https://www.tutorialspoint.com/flask
import flask,os
uppath = lambda _path, n: os.sep.join(_path.split(os.sep)[:-n])
UPLOAD_FOLDER = os.path.dirname(os.path.join(os.path.abspath(uppath(__file__, 1)),'uploads'))
print('='*20,UPLOAD_FOLDER)
DOWNLOAD_FOLDER = os.path.dirname(os.path.abspath(__file__)) + '/downloads/'
app = Flask(__name__,static_url_path="/static")
DIR_PATH = os.path.dirname(os.path.realpath(__file__))
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
app.config['DOWNLOAD_FOLDER'] = DOWNLOAD_FOLDER
#app.run(debug=True)
from tensorflow.keras.preprocessing import image
###################################################
def preprocess_input(x, data_format=None, version=1):
'''
preprocess_input will take the numpy array of image and remove the unnecessary values
The out ut numpy only contains thermal image of face which will have the umporatant chracters
Here we subtract the values from image numpy array after subtraction redundant pixels are removed
'''
x_temp = np.copy(x)
if data_format is None:
data_format = K.image_data_format()
assert data_format in {'channels_last', 'channels_first'}
if version == 1:
if data_format == 'channels_first':
x_temp = x_temp[:, ::-1, ...]
x_temp[:, 0, :, :] -= 93.5940
x_temp[:, 1, :, :] -= 104.7624
x_temp[:, 2, :, :] -= 129.1863
else:
x_temp = x_temp[..., ::-1]
x_temp[..., 0] -= 93.5940
x_temp[..., 1] -= 104.7624
x_temp[..., 2] -= 129.1863
elif version == 2:
if data_format == 'channels_first':
x_temp = x_temp[:, ::-1, ...]
x_temp[:, 0, :, :] -= 91.4953
x_temp[:, 1, :, :] -= 103.8827
x_temp[:, 2, :, :] -= 131.0912
else:
x_temp = x_temp[..., ::-1]
x_temp[..., 0] -= 91.4953
x_temp[..., 1] -= 103.8827
x_temp[..., 2] -= 131.0912
else:
raise NotImplementedError
return x_temp
def read_img(path):
'''
read_img takes the path as input and reads the data and calls the preprocess_input function which removes
the unnecessary pixels and gives the face relevant pixels
'''
img = image.load_img(path, target_size=(224, 224))
img = np.array(img).astype(np.float)
return preprocess_input(img, version=2)
def chunker(seq, size=32):
return (seq[pos:pos + size] for pos in range(0, len(seq), size))
def auc(y_true, y_pred):
return tf.py_func(roc_auc_score, (y_true, y_pred), tf.double)
myGraph = tf.Graph()
session= tf.Session(graph = myGraph)
def predict_images(img1,img2):
global submission,model3,graph
d = {'img_pair': img1+"-"+img2}
submission = pd.DataFrame(data=d,index=[0])
predictions = []
test_path=''
for batch in (chunker(submission.img_pair.values)):
X1 = [x.split("-")[0] for x in batch]
X1 = np.array([read_img(test_path + x) for x in X1])
X2 = [x.split("-")[1] for x in batch]
X2 = np.array([read_img(test_path + x) for x in X2])
print("=="*9,X1.shape, X2.shape)
#with graph.as_default():
#session = tf.Session(graph=tf.Graph())
with session.graph.as_default():
backend.set_session(session)
pred3 = model3.predict([X1, X2])
print("=="*9,pred3)
print("=="*9,X1.shape, X2.shape)
#pred1 = [0.20 * e for e in model1.predict([X1, X2]).ravel().tolist()]
#pred2 = [0.20 * e for e in model2.predict([X1, X2]).ravel().tolist()]
#pred3 = [0.20 * e for e in model3.predict([X1, X2]).ravel().tolist()]
#pred4 = [0.20 * e for e in model4.predict([X1, X2]).ravel().tolist()]
#pred5 = [0.20 * e for e in model5.predict([X1, X2]).ravel().tolist()]
#pred = [sum(x) for x in zip(pred1, pred2, pred3, pred4, pred5)] #list( map(add, pred1, pred2, pred3, pred4, pred5) )
pred=pred3[0]
return pred
###################################################
@app.route('/')
def hello_world():
return 'Hello World!'
filename1="filename1.PNG"
@app.route('/savea', methods=['POST'])
def savea():
global filename1
print("===== save a called ===========")
print(type(request.files))
print(list(request.files.keys()))
file = request.files['userprofile_picturea']
print("===== file ===========", file)
filename = secure_filename(file.filename)
#filename1 = filename
print("===== filename ===========", filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'],'uploads', "filename1.PNG"))
return ''
filename2="filename2.PNG"
@app.route('/saveb', methods=['POST'])
def saveb():
global filename2
print("===== save a called ===========")
file = request.files['userprofile_pictureb']
print("===== file ===========", file)
filename = secure_filename(file.filename)
#filename2 = filename
print("===== filename ===========", filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'],'uploads', "filename2.PNG"))
return ''
@app.route('/index')
def index():
return flask.render_template('index.html')
i =0
@app.route('/type')
def type_r():
return flask.render_template('index.html')
'''
with session.graph.as_default():
backend.set_session(session)
model3 = load_model('model-3',custom_objects={ 'auc': auc })'''
import time
'''
with graph.as_default():
model3 = load_model('model-3',custom_objects={ 'auc': auc })'''
@app.route('/predict', methods=['POST'])
def predict():
global i,submission,model3,filename1,filename2
#pred = predict_images(os.path.join(UPLOAD_FOLDER,'uploads', filename1),os.path.join(UPLOAD_FOLDER,'uploads' , filename2))
pred=['0.9644774']
prediction = float(pred[i])
i = i+1
#time.sleep(4)
#prediction=0.2542367329820991
return flask.render_template('result.html',message = "{0:.2f}".format(round(prediction,2)) ,message2 = prediction );
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080,debug=True)
#predict()
|
998,434 | 7d34b95a4a93890e96367cdc0052344abfe95026 | from requests import get
import socket
from main_application.connect_to_asterisk.asterisk_manager import ConnectAsteriskManager
import os
import sys
# Changing SIP Trunk settings
class SettingsToPjsip:
def __init__(self, config_location, pjsip_port, provider_address, provider_port, provider_ip_addresses):
if getattr(sys, 'frozen', False):
self.application_path = os.path.dirname(sys.executable)
else:
self.application_path = os.path.dirname(__file__)
self.config_location = os.path.join(self.application_path, config_location)
self.pjsip_port = pjsip_port
self.provider_address = provider_address
self.provider_port = provider_port
self.provider_ip_addresses = provider_ip_addresses
def modify_transport_udp_nat(self, start):
end = start
public_ip = get('https://api.ipify.org').text
for line in range(start, len(self.data)):
end += 1
if "bind" in self.data[line]:
self.data[line] = 'bind=0.0.0.0:' + self.pjsip_port + '\n'
elif "local_net" in self.data[line]:
self.data[line] = 'local_net=' + self.local_ip + '\n'
elif "external_media_address" in self.data[line]:
self.data[line] = 'external_media_address=' + public_ip + '\n'
elif "external_signaling_address" in self.data[line]:
self.data[line] = 'external_signaling_address=' + public_ip + '\n'
break
return end
def modify_provider_aors(self, start):
end = start
for line in range(start, len(self.data)):
end += 1
if "contact=sip:" in self.data[line]:
self.data[line] = 'contact=sip:' + self.provider_address + ':' + self.provider_port + '\n'
break
return end
def modify_provider_ident(self, start):
match_index = 0
line = start
while line < len(self.data):
if "match" in self.data[line]:
if match_index == 0:
self.data[line] = ""
match_index = line
else:
del self.data[line]
line -= 1
elif not match_index == 0:
break
line += 1
if not match_index == 0:
for ip_address in self.provider_ip_addresses:
self.data[match_index] += 'match=' + ip_address + '\n'
else:
i = 0
for ip_address in self.provider_ip_addresses:
if i == 0:
if not self.data[-1].strip():
self.data[-1] += 'match=' + ip_address + '\n'
else:
self.data.append('match=' + ip_address + '\n')
i += 1
else:
self.data[-1] += 'match=' + ip_address + '\n'
# Restarting Asterisk - easiest way to reload PJSIP totally, instantly
def reload_pjsip(self):
manager = ConnectAsteriskManager.connect_to_asterisk_manager()
try:
manager.command('core restart now')
except:
manager.close()
def create_config(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
self.local_ip = s.getsockname()[0]
s.close()
with open(self.config_location, 'r') as file:
self.data = file.readlines()
file.close()
data_index = self.modify_transport_udp_nat(0)
data_index = self.modify_provider_aors(data_index)
self.modify_provider_ident(data_index)
with open(self.config_location, 'w') as file:
file.writelines(self.data)
file.close()
self.reload_pjsip()
|
998,435 | 8b4670ea7919cd353ccfebcdc26afff48522b76b | import pytest
import mkl
from qiskit import *
from qiskit.compiler import transpile, assemble
mkl.set_num_threads(1)
backend = Aer.get_backend('statevector_simulator')
def run_bench(benchmark, nqubits, gate, locs=(1, )):
if qiskit.__qiskit_version__['qiskit-terra'] == '0.9.0':
run_bench_new(benchmark, nqubits, gate, locs)
else:
run_bench_old(benchmark, nqubits, gate, locs)
def run_bench_new(benchmark, nqubits, gate, locs=(1, )):
qubits = QuantumRegister(nqubits)
circuit = QuantumCircuit(qubits)
locs = [qubits[k] for k in locs]
getattr(circuit, gate)(*locs)
experiments = circuit
basis_gates = None
coupling_map = None # circuit transpile options
backend_properties = None
initial_layout = None,
seed_transpiler = None
optimization_level = None
pass_manager = None,
qobj_id = None
qobj_header = None
shots = 1024, # common run options
memory = False
max_credits = 10
seed_simulator = None,
default_qubit_los = None
default_meas_los = None, # schedule run options
schedule_los = None
meas_level = 2
meas_return = 'avg',
memory_slots = None
memory_slot_size = 100
rep_time = None
parameter_binds = None
experiments = transpile(experiments,
basis_gates=basis_gates,
coupling_map=coupling_map,
backend_properties=backend_properties,
initial_layout=initial_layout,
seed_transpiler=seed_transpiler,
optimization_level=optimization_level,
backend=backend,
pass_manager=pass_manager,
)
run_config = {}
# assembling the circuits into a qobj to be run on the backend
qobj = assemble(experiments,
qobj_id=qobj_id,
qobj_header=qobj_header,
shots=shots,
memory=memory,
max_credits=max_credits,
seed_simulator=seed_simulator,
default_qubit_los=default_qubit_los,
default_meas_los=default_meas_los,
schedule_los=schedule_los,
meas_level=meas_level,
meas_return=meas_return,
memory_slots=memory_slots,
memory_slot_size=memory_slot_size,
rep_time=rep_time,
parameter_binds=parameter_binds,
backend=backend,
run_config=run_config
)
benchmark(backend.run, qobj, **run_config)
def run_bench_old(benchmark, nqubits, gate, locs=(1, )):
qubits = QuantumRegister(nqubits)
circuit = QuantumCircuit(qubits)
locs = [qubits[k] for k in locs]
getattr(circuit, gate)(*locs)
experiments = circuit
basis_gates = None
coupling_map = None # circuit transpile options
backend_properties = None
initial_layout = None
seed_transpiler = None
optimization_level = None
pass_manager = None
qobj_id = None
qobj_header = None
shots = 1024 # common run options
memory = False
max_credits = 10
seed_simulator = None
default_qubit_los = None
default_meas_los = None # schedule run options
schedule_los = None
meas_level = 2
meas_return = 'avg'
memory_slots = None
memory_slot_size = 100
rep_time = None
parameter_binds = None
seed = None
seed_mapper = None # deprecated
config = None
circuits = None
run_config = {}
experiments = transpile(experiments,
basis_gates=basis_gates,
coupling_map=coupling_map,
backend_properties=backend_properties,
initial_layout=initial_layout,
seed_transpiler=seed_transpiler,
optimization_level=optimization_level,
backend=backend,
pass_manager=pass_manager,
seed_mapper=seed_mapper, # deprecated
)
qobj = assemble(experiments,
qobj_id=qobj_id,
qobj_header=qobj_header,
shots=shots,
memory=memory,
max_credits=max_credits,
seed_simulator=seed_simulator,
default_qubit_los=default_qubit_los,
default_meas_los=default_meas_los,
schedule_los=schedule_los,
meas_level=meas_level,
meas_return=meas_return,
memory_slots=memory_slots,
memory_slot_size=memory_slot_size,
rep_time=rep_time,
parameter_binds=parameter_binds,
backend=backend,
config=config, # deprecated
seed=seed, # deprecated
run_config=run_config
)
benchmark(backend.run, qobj, **run_config)
def first_rotation(circuit, qubits):
for each in qubits:
circuit.rx(1.0, each)
circuit.rz(1.0, each)
return circuit
def mid_rotation(circuit, qubits):
for each in qubits:
circuit.rz(1.0, each)
circuit.rx(1.0, each)
circuit.rz(1.0, each)
return circuit
def last_rotation(circuit, qubits):
for each in qubits:
circuit.rz(1.0, each)
circuit.rx(1.0, each)
return circuit
def entangler(circuit, qubits, pairs):
for a, b in pairs:
circuit.cx(qubits[a], qubits[b])
return circuit
def generate_qcbm_circuit(n, depth, pairs):
qubits = QuantumRegister(n)
circuit = QuantumCircuit(qubits)
circuit = first_rotation(circuit, qubits)
circuit = entangler(circuit, qubits, pairs)
for k in range(depth-1):
circuit = mid_rotation(circuit, qubits)
circuit = entangler(circuit, qubits, pairs)
circuit = last_rotation(circuit, qubits)
return circuit
nbit_list = range(4,18)
@pytest.mark.parametrize('nqubits', nbit_list)
def test_X(benchmark, nqubits):
benchmark.group = "X"
run_bench(benchmark, nqubits, 'x')
@pytest.mark.parametrize('nqubits', nbit_list)
def test_H(benchmark, nqubits):
benchmark.group = "H"
run_bench(benchmark, nqubits, 'h')
@pytest.mark.parametrize('nqubits', nbit_list)
def test_T(benchmark, nqubits):
benchmark.group = "T"
run_bench(benchmark, nqubits, 't')
@pytest.mark.parametrize('nqubits', nbit_list)
def test_CX(benchmark, nqubits):
benchmark.group = "CNOT"
run_bench(benchmark, nqubits, 'cx', (1, 2))
# qiskit doesn't support?
# @pytest.mark.parametrize('nqubits', nbit_list)
# def test_CY(benchmark, nqubits):
# benchmark.group = "C-Rx(0.5)"
# run_bench(benchmark, nqubits, '', (2, 3))
@pytest.mark.parametrize('nqubits', nbit_list)
def test_Toffoli(benchmark, nqubits):
benchmark.group = "Toffoli"
run_bench(benchmark, nqubits, 'ccx', (2, 3, 0))
@pytest.mark.parametrize('nqubits', range(4, 16))
def test_qcbm(benchmark, nqubits):
benchmark.group = "QCBM"
circuit = generate_qcbm_circuit(nqubits, 9,
[(i, (i+1)%nqubits) for i in range(nqubits)])
benchmark(execute, circuit, backend)
|
998,436 | 445e136be0e44744df5802a847c11aaa2b5a44c7 | # -*- coding: utf-8 -*-
N = int(input())
A = int(input())
val = N*N-A
if(val >= 0):
print(N*N-A)
else:
print(0)
|
998,437 | 29f1d94cbca249e566ef29dd5330681eeaa7f914 | ###---------Imports---------###
import math
from kivy.app import App
from kivy.clock import Clock
from kivy.config import Config
from kivy.graphics import Color, Ellipse
from kivy.uix.label import Label
from kivy.uix.popup import Popup
from kivy.core.window import Window
from Pretty_quatre.RBF import *
from kivy.uix.boxlayout import BoxLayout
from kivy.graphics.vertex_instructions import Rectangle, Line, Point
###---------PreConfig---------###
Window.size = (900, 600)
Config.set('input', 'mouse', 'mouse,multitouch_on_demand')
class MainMenu(BoxLayout):
max_epochs = 100 #Numero default de epocas
entry_x = [] #Listado de entradas de entrenamiento
wish_y =[] #Listado de labels o clase deseada
anima = False #Animacion Encendida
vuelta = 0 #Vuelta de la animacion
n_gauss = 10 #Numero default de epocas
desired_error = 0.01 #Error deseado default
rbf = None #Objeto del RBF
#funciones predefinidas
#f1 = 2*sin(x)*cos(x)+cos(x)
#f2 = 3*sin(x)^4+5*sin(x)^2+2*cos(x)^5
#f3 = e+2*cos(x)^5+sin(x)
#f4 = cos(x)
# Limpia el plano y la variable de Entry_x y Wish_y
def reset(self):
self.soft_reset()
self.set_lines()
self.entry_x.clear()
self.wish_y.clear()
self.rbf = None
self.anima = False
def reset_graph(self):
with self.canvas:
Color(1, 1, 1, 1, mode='rbg')
Rectangle(pos=(40, 21), size=(371, 108), source="Img/back2.jpg")
self.set_other_lines()
def changeRange(self, oxu, oxl, nxu, nxl, v):
OldRangex = oxu - oxl
NewRangex = nxu - nxl
return round((((v - oxl) * NewRangex) / OldRangex) + nxl, 1)
def soft_reset(self):
#solo limpia las lineas
graph = self.ids.functionGraph
with graph.canvas:
Color(0.78, 0.54, 0.64, 1, mode='rgb')
Rectangle(pos=(graph.pos), size =(graph.size))
graph = self.ids.errorGraph
with graph.canvas:
Color(0.78, 0.54, 0.84, 1, mode='rgb')
Rectangle(pos=(graph.pos), size=(graph.size))
#graficando funcion original
def graphFunction(self, f):
graph = self.ids.functionGraph
with graph.canvas:
x = 0
while x < 50:
if f == 'cos(x)':
y = math.cos(x)
elif f == '2*sin(x)*cos(x)+cos(x)':
y = 2*math.sin(x)*math.cos(x)+math.cos(x)
elif f == '3*sin(x)^4+5*sin(x)^2+2*cos(x)^5':
y = 3 * math.pow(math.sin(x), 4) + 5 * math.pow(math.sin(x), 2) + 2 * math.pow(math.cos(x), 5)
else:
y = math.e + 2 * math.pow(math.cos(x), 5) + math.sin(x)
d = 2
xi = graph.pos[0]
xf = graph.pos[0] + graph.width
yi = graph.pos[1]
yf = graph.pos[1] + graph.height
NewValuex = self.changeRange(50, 0, xf, xi, x)
NewValuey = self.changeRange(10, -10, yf, yi, y)
Color(0.2, 0, 0.8, 1, mode='rgb')
Ellipse(pos=(NewValuex - d / 2, NewValuey - d / 2), size=(d, d))
x += 0.01
#graficar error
def draw_error(self, e, t, n):
graph = self.ids.errorGraph
with graph.canvas:
Color(0.2, 0, 0.8, 1, mode='rgb')
x = t
y = e
d = 2
xi = graph.pos[0]
xf = graph.pos[0] + graph.width
yi = graph.pos[1]
yf = graph.pos[1] + graph.height
NewValuex = self.changeRange((n-1), 0, xf, xi, x)
NewValuey = self.changeRange(self.rbf.highestError, 0, yf, yi, y)
Ellipse(pos=(NewValuex - d/2, NewValuey - d/2), size=(d, d))
########################################
##------------Animacion---------------##
def aminacion(self, *args):
if self.anima:
tam = len(self.rbf.time_weights)
if tam > 0:
#TODO como vas a devolver las gaussianas?
self.draw_aprox(self.rbf.time_weights[0], [1, 0, 0])
if 0 < self.vuelta < (tam - 1):
self.draw_w(self.rbf.time_weights[self.vuelta], [0, 0, 1])
elif self.vuelta >= tam:
self.draw_w(self.rbf.time_weights[len(self.rbf.time_weights) - 1], [0, 1, 0])
self.anima = False
#graficando el error
if self.vuelta < len(self.rbf.time_errors):
self.draw_error(self.rbf.time_errors[self.vuelta], self.vuelta, len(self.rbf.time_errors))
self.vuelta += 1
########################################
def draw_aprox(self, w, c):
with self.canvas:
Color(c[0], c[1], c[2], 1.0, mode='rgb')
m = -(w[0]/w[2])/(w[0]/w[1])
b = w[0]/w[2]
xi = -5
yi = m * xi + b
if yi > 5:
yi = 5
xi = (b-yi) / (-m)
elif yi < -5:
yi = -5
xi = (b-yi) / (-m)
xf = 5
yf = m*xf+b
if yf > 5:
yf = 5
xf = (b-yf) / (-m)
elif yf < -5:
yf = -5
xf = (b-yf) / (-m)
NewValuexi = self.changeRange(5, -5, 412, (412 - 370), xi)
NewValueyi = self.changeRange(5, -5, (180 + 370), 180, yi)
NewValuexf = self.changeRange(5, -5, 412, (412 - 370), xf)
NewValueyf = self.changeRange(5, -5, (180 + 370), 180, yf)
Line(points=(NewValuexi, NewValueyi, NewValuexf, NewValueyf), width=.7)
# (La funcion que dibuja las lineas de los planos)
def set_lines(self):
self.soft_reset()
graph = self.ids.functionGraph
with graph.canvas:
x, y = graph.pos[0], graph.pos[1]
aumx = graph.width/50
aumy = graph.height/20
Color(0.9, .8, 1, 1, mode='rgb')
while x < graph.pos[0]+graph.width:
Line(points=(x, graph.pos[1], x, graph.pos[1]+graph.height), width=1)
x += aumx
while y < graph.pos[1]+graph.height:
Line(points=(graph.pos[0], y, graph.pos[0]+graph.width, y), width=1)
y += aumy
#dibujando axis
Color(0, 0, 0, .5, mode='rgb')
x = graph.pos[0]
y = graph.pos[1] + graph.height/2
Line(points=(graph.pos[0], y, graph.pos[0]+graph.width, y), width=1.2)
Line(points=(x, graph.pos[1], x, graph.pos[1]+graph.height), width=1.2)
#lineas del error
graph = self.ids.errorGraph
with graph.canvas:
x, y = graph.pos[0], graph.pos[1]
nlin = 20
maxe = 10
if self.rbf is not None:
maxe = self.rbf.highestError
nlin = maxe
self.ids.maxError.text = str(maxe)
self.ids.epochsg.text = str(self.rbf.reachedEpochs)
aumy = graph.height / nlin
Color(0.9, .8, 1, 1, mode='rgb')
while y < graph.pos[1] + graph.height:
Line(points=(graph.pos[0], y, graph.pos[0] + graph.width, y), width=1)
y += aumy
# dibujando axis
Color(0, 0, 0, .5, mode='rgb')
x = graph.pos[0]
y = graph.pos[1]
Line(points=(graph.pos[0], y, graph.pos[0] + graph.width, y), width=1.2)
Line(points=(x, graph.pos[1], x, graph.pos[1] + graph.height), width=1.2)
#para actualizar la funcion
def changeFunction(self, button, selection):
button.text = selection
self.set_lines()
self.graphFunction(selection)
#cuando se da clic en entrenar
def start_training(self, me, ng, de, f):
self.reset_graph()
#TODO agregar lo de rbf
self.vuelta = 0
#tomar datos del usuario
def getData(self, mx, ngss, de, func, reachedEp, reachedEr, functionGraph, errorGraph):
#limpiar lineas si hay
self.soft_reset()
self.set_lines()
self.graphFunction(func)
if(len(self.entry_x) > 0):
me = self.max_epochs
ng = self.n_gauss
des = self.desired_error
f = func
if mx.text != "":
try:
me = int(mx.text)
if me < 1:
me = 100
mx.text = str(me)
except ValueError:
print("Not Integer")
if ngss.text != "":
try:
ng = int(ngss.text)
if ng < 1:
ng = 10
ngss.text = str(ng)
except ValueError:
print("Not Integer")
if de.text != "":
try:
des = float(de.text)
# el error solo va de 0 a 1
if des < 0.0 or des > 1.0:
des = 0.1
de.text = str(des)
except ValueError:
print("Not Float")
self.start_training(me, ng, de, f)
#reachedEp.text = "Reached Epochs: " + str(rbf.epochs)
#reachedEr.text = "Reached Error: " + str(rbf.error)
#TODO dibujar funcion con las gaussianas
else:
popup = Popup(title='¡Error!',
content=Label(text='You need at least one entry to train '),
size_hint=(None, None), size=(300, 100))
popup.open()
class RBFApp(App):
def build(self):
menu = MainMenu()
#Clock.schedule_interval(menu.animacion, 0.05)
return menu
RBFApp().run() |
998,438 | 76653fbcb71db8db83202b1d05d88efffd824ce4 | #!/usr/bin/env python3
from time import sleep
from notify_engine import (
NotifyManager, EventLoop, CommonEvent
)
def handler(event):
print(event.message)
print(event.__dict__)
print(event.serialize())
def start_task(task, event):
task(event)
def nm_factory():
evl = EventLoop('redis://localhost:6379/9')
nm = NotifyManager(evl, start_task)
return nm
if __name__ == '__main__':
nm = nm_factory()
nm.subscribe(CommonEvent, handler)
while True:
nm.run()
sleep(1)
|
998,439 | 2082c813b56bd910d9210e6aa12d93d9f5cc41c3 | # Generated by Django 2.1.7 on 2019-03-10 07:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('trxs', '0004_auto_20190310_0628'),
]
operations = [
migrations.AddField(
model_name='trxs',
name='sale_dollars',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=12),
),
migrations.AddField(
model_name='trxs',
name='tax_dollars',
field=models.DecimalField(decimal_places=2, default=0.0, max_digits=12),
),
]
|
998,440 | 9e6dcf9bb1290d3f44d83f4920b6e16dfc970139 | import time
from uuid import uuid4
import os
def PathAndRename(path):
def wrapper(instance, filename):
ext = filename.split('.')[-1]
# get filename
TimeStr = time.strftime('%y-%j')
if instance.pk:
filename = '{}-{}.{}'.format(instance.pk, uuid4().hex, ext)
else:
# set filename as random string
filename = '{}.{}'.format(uuid4().hex, ext)
# return the whole path to the file
Temp = time.strftime(path)
return os.path.join(Temp, filename)
return wrapper
|
998,441 | 6d0aa4257a69fb6b3f3debe1393f973b86eb54ad | # Ejerecicio nro 05
# Introducir dos numeros donde la suma de sus resultados sean perfectas
num1=int(input("Introduce un numero:"))
num2=int(input("Introduce un numero:"))
for i in range(num1,num2+1):
suma=0
for j in range(1,(i//2)+1):
if(i%j==0):
suma+=j
if(suma==i):
#Fin_if
#Fin_For
print("El numero",i," es perfecto porque la suma es:",suma)
|
998,442 | 894257dac871085ba6b17966f058e3f82f43ed61 | import time
import sys
import heapq
# Get max price of items in bag
def get_max_bag_price(items: list, bag: int) -> float:
# Add part price for each item
# We use -price / col to reverse 2n priority order with min in top and make -max in top
items = [(-price / col, col) for price, col in items]
# Create from [(part_price, count), (part_price, count), ....] to 2n priority order with min in top
heapq.heapify(items)
# Store points
result_wight = 0
while items and bag:
part_price, count = heapq.heappop(items)
max_size = min(bag, count)
result_wight += -part_price * max_size
bag -= max_size
return result_wight
# Первая строка содержит количество предметов 1 <= n <= 10^3
# и вместимость рюкзака 0 <= W <= 2 * 10^6. Каждая из следующих nn строк
# задаёт стоимость 0 <= Ci <= 2 * 10^6 и объём 0 < Wi <= 2 * 10^6
# предмета (n, W, Ci, Wi — целые числа). Выведите максимальную стоимость частей
# предметов (от каждого предмета можно отделить любую часть, стоимость и объём при
# этом пропорционально уменьшатся), помещающихся в данный рюкзак, с точностью не менее
# трёх знаков после запятой.
# Run: python3 bag.py < test_data/bag.txt (file mode)
# Input:
# 3 50
# 60 20
# 100 50
# 120 30
# Output:
# 180.000
# Time: 0.32701897621154785 s
def test():
assert get_max_bag_price([(60, 20)], 0) == 0.0
assert get_max_bag_price([(60, 20)], 25) == 60.0
assert get_max_bag_price([(60, 20), (0, 100)], 25) == 60.0
assert get_max_bag_price([(60, 20), (50, 50)], 25) == 60.0 + 5.0
assert get_max_bag_price([(60, 20), (100, 50), (120, 30)], 50) == 180.0
from random import randint
# Get start time
start = time.time()
for _ in range(1000):
n = randint(1, 1000)
bag = randint(0, 2 * 10**6)
items = []
for i in range(n):
items.append((randint(0, 2 * 10**6), randint(1, 2 * 10**6)))
get_max_bag_price(items, bag)
# Show execution time
print('Time for 1000 tests: ', time.time() - start, 's')
def main():
# Get data from file Input
reader = (tuple(map(int, line.split())) for line in sys.stdin)
items_length, bag = next(reader)
items = list(reader)
# Check correct items length
assert len(items) == items_length
# Get start time
start = time.time()
# Execute function
result_gold = get_max_bag_price(items, bag)
# Print result
print('{:.3f}'.format(result_gold))
# Show execution time
print('Time: ', time.time() - start, 's')
if __name__ == "__main__":
# main()
test()
|
998,443 | 45cc856b9823d753b78e5b94bbfd6b8aa7c1f17e | class Animal:
name=''
def Eat(self):
print("Ням-ням");
def setName(self,newName):
self.name=newName
def getName(self):
return self.name
def makeNoize(self):
print(self.name+' говорит Гррр')
def __init__(self,newName):
self.name=newName
print('Родилось животное ',self.name)
Tiger=Animal('Ричард')
print(Tiger.getName())
Tiger.setName('Рик')
print(Tiger.getName())
Tiger.Eat()
Tiger.makeNoize()
|
998,444 | 170a92c5d63bcd1ab884e448fa527cd8c9769503 | #!/usr/bin/env python
# Command:
# ./rele_turn.py pin on/off rpiv
#
# where:
# - pin is the GPIO pin number
# - on and off means normally closed and opened (because cables
# were mounted to be normally closed as default)
# - rpiv is the last 8-bit IP number (e.g. 36 for parents)
#
# Example:
# rele_turn.py 4 1 # turn on p-supply rpi-cam + tinesyne-amp
# rele_turn.py 17 1 # turn on p-supply rpi-studio
# rele_turn.py 4 0 36 # turn off p-supply rpi-cam + tinesyne-amp
# rele_turn.py 17 0 36 # turn off p-supply rpi-studio
# this is useful to poweroff the rpiv before to turn off the power supply:
#from subprocess import call
import RPi.GPIO as GPIO
import time
import sys
import os
GPIO.setwarnings(False)
PIN_RELE = int(sys.argv[1])
GPIO.setmode(GPIO.BCM) # referring to the pins by the "Broadcom SOC channel" number, these are the numbers after "GPIO"
#GPIO.setmode(GPIO.BOARD)# referring to the pins by the number of the pin the the plug
# PARs
PAUSE_1 = 2 # sec
PAUSE_2 = 15# sec
IP = 'pi@192.168.1.' + sys.argv[3]
CMD = 'ssh ' + IP + ' sudo poweroff &'
# Set GPIO to output
GPIO.setup(PIN_RELE, GPIO.OUT, initial=True)
#GPIO.setup(PIN_RELE,GPIO.OUT)
if int(sys.argv[2])==0:
print "powering on the rpiv: " + IP
GPIO.output(PIN_RELE,GPIO.LOW)
time.sleep( PAUSE_1 )
print "Rele :: closed contact"
else:
print "powering off the rpiv: " + IP
#call(['ssh',IP,'sudo poweroff'])
x = os.system( CMD )
time.sleep( PAUSE_2 )
GPIO.output(PIN_RELE,GPIO.HIGH)
print "Rele :: opened contact"
|
998,445 | ea32f1979644a9d825bdd4864d5637fa275bf1ad | """
Uncipher Cisco type 7 ciphered passwords
Usage: python uncipher.py <pass> where <pass> is the text of the type 7 password
Example:
$ python uncipher.py 094F4F1D1A0403
catcat
"""
import fileinput
import sys
global key
key = [0x64, 0x73, 0x66, 0x64, 0x3b, 0x6b, 0x66, 0x6f, 0x41,
0x2c, 0x2e, 0x69, 0x79, 0x65, 0x77, 0x72, 0x6b, 0x6c,
0x64, 0x4a, 0x4b, 0x44, 0x48, 0x53, 0x55, 0x42]
def uncipher(ciphertext):
try:
index = int(pw[:2],16)
except ValueError:
return ''
pw_text = ciphertext[2:].rstrip()
pw_hex_values = [pw_text[start:start+2] for start in range(0,len(pw_text),2)]
pw_chars = [chr(key[index+i] ^ int(pw_hex_values[i],16)) for i in range(0,len(pw_hex_values))]
pw_plaintext = ''.join(pw_chars)
return pw_plaintext
try:
for pw in fileinput.input():
print uncipher(pw)
except:
pw = sys.argv[1]
print uncipher(pw)
|
998,446 | 9c03329ef135b754cda99f86fd66331524f5deb0 | from django.db import models
from django.core.urlresolvers import reverse
from apps.visualizations.models import Visualization
from apps.dashboards.models import Dashboard
from apps.accounts.models import User
import uuid
class SharedVisualization(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=True)
visualization = models.ForeignKey(Visualization)
created_by = models.ForeignKey(User, null=True)
token = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
valid_until = models.DateTimeField(null=True)
label = models.CharField(max_length=255, null=True)
def generate_url(self, request):
return request.build_absolute_uri(reverse('visualizations_show_anonymous', kwargs=dict(visualization_id=self.visualization.id, token=str(self.token))))
class SharedDashboard(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
is_active = models.BooleanField(default=True)
dashboard = models.ForeignKey(Dashboard)
created_by = models.ForeignKey(User, null=True)
token = models.UUIDField(default=uuid.uuid4, editable=False, unique=True)
valid_until = models.DateTimeField(null=True)
label = models.CharField(max_length=255, null=True)
def generate_url(self, request):
return request.build_absolute_uri(reverse('dashboards_play_anonymous', kwargs=dict(dashboard_id=self.dashboard.id, token=str(self.token))))
def generate_path(self):
return reverse('dashboards_play_anonymous', kwargs=dict(dashboard_id=self.dashboard.id, token=str(self.token)))
|
998,447 | 9c5c937a671489014c28a10928b056c45b43be9d | # for module compile
Import('env')
Import('RTT_ROOT')
Import('rtconfig')
# build each components
objs = ''
objs += SConscript(RTT_ROOT + '/drivers/SConscript')
if rtconfig.CROSS_TOOL == 'gcc':
rtconfig.RT_USING_MINILIBC = True
objs = objs + SConscript('minilibc/SConscript')
if 'RT_USING_SPI' in dir(rtconfig) and rtconfig.RT_USING_SPI:
objs = objs + SConscript('drivers/SConscript')
if 'RT_USING_FINSH' in dir(rtconfig) and rtconfig.RT_USING_FINSH:
objs = objs + SConscript('finsh/SConscript')
if 'RT_USING_DFS' in dir(rtconfig) and rtconfig.RT_USING_DFS:
objs = objs + SConscript('filesystem/SConscript')
if 'RT_USING_LWIP' in dir(rtconfig) and rtconfig.RT_USING_LWIP:
objs = objs + SConscript('lwip/SConscript')
if 'RT_USING_RTGUI' in dir(rtconfig) and rtconfig.RT_USING_RTGUI:
objs = objs + SConscript('gui/SConscript')
Return('objs')
|
998,448 | 7b5e0b849db8cbd2c1865042926023ddc256d8de | import logging
logger = logging.getLogger(__name__)
def func():
logging.error("aaaaaaaaaa")
logger.info("a2_aaaaaaaaaaaaaa") |
998,449 | 89f12a326c81154cee7f0b126647acf2b78e3b96 | from requests_html import HTMLSession
import time
import datetime
s = HTMLSession()
headers = {
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'accept-encoding': 'gzip, deflate, br',
'accept-language': 'en-GB,en-US;q=0.9,en;q=0.8',
'cache-control': 'max-age=0',
'cookie': 'ncbi_sid=324F04060EA7D083_9471SID; _ga=GA1.2.2054965354.1625982931; pmc.article.report=; pmc_labs_version=""; _gid=GA1.2.732976300.1633866554; WebEnv=1ncJSF8xxGLn_nGzKWJq-Ojyv_hBiRcSAQi8bQV-YEWQd%40324F04060EA7D083_9471SID; _gat_ncbiSg=1; QSI_SI_4ZW5tSQNmEIzIvY_intercept=true; _gat_dap=1; ncbi_pinger=N4IgDgTgpgbg+mAFgSwCYgFwgCIAYBC2AzAKwCCAnBUUfgOwCMDJuruRAYnUQGwAsuOuwYAmAHQMxAWzgiQAGhABXAHYAbAPYBDVCqgAPAC6ZQcrGCkBjALQAzCBpWGoK9IqKZwVhSD6etEIbIlmpQPiSePiK4nniEpJTUtIzMbOxcvAJCRKIS0rJRDJ4WNvaOzq4YJRgBQSFQGAByAPKNAKJRZiAA7r1iKpYARsj9alL9yIhiAOYaMFEUngy8PD5EMVh0PAAca0VYyzyr7l10IrvuHli2WmoAzmHufliGEEqPIES7WGuLWHwkCgkC6+Da+bY7GKKPhXEC4MQ5CQ+PjPZTqbS6AzGaERLBQkAkWHbPhFRQkVEkHbhVabbYgkh0Tz4nj7EA8IEgAC+nKAA===',
'referer':'https://www.ncbi.nlm.nih.gov/labs/pmc',
'sec-ch-ua': '"Chromium";v="94", "Google Chrome";v="94", ";Not A Brand";v="99"',
'sec-ch-ua-mobile': '?0',
'sec-ch-ua-platform': '"Windows"',
'sec-fetch-dest': 'document',
'sec-fetch-mode': 'navigate',
'sec-fetch-site': 'none',
'sec-fetch-user': '?1',
'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.71 Safari/537.36',
'connection':'keep-alive'
}
print('starting to extract...')
start_time = datetime.datetime.now()
input_file = open('inputfile.txt', 'r')
urls = input_file.readlines()
for url in urls:
urllist = url.strip()
try:
r = s.get(urllist, headers=headers)
pdfs = r.html.find('#main-content > aside > section:nth-child(1) > ul > li.pdf-link.other_item')
for item in pdfs:
pmcpdf = 'https://www.ncbi.nlm.nih.gov' + item.find('a', first=True).attrs['href']
pmcid = item.find('a', first=True).attrs['href'].split('/')[-3]
time.sleep(3)
output = open('output_pdf_urls.txt', 'a')
output.write(pmcid + '\t' + pmcpdf + '\n')
end_time = datetime.datetime.now()
output.close()
except ConnectionError:
pass
print('File downloaded')
print('Time taken to task complete:', end_time-start_time) |
998,450 | 40adc18a8e2bb587c7be206d2101b20f5e1efaa0 | import ee
import geemap
# Create a map centered at (lat, lon).
Map = geemap.Map(center=[40, -100], zoom=4)
# Load a Landsat 8 collection.
collection = ee.ImageCollection('LANDSAT/LC08/C01/T1') \
.filterBounds(ee.Geometry.Point(-122.262, 37.8719)) \
.filterDate('2014-01-01', '2014-12-31') \
.sort('CLOUD_COVER')
# Compute the median of each pixel for each band of the 5 least cloudy scenes.
median = collection.limit(5).reduce(ee.Reducer.median())
# Define visualization parameters in an object literal.
vizParams = {'bands': ['B5_median', 'B4_median', 'B3_median'],
'min': 5000, 'max': 15000, 'gamma': 1.3}
Map.setCenter(-122.262, 37.8719, 10)
Map.addLayer(median, vizParams, 'Median image')
# Display the map.
Map
|
998,451 | c09e5aae89e97e73aadcaf4b9720c60389daa408 | import numpy as np
from random import randint
from src import data
xtrain, xtest, ytrain, ytest = data.load_spambase_test_train()
# In the final version of this module we shouldn't be tightly coupled to the
# features of the classifier that we are attacking. The decoupling would
# require an invertible mapping between feature vectors and objects (e.g. email
# messages) which would make matching feature column names and order
# irrelevant. We don't have that mapping yet so we rely on our features
# having the same order and names as the classifier we attack.
SPAMBASE_FEATURE_SPECS = [
{
"name": c,
"type": int if type(xtrain[c].ix[0]) == np.int64 else float,
"max": max(xtrain[c]),
"min": min(xtrain[c])
}
for c in xtrain.columns if c != "spam"
]
def create_greater_than_constraint(
x,
column_name,
column_index,
greater_than,
upper_bound
):
"""Create a constraint that requires a certain feature to be greater than
a specified value.
The feature constraints are used by the anticlassifier when generating
vectors. The constraints are used in two roles:
- when generating the initial guess of the feature vector
- in the scipy.optimize routines that seek the optimal vector
These roles require the constraints to be expressed in a certain format.
The initial guess generation requires:
- That the "init" function be a callable
See anticlassifier.py for details.
The scipy.optimize routines require:
- the constraints must be a list of dictionary
- the "type" field must be set on each dictionary
- the "fun" field on each dictionary must be a callable
For details, refer to:
http://docs.scipy.org/doc/scipy/reference/generated/scipy.optimize.minimize.html
"""
assert x.columns[column_index] == column_name
return {
"name": "{0}_gt_{1}".format(column_name, greater_than),
"type": "ineq",
"fun": lambda x: x[column_index] - greater_than,
"init": lambda x: x.__setitem__(
column_index, randint(greater_than, upper_bound))
}
# the constraints to pass onto the minimizer.
# depend on the order of the feature specs
SPAMBASE_CONSTRAINTS = []
SPAMBASE_CONSTRAINTS.append(
create_greater_than_constraint(
xtrain,
"capital_run_length_average",
0,
10,
25
)
)
SPAMBASE_CONSTRAINTS.append(
create_greater_than_constraint(
xtrain,
"capital_run_length_longest",
1,
5,
10
)
)
|
998,452 | a0209c444ba14442e40acc7df905fa5a1f86f4cc | n, x = map(int, input().split())
l = list(map(int,input().split()))
count = 1
d = 0
for lx in l:
d += lx
if d <= x:
count += 1
else: break
print(count) |
998,453 | 6efd46dda40a0c11dd04d0457bfba54002b65007 | #!/usr/bin/env python3
import json,requests
import pandas as pd
url_gainers_nifty50 = "https://www.nseindia.com/live_market/dynaContent/live_analysis/gainers/niftyGainers1.json"
url_gainers_nifty_next50 = "https://www.nseindia.com/live_market/dynaContent/live_analysis/gainers/jrNiftyGainers1.json"
url_fno_gainers = "https://www.nseindia.com/live_market/dynaContent/live_analysis/gainers/fnoGainers1.json"
url_losers_nifty50 = "https://www.nseindia.com/live_market/dynaContent/live_analysis/losers/niftyLosers1.json"
url_losers_nifty_next50 = "https://www.nseindia.com/live_market/dynaContent/live_analysis/losers/jrNiftyLosers1.json"
url_fno_losers = "https://www.nseindia.com/live_market/dynaContent/live_analysis/losers/fnoLosers1.json"
url_volume_gainers = "https://www.nseindia.com/live_market/dynaContent/live_analysis/volume_spurts/volume_spurts.json"
url_oi_spurts = "https://www.nseindia.com/live_market/dynaContent/live_analysis/oi_spurts/topPositiveOIChangeData.json"
resp_gainers_nifty50 = requests.get(url_gainers_nifty50)
data_gainers_nifty50 = json.loads(resp_gainers_nifty50.text)
resp_gainers_nifty_next50 = requests.get(url_gainers_nifty_next50)
data_gainers_nifty_next50 = json.loads(resp_gainers_nifty_next50.text)
resp_fno_gainers = requests.get(url_fno_gainers)
data_fno_gainers = json.loads(resp_fno_gainers.text)
resp_losers_nifty50 = requests.get(url_losers_nifty50)
data_losers_nifty50 = json.loads(resp_losers_nifty50.text)
resp_losers_nifty_next50 = requests.get(url_losers_nifty_next50)
data_losers_nifty_next50 = json.loads(resp_losers_nifty_next50.text)
resp_fno_losers = requests.get(url_fno_losers)
data_fno_losers = json.loads(resp_fno_losers.text)
resp_volume_gainers = requests.get(url_volume_gainers)
data_volume_gainers = json.loads(resp_volume_gainers.text)
resp_oi_spurts = requests.get(url_oi_spurts)
data_oi_spurts = json.loads(resp_oi_spurts.text)
gainers_nifty50=[]
for stock in data_gainers_nifty50['data']:
gainers_nifty50.append(stock['symbol'])
gainers_nifty_next50=[]
for stock in data_gainers_nifty_next50['data']:
gainers_nifty_next50.append(stock['symbol'])
fno_gainers=[]
for stock in data_fno_gainers['data']:
fno_gainers.append(stock['symbol'])
fno_losers=[]
for stock in data_fno_losers['data']:
fno_losers.append(stock['symbol'])
losers_nifty50=[]
for stock in data_losers_nifty50['data']:
losers_nifty50.append(stock['symbol'])
losers_nifty_next50=[]
for stock in data_losers_nifty_next50['data']:
losers_nifty_next50.append(stock['symbol'])
volume_gainers=[]
for stock in data_volume_gainers['data']:
volume_gainers.append(stock['sym'])
oi_spurts=[]
for stock in data_oi_spurts['data']:
oi_spurts.append(stock['symbol'])
gainers = gainers_nifty50 + gainers_nifty_next50
losers = losers_nifty50 + losers_nifty_next50
gainers_losers = gainers + losers
fno_gainers_fno_losers = fno_gainers + fno_losers
#set_1, set_2, set_3, set_4 = set(gainers_losers), set(fno_gainers_fno_losers), set(volume_gainers), set(oi_spurts)
#common_cash_fno_vol_oi = list(set_1 & set_2 & set_3 & set_4)
#set_11, set_22, set_33 = set(gainers_losers), set(fno_gainers_fno_losers), set(volume_gainers)
#common_cash_fno_vol = list(set_11 & set_22 & set_33)
common_vol_oi = list(set(volume_gainers).intersection(oi_spurts))
common_cash_fno = list(set(gainers_losers).intersection(fno_gainers_fno_losers))
common_cash_fno_vol_oi = list(set(common_vol_oi).intersection(common_cash_fno))
common_cash_fno_oi = list(set(common_cash_fno).intersection(oi_spurts))
common_cash_fno_vol = list(set(common_cash_fno).intersection(volume_gainers))
common_cash_volume_gainers = list(set(gainers_losers).intersection(volume_gainers))
common_cash_oi_spurts = list(set(gainers_losers).intersection(oi_spurts))
common_fno_gainers_fno_losers_vol = list(set(fno_gainers_fno_losers).intersection(volume_gainers))
common_fno_gainers_fno_losers_oi = list(set(fno_gainers_fno_losers).intersection(oi_spurts))
common_gainers_fno_gainers = list(set(gainers).intersection(fno_gainers))
common_losers_fno_losers = list(set(losers).intersection(fno_losers))
print("\n----------\n")
print("CASH & FnO")
print("{}".format("=" * 6))
for stock in common_cash_fno:
print(stock)
print("\n----------\n")
|
998,454 | a4b90a381a7dbe2f37a6abd8880e8c9c116aa6c4 | import logging
from typing import Optional
from datetime import datetime, timedelta
import uuid
from fastapi import APIRouter, HTTPException, Depends, status
from fastapi.security import OAuth2PasswordBearer, OAuth2PasswordRequestForm
from jose import JWTError, jwt
from pydantic import BaseModel
from api.conf import config
router = APIRouter()
logger = logging.getLogger(__name__)
SECRET_KEY = config["JWT_PRIVATE_KEY"]
PUBLIC_KEY = config["JWT_PUBLIC_KEY"]
ALGORITHM = "RS256"
ACCESS_TOKEN_EXPIRE_MINUTES = 30
REFRESH_TOKEN_EXPIRE_DAYS = 120
def example_user_validator(username: str, password: str):
if username == "test" and password == "password":
return {"username": username, "fullname": "Test User", "groups": ["Admins", "Testuser"]}
else:
return False
class AccessRefreshToken(BaseModel):
access_token: str
refresh_token: str
class AccessToken(BaseModel):
access_token: str
oauth2_scheme = OAuth2PasswordBearer(tokenUrl="auth/login")
async def get_jwt_payload(token: str = Depends(oauth2_scheme)):
credentials_exception = HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Could not validate credentials",
headers={"WWW-Authenticate": "Bearer"},
)
try:
payload = jwt.decode(token, PUBLIC_KEY, algorithms=ALGORITHM)
if payload.get("sub") is None:
raise credentials_exception
except JWTError:
raise credentials_exception
return payload
jwt_claims = {
"sub": "", # the username
"user_claims": {}, # everything user related
"exp": 0, # expiry datetime
"type": "access", # access or refresh
"jti": "" # unique token identifier to revoke tokens. Generated with uuid.uuid4()
}
@router.post("/login", response_model=AccessRefreshToken)
async def login_for_access_token(form_data: OAuth2PasswordRequestForm = Depends()):
"""
Login to get an access and refresh token for later authentication.
"""
user = example_user_validator(form_data.username, form_data.password)
if not user:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Incorrect username or password",
headers={"WWW-Authenticate": "Bearer"},
)
access_token_data = jwt_claims.copy()
access_token_data["sub"] = user["username"]
access_token_data["exp"] = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
access_token_data["jti"] = str(uuid.uuid4())
refresh_token_data = jwt_claims.copy()
refresh_token_data["sub"] = user["username"]
refresh_token_data["exp"] = datetime.utcnow() + timedelta(days=REFRESH_TOKEN_EXPIRE_DAYS)
refresh_token_data["type"] = "refresh"
refresh_token_data["jti"] = str(uuid.uuid4())
return AccessRefreshToken(
access_token=jwt.encode(access_token_data, SECRET_KEY, algorithm=ALGORITHM),
refresh_token=jwt.encode(refresh_token_data, SECRET_KEY, algorithm=ALGORITHM)
)
@router.get("/refresh", response_model=AccessToken)
async def generate_new_refesh_key(payload: dict = Depends(get_jwt_payload)):
"""
Get a new access token with a valid refresh token.
"""
if payload["type"] != "refresh":
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="You gave the access key, but we need the refresh key",
headers={"WWW-Authenticate": "Bearer"},
)
# <- Your token revocation code should be here!
access_token_data = jwt_claims.copy()
access_token_data["sub"] = payload["sub"]
access_token_data["exp"] = datetime.utcnow() + timedelta(minutes=ACCESS_TOKEN_EXPIRE_MINUTES)
access_token_data["jti"] = str(uuid.uuid4())
return AccessToken(access_token=jwt.encode(access_token_data, SECRET_KEY, algorithm=ALGORITHM))
@router.get("/check_auth", response_model=dict)
async def check_token_for_username(payload: dict = Depends(get_jwt_payload)):
"""
Get all jwt user claims. That's how a protected endpoint should look like.
"""
return payload
|
998,455 | c74bab0c8b272c36a6ba3af45f29a3aa0f2b4c1c | j=list(input())
j.sort()
for d in j:
print(d,end="")
|
998,456 | fb86fc48d16b33686083b5b6fb38b8200d076412 | import logging
from colab_ssh._command import run_command as _run_command
import os
import sys
import importlib
import requests
from .get_tunnel_config import get_tunnel_config
import re
from urllib.parse import quote
from .utils import show_hint_message
from colab_ssh.utils.logger import get_logger
logger= get_logger()
def add_folder_to_sys_path(folder_path):
sys.path.insert(0, folder_path)
def parse_cloning_output(array):
git_logger= get_logger("git")
# Successfully cloned
if len(array) == 1:
folder_path = "./"+re.search("'(.*?)'", array[0]).groups(1)[0]
print('''Successfully cloned the repository in {}'''.format(folder_path))
return add_folder_to_sys_path(folder_path)
# Error occured in the cloning
git_logger.debug(array)
info, error, *rest = array
git_logger.error(error)
show_hint_message(error)
def init_git(repositoryUrl,
branch="",
personal_token="",
email=None,
username=None,
verbose=False):
# Add the Personal access token if available
full_url = repositoryUrl.replace("github.com", personal_token+"@github.com") if personal_token else repositoryUrl
if not personal_token:
response = requests.get(full_url)
if response.status_code == 200:
logger.info("✔️ Public repository")
else:
# Get username if not passed
username_input= input("Enter your username: (leave it empty if it's '{}')\n".format(username))
username = quote(username_input or username)
# Get password
from getpass import getpass
password = quote(getpass('Enter your password: \n'))
if password:
full_url = repositoryUrl.replace("github.com", "{}:{}@github.com".format(username, password))
# Clone the repository then add the folder to the sys.path
_run_command(
"git clone {} {}".format(
# Branch argument
"--branch {}".format(branch) if branch else "",
# Url argument
full_url),
callback=parse_cloning_output
)
repo_name = os.path.basename(repositoryUrl)
repo_name, _ = os.path.splitext(repo_name)
# Checkout the branch
os.system(f'cd {repo_name}')
# Make sure that even if the repository is public, the personal token is still in the origin remote url
if personal_token:
os.system("git remote set-url origin {}".format(full_url))
# Add the email and username
if email: os.system('git config --global user.email "{}"'.format(email))
if username: os.system('git config --global user.name "{}"'.format(username))
# Bring the public key from the repository and paste it in the authorized_keys
os.system("mkdir -p ~/.ssh && curl -s -L -f {} {}/{}/.colab_ssh/authorized_keys >> ~/.ssh/authorized_keys".format(
("-H 'Authorization: token {}'".format(personal_token)) if personal_token else "",
repositoryUrl.split(".git")[0].replace("github.com", "raw.githubusercontent.com"),
branch))
# Print the VSCode direct link
try:
output = get_tunnel_config()
link = f"vscode://vscode-remote/ssh-remote+root@{output['domain']}:{output['port']}{os.getcwd()}/{repo_name}"
if importlib.util.find_spec("IPython") and 'ipykernel' in sys.modules:
from IPython.display import HTML, display
display(
HTML(
f"""[Optional] You can open the cloned folder using VSCode, by clicking
<a href='{link}'>{repo_name}</a>
"""
)
)
else:
# Support for terminal
print(f"[Optional] You can open the cloned folder using VSCode, by going to this url:\n{link}")
except Exception as e:
if verbose:
print(e)
|
998,457 | bd2fa77a9520ba81c96e54ff9293ec6086ad3d90 | '''
print('第一题')
def mysum(*args):
return sum(args)
print(mysum(1,3,36,3))
'''
print('第二题')
#方法一
def mymax(*args):
if len(args) == 1:
l = list(*args)
l.sort()
return l[-1]
#return max(args[0])
return max(args)
#方法二
def mymax2(a,*args):
if len(args) == 0:
m = a[0] #先假设第一个数最大
i = 1
while i < len(a):#遍历之后的每一个元素
if a[i] > m:
m = a[i]
i += 1
return m
else:
m = a
for x in args:
if x > m:
m = x
return m
#方法三:
def mymax3(a,*args):
def _max(*args):
m = args[0]
i = 1
while i < len(args):
if args[i] > m:
m = args[i]
i += 1
return m
if len(args) == 0:
return _max(*a)
return _max(a,*args)
print(mymax([1,2,3]))
#print(mymax(100,200))
#print(mymax('abc'))
#print(mymax('c','f','g'))
#print(mymax([1,23,4],[3,4,5]))
#print(mymax(1,[2,3]))
'''
print('第三题')
def min_max(*args):
t = (min(args),max(args))
return t
print(min_max(1,2,3,4))
'''
|
998,458 | d81541f0bfae3d2a5323f517300afa9501bdf685 | import math
def troco(T):
moedas = {
1: 0,
5: 0,
10: 0,
25: 0,
50: 0,
100: 0
}
while(T > 0): # Criterio guloso: escolher maior moeda possivel. Colocar quantas der dela
if T >= 100:
moedas[100] = int(math.floor(T/100))
T = T % 100
elif T >= 50:
moedas[50] = int(math.floor(T/50))
T = T % 50
elif T >= 25:
moedas[25] = int(math.floor(T/25))
T = T % 25
elif T >= 10:
moedas[10] = int(math.floor(T/10))
T = T % 10
elif T >= 5:
moedas[5] = int(math.floor(T/5))
T = T % 5
elif T >= 1:
moedas[1] = T
T = 0
return moedas
def main():
T = int(input("T = "))
moedas = troco(T)
print("{} pode ser representado por:".format(T))
if moedas[1] != 0:
if moedas[1] == 1:
print("1 moeda de 1")
else:
print("{} moedas de 1".format(moedas[1]))
if moedas[5] != 0:
if moedas[5] == 1:
print("1 moeda de 5")
else:
print("{} moedas de 5".format(moedas[5]))
if moedas[10] != 0:
if moedas[10] == 1:
print("1 moeda de 10")
else:
print("{} moedas de 10".format(moedas[10]))
if moedas[25] != 0:
if moedas[25] == 1:
print("1 moeda de 25")
else:
print("{} moedas de 25".format(moedas[25]))
if moedas[50] != 0:
if moedas[50] == 1:
print("1 moeda de 50")
else:
print("{} moedas de 50".format(moedas[50]))
if moedas[100] != 0:
if moedas[100] == 1:
print("1 moeda de 100")
else:
print("{} moedas de 100".format(moedas[100]))
if __name__ == '__main__':
main() |
998,459 | 394b0ed71ca4595080fbe9e5da79eec56527f1b4 | import zmq
import time
import pickle
def serve():
'''
'''
context = zmq.Context()
socket = context.socket(zmq.REP)
socket.bind('tcp://*:5555')
while True:
message = socket.recv()
print pickle.loads(message)['foo']
socket.send("world")
if __name__ == '__main__':
serve()
|
998,460 | 698f0a391873d54323796003131e28e3d6fb588f | from .ArangodServer import ArangodServer
class DBServer(ArangodServer):
def __init__(self, environment):
super().__init__(environment)
environment.register_dbserver(self)
def collect_parameters(self, param):
self._environment.insert_cluster_agency_endpoints(param)
param["--cluster.my-address"] = self.get_endpoint()
param["--cluster.my-role"] = "DBSERVER"
param["--javascript.startup-directory"] = "js" # yolo
param["--http.trusted-origin"] = "all"
param["--database.check-version"] = "false"
param["--database.upgrade-check"] = "false"
super().collect_parameters(param)
|
998,461 | ded94b0e1248a8d4ecf96b5f403d06e7a31e9f80 | # Copyright 2013-2018 Adam Karpierz
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# <AK> added
#
from __future__ import absolute_import
from . import common
from jpype import JPackage, JClass, JClassUtil
class JClassUtilTestCase(common.JPypeTestCase):
def testIsInterface(self):
itf1 = JPackage("jpype").jclassutil.TestInterface1
itf2 = JClass("jpype.jclassutil.TestInterface2")
itf3 = JPackage("java.lang").Cloneable
itf4 = JClass("java.io.Serializable")
cls1 = JPackage("java.lang").Integer
cls2 = JClass("java.math.BigInteger")
self.assertTrue(JClassUtil.isInterface(itf1))
self.assertTrue(JClassUtil.isInterface(itf2))
self.assertTrue(JClassUtil.isInterface(itf3))
self.assertTrue(JClassUtil.isInterface(itf4))
self.assertFalse(JClassUtil.isInterface(cls1))
self.assertFalse(JClassUtil.isInterface(cls2))
self.assertFalse(JClassUtil.isInterface("skdffr"))
self.assertFalse(JClassUtil.isInterface(12))
|
998,462 | 2b2ee63a6595c6ab63edb8aa997ca7d47e4b402d | from __future__ import print_function
import midi
import sys
from music_model import MusicModel
import random
from math import log
import numpy as np
import data
from util import compose
from util import play_midi
from util import plot_midi
from os import listdir, mkdir
import matplotlib.pyplot as plt
def read_pitchs(midi_file):
""" """
song = midi.read_midifile(midi_file)
song.make_ticks_abs()
tracks = []
for track in song:
notes = [note for note in track if note.name == 'Note On']
pitch = [note.pitch for note in notes]
tick = [note.tick for note in notes]
tracks += [tick, pitch]
try:
ticks = tracks[2]
pitchs = tracks[3]
except:
ticks = tracks[0]
pitchs = tracks[1]
return pitchs
if __name__ == "__main__":
# midi_dir = './collection/midi/'
# midi_dir = './collection/midi-learn/'
# midi_dir = './collection/midi_collection1/'
# midi_dir = './collection/zhou/'
midi_dir = './collection/scale_chords_small/midi/'
midi_files_list = [f for f in listdir(midi_dir) if f.endswith('.mid')]
# print(midi_files_list)
for midi_file in midi_files_list:
melody_root = midi_file.split('/')[-1][:-4]
corpus = [data.read_pitchs(midi_dir+midi_file)]
# model.fit_corpus(corpus)
plot_midi(midi_dir+midi_file,show=False,save=True,save_dir=midi_dir)
# plt.savefig(midi_dir+melody_root+'.png') |
998,463 | 70edf6c4f1877f42db88615ad48c0a105a44861b | a, p = map(int, input().split())
print(int((int((3*a)+p)/2)))
|
998,464 | 7cd62f499de4394322d6a2e5dd15e7618301fff0 | import plotly.graph_objects as go
import plotly.express as px
import numpy as np
from random_walk import RandomWalk
rw = RandomWalk()
rw.fill_walk()
fig = go.Figure(data= go.Scatter(
x=rw.x_values,
y=rw.y_values,
mode = 'markers',
name = 'Random Walk',
marker = dict(color=np.arange(rw.num_points),
size = 8,
colorscale = 'Blues',
showscale = True
)
))
fig.show()
|
998,465 | 5a7d79fa6fcf9dc1eee829fb6639422b57a87dbb | listapesos = []
for c in range(1, 6):
peso = float(input(f'Informe o peso da {c}° pessoa: '))
listapesos.append(peso)
print(f'O menor peso informado foi {min(listapesos)}kg')
print(f'O maior peso informado foi {max(listapesos)}kg')
|
998,466 | 9b917dbbd95a33e2f2d1f6ef10a1aaef28508cdf | import json
from enum import Enum
from typing import Any, Type
from core.domain import ConsumerLifestageType, GenderType, ProductCategory
from scraping.spiders._base import SETTINGS
def enum_has_value(enum: Type[Enum], value: Any) -> bool:
try:
enum(value)
except ValueError:
return False
return True
def test_startjob() -> None:
for merchant, settings in SETTINGS.items():
for setting in settings:
assert "start_urls" in setting
assert "category" in setting
start_urls = setting["start_urls"]
category = setting["category"]
gender = setting.get("gender")
consumer_lifestage = setting.get("consumer_lifestage")
meta_data = setting.get("meta_data")
if isinstance(start_urls, list):
assert all([isinstance(start_url, str) for start_url in start_urls])
else:
assert isinstance(start_urls, str)
if isinstance(category, tuple):
assert len(category) == 2
category, category_meta_data = category
assert isinstance(category_meta_data, dict)
assert enum_has_value(ProductCategory, category)
if gender is not None:
assert enum_has_value(GenderType, gender)
if consumer_lifestage is not None:
assert enum_has_value(ConsumerLifestageType, consumer_lifestage)
if meta_data is not None:
if isinstance(meta_data, str):
meta_data = json.loads(meta_data)
assert isinstance(meta_data, dict)
|
998,467 | 0d577d7874709ab946d138ed134bb264ce3833a5 | banner = """\u001b[36;1m
█████▄ ██▀███ ▄▄▄ ▄████ ▒█████ ███▄ ▄███ ▄███ ▓▓▄ █
▒██ ██▌▓██ ▒ ██▒▒████▄ ██▒ ▀█▒▒██ ██▒ ██▒▀█▀█ █ ▒██ ██ ▄ ██ ▀█ █
░██ █▌▓██ ▄█ ▒▒██ ▀█▄ ▒██░▄▄▄░▒██░ ██ ▓██ ▓██▒ ██ ▀█▄▓ ██ ▀█ ██▒
░▓█▄ ▌▒██▀▀█▄ ░██▄▄▄▄██ ░▓█ ██▓▒██ ██░▒██ ▒██ ░██▄▄▄▄██ ▓██▒ ▐▌██▒
░▒████▓ ░██▓ ▒██▒ ▓█ ▓██▒░▒▓███▀▒░ ████▓▒░▒██▒ ░██▒ ▓█ ▓██▒▒██░ ▓██░
▒▒▓ ▒ ░ ▒▓ ░▒▓░ ▒▒ ▒█░ ░▒ ▒ ░ ▒░▒░▒░ ░ ▒░ ░ ░ ▒▒ ▓▒█░░ ▒░ ▒ ▒
░ ▒ ▒ ░▒ ░ ▒░ ▒ ▒▒ ░ ░ ░ ░ ▒ ▒░ ░ ░ ░ ▒ ▒▒ ░░ ░░ ░ ▒░
░ ░ ░ ░░ ░ ░ ▒ ░ ░ ░ ░ ░ ░ ▒ ░ ░ ░ ▒ ░ ░ ░
░ ░ ░ ░ ░ ░ ░ ░ ░ ░ ░
\u001b[32;1m
--Presented with <3 by Shivanshu Sharma
\u001b[35 ____ __ _ _
| __ ) __ _ ___ ___ / /_ | || |
| _ \ / _` / __|/ _ \ | '_ \| || |_
| |_) | (_| \__ \ __/ | (_) |__ _|
|____/ \__,_|___/\___| \___/ |_|
\u001b[32;1m \u001b[0m """
print(banner)
menu=""" \u001b[33;1m
------------------------------------------
LIST OF DECODERS |
------------------------------------------
[+] ASCII CONVERTER -- Ascii.py |
[+] ATBASH DECODER -- Atbash.py |
[+] CAESER DECODER -- Caeser.py |
[+] BACON DECODER -- Bacon.py |
[+] BASE32 DECODER -- Base32.py |
[+] BASE64 DECODER -- Base64.py |
[+] BASE85 DECODER -- Base85.py |
[+] DNA DECODER -- Dna.py |
[+] MORSE DECODER -- Morse.py |
[+] NUMBER SYSTEM -- Num.py |
[!] BINARY TO TEXT |
[!] HEX TO TEXT |
[!] OCTAL TO TEXT |
[+] RAILFENCE DECODER -- Rail.py |
[+] REVERSE CIPHER -- Reverse.py |
[+] ROTn DECODER -- Rot.py |
[+] TRANSPOSITION CIPHER -- Trans.py |
[+] VIGNERE DECODER -- Vignere.py |
------------------------------------------s
\u001b[32;1m"""
print(menu)
import base64
base64_message = input("Enter Base64 string: ")
base64_bytes = base64_message.encode('ascii')
message_bytes = base64.b64decode(base64_bytes)
message = message_bytes.decode('ascii')
print(message)
|
998,468 | f33cc78b8e1cc2eef2b440ef2c39253bde1e6953 | # Generated by Django 2.2.7 on 2020-02-27 16:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('homepage', '0003_auto_20200113_1928'),
]
operations = [
migrations.AlterField(
model_name='scholarship',
name='document',
field=models.FileField(upload_to='documents/scholarship_broucher/'),
),
migrations.AlterField(
model_name='scholarship',
name='img',
field=models.ImageField(default='pics/scholarship_pics/default.png', upload_to='pics'),
),
]
|
998,469 | 03d016e739a9bca9d3690473b34a1b712dda0e5c | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'tweetifier.ui'
#
# Created by: PyQt5 UI code generator 5.5.1
#
# WARNING! All changes made in this file will be lost!
import sys
from classifier import Classify
from preprocessed_tweets import first_level
from fetch_tweet import TweetFetch
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QApplication
from PyQt5.QtCore import *
class Ui_MainWindow(object):
def fromfile(self):
self.fromfile = TweetFetch()
f = self.fromfile.fetchFromFile()
self.textEdit.setText(f)
self.textEdit_3.setText("Tweet Fetched from file")
def fromtwitter(self):
self.fromtwitter = TweetFetch()
self.fromtwitter.status()
f = self.fromtwitter.fetchFromTwitter()
self.textEdit.setText(f)
self.textEdit_3.setText("Tweet Fetched from Twitter")
def cl(self):
self.cl = Classify()
a = self.cl.classify_r1()
self.textEdit_2.setText(a+' >>')
b = self.cl.classify_r2(a)
self.textEdit_2.append(b)
self.textEdit_3.setText("Tweet Classfied")
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(579, 458)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.pushButton = QtWidgets.QPushButton(self.centralwidget)
self.pushButton.setGeometry(QtCore.QRect(30, 140, 151, 27))
self.pushButton.setObjectName("pushButton")
self.pushButton.clicked.connect(self.fromfile)
self.pushButton_2 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_2.setGeometry(QtCore.QRect(90, 300, 85, 27))
self.pushButton_2.setObjectName("pushButton_2")
self.pushButton_2.clicked.connect(self.cl)
self.textEdit = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit.setGeometry(QtCore.QRect(220, 130, 301, 101))
self.textEdit.setObjectName("textEdit")
self.textEdit.setReadOnly(True)
self.textEdit_2 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_2.setGeometry(QtCore.QRect(220, 250, 301, 171))
self.textEdit_2.setObjectName("textEdit_2")
self.textEdit_2.setReadOnly(True)
self.line = QtWidgets.QFrame(self.centralwidget)
self.line.setGeometry(QtCore.QRect(0, 230, 611, 16))
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(20, 10, 541, 20))
font = QtGui.QFont()
font.setPointSize(10)
self.label.setFont(font)
self.label.setObjectName("label")
self.textEdit_3 = QtWidgets.QTextEdit(self.centralwidget)
self.textEdit_3.setGeometry(QtCore.QRect(220, 60, 301, 41))
self.textEdit_3.setObjectName("textEdit_3")
self.textEdit_3.setReadOnly(True)
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(10, 70, 181, 20))
font = QtGui.QFont()
font.setPointSize(10)
font.setItalic(False)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.line_2 = QtWidgets.QFrame(self.centralwidget)
self.line_2.setGeometry(QtCore.QRect(0, 110, 601, 16))
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.pushButton_3 = QtWidgets.QPushButton(self.centralwidget)
self.pushButton_3.setGeometry(QtCore.QRect(30, 200, 151, 27))
self.pushButton_3.setObjectName("pushButton_3")
self.pushButton_3.clicked.connect(self.fromtwitter)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.pushButton.setText(_translate("MainWindow", "Fetch Tweet from File"))
self.pushButton_2.setText(_translate("MainWindow", "Classify"))
self.label.setText(_translate("MainWindow", "Welcome to Twitter Classifier! This system classifies tweet into 3 level deep cateorization"))
self.label_2.setText(_translate("MainWindow", "Status of Processing Request"))
self.pushButton_3.setText(_translate("MainWindow", "Fetch Tweet from Twitter"))
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
MainWindow = QtWidgets.QMainWindow()
ui = Ui_MainWindow()
ui.setupUi(MainWindow)
MainWindow.show()
sys.exit(app.exec_())
|
998,470 | 8583e174d5651a30952a04c2f4b7f6097e7f4789 | from helpers.read_input import *
import copy
def parse_input(lines):
# idx is the location, val is if it's been visited
visited = []
for i, l in enumerate(lines):
instruct, arg = l.split(' ')
instructions = {
'instruct':instruct,
'arg': int(arg),
'seen': False,
}
visited.append(instructions)
return visited
def is_repeated(instructions, accum):
repeats = True
i = 0
while i < len(instructions) and repeats == True:
new_instructions = copy.deepcopy(instructions)
instruct = new_instructions[i]['instruct']
print(i, instruct)
accum = 0
if instruct == 'jmp':
new_instructions[i]['instruct'] = 'nop'
elif instruct == 'nop':
new_instructions[i]['instruct'] = 'jmp'
else:
i+=1
continue
accum, repeats= loop_accumulator(new_instructions, 0)
i+=1
return accum
def loop_accumulator(visited, accum):
# start at the beginning
i = 0
while i < len(visited) and visited[i]['seen'] == False:
instruct = visited[i]['instruct']
# default move of 1
move = 1
if instruct == 'acc':
accum+= visited[i]['arg']
elif instruct == 'jmp':
move= visited[i]['arg']
visited[i]['seen'] = True
i+=move
if i >= len(visited):
return accum, False
return accum, visited[i]['seen']
def test():
lines = read_line_input("d08", 'test')
instructions = parse_input(lines)
accum = is_repeated(instructions, 0)
assert(accum == 8)
print("done with assert", accum)
def run():
test()
lines = read_line_input("d08")
instructions = parse_input(lines)
# print(loop_accumulator(instructions, 0))
print(is_repeated(instructions, 0))
|
998,471 | a4635e31239839b936a6ad3572c654f5d1b8fd3a | # Copyright (C) 2020 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Mixins for entities."""
# pylint: disable=too-few-public-methods
class Reviewable(object):
"""A mixin for reviewable objects."""
def update_review(self, new_review):
"""Update obj review dict and status with new values."""
new_review = new_review.convert_review_to_dict()
self.review["status"] = new_review["status"]
if new_review["last_reviewed_by"]:
self.review["last_reviewed_by"] = new_review["last_reviewed_by"]
self._upd_reviewers(new_review["reviewers"])
self.update_attrs(review_status=new_review["status"])
def _upd_reviewers(self, new_reviewers):
"""Update object review `reviewers` with new values if needed."""
if self.review["reviewers"]:
if new_reviewers and new_reviewers[0] not in self.review["reviewers"]:
self.review["reviewers"] = self.review["reviewers"] + new_reviewers
else:
self.review["reviewers"] = new_reviewers
|
998,472 | 1ba650b9e35b3eb1e10707e2b3b475b16c6ad614 | #!/usr/bin/env python3
import fileinput
R = 6
C = 50
screen = [[False] * C for _ in range(R)]
def print_():
print('\n'.join(''.join('#' if b else '.' for b in r) for r in screen))
print()
def rect(A, B):
for x in range(A):
for y in range(B):
screen[y][x] = True
def rotate_row(A, B):
l = len(screen[A])
row = [screen[A][(i - B) % l] for i in range(l)]
screen[A] = row
def rotate_col(A, B):
l = len(screen)
col = [screen[(i - B) % l][A] for i in range(l)]
for i, b in enumerate(col):
screen[i][A] = b
for line in fileinput.input():
parts = line.strip().split()
if parts[0] == 'rect':
A, B = map(int, parts[1].split('x'))
rect(A, B)
elif parts[1] == 'row':
A = int(parts[2][2:])
B = int(parts[-1])
rotate_row(A, B)
else:
A = int(parts[2][2:])
B = int(parts[-1])
rotate_col(A, B)
print(line.strip())
print_()
print(sum(sum(1 if b else 0 for b in r) for r in screen))
|
998,473 | 80933852d4db3533a085dcde57dc436d5180acbc | import random
from typing import Tuple
import numpy as np
import pytest
from fleet import astar
from fleet.serializable import Map
@pytest.mark.parametrize(
argnames=("from_pos", "to_pos", "expected_n_moves", "obstacles",
"obstructed"),
argvalues=(
# Test single obstacles
((0, 0, 0), (4, 0, 1), 6, [[1, 0, 0]], False),
((0, 0, 0), (4, 0, 1), 8, [[1, 0, 0], [0, 0, 1]], False),
# Test cornered against 0 grid (the path should work, and travel
# into the negative direction!)
((0, 0, 0), (4, 0, 1), 8, [[1, 0, 0], [0, 0, 1], [0, 1, 0]], False),
# Test being completely blocked leads to no path
((0, 0, 0), (4, 4, 4), 13,
[[1, 0, 0], [0, 0, 1], [0, 1, 0], [0, -1, 0], [-1, 0, 0],
[0, 0, -1]], True),
((2, 2, 2), (-4, -4, -4), 19,
[[3, 2, 2], [2, 2, 3], [2, 3, 2], [2, 1, 2], [1, 2, 2],
[2, 2, 1]], True),
# Test when the end goal itself is completely enveloped
((5, 5, 5), (0, 0, 0), 16,
[[1, 0, 0], [0, 0, 1], [0, 1, 0], [0, -1, 0], [-1, 0, 0],
[0, 0, -1]], True),
# Test cases with no obstacles
((0, 0, 0), (4, 4, 4), 13, [], False),
((0, 0, 0), (4, 0, 0), 5, [], False),
((0, 0, 0), (0, 0, 4), 5, [], False),
((0, 0, 0), (0, 4, 4), 9, [], False),
# Test cases that will have from/to points outside of the obstacle
# min/mx
((-5, -4, -3), (30, 30, 30), 103, [[-4, -4, -3], [-3, -4, -3]],
False),
# Test when the end-goal is an obstacle a path is still yielded
((0, 0, 0), (1, 0, 0), 2, [[1, 0, 0]], True),
((0, 0, 0), (1, 1, 1), 4, [[1, 1, 1]], True),
# Test something super far away doesn't take long, because of
# e_admissibility (it used to never complete!)
((0, 0, 0), (1000, 3000, 2500), 6501,
[[i, i, i] for i in range(1, 1000)] +
[[i - 1, i - 1, i - 1] for i in range(2, 1000)],
False),
)
)
def test_astar_3d_as_2d(from_pos: Tuple[int],
to_pos: Tuple[int],
expected_n_moves: int,
obstacles: Tuple[Tuple[int]],
obstructed: bool):
"""This is a more opinionated test for validating astar3D with exact
parameters"""
map = Map(
position=from_pos,
direction=0,
obstacles=obstacles
)
path, path_obstructed = astar(
from_pos=map.position,
to_pos=to_pos,
map=map,
obstacle_cost=10,
e_admissibility=2
)
assert obstructed == path_obstructed
# Ensure the path actually gets you to the end location
if len(path):
from_pos = np.array(from_pos)
to_pos = np.array(to_pos)
for point in path:
direction = from_pos - np.array(point)
from_pos -= direction
assert (to_pos == from_pos).all()
assert len(path) == expected_n_moves
@pytest.mark.parametrize(
argnames=(
"block_center", "radius", "from_pos", "to_pos", "expected_n_moves",
"obstacle_cost", "obstructed"),
argvalues=[
# Test that with an obstacle cost of 1, the straightest path is taken
((0, 0, 0), 1, (-2, 0, 0), (2, 0, 0), 5, 1, True),
# Test that with an obstacle cost of less than or equal to 1, going
# through obstacles is totally A-OK!
((0, 0, 0), 1, (-2, -2, -2), (2, 2, 2), 13, 0, True),
((0, 0, 0), 1, (-2, -2, -2), (2, 2, 2), 13, 1, True),
((0, 0, 0), 1, (-2, -2, -2), (2, 2, 2), 13, 1.1, False),
# Test that with an obstacle cost of 2 or below, the path routes
# through the blockage, and at over 4, it goes direct
((0, 0, 0), 1, (-2, 0, 0), (2, 0, 0), 5, 2, True),
((0, 0, 0), 1, (-2, 0, 0), (2, 0, 0), 9, 2.01, False),
# Here we have a block of 13 in diameter.
((0, 0, 0), 6, (-14, 0, 0), (14, 0, 0), 29, 1, True),
((0, 0, 0), 6, (-14, 0, 0), (14, 0, 0), 29, 1.999, True),
((0, 0, 0), 6, (-14, 0, 0), (14, 0, 0), 43, 2, False),
((0, 0, 0), 6, (-14, 0, 0), (14, 0, 0), 43, 2.001, False),
]
)
def test_obstacle_cost(
block_center: Tuple[int],
radius: int,
from_pos: Tuple[int],
to_pos: Tuple[int],
expected_n_moves: int,
obstacle_cost: int,
obstructed: bool):
obstacles = generate_obstacle_block(
center=block_center,
radius=radius)
map = Map(
position=from_pos,
direction=0,
obstacles=obstacles
)
path, path_obstructed = astar(
from_pos=map.position,
to_pos=to_pos,
map=map,
obstacle_cost=obstacle_cost,
e_admissibility=1
)
assert path_obstructed == obstructed
assert expected_n_moves == len(path)
def generate_obstacle_block(center, radius):
"""Generates a block of obstacles surrounding a center, optionally
excluding the center
"""
obstacles = []
for x in range(center[0] - radius, center[0] + radius + 1):
for y in range(center[1] - radius, center[1] + radius + 1):
for z in range(center[2] - radius, center[2] + radius + 1):
obstacles.append([x, y, z])
assert len(obstacles) == (radius * 2 + 1) ** 3
return obstacles
|
998,474 | 898096bf09e8dcd6eed490a1e0726e5d9bfce5b2 | # Copyright (C) 2022 CVAT.ai Corporation
#
# SPDX-License-Identifier: MIT
from __future__ import annotations
import json
from time import sleep
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from cvat_sdk.core.client import Client
def create_git_repo(
client: Client,
*,
task_id: int,
repo_url: str,
status_check_period: int = None,
use_lfs: bool = True,
):
if status_check_period is None:
status_check_period = client.config.status_check_period
common_headers = client.api_client.get_common_headers()
response = client.api_client.rest_client.POST(
client.api_map.git_create(task_id),
post_params={"path": repo_url, "lfs": use_lfs, "tid": task_id},
headers=common_headers,
)
response_json = json.loads(response.data)
rq_id = response_json["rq_id"]
client.logger.info(f"Create RQ ID: {rq_id}")
client.logger.debug("Awaiting a dataset repository to be created for the task %s...", task_id)
check_url = client.api_map.git_check(rq_id)
status = None
while status != "finished":
sleep(status_check_period)
response = client.api_client.rest_client.GET(check_url, headers=common_headers)
response_json = json.loads(response.data)
status = response_json["status"]
if status == "failed" or status == "unknown":
client.logger.error(
"Dataset repository creation request for task %s failed" "with status %s.",
task_id,
status,
)
break
client.logger.debug(
"Awaiting a dataset repository to be created for the task %s. Response status: %s",
task_id,
status,
)
client.logger.debug("Dataset repository creation completed with status: %s.", status)
|
998,475 | 71c67db0c7a5d29a4c6618cf7262aa42a326846d | import ship, game_board, sprites
from random import randint
import time
from typing import List, Tuple, Optional
class Computer:
""" A computer player"""
def __init__(self):
# list of ship objects
self._my_ships: List[ship.Ship] = []
# list of (row,col) coordinates
self._my_misses: List[Tuple[int, int]] = []
# list of (row,col) coordinates
self._my_hits: List[Tuple[int, int]] = []
# list of ship objects
self._sunk_ships: List[ship.Ship] = []
# list of (row,col) coordinates
self._their_misses: List[Tuple[int, int]] = []
# list of (row,col) coordinates
self._their_hits: List[Tuple[int, int]] = []
# the board matrix is a 10x10 structure with
# pointers to ship objects. Initialize to all
# None values- no ships are on the board
self._board_matrix: List[List[Optional[ship.Ship]]] = [[None] * 10 for _ in range(10)]
# set to True if all opponent's ships are sunk
self.complete: bool = False
def initialize(self):
""" Create a valid ship layout
This function populates
_my_ships and _board_matrix
Ship Type | Length
-----------|-------
Carrier | 5
Battleship | 4
Cruiser | 3
Submarine | 3
Destroyer | 2
* the ship type is just FYI, it is not used in the game *
"""
# --------- BEGIN YOUR CODE ----------
# This is exactly the same as Human.initialize, just copy the code over
# --------- END YOUR CODE ----------
pass
def guess(self, row, col) -> Tuple[int, Optional[ship.Ship]]:
"""
Tell the other player whether a row and column guess is a hit or miss.
Record the (row,col) guess in either self._their_hits or self._their_misses
If a space is guessed twice do not hit the ship twice. That's cheating :)
Returns a tuple of (status, ship) where
status = 0: miss
= 1: hit
= 2: sunk
ship = None if a hit or miss
= ship object if sunk
"""
my_ship: ship.Ship = self._board_matrix[row][col]
# if my_ship is None the guess is a miss, otherwise its a hit
# --------- BEGIN YOUR CODE ----------
# This is exactly the same as Human.guess, just copy the code over
# --------- END YOUR CODE ----------
def take_turn(self, opponent):
"""
Guess a new row,col space. This may be random or use a more sophisticated AI.
Updates self._my_hits, self._my_misses, and self._sunk_ships
"""
# --------- BEGIN YOUR CODE ----------
# 1.) Guess a random space that has not been guessed (or be more clever!)
# Steps 2-4 are the same as Human.take_turn
# 2.) Call opponent.guess() to check whether the guess is a hit or miss
# 3.) Update my_hits, my_misses, and sunk_ships accordingly
# 4.) If the sunk_ships array has 5 ships in it set self.complete to True
# --------- END YOUR CODE ----------
# enforce a short delay to make the computer appear to "think" about its guess
time.sleep(0.5)
def print_board(self):
"""
Print the player's board as text, useful for debugging
"""
print("=" * 10)
for row in self._board_matrix:
for entry in row:
if entry is None:
print("_", end="")
else:
print(entry.length, end="")
print("")
print("=" * 10)
def draw(self,
my_board: game_board.GameBoard,
their_board: game_board.GameBoard):
""" Add sprites to the game board's to indicate
ship positions, guesses, hits, etc """
for my_ship in self._my_ships:
my_ship.draw(my_board)
for miss in self._their_misses:
my_board.add_sprite(sprites.miss, miss)
for hit in self._their_hits:
my_board.add_sprite(sprites.hit, hit)
# draw hit indicators on their board
for miss in self._my_misses:
their_board.add_sprite(sprites.miss, miss)
for their_ship in self._sunk_ships:
their_ship.draw(their_board)
for hit in self._my_hits:
their_board.add_sprite(sprites.hit, hit)
|
998,476 | 018ba1b46497a9866925fb958e626a41fa29487e | import aoi
from libs.conversions import discord_number_emojis as dne
class Game:
def __init__(self, ctx: aoi.AoiContext):
self.ctx = ctx
async def play(self):
raise NotImplementedError()
def score(self, aoi_score: int, player_score: int):
return f":person_red_hair: {dne(player_score)}\n" \
f"<:aoi:760338479841935372> {dne(aoi_score)}\n"
|
998,477 | 119e87cd39cbd5e6f715856cc9e7f8eafd05fa19 |
def find_duplicates(my_list):
returned_list=[]
for i in range(len(my_list)):
for j in range(i+1,len(my_list)):
if my_list[i]==my_list[j] and my_list[i] not in returned_list:
returned_list.append(my_list[i])
return returned_list
my_test_list =['f','a','b','c','b','d','m','n','n','a']
print(find_duplicates(my_test_list))
#another solution
def yet_again_another_method(my_list):
list(my_list)
returned_list =[]
for value in my_list:
if my_list.count(value) > 1 and value not in returned_list:
returned_list.append(value)
return returned_list
print(yet_again_another_method(my_test_list)) |
998,478 | eda3ca3df3c54a9f0f2511580590857b0e3e93dd | from heapq import heappush, heappop
from collections import namedtuple, defaultdict
from .nearest import BruteForceNeighbors, KDNeighbors
from .primitives import default_weights, get_embed_fn, get_distance_fn
from .utils import INF, elapsed_time, get_pairs, default_selector, irange, \
merge_dicts, compute_path_cost, get_length, is_path, flatten
import time
import numpy as np
Metric = namedtuple('Metric', ['p_norm', 'weights'])
Node = namedtuple('Node', ['g', 'parent'])
Solution = namedtuple('PRMSolution', ['path', 'samples', 'edges', 'colliding_vertices', 'colliding_edges'])
unit_cost_fn = lambda v1, v2: 1.
zero_heuristic_fn = lambda v: 0
ORDINAL = 1e3
REVERSIBLE = True
ROADMAPS = [] # TODO: not ideal
def sample_until(sample_fn, num_samples, max_time=INF):
# TODO: is this actually needed?
# TODO: compute number of rejected samples
start_time = time.time()
samples = []
while (len(samples) < num_samples) and (elapsed_time(start_time) < max_time):
conf = sample_fn() # TODO: include
# TODO: bound function based on distance_fn(start, conf) and individual distances
# if (max_cost == INF) or (distance_fn(start, conf) + distance_fn(conf, goal)) < max_cost:
# TODO: only keep edges that move toward the goal
samples.append(conf)
return samples
def retrace_path(visited, vertex):
if vertex is None:
return []
return retrace_path(visited, visited[vertex].parent) + [vertex]
def dijkstra(start_v, neighbors_fn, cost_fn=unit_cost_fn):
# Update the heuristic over time
# TODO: overlap with discrete
# TODO: all pairs shortest paths
# TODO: max_cost
start_g = 0.
visited = {start_v: Node(start_g, None)}
queue = [(start_g, start_v)]
while queue:
current_g, current_v = heappop(queue)
if visited[current_v].g < current_g:
continue
for next_v in neighbors_fn(current_v):
next_g = current_g + cost_fn(current_v, next_v)
if (next_v not in visited) or (next_g < visited[next_v].g):
visited[next_v] = Node(next_g, current_v)
heappush(queue, (next_g, next_v))
return visited
def get_priority_fn(w=1.):
assert 0. <= w <= INF
def priority_fn(g, h):
if w == 0.:
return g
if w == INF:
return (h, g)
return g + w*h
return priority_fn
def wastar_search(start_v, end_v, neighbors_fn, cost_fn=unit_cost_fn,
heuristic_fn=zero_heuristic_fn, max_cost=INF, max_time=INF, **kwargs):
# TODO: lazy wastar to get different paths
# TODO: multi-start / multi-goal
# TODO: use previous search tree as heuristic
#heuristic_fn = lambda v: cost_fn(v, end_v)
priority_fn = get_priority_fn(**kwargs)
goal_test = lambda v: v == end_v
start_time = time.time()
start_g = 0.
start_h = heuristic_fn(start_v)
visited = {start_v: Node(start_g, None)}
queue = [(priority_fn(start_g, start_h), start_g, start_v)]
while queue and (elapsed_time(start_time) < max_time):
_, current_g, current_v = heappop(queue)
if visited[current_v].g < current_g:
continue
if goal_test(current_v):
return retrace_path(visited, current_v)
for next_v in neighbors_fn(current_v): # TODO: lazily compute neighbors
next_g = current_g + cost_fn(current_v, next_v)
if (next_v not in visited) or (next_g < visited[next_v].g):
visited[next_v] = Node(next_g, current_v)
next_h = heuristic_fn(next_v)
if (next_g + next_h) < max_cost: # Assumes admissible
next_p = priority_fn(next_g, next_h)
heappush(queue, (next_p, next_g, next_v))
return None
##################################################
class Roadmap(object):
def __init__(self, extend_fn, weights=None, distance_fn=None, cost_fn=None,
p_norm=2, max_degree=5, max_distance=INF, approximate_eps=0., **kwargs):
# TODO: custom cost_fn
assert (weights is not None) or (distance_fn is not None)
self.distance_fn = distance_fn
self.extend_fn = extend_fn
self.weights = weights
self.cost_fn = cost_fn
self.p_norm = p_norm
self.max_degree = max_degree
self.max_distance = max_distance
self.approximate_eps = approximate_eps
if self.weights is None:
self.nearest = BruteForceNeighbors(self.distance_fn)
else:
self.nearest = KDNeighbors(embed_fn=get_embed_fn(self.weights), **kwargs)
#self.nearest = BruteForceNeighbors(get_distance_fn(weights, p_norm=p_norm))
self.edges = set()
self.outgoing_from_vertex = defaultdict(set)
self.edge_costs = {}
self.edge_paths = {}
self.colliding_vertices = {}
self.colliding_edges = {}
self.colliding_intermediates = {}
@property
def samples(self):
return self.nearest.data
@property
def vertices(self):
return list(range(len(self.samples))) # TODO: inefficient
def index(self, x):
#return self.samples.index(x)
for i, q in enumerate(self.samples):
if x is q: #x == q:
return i
return ValueError(x)
def __iter__(self):
return iter([self.samples, self.vertices, self.edges])
def add_edge(self, v1, v2):
assert REVERSIBLE # TODO
edges = {(v1, v2), (v2, v1)}
self.edges.update(edges)
self.outgoing_from_vertex[v1].add(v2)
self.outgoing_from_vertex[v2].add(v1)
return edges
def add_samples(self, samples):
new_edges = set()
for v1, sample in self.nearest.add_data(samples):
#for v1, sample in enumerate(self.vertices):
# TODO: could dynamically compute distances
# if len(self.outgoing_from_vertex[v1]) >= self.max_degree:
# raise NotImplementedError()
max_degree = min(self.max_degree + 1, len(self.samples))
for d, v2, _ in self.nearest.query_neighbors(sample, k=max_degree, eps=self.approximate_eps,
p=self.p_norm, distance_upper_bound=self.max_distance):
if (v1 != v2): # and (d <= self.max_distance):
new_edges.update(self.add_edge(v1, v2))
return new_edges
def is_colliding(self, v1, v2):
edge = (v1, v2)
return self.colliding_vertices.get(v1, False) or \
self.colliding_vertices.get(v2, False) or \
self.colliding_edges.get(edge, False)
def is_safe(self, v1, v2):
edge = (v1, v2)
return not self.colliding_edges.get(edge, True)
def neighbors_fn(self, v1):
for v2 in self.outgoing_from_vertex[v1]:
if not self.is_colliding(v1, v2):
yield v2
def check_vertex(self, v, collision_fn):
x = self.samples[v]
if v not in self.colliding_vertices:
# TODO: could update the colliding adjacent edges as well
self.colliding_vertices[v] = collision_fn(x)
return not self.colliding_vertices[v]
def check_intermediate(self, v1, v2, index, collision_fn):
if (v1, v2, index) not in self.colliding_intermediates:
x = self.get_path(v1, v2)[index]
self.colliding_intermediates[v1, v2, index] = collision_fn(x)
if self.colliding_intermediates[v1, v2, index]:
# TODO: record when all safe
self.colliding_edges[v1, v2] = self.colliding_intermediates[v1, v2, index]
if REVERSIBLE:
self.colliding_edges[v2, v1] = self.colliding_edges[v1, v2]
return not self.colliding_intermediates[v1, v2, index]
def check_edge(self, v1, v2, collision_fn):
if (v1, v2) not in self.colliding_edges:
segment = default_selector(self.get_path(v1, v2)) # TODO: check_intermediate
self.colliding_edges[v1, v2] = any(map(collision_fn, segment))
if REVERSIBLE:
self.colliding_edges[v2, v1] = self.colliding_edges[v1, v2]
return not self.colliding_edges[v1, v2]
def check_path(self, path, collision_fn):
for v in default_selector(path):
if not self.check_vertex(v, collision_fn):
return False
# for v1, v2 in default_selector(get_pairs(path)):
# if not self.check_edge(v1, v2, collision_fn):
# return False
# return True
intermediates = []
for v1, v2 in get_pairs(path):
intermediates.extend((v1, v2, index) for index in range(len(self.get_path(v1, v2))))
for v1, v2, index in default_selector(intermediates):
if not self.check_intermediate(v1, v2, index, collision_fn):
return False
return True
def check_roadmap(self, collision_fn):
for vertex in self.vertices:
self.check_vertex(vertex, collision_fn)
for vertex1, vertex2 in self.edges:
self.check_edge(vertex1, vertex2, collision_fn)
def get_cost(self, v1, v2):
edge = (v1, v2)
if edge not in self.edge_costs:
self.edge_costs[edge] = self.cost_fn(self.samples[v1], self.samples[v2])
if REVERSIBLE:
self.edge_costs[edge[::-1]] = self.edge_costs[edge]
return self.edge_costs[edge]
def get_path(self, v1, v2):
edge = (v1, v2)
if edge not in self.edge_paths:
path = list(self.extend_fn(self.samples[v1], self.samples[v2]))
self.edge_paths[edge] = path
if REVERSIBLE:
self.edge_paths[edge[::-1]] = path[::-1]
return self.edge_paths[edge]
def augment(self, sample_fn, num_samples=100):
n = len(self.samples)
if n >= num_samples:
return self
samples = sample_until(sample_fn, num_samples=num_samples - n)
self.add_samples(samples)
return self
##################################################
def get_metrics(conf, weights=None, p_norm=2, distance_fn=None, cost_fn=None):
# TODO: can embed pose and/or points on the robot for other distances
if (weights is None) and (distance_fn is None):
weights = default_weights(conf, weights=weights)
#distance_fn = distance_fn_from_extend_fn(extend_fn)
if cost_fn is None:
if distance_fn is None:
cost_fn = get_distance_fn(weights, p_norm=p_norm) # TODO: additive cost, acceleration cost
else:
cost_fn = distance_fn
return weights, distance_fn, cost_fn
def lazy_prm(start, goal, sample_fn, extend_fn, collision_fn, distance_fn=None, cost_fn=None, roadmap=None, num_samples=100,
weights=None, circular={}, p_norm=2, lazy=True, max_cost=INF, max_time=INF, w=1., meta=False, verbose=True, **kwargs): #, max_paths=INF):
"""
:param start: Start configuration - conf
:param goal: End configuration - conf
:param sample_fn: Sample function - sample_fn()->conf
:param extend_fn: Extension function - extend_fn(q1, q2)->[q', ..., q"]
:param collision_fn: Collision function - collision_fn(q)->bool
:param max_time: Maximum runtime - float
:param kwargs: Keyword arguments
:return: Path [q', ..., q"] or None if unable to find a solution
"""
# TODO: compute hyperparameters using start, goal, and sample_fn statistics
# TODO: scale default parameters based on
# TODO: precompute and store roadmap offline
# TODO: multiple collision functions to allow partial reuse
# TODO: multi-query motion planning
start_time = time.time()
weights, distance_fn, cost_fn = get_metrics(start, weights=weights, p_norm=p_norm, distance_fn=distance_fn, cost_fn=cost_fn)
if roadmap is None:
roadmap = Roadmap(extend_fn, weights=weights, distance_fn=distance_fn, cost_fn=cost_fn, circular=circular)
#leafsize=10, compact_nodes=True, copy_data=False, balanced_tree=True, boxsize=None, **kwargs)
roadmap.add_samples([start, goal] + sample_until(sample_fn, num_samples))
roadmap = roadmap.augment(sample_fn, num_samples=num_samples)
samples, vertices, edges = roadmap
start_vertex = roadmap.index(start)
end_vertex = roadmap.index(goal)
degree = np.average(list(map(len, roadmap.outgoing_from_vertex.values())))
# TODO: update collision occupancy based on proximity to existing colliding (for diversity as well)
# TODO: minimize the maximum distance to colliding
if not lazy:
roadmap.check_roadmap(collision_fn)
weight_fn = roadmap.get_cost
if meta:
lazy_fn = lambda v1, v2: int(not roadmap.is_safe(v1, v2)) # TODO: score by length
#weight_fn = lazy_fn
#weight_fn = lambda v1, v2: (lazy_fn(v1, v2), cost_fn(samples[v1], samples[v2])) # TODO:
weight_fn = lambda v1, v2: ORDINAL*lazy_fn(v1, v2) + roadmap.get_cost(v1, v2)
visited = dijkstra(end_vertex, roadmap.neighbors_fn, weight_fn)
heuristic_fn = lambda v: visited[v].g if (v in visited) else INF # TODO: lazily apply costs
#heuristic_fn = zero_heuristic_fn
#heuristic_fn = lambda v: weight_fn(v, end_vertex)
path = None
while (elapsed_time(start_time) < max_time) and (path is None): # TODO: max_attempts
lazy_path = wastar_search(start_vertex, end_vertex, neighbors_fn=roadmap.neighbors_fn,
cost_fn=weight_fn, heuristic_fn=heuristic_fn,
max_cost=max_cost, max_time=max_time-elapsed_time(start_time), w=w)
if lazy_path is None:
break
if verbose:
print('Candidate | Length: {} | Cost: {:.3f} | Vertices: {} | Samples: {} | Degree: {:.3f} | Time: {:.3f}'.format(
len(lazy_path), compute_path_cost(lazy_path, cost_fn=weight_fn),
len(roadmap.colliding_vertices), len(roadmap.colliding_intermediates),
degree, elapsed_time(start_time)))
if roadmap.check_path(lazy_path, collision_fn):
path = lazy_path
if path is None:
forward_visited = set(dijkstra(start_vertex, roadmap.neighbors_fn))
backward_visited = set(dijkstra(end_vertex, roadmap.neighbors_fn))
# for v in roadmap.vertices:
# if not roadmap.colliding_vertices.get(v, False):
# # TODO: add edges if the collision-free degree drops
# num_colliding = sum(not roadmap.colliding_vertices.get(v2, False) for v2 in roadmap.outgoing_from_vertex[v])
if verbose:
print('Failure | Forward: {} | Backward: {} | Vertices: {} | Samples: {} | Degree: {:.3f} | Time: {:.3f}'.format(
len(forward_visited), len(backward_visited),
len(roadmap.colliding_vertices), len(roadmap.colliding_intermediates),
degree, elapsed_time(start_time)))
return Solution(path, samples, edges, roadmap.colliding_vertices, roadmap.colliding_edges)
if verbose:
print('Solution | Length: {} | Cost: {:.3f} | Vertices: {} | Samples: {} | Degree: {:.3f} | Time: {:.3f}'.format(
len(path), compute_path_cost(path, cost_fn=weight_fn),
len(roadmap.colliding_vertices), len(roadmap.colliding_intermediates),
degree, elapsed_time(start_time)))
#waypoints = [samples[v] for v in path]
#solution = [start] + refine_waypoints(waypoints, extend_fn)
solution = [start] + list(flatten(roadmap.get_path(v1, v2) for v1, v2 in get_pairs(path)))
return Solution(solution, samples, edges, roadmap.colliding_vertices, roadmap.colliding_edges)
##################################################
def create_param_sequence(initial_samples=100, step_samples=100, **kwargs):
# TODO: iteratively increase the parameters
# TODO: generalize to degree, distance, cost
return (merge_dicts(kwargs, {'num_samples': num_samples})
for num_samples in irange(start=initial_samples, stop=INF, step=step_samples))
def lazy_prm_star(start, goal, sample_fn, extend_fn, collision_fn, distance_fn=None, cost_fn=None, max_cost=INF, success_cost=INF,
param_sequence=None, resuse=True, weights=None, circular={}, p_norm=2, max_time=INF, verbose=False, **kwargs):
# TODO: bias to stay near the (past/hypothetical) path
# TODO: proximity pessimistic collision checking
# TODO: roadmap reuse in general
start_time = time.time()
weights, distance_fn, cost_fn = get_metrics(start, weights=weights, p_norm=p_norm, distance_fn=distance_fn, cost_fn=cost_fn)
#print(weights, distance_fn, cost_fn)
#input()
if param_sequence is None:
param_sequence = create_param_sequence()
roadmap = None
best_path = None
best_cost = max_cost
for i, params in enumerate(param_sequence):
remaining_time = (max_time - elapsed_time(start_time))
if remaining_time <= 0.:
break
if verbose:
print('\nIteration: {} | Cost: {:.3f} | Elapsed: {:.3f} | Remaining: {:.3f} | Params: {}'.format(
i, best_cost, elapsed_time(start_time), remaining_time, params))
if (roadmap is None) or not resuse:
roadmap = Roadmap(extend_fn, weights=weights, distance_fn=distance_fn, cost_fn=cost_fn, circular=circular)
roadmap.add_samples([start, goal] + sample_until(sample_fn, params['num_samples']))
new_path = lazy_prm(start, goal, sample_fn, extend_fn, collision_fn, roadmap=roadmap,
cost_fn=cost_fn, weights=weights, circular=circular, p_norm=p_norm,
max_time=remaining_time, max_cost=best_cost,
verbose=verbose, **params, **kwargs)[0]
new_cost = compute_path_cost(new_path, cost_fn=cost_fn)
if verbose:
print('Path: {} | Cost: {:.3f} | Length: {}'.format(is_path(new_path), new_cost, get_length(new_path)))
if new_cost < best_cost:
best_path = new_path
best_cost = new_cost
if best_cost < success_cost:
break
if roadmap is not None:
ROADMAPS.append(roadmap)
return best_path
|
998,479 | c27c96edc47bb00a910e53c89c43af124689891f | """empty message
Revision ID: 0257_letter_branding_migration
Revises: 0256_set_postage_tmplt_hstr
"""
# revision identifiers, used by Alembic.
revision = "0257_letter_branding_migration"
down_revision = "0256_set_postage_tmplt_hstr"
from alembic import op
def upgrade():
op.execute(
"""INSERT INTO letter_branding (id, name, filename, domain)
SELECT uuid_in(md5(random()::text)::cstring), name, filename, null
from dvla_organisation"""
)
op.execute(
"""INSERT INTO service_letter_branding (service_id, letter_branding_id)
SELECT S.id, LB.id
FROM services s
JOIN dvla_organisation d on (s.dvla_organisation_id = d.id)
JOIN letter_branding lb on (lb.filename = d.filename)
WHERE d.id != '001'
"""
)
def downgrade():
op.execute("delete from service_letter_branding")
op.execute("delete from letter_branding")
|
998,480 | 4d2b5ae31a1268bf267ba70697f8c983190d9fee | # !/usr/bin/env python
# -*- coding:utf-8 -*-
"""
Copyright 2020 Tianshu AI Platform. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=============================================================
"""
import codecs
import sched
import sys
import logging
import time
from program.exec.text_classification import classify_by_textcnn as classify
from abc import ABC
from program.abstract.algorithm import Algorithm
logging.basicConfig(format='%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s',
level=logging.DEBUG)
schedule = sched.scheduler(time.time, time.sleep)
sys.stdout = codecs.getwriter("utf-8")(sys.stdout.detach())
delayId = ""
class Text_classification(Algorithm, ABC):
def __init__(self):
pass
def execute(task):
return Text_classification.textClassificationExecutor(task)
def textClassificationExecutor(jsonObject):
"""Annotation task method.
Args:
redisClient: redis client.
key: annotation task key.
"""
global delayId
result = True
print('-------------process one-----------------')
try:
text_path_list = []
id_list = []
label_list = jsonObject['labels']
for fileObject in jsonObject['files']:
text_path_list.append(fileObject['url'])
id_list.append(fileObject['id'])
print(text_path_list)
print(id_list)
print(label_list)
classifications = Text_classification._classification(text_path_list, id_list, label_list) # --------------
finished_json = {"reTaskId": jsonObject['reTaskId'], "classifications": classifications}
return finished_json, result
except Exception as e:
print(e)
@staticmethod
def _init():
print('init classify_obj')
global classify_obj
classify_obj = classify.TextCNNClassifier() # label_log
def _classification(text_path_list, id_list, label_list):
"""Perform automatic text classification task."""
textnum = len(text_path_list)
batched_num = ((textnum - 1) // classify.BATCH_SIZE + 1) * classify.BATCH_SIZE
for i in range(batched_num - textnum):
text_path_list.append(text_path_list[0])
id_list.append(id_list[0])
annotations = classify_obj.inference(text_path_list, id_list, label_list) #
return annotations[0:textnum]
|
998,481 | d23d5387c590f52b5f1d0989d9d0f25a1428fa29 | # Generated by Django 3.1.2 on 2021-04-01 05:46
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gallery', '0004_dummypost_activedummy'),
]
operations = [
migrations.AddField(
model_name='user',
name='contactgmail',
field=models.CharField(max_length=256, null=True),
),
migrations.AddField(
model_name='user',
name='opensea_url',
field=models.URLField(max_length=256, null=True),
),
migrations.AddField(
model_name='user',
name='profile_des',
field=models.TextField(null=True),
),
]
|
998,482 | 4200fcd543e7b325a9d12e509398d848f314d4b9 | from django.urls import path, include
from .views import search, get_remaining, OpenAiEndpoint
from .routers import router, collection_router
from django.conf.urls import url
urlpatterns = [
path('search/', search),
path('get-remaining/', get_remaining),
path('', include(router.urls)),
path('', include(collection_router.urls)),
path('collection/<int:collection_id>/video/<int:media_id>/question_ans/', OpenAiEndpoint.as_view(), {'media': 'video'} ),
path('collection/<int:collection_id>/audio/<int:media_id>/question_ans/', OpenAiEndpoint.as_view(), {'media': 'audio'}),
path('collection/<int:collection_id>/text/<int:media_id>/question_ans/', OpenAiEndpoint.as_view(), {'media': 'text'}),
#The following is for nested url test, now use DRF-Nested-Routers instead
#url(r'^collection/(?P<collection_pk>\d+)/video/?$',
# VideoViewSet.as_view({'get': 'list'}), name='collection-video-list'),
#url(r'^collection/(?P<collection_pk>\d+)/video/(?P<pk>\d+)/?$',
# VideoViewSet.as_view({'get': 'retrieve'}), name='collection-video-detail'),
]
|
998,483 | d8dc50a5d4fc79d29a946c4fbdf05bb179b30634 | from django.urls import path
from . import views
urlpatterns = [
path('posts/', views.posts, name='posts'),
path('post/', views.create_post, name='create_post'),
path('<int:pk>', views.update_post, name='update_post'),
path('upload/<int:pk>', views.upload_file, name='upload'),
path('', views.login, name='login'),
path('logout/', views.logout, name='logout'),
]
|
998,484 | d0c15881ee838de157666ce8f6c9f8b84d159eca | import peewee
from Peewee import Peewee
# create the database
database = Peewee().create_database("Database.db")
# create the class Example with fields id and level
class Example(peewee.Model):
id,level=Peewee().fields()["autofield"](),Peewee().fields()["int"]()
class Meta:
database = database
# initiate the table
Peewee().create_table(Example)
# create some examples
Example(level=100).save()
Example(level=120).save()
Example(level=200).save()
# select the objects created
print(list(Example.select()))
# select the objects created where the level is over 110
print(list(Example.select().where(Example.level > 110)))
# drop the table
Peewee().drop_table(Example)
|
998,485 | 10c07f4ddcb1dd73e7771464a488761aa0d23815 | import os.path
import time
import magic
from io import SEEK_END
from flask import request, jsonify, abort
from .. import app
from ..helpers import verify_token, gen_hash
from ..models import UploadedFile
@app.route('/api/1/upload/check', methods={'POST'})
def check_uploaded():
user = verify_token()
if not user:
abort(403)
if 'content_checksum' in request.form:
file = UploadedFile.objects(content_checksum=request.form['content_checksum']).first()
else:
file = UploadedFile.objects(checksum=request.form['checksum']).first()
if file:
return jsonify(result=True, checksum=file.checksum, filesize=file.filesize, vp_checksum=file.vp_checksum)
else:
return jsonify(result=False)
@app.route('/api/1/upload/file', methods={'POST'})
def upload_file():
user = verify_token()
if not user:
abort(403)
if 'checksum' not in request.form or 'file' not in request.files:
abort(400)
file = request.files['file']
checksum = request.form['checksum']
if gen_hash(file.stream, 'sha256') != checksum:
return jsonify(result=False, reason='checksum')
file.seek(0)
mime = magic.from_buffer(file.read(1024), mime=True)
if mime in app.config['MIME_BLACKLIST']:
return jsonify(result=False, reason='invalid mime')
file.seek(0, SEEK_END)
record = UploadedFile(expires=time.time() + 24 * 60 * 60,
checksum=checksum,
content_checksum=request.form.get('content_checksum'),
vp_checksum=request.form.get('vp_checksum'),
filesize=file.tell())
if mime in ('image/jpg', 'image/jpeg'):
record.file_ext = 'jpg'
elif mime == 'image/png':
record.file_ext = 'png'
elif mime == 'image/x-ms-bmp':
record.file_ext = 'bmp'
record.gen_filename()
record.save()
full_path = os.path.join(app.config['FILE_STORAGE'], os.path.normpath(record.filename))
os.makedirs(os.path.dirname(full_path), exist_ok=True)
file.seek(0)
file.save(full_path)
return jsonify(result=True)
|
998,486 | c8d65985804fa6e90747b602fd2a53f53e95f0a2 | try:
from jsonpath_ng.ext import parse # noqa: F401
from redis.commands.json.path import Path # noqa: F401
from ._json_mixin import JSONCommandsMixin, JSONObject # noqa: F401
except ImportError as e:
if e.name == "fakeredis.stack._json_mixin":
raise e
class JSONCommandsMixin: # type: ignore
pass
|
998,487 | 13a3e3adfc50ee722c03d98f2369d545bff6c424 | #!/usr/bin/env python
# 箱ひげ図の見方 データの分布を「箱」と「ひげ」で表したグラフで、データがどのあたりの値に集中しているかをひと目で捉えることができます。
# 参考 https://bellcurve.jp/statistics/course/5220.html
import matplotlib.pyplot as plt
def boxPlot(numList):
# 箱ひげ図
fig, ax = plt.subplots()
bp = ax.boxplot(numList)
# ax.set_xticklabels(['math', 'literature'])
plt.title('Box plot')
plt.xlabel('exams')
plt.ylabel('point')
# Y軸のメモリのrange
plt.ylim([0,100])
plt.grid()
# 描画
plt.show()
def main():
# なんらかの数
numList = [74, 65, 40, 62, 85, 67, 82, 71, 60, 99]
boxPlot(numList)
if __name__ == "__main__":
main()
|
998,488 | 1c030f2518fbb3b5a98a12c0b0ac18792920540a | __all__ = ['animedreaming', 'animeflavor', 'animehaven', 'dubcrazy',
'dubbedcrazy', 'hentai2w', 'hentaicraving', 'hentaiseriesonline',
'jamo', 'javon', 'kiss', 'lovemyanime', 'myhentai',
'watchhentaistream']
|
998,489 | 2e526062b456be8e4396044eb3fdb6420e416a8d | #%% Sampling std
import numpy as np
n = 100
std=5.6
sampling_std=std/np.sqrt(n)
#%%
#Tüm olası örneklem ortalamasının (Ẋ) %68’i ±1 z skor aralığındadır.
#Tüm olası örneklem ortalamasının (Ẋ) %95’i ±2 z skor aralığındadır.
#Tüm olası örneklem ortalamasının (Ẋ) %99’u ±3 z skor aralığındadır.
# confidence interval two tailed z test
from scipy.stats import norm
# define probability
p = 0.975
# retrieve value <= probability
value = norm.ppf(p)
print(value)
x=np.array([5,10,15,20])
mean=60000
std=3000
# confirm with cdf
# populasyon std biliniyor ve n>100
upper_conf_interval = mean +value * (std/np.sqrt(len(x)))
lower_conf_interval = mean -value * (std/np.sqrt(len(x)))
print(lower_conf_interval,upper_conf_interval)
#%% populasyon std bilinmiyor ve n>100
upper_conf_interval = mean +value * (std/np.sqrt(len(x)-1))
lower_conf_interval = mean -value * (std/np.sqrt(len(x)-1))
print(lower_conf_interval,upper_conf_interval)
#%% confidence z interval for proportion
from scipy.stats import norm
# define probability
p = 0.975
# retrieve value <= probability
value = norm.ppf(p)
print(value)
count = 60
total = 200
ps = count/total
ps = 0.4
pu = 0.5
upper_conf_interval = ps +value * (np.sqrt((pu*(1-pu))/len(x)))
lower_conf_interval = ps -value * (np.sqrt((pu*(1-pu))/len(x)))
print(lower_conf_interval,upper_conf_interval)
#%% Ortalamanın Two tailed hypotesis testing
#H0 mean = aranan_ort
#H1 mean != aranan_ort
# define probability
p = 0.975
# retrieve value <= probability
value = norm.ppf(p)
print(value)
mean = 60000
std= 3000
N= 1200
aranan_ort = 25000
z = (aranan_ort - mean) /(std/np.sqrt(N))
if (z > value) | (z< -value):
print("H0 reddedildi")
else:
print("H0 red edilemedi")
#%% Ortalamanın One tailed hypotesis testing
#H0 mean => aranan_ort
#H1 mean < aranan_ort
p = 0.95
# retrieve value <= probability
value = norm.ppf(p)
print(value)
mean = 4
std= 1.2
N= 120
aranan_ort = 4.5
z = (aranan_ort - mean) /(std/np.sqrt(N))
if (z > value) | (z< -value):
print("H0 reddedildi")
else:
print("H0 red edilemedi")
#%% Two tailed hypotesis testing for proportion
#H0 ps = pu
#H1 ps != pu
count = 53
total = 122
ps = count/total
pu = 0.39
p = 0.975
# retrieve value <= probability
value = norm.ppf(p)
print(value)
z = (ps -pu)/np.sqrt((pu*(1-pu)/total))
if (z > value) | (z< -value):
print("H0 reddedildi")
else:
print("H0 red edilemedi")
#%%
#%% Ortalamanın tek örneklem testi n<=30
#%%Serbestlik derecesi = N1 - 1
#H0 mean = aranan_ort
#H1 mean != aranan_ort
value = 2.756
mean=2.5
aranan_ort = 2.78
std=1.23
n=30
t=(aranan_ort-mean)/(std/(np.sqrt(n-1)))
if (t > value) | (t< -value):
print("H0 reddedildi")
else:
print("H0 red edilemedi")
#%% Ortalamanın İki örneklem testi büyük örneklem > 30
#H0 mean1 = mean2
#H1 mean1 != mean2
mean_1=2.37
mean2=2.78
std_1=0.63
std_2=0.95
n1=42
n2=37
std_birlesik = np.sqrt((std_1**2)/(n1-1)+((std_2**2)/(n2-1)))
z = (mean_1 - mean2)/std_birlesik
# define probability
p = 0.975
# retrieve value <= probability
value = norm.ppf(p)
print(value)
if (z > value) | (z< -value):
print("H0 reddedildi")
else:
print("H0 red edilemedi")
#%% Ortalamanın İki örneklem testi büyük örneklem <= 30
#H0 mean1 = mean2
#H1 mean1 != mean2
#Serbestlik derecesi = N1 + N2 - 2
value = -2.28
mean_1=2.37
mean2=2.78
std_1=0.63
std_2=0.95
n1=42
n2=37
std_birlesik = np.sqrt(((n1*std_1**2)+(n2*std_2**2))/(n1+n2-2)) * np.sqrt((n1+n2)/(n1*n2))
t = (mean_1 - mean2)/std_birlesik
if (t > value) | (t< -value):
print("H0 reddedildi")
else:
print("H0 red edilemedi")
#%% Orantı İçin iki örneklem Hipotez Testi n>30
count1 = 53
total1 = 83
ps1 = count1/total1
ps1 = 0.34
count2 = 53
total2 = 103
ps2 = count2/total2
ps2=0.25
p = 0.975
# retrieve value <= probability
value = norm.ppf(p)
print(value)
pu_toplam = ((total1*ps1) + (total2*ps2))/(total1+total2)
std_p_p = np.sqrt(pu_toplam*(1-pu_toplam)) * np.sqrt((n1 + n2)/(n1*n2))
z = (ps1-ps2) / std_p_p
if (t > value) | (t< -value):
print("H0 reddedildi")
else:
print("H0 red edilemedi")
|
998,490 | a194baca305478a625a27ee3e028cf661214ec00 | for i in range (2000 , 3200):
if i % 7 ==0 and i %5 != 0:
print("Requested Numbers are : " , i) |
998,491 | fb362127c4830ffe196d91f7818e310d22b0e96a | from django.apps import AppConfig
class ClinicaProfissionalConfig(AppConfig):
name = 'clinica_profissional'
|
998,492 | 74a9871e5f40e0095b4c504da2e8f2003a82e0eb | from linebot.models import (
TextSendMessage, CarouselTemplate , BubbleContainer,
BoxComponent, TextComponent, FlexSendMessage, ImageComponent,
ButtonComponent, MessageAction, CarouselContainer, SeparatorComponent, IconComponent
)
# 食物清單
def food():
mes = BubbleContainer(
header = BoxComponent(
layout = 'baseline',
contents = [
TextComponent(
text = 'Q1:搭配食物',
weight = 'bold',
size = 'xxl',
flex =2,
align = 'center'
)
]
),
body = BoxComponent(
layout = 'vertical',
spacing = 'xs',
contents = [
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='紅肉',text='紅肉')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='白肉',text='白肉')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='海鮮',text='海鮮')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='甜點',text='甜點')
)
]
)
)
message = FlexSendMessage(alt_text = "hello",contents = mes)
return message
# 香氣清單
def aroma():
mes = BubbleContainer(
header = BoxComponent(
layout = 'baseline',
contents = [
TextComponent(
text = 'Q2:喜歡的香氣',
weight = 'bold',
size = 'xxl',
flex =2,
align = 'center'
)
]
),
body = BoxComponent(
layout = 'vertical',
spacing = 'xs',
contents = [
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='花香 (ex:玫瑰花...)',text='花香')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='漿果 (ex:櫻桃...)',text='漿果')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='柑橘 (ex:檸檬...)',text='柑橘')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='熱帶水果 (ex:鳳梨...)',text='熱帶水果')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='淺色水果 (ex:杏桃...)',text='淺色水果')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='香料 (ex:胡椒...)',text='香料')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='土木 (ex:橡木桶...)',text='土木')
)
]
)
)
message = FlexSendMessage(alt_text = "hello",contents = mes)
return message
# 口感清單
def taste():
mes = BubbleContainer(
header = BoxComponent(
layout = 'baseline',
contents = [
TextComponent(
text = 'Q3:想要的口感',
weight = 'bold',
size = 'xxl',
flex =2,
align = 'center'
)
]
),
body = BoxComponent(
layout = 'vertical',
spacing = 'xs',
contents = [
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='清香',text='清香')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='甜',text='甜')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='圓潤',text='圓潤')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='酸',text='酸')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='濃厚',text='濃厚')
)
]
)
)
message = FlexSendMessage(alt_text = "hello",contents = mes)
return message
#通路
def chain():
mes = BubbleContainer(
header = BoxComponent(
layout = 'baseline',
contents = [
TextComponent(
text = 'Q4:想購買的通路',
weight = 'bold',
size = 'xxl',
flex =2,
align = 'center'
)
]
),
body = BoxComponent(
layout = 'vertical',
spacing = 'xs',
contents = [
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='家樂福',text='家樂福')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='全聯',text='全聯')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='Costco',text='Costco')
)
]
)
)
message = FlexSendMessage(alt_text = "hello",contents = mes)
return message
#價格清單
def price():
mes = BubbleContainer(
header = BoxComponent(
layout = 'baseline',
contents = [
TextComponent(
text = 'Q5:價格預算',
weight = 'bold',
size = 'xxl',
flex =2,
align = 'center'
)
]
),
body = BoxComponent(
layout = 'vertical',
spacing = 'xs',
contents = [
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='100~350元',text='100~350元')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='351~600元',text='351~600元')
),
ButtonComponent(
margin = 'xl',
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='600元以上',text='600元以上')
)
]
)
)
message = FlexSendMessage(alt_text = "hello",contents = mes)
return message
#選擇清單:
def model(mes):
fkey = ['紅肉','白肉','海鮮','甜點']
akey = ['花香','漿果','柑橘','熱帶水果','淺色水果','香料','土木']
tkey = ['清香','甜','圓潤','酸','濃厚']
chkey = ['家樂福','全聯','Costco']
if mes == "Go":
return food()
elif mes in fkey:
return aroma()
elif mes in akey:
return taste()
elif mes in tkey:
return chain()
elif mes in chkey:
return price()
return TextSendMessage(text="None img")
#圖片按鈕
def showimg():
mes = BubbleContainer(
body = BoxComponent(
layout = 'vertical',
spacing = 'xs',
contents = [
ButtonComponent(
style='secondary',
color='#FFEE99',
height='sm',
action=MessageAction(label='ShowImg',text='showImg')
)
]
)
)
message = FlexSendMessage(alt_text = "hello",contents = mes)
return message
#我的最愛
def mylove(n,ulist):
a = ulist
s = []
alen = len(ulist)
if alen == 0:
a.append('None')
s.append('x')
else:
s.append(a[0])
contents = [BoxComponent(
margin = 'md',
layout = 'vertical',
spacing = 'xs',
contents = [
BoxComponent(
margin = 'xs',
layout = 'horizontal',
spacing = 'xs',
contents = [
ButtonComponent(
style='secondary',
color='#84C1FF',
flex = 8,
height='sm',
action=MessageAction(label=a[0],text='See:'+a[0])
),
ButtonComponent(
style='secondary',
color='#ECFFFF',
flex = 4,
height='sm',
action=MessageAction(label="刪除",text='re:'+s[0])
)
]
)
]
)]
for x in range(1,alen):
contents.append(BoxComponent(
margin = 'md',
layout = 'vertical',
spacing = 'xs',
contents = [
BoxComponent(
margin = 'xs',
layout = 'horizontal',
spacing = 'xs',
contents = [
ButtonComponent(
style='secondary',
color='#84C1FF',
flex = 8,
height='sm',
action=MessageAction(label=a[x],text='See:'+a[x])
),
ButtonComponent(
style='secondary',
color='#ECFFFF',
flex = 4,
height='sm',
action=MessageAction(label='刪除',text='re:'+a[x])
)
]
)
]
)
)
Carousel = CarouselContainer(
contents = [
BubbleContainer(
header = BoxComponent(
layout = 'baseline',
contents = [
TextComponent(
text = n +"的最愛",
weight = 'bold',
size = 'xl',
align = 'center'
)
]
),
body = BoxComponent(
layout = 'vertical',
contents = contents
)
)
]
)
message = FlexSendMessage(alt_text = "hello",contents = Carousel)
return message
#顯示圖片
def tenMod(dset):
contents = []
if len(dset):
for x in dset:
contents.append(BubbleContainer(
hero = ImageComponent(
url = x,
size = 'full',
align = 'center',
aspect_ratio = '13:13',
aspect_mode = 'cover'
),
body = BoxComponent(
layout = 'vertical',
contents = [
TextComponent(
margin = 'md',
text = dset[x],
weight = 'bold',
size = 'md',
align = 'center'
),
SeparatorComponent(
margin = 'xl',
color = '#0000FF'
),
BoxComponent(
margin = 'md',
layout = 'horizontal',
spacing = 'xs',
contents = [
ButtonComponent(
style='link',
color='#84C1FF',
flex = 1,
height='sm',
action=MessageAction(label="加入最愛",text='MyLove:'+dset[x])
)
]
)
]
)
)
)
Carousel = CarouselContainer(
contents = contents
)
message = FlexSendMessage(alt_text = "hello",contents= Carousel)
else:
message = TextSendMessage(text='無符合您輸入的葡萄酒'+chr(0x100010)+"\n"\
+"請重新選擇"+chr(0x10008D))
return message
#顯示單張圖片
def single(set1):
for x in set1:
mes = BubbleContainer(
hero = ImageComponent(
url = set1[x],
size = 'full',
align = 'center',
aspect_ratio = '13:13',
aspect_mode = 'cover'
),
body = BoxComponent(
layout = 'vertical',
contents = [
TextComponent(
margin = 'md',
text = x,
weight = 'bold',
size = 'md',
align = 'center'
),
SeparatorComponent(
margin = 'xl',
color = '#0000FF'
),
BoxComponent(
margin = 'md',
layout = 'horizontal',
spacing = 'xs',
contents = [
ButtonComponent(
style='link',
color='#84C1FF',
flex = 1,
height='sm',
action=MessageAction(label="我的最愛",text='SeeMyLove')
)
]
)
]
)
)
return FlexSendMessage(alt_text = "hello",contents= mes)
else:
return TextSendMessage(text="None img")
#Ukey 選項確認
def ckey(mes):
fkey = ['紅肉','白肉','海鮮','甜點']
akey = ['花香','漿果','柑橘','熱帶水果','淺色水果','香料','土木']
tkey = ['清香','甜','圓潤','酸','濃厚']
chkey = ['家樂福','全聯','Costco']
pkey = ['100~350元','351~600元','600元以上']
if mes in fkey:
return "ck"
elif mes in akey:
return "ck"
elif mes in tkey:
return "ck"
elif mes in chkey:
return "ck"
elif mes in pkey:
return "pk"
return "none"
|
998,493 | 0026d918c60eecb2ae4ec1b952a7373bceab533d | import pymysql
db=pymysql.connect("localhost","root","root","attendance")
cursor=db.cursor()
sid=int(raw_input("Enter id"))
sname=raw_input("Enter student name")
email=raw_input("Enter email_id")
mobile=int(raw_input("Enter contact number"))
address =raw_input("Enter address")
sql="insert into students values(%d,'%s','%s','%d','%s')" % (sid,sname,email,mobile,address)
res=cursor.execute(sql)
try:
if res>0:
print "Details added successfully"
except Exception as e:
print e
print "Failed"
db.close()
|
998,494 | b26a12fbc91f15173a3c673f188262efbe887db2 | import numpy as np
from scipy.io import wavfile
def wave_to_file(wav, wav2=None, fname="temp.wav", amp=0.1, sample_rate=44100):
wav = np.array(wav)
wav = np.int16(wav * amp * (2**15 - 1))
if wav2 is not None:
wav2 = np.array(wav2)
wav2 = np.int16(wav2 * amp * (2 ** 15 - 1))
wav = np.stack([wav, wav2]).T
wavfile.write(fname, sample_rate, wav) |
998,495 | 4e9eb151ec136e900752b8101884c7b4ee97836c | # Imports the monkeyrunner modules used by this program
from com.android.monkeyrunner import MonkeyRunner, MonkeyDevice
# sets a variable with the apk name
apk='/home/nour/APKS/App_Lambda_App.apk'
# sets a variable with the name of an Activity in the package
activity ='com.core.lambdaapp.MainActivity'
#set the package name
package = 'com.core.lambdaapp'
# Connects to the current device, returning a MonkeyDevice object
print("connecting device...")
device = MonkeyRunner.waitForConnection()
# Installs the Android package. Notice that this method returns a boolean, so you can test
# to see if the installation worked.
print("installing app...")
print("give APK path")
device.installPackage(apk)
print("launching app...")
#print("give package name")
#p=raw_input()
#print("give MainActivity name")
#a=raw_input()
# sets the name of the component to start
runComponent = package +"/"+ activity
# Runs the component
device.startActivity(component=runComponent)
print('touch button')
# Presses the Menu button
device.press('KEYCODE_MENU', MonkeyDevice.DOWN_AND_UP)
######
#PRESS SOME KEYS
device.press('KEYCODE_MOVE_END', MonkeyDevice.DOWN_AND_UP)
device.press('KEYCODE_DEL', MonkeyDevice.DOWN_AND_UP)
device.press('KEYCODE_DEL', MonkeyDevice.DOWN_AND_UP)
device.press('KEYCODE_DEL', MonkeyDevice.DOWN_AND_UP)
device.type('12.2')
device.press('KEYCODE_TAB', MonkeyDevice.DOWN_AND_UP)
device.press('KEYCODE_MOVE_END', MonkeyDevice.DOWN_AND_UP)
device.press('KEYCODE_DEL', MonkeyDevice.DOWN_AND_UP)
device.type('1')
for num in range(1,1000):
device.touch(560,400, 'DOWN_AND_UP')
# Takes a screenshot
result = device.takeSnapshot()
print('taking screenshot ...')
# Writes the screenshot to a file
result.writeToFile('/home/nour/Downloads/shot.png','png')
print('screenshot done ...')
|
998,496 | 86180b563db183fb6c3c05195d02690177d36e78 | # for cascade rcnn
import torch
from icecream import ic
model_name = "../pretrained/faster_rcnn_r50_fpn_2x_coco_bbox_mAP-0.384_20200504_210434-a5d8aa15.pth"
model = torch.load(model_name)
# weight
num_classes = 15 + 1
# for faster model
model["state_dict"]["roi_head.bbox_head.fc_cls.weight"] = model["state_dict"]["roi_head.bbox_head.fc_cls.weight"][
:num_classes, :]
model["state_dict"]["roi_head.bbox_head.fc_cls.bias"] = model["state_dict"]["roi_head.bbox_head.fc_cls.bias"][
:num_classes]
# for cascade model
# model["state_dict"]["roi_head.bbox_head.0.fc_cls.weight"] = model["state_dict"]["roi_head.bbox_head.0.fc_cls.weight"][
# :num_classes, :]
# model["state_dict"]["roi_head.bbox_head.1.fc_cls.weight"] = model["state_dict"]["roi_head.bbox_head.1.fc_cls.weight"][
# :num_classes, :]
# model["state_dict"]["roi_head.bbox_head.2.fc_cls.weight"] = model["state_dict"]["roi_head.bbox_head.2.fc_cls.weight"][
# :num_classes, :]
# # bias
# model["state_dict"]["roi_head.bbox_head.0.fc_cls.bias"] = model["state_dict"]["roi_head.bbox_head.0.fc_cls.bias"][
# :num_classes]
# model["state_dict"]["roi_head.bbox_head.1.fc_cls.bias"] = model["state_dict"]["roi_head.bbox_head.1.fc_cls.bias"][
# :num_classes]
# model["state_dict"]["roi_head.bbox_head.2.fc_cls.bias"] = model["state_dict"]["roi_head.bbox_head.2.fc_cls.bias"][
# :num_classes]
# save new model
torch.save(model, "../pretrained/modified_c16_faster_rcnn_r50_fpn_2x_coco.pth")
|
998,497 | 31864975bad0bd0fb6f4397900357aa1052db9c5 | keerdaf=input()
print(keerdaf.title())
|
998,498 | 4bccbb2b0eb7e4d7a2c0e33173d8569448ac2c15 | print('Hello World')
file_in = open('C:\\Richard\\RichardMinorTextFiles\\TestFile2.txt', 'r')
file_out = open('C:\\Richard\\RichardMinorTextFiles2\\TestFile2New.txt', 'w')
hold = []
hold2 = []
line1 = ''
asterik = '*'
fullname = ''
streetaddr = ''
cityaddr = ''
staddr = ''
zipaddr = ''
for line in file_in:
hold = line.split(',')
# print(hold,end='')
# print(' ')
# print(hold[0])
# print(hold[1])
# print(hold[2])
# print(hold[3])
# print(hold[4])
fullname = hold[0]
streetaddr = hold[1]
cityaddr = hold[2]
staddr = hold[3]
zipaddr = hold[4]
# print('Full name',fullname.lstrip())
# print('Street address',streetaddr.lstrip())
# print('city ',cityaddr.lstrip())
# print('state address',staddr.lstrip())
# print('zip code',zipaddr.lstrip())
line1 = fullname.lstrip().__str__() + asterik + streetaddr.lstrip().__str__() + asterik + cityaddr.lstrip().__str__() + asterik + staddr.lstrip().__str__() + asterik + zipaddr.lstrip().__str__()
print(line1)
file_out.write(line1)
file_in.close()
file_out.close() |
998,499 | ad5199421e384e866a45d9bf60fe7993d7f56c18 | __author__ = 'alex'
STATUS_LOCK_ID = 'status_lock_id' |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.