blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e6f63f4947ed7d33320717bfd1a45ef9236c87e7 | 69233358e19c2fae446a28859658585c1e384edf | /deep_depth_transfer/utils/__init__.py | f9f9c5a926feb809041c341668cfe8b3999c758b | [] | no_license | MisterMap/deep-depth-transfer | ef6842d5b604e50aab3676322aea405dc5d2591c | 70397953097b9c5fca54ca45480eb609ce209b3b | refs/heads/master | 2023-01-09T11:18:42.502151 | 2020-10-28T08:44:19 | 2020-10-28T08:44:19 | 292,852,689 | 1 | 4 | null | 2020-10-28T08:44:20 | 2020-09-04T13:16:48 | Jupyter Notebook | UTF-8 | Python | false | false | 172 | py | from .logger_collection import LoggerCollection
from .tensor_board_logger import TensorBoardLogger
from .mlflow_logger import MLFlowLogger
from .metrics import DepthMetric
| [
"kurenkov1995@mail.ru"
] | kurenkov1995@mail.ru |
73ec753be9462f1e9d9b4fa780ea5358bfec5360 | 7da92607b350108efc3b1c081dee47b8f54f01e3 | /userProfile/serializer.py | 3053c285477a4f7a0e7ae47fc99df6d6687d5922 | [] | no_license | igorsbrito01/computer_store | 6fab362db904bf457d0bf6a83195fe6ea6ca1624 | 74daf5d819ec6ecb048c0210ab26873bbfc578c6 | refs/heads/master | 2022-05-13T19:16:58.864766 | 2019-07-15T17:30:41 | 2019-07-15T17:30:41 | 196,866,605 | 0 | 0 | null | 2022-04-22T21:53:04 | 2019-07-14T17:46:08 | Python | UTF-8 | Python | false | false | 194 | py | from rest_framework import serializers
from .models import User
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ['full_name', 'email']
| [
"igorsbrito93@gmail.com"
] | igorsbrito93@gmail.com |
6145ed2868b600edc75e936a3607c93d50309798 | 00785a0c5d671522cd89a68c2193fe2415954393 | /ex34.py | f9fe49737594cac33ac8f91f98e696122f6e185f | [] | no_license | LikeABird-CX/Learn-python-the-hard--way | 33334040084b8b67e7a6c89fc298ea03bc7a4434 | 327002c32947c4f2cf5f844ebbe5f67b068f4031 | refs/heads/master | 2021-01-19T20:23:09.248062 | 2014-11-09T16:55:03 | 2014-11-09T16:55:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 177 | py | animals = ['bear', 'python', 'peacock', 'kangaroo', 'whale', 'platypus']
python = animals[1]
kangaroo = animals[3]
peacock = animals[2]
whale = animals[4]
platypus = animals[-1] | [
"seanseany2014@gmail.com"
] | seanseany2014@gmail.com |
8880c733d385f621150bc797e7cc5cdba130e10a | 40d9422dd81966848f2862d8e716b28c78aea707 | /04_geosearch.py | a4b675d28476bdf7a1547145cc7fa2b52e83f759 | [] | no_license | jakobzhao/pe03 | 8a385d0d4c721fa36248d307586949e3152f850e | ac2ae7ab741a6c15dfae98b8c95eb4f09e77dd4c | refs/heads/main | 2023-04-13T07:57:25.279559 | 2021-04-21T08:17:05 | 2021-04-21T08:17:05 | 358,016,556 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,129 | py | # created on Dec 24, 2020
# modified on April 14, 2021
# @author: Bo Zhao
# @email: zhaobo@uw.edu
# @website: https://hgis.uw.edu
# @organization: Department of Geography, University of Washington, Seattle
# @description: Search existing tweets
import tweepy, json, time, csv
# Create a csv file to store the structured data after processing.
csvfile = open("assets/searched_tweets.csv", "w", newline='', encoding="utf-8") # mode a, r, w
# All the fields of each data entry that I want to collect.
fieldnames = ['username', 'userid', 'profile_location', 'created_at', 'text', 'retweet_count', 'source', 'coordinates']
# Create a writer to write the structured data to the csv file.
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
# Write the header to the csv file
writer.writeheader()
# Apply for your own Twitter API keys at https://developer.twitter.com/en/apply-for-access
consumer_key = "your_consumer_key"
consumer_secret = "your_consumer_secret"
access_token = "your_access_token"
access_token_secret = "your_access_token_secret"
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, wait_on_rate_limit=True)
# Define the search term and the date_since date as variables
search_words = "#BLM"
location = "47.6138893,-122.3107869,100mi"
# read the Twitter API document to look for other ways to customize your queries.
# refer to https://developer.twitter.com/en/docs/twitter-api/v1/rules-and-filtering/search-operators
# for example: you can ignore all the retweets by #wildfires -filter:retweets
# Geolocalization: the search operator “near” isn’t available in the API, but there is a more precise way to restrict
# your query by a given location using the geocode parameter specified with the template “latitude,longitude,radius”,
# for example, “47.6138893,-122.3107869,10mi” (capitol hill at Seattle). When conducting geo searches, the search API will first attempt to find Tweets、
# which have lat/long within the queried geocode, and in case of not having success, it will attempt to find Tweets created
# by users whose profile location can be reverse geocoded into a lat/long within the queried geocode, meaning that is possible
# to receive Tweets which do not include lat/long information.
date_since = "2020-10-16"
# Collect tweets
# tweets = tweepy.Cursor(api.search, q=search_words, lang="en", since=date_since).items(100)
tweets = tweepy.Cursor(api.search, q=search_words, geocode=location, lang="en", since=date_since).items(100)
# Iterate and print tweets
for tweet in tweets:
row = {
'username': tweet.author.name,
'userid': tweet.author.id,
'profile_location': tweet.author.location,
'created_at': str(tweet.author.created_at),
'text': tweet.text,
'retweet_count': tweet.retweet_count,
'source': tweet.source,
'coordinates': tweet.coordinates
}
writer.writerow(row)
print(row)
csvfile.close()
# notify the completion of the program in the console.
print("finished")
| [
"zhaobo@uw.edu"
] | zhaobo@uw.edu |
022fe9db18a7bfac1546592098e38dd4228037d4 | c0832b70cd3e01b8f7795ae4ce977e6852c27e9a | /problem3.py | a10d6abefc7b59aa3ac458ecfcd3b6a3620c38b2 | [] | no_license | leahpd/Lesson-7- | 597ddb4f050ec6ba74d143842ee7d93a0abf5073 | 9982f3ca560a808eae7858f841aed1b0f20031bd | refs/heads/master | 2020-04-12T17:00:44.463255 | 2018-12-20T21:09:28 | 2018-12-20T21:09:28 | 162,631,568 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | print('-'*65)
print ('100th Birthday Program:')
print()
print('Description: This program asks you for your current age and tells you the year that you will turn 100.')
age = input('What is your age today? ')
age = int(age)
yearsleft= 100 - age
yearsleft = int(yearsleft)
year = 2018 + yearsleft
print(year)
| [
"noreply@github.com"
] | leahpd.noreply@github.com |
a3d9d48aaac6d26112f1a443eb7a52e11c83b86d | 31f6ef3a340f46dee74e1e69b87307d96082672e | /mysite/wsgi.py | 97b69fc74305c3cc0a474c8cb11abaa056063a9b | [] | no_license | SravaniKV/mdc-assign1p2 | 9fcf8cdc8e31c75b5ca35a7eafa65304141d7beb | de22e41794f154df1da109271d25abe6fc173907 | refs/heads/master | 2021-05-08T11:08:29.601312 | 2018-02-01T19:11:29 | 2018-02-01T19:11:29 | 119,882,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 562 | py | """
WSGI config for mysite project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
#application = get_wsgi_application() – this line of code is already present
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mysite.settings")
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
| [
"venkatasravanikaka@unomaha.edu"
] | venkatasravanikaka@unomaha.edu |
0fe2a3ee5bcbf151df129e38ff5051b969889aca | d9dcbd9f4dc60ab752670d2b7025c2da05f6d69d | /study_day12_flask01/15_hook.py | 61dfd8866cf5d20895859f744a5ca530dfbbfdc9 | [] | no_license | chenliang15405/python-learning | 14c7e60c794026b1f2dadbbbe82f63e4745b0c23 | 6de32a1c9b729e9528b45c080e861b3da352f858 | refs/heads/master | 2020-08-28T12:21:07.544254 | 2020-01-04T10:49:50 | 2020-01-04T10:49:50 | 217,696,366 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 763 | py | """
请求的钩子
"""
from flask import Flask, request
app = Flask(__name__)
@app.route("/hello")
def index():
print("hello 执行")
return "login success"
# 相当于程序启动之后在处理第一个请求的时候执行的函数
@app.before_first_request
def index():
print("第一次请求处理前执行")
@app.before_request
def index():
print("每次请求之前都被执行")
@app.after_request
def index():
print("每次请求之后都执行,出现异常则不执行")
@app.teardown_request
def index():
# 就算请求的路由不存在,也会执行这个函数
print(request.path)
print("每次请求之后执行,无论是否出现异常,都执行")
if __name__ == '__main__':
app.run()
| [
"1165243776@qq.com"
] | 1165243776@qq.com |
4d3956aca08cd84cabb584e92a9c96a95ef34502 | 11cf40946c55b47886cfe8777916a17db82c2309 | /ex8.py | 717dbf1941b429dd3f54602f98d5e4661d89267e | [] | no_license | dalalsunil1986/python_the_hard_way_exercises | fc669bf2f823a4886f0de717d5f1ca0d0233f6af | bc329999490dedad842e23e8447623fd0321ffe0 | refs/heads/master | 2023-05-03T01:35:24.097087 | 2021-05-16T00:43:56 | 2021-05-16T00:43:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 281 | py | formatter = "{} {} {} {}"
print(formatter.format(1, 2, 3, 4))
print(formatter.format("one", "two", "three", "four"))
print(formatter.format(formatter, formatter, formatter, formatter))
print(formatter.format(
"Try your",
"Own text",
"Maybe a",
"Or some bs"
))
| [
"mathiasgreg@gmail.com"
] | mathiasgreg@gmail.com |
f07f05c1470de23f58415c893c0c116f943ce367 | 5aa7bc2a6c8660235cf4124054d288a9ec025ba6 | /misc/SCAN/vocab.py | 4afadf56fbadede66344b50972e6e12f696577f9 | [
"Apache-2.0",
"MIT"
] | permissive | YuanEZhou/Grounded-Image-Captioning | 6635b1d8e47cbbde42e4b2f56d6d554f61a350da | 044685a0fb049b7d49a930bb3defc3bc92ecf363 | refs/heads/master | 2022-03-07T16:45:33.752706 | 2022-01-05T12:47:01 | 2022-01-05T12:47:01 | 251,247,888 | 60 | 8 | null | null | null | null | UTF-8 | Python | false | false | 3,337 | py | # -----------------------------------------------------------
# Stacked Cross Attention Network implementation based on
# https://arxiv.org/abs/1803.08024.
# "Stacked Cross Attention for Image-Text Matching"
# Kuang-Huei Lee, Xi Chen, Gang Hua, Houdong Hu, Xiaodong He
#
# Writen by Kuang-Huei Lee, 2018
# ---------------------------------------------------------------
"""Vocabulary wrapper"""
import nltk
from collections import Counter
import argparse
import os
import json
annotations = {
'coco_precomp': ['train_caps.txt', 'dev_caps.txt'],
'f30k_precomp': ['train_caps.txt', 'dev_caps.txt'],
}
class Vocabulary(object):
"""Simple vocabulary wrapper."""
def __init__(self):
self.word2idx = {}
self.idx2word = {}
self.idx = 0
def add_word(self, word):
if word not in self.word2idx:
self.word2idx[word] = self.idx
self.idx2word[self.idx] = word
self.idx += 1
def __call__(self, word):
if word not in self.word2idx:
return self.word2idx['<unk>']
return self.word2idx[word]
def __len__(self):
return len(self.word2idx)
def serialize_vocab(vocab, dest):
d = {}
d['word2idx'] = vocab.word2idx
d['idx2word'] = vocab.idx2word
d['idx'] = vocab.idx
with open(dest, "w") as f:
json.dump(d, f)
def deserialize_vocab(src):
with open(src) as f:
d = json.load(f)
vocab = Vocabulary()
vocab.word2idx = d['word2idx']
vocab.idx2word = d['idx2word']
vocab.idx = d['idx']
return vocab
def from_txt(txt):
captions = []
with open(txt, 'rb') as f:
for line in f:
captions.append(line.strip())
return captions
def build_vocab(data_path, data_name, caption_file, threshold):
"""Build a simple vocabulary wrapper."""
counter = Counter()
for path in caption_file[data_name]:
full_path = os.path.join(os.path.join(data_path, data_name), path)
captions = from_txt(full_path)
for i, caption in enumerate(captions):
tokens = nltk.tokenize.word_tokenize(
caption.lower().decode('utf-8'))
counter.update(tokens)
if i % 1000 == 0:
print("[%d/%d] tokenized the captions." % (i, len(captions)))
# Discard if the occurrence of the word is less than min_word_cnt.
words = [word for word, cnt in counter.items() if cnt >= threshold]
# Create a vocab wrapper and add some special tokens.
vocab = Vocabulary()
vocab.add_word('<pad>')
vocab.add_word('<start>')
vocab.add_word('<end>')
vocab.add_word('<unk>')
# Add words to the vocabulary.
for i, word in enumerate(words):
vocab.add_word(word)
return vocab
def main(data_path, data_name):
vocab = build_vocab(data_path, data_name, caption_file=annotations, threshold=4)
serialize_vocab(vocab, './vocab/%s_vocab.json' % data_name)
print("Saved vocabulary file to ", './vocab/%s_vocab.json' % data_name)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_path', default='data')
parser.add_argument('--data_name', default='coco_precomp',
help='{coco,f30k}_precomp')
opt = parser.parse_args()
main(opt.data_path, opt.data_name)
| [
"1977212346@qq.com"
] | 1977212346@qq.com |
db720367282065a82bce1e65eedd0955fcb4d72b | 621185192ba26cf426ce5f97f9a94198580123a6 | /core/api.py | 1109222f30ba63353eedf4e39925b4275cbc4c59 | [] | no_license | DmitriyKarpovskiy1/simple_messenger | 2bbacaa09ffcd00fd10d39374a2183834c3dc854 | 8aa77e31c4afb662301066edc94b04117e6785cd | refs/heads/master | 2023-01-27T19:47:24.032873 | 2020-12-05T14:55:23 | 2020-12-05T14:55:23 | 314,788,638 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,744 | py | from . import response
from .data_accessor import Data_accessor
data_accessor = Data_accessor()
def main():
return response.response(200, "Server API for Simple Messenger")
# Логин/пароль 60 символов
def create_user(name, password):
if name is None or password is None:
return response.response(400, "Need user and password")
if data_accessor.user_exist(name):
return response.response(400, f"User with name {name} already exist")
if data_accessor.create_user(name, password):
return response.response(200, f"User with name {name} created")
else:
return response.response(500, f"Failed to create a user with the name {name}")
def create_chat(user_name, chat_name):
if user_name is None or chat_name is None:
return response.response(400, "Need user and chat_number")
create_result = data_accessor.create_chat(user_name, chat_name)
if create_result is None:
return response.response(500, f"Failed to create a chat with the name {chat_name}")
if data_accessor.join_chat(create_result, user_name):
return response.response(200, f"Chat with name {chat_name} created", sensible=create_result)
return response.response(500, f"Failed join to chat with the name {chat_name}")
# Можно инвайтить уже присмоединенных пользователей
def invite_user(user_name, chat_number):
if user_name is None or chat_number is None:
return response.response(400, "Need user and chat_number")
if data_accessor.create_chat_notifications(user_name, chat_number):
return response.response(200, f"User {user_name} invited")
return response.response(500, f"Failed to invite a user with the name {user_name}." +
" User already invited or user/chat don't exist.")
def get_chat_notifications(user_name):
if user_name is None:
return response.response(400, "Need user")
result = data_accessor.get_chat_notifications(user_name)
if result is not None:
return response.response(200, f"Chats information for a user with the name {user_name}", sensible=result)
return response.response(500, f"Failed to get information for a user with the name {user_name}")
def delete_chat_notification(user_name, chat_number):
if user_name is None or chat_number is None:
return response.response(400, "Need user and chat_number")
if data_accessor.delete_chat_notification(user_name, chat_number):
return response.response(200, f"Notification delete for a user with the name {user_name}")
return response.response(500, f"Failed to delete notification for a user with the name {user_name}")
# Возможно нужна проверка на сущестование уведомления
def join_chat(user_name, chat_number):
if user_name is None or chat_number is None:
return response.response(400, "Need user and chat_number")
if not data_accessor.delete_chat_notification(user_name, chat_number):
return response.response(500, f"Failed to delete notification for a user with the name {user_name}")
if data_accessor.join_chat(chat_number, user_name):
return response.response(200, f"Join to chat with id {chat_number}")
return response.response(500, f"Failed join to chat with id {chat_number}")
def chat_owner(chat_number):
if chat_number is None:
return response.response(400, "Need chat_number")
result = data_accessor.chat_owner(chat_number)
if result is None:
return response.response(400, f"Chat {chat_number} not exist")
return response.response(200, f"Owner for chat {chat_number}", sensible=result)
def leave_chat(user_name, chat_number):
if user_name is None or chat_number is None:
return response.response(400, "Need user and chat_number")
if data_accessor.leave_chat(user_name, chat_number):
return response.response(200, f"Leave from chat {chat_number}")
else:
return response.response(500, f"Failed leave chat with id {chat_number}")
def chat_name(chat_number):
if chat_number is None:
return response.response(400, "Need chat_number")
result = data_accessor.chat_name(chat_number)
if result is None:
return response.response(400, f"Chat with number {chat_number} don't exist")
return response.response(200, f"Chat with number {chat_number} exist", sensible=result)
def delete_chat(user_name, chat_number):
if user_name is None or chat_number is None:
return response.response(400, "Need user and chat_number")
if not user_name == data_accessor.chat_owner(chat_number):
return response.response(400, f"User with the name {user_name} are not chat owner for {chat_number}")
if data_accessor.delete_chat(chat_number):
return response.response(200, f"Delete chat {chat_number}")
else:
return response.response(500, f"Failed to delete chat with id {chat_number}")
def check_password(user_name, password):
if user_name is None or password is None:
return response.response(400, "Need user and password")
if data_accessor.check_password(user_name, password):
return response.response(200, f"Password for user {user_name} coorect", sensible=True)
else:
return response.response(400, f"Password for user {user_name} incoorect", sensible=False)
# Если прользователя нет, то все пройдет успешно
def change_password(user_name, password):
if user_name is None or password is None:
return response.response(400, "Need user and password")
if data_accessor.change_password(user_name, password):
return response.response(200, f"Password for user {user_name} changed")
else:
return response.response(500, f"User with name {user_name} not exist")
# Если чата нет, то верент пустой лист
def all_users_in_chat(chat_number):
if chat_number is None:
return response.response(400, "Need chat_number")
result = data_accessor.all_users_in_chat(chat_number)
if result is None:
return response.response(500, f"Failed take list all users for chat with id {chat_number}")
return response.response(200, f"List all users for chat with id {chat_number}", sensible=result)
def all_chats_for_user(user_name):
if user_name is None:
return response.response(400, "Need user")
result = data_accessor.all_chats_for_user(user_name)
if result is None:
return response.response(500, f"Failed take list all chats for user with name {user_name}")
return response.response(200, f"List all chats for user with name {user_name}", sensible=result)
def send_message(user_name, chat_number, message):
if user_name is None or chat_number is None or message is None:
return response.response(400, "Need user, chat_number, message")
if data_accessor.send_message(user_name, chat_number, message):
users_name = data_accessor.all_users_in_chat(chat_number)
if users_name is None:
return response.response(500, f"Failed take list all users for chat with id {chat_number}")
users_name.remove(user_name)
data_accessor.create_message_notification(chat_number, users_name)
return response.response(200, f"Message from {user_name} sended")
return response.response(400, f"Message from {user_name} not sended")
def list_messages(chat_number):
if chat_number is None:
return response.response(400, "Need chat_number")
result = data_accessor.list_messages(chat_number)
if result is None:
return response.response(500, f"Failed get list to the chat with id {chat_number}")
return response.response(200, f"List all messages for the chat with id {chat_number}", sensible=result)
def get_messages_notifications(user_name):
if user_name is None:
return response.response(400, "Need user")
result = data_accessor.get_messages_notifications(user_name)
if result:
return response.response(200, f"Chats information for a user with the name {user_name}", sensible=result)
return response.response(500, f"Failed to get information for a user with the name {user_name}")
def delete_message_notification(user_name, chat_number):
if user_name is None or chat_number is None:
return response.response(400, "Need user and chat_number")
if data_accessor.delete_message_notification(user_name, chat_number):
return response.response(200, f"Notification delete for a user with the name {user_name}")
return response.response(500, f"Failed to delete notification for a user with the name {user_name}")
| [
"dmitriy.karpovskiy@fruct.org"
] | dmitriy.karpovskiy@fruct.org |
bf8de6800a12c7d01677b1d90a2bcfdcc875683f | c68c841c67f03ab8794027ff8d64d29356e21bf1 | /Sort Letters by Case.py | df083f821551643b58d7653b6921c678258e3104 | [] | no_license | jke-zq/my_lintcode | 430e482bae5b18b59eb0e9b5b577606e93c4c961 | 64ce451a7f7be9ec42474f0b1164243838077a6f | refs/heads/master | 2020-05-21T20:29:11.236967 | 2018-06-14T15:14:55 | 2018-06-14T15:14:55 | 37,583,264 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 593 | py | class Solution:
"""
@param chars: The letters array you should sort.
"""
def sortLetters(self, chars):
# write your code here
if not chars:
return None
length = len(chars)
left, pivot, right = 0, 0, length - 1
while left <= right:
if chars[left].islower():
chars[pivot], chars[left] = chars[left], chars[pivot]
pivot += 1
left += 1
else:
chars[left], chars[right] = chars[right], chars[left]
right -= 1
| [
"jke0zq@gmail.com"
] | jke0zq@gmail.com |
79a2af8f5ef1a19f13157706bf8291518bde5ce6 | 66d273b51b0be55c083764d1e2fd9b19182aacdc | /TourismShop/settings/prod.py | e56a554f9bf638f3c486504e0757a288a5531460 | [] | no_license | RainLeave/TourWeb | b48412d0aac1544c8f2171923daaacc4a438b74c | 6947ecbd41b5078fb5fa3979a208ec15df910338 | refs/heads/master | 2022-12-10T10:36:15.572610 | 2019-10-12T12:48:14 | 2019-10-12T12:48:14 | 213,925,647 | 1 | 0 | null | 2022-12-08T06:41:55 | 2019-10-09T13:25:51 | Python | UTF-8 | Python | false | false | 345 | py | from TourismShop.settings.base import * # NOQA (ignore all errors on this line)
import os
DATABASES = {
'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ.get('DATABASE_NAME')
}
} | [
"1171039932l@sina.com"
] | 1171039932l@sina.com |
998ddf92010b3cdd98dc09f3ae0e3b34477f25f0 | 0e5505b281f444a69ad5fa55f1b12c0467a8cafd | /documentation/homework1/analysis/distance_analyzer.py | 212bd24ee290ed8cc4fc6e906175c0313be69268 | [] | no_license | drobilc/MatchMaker | 8e8627d57072b6104fab10d80b380f029d103567 | 8b0212a0206ea38f770e1dbfc2ce5c16741c3a14 | refs/heads/master | 2021-03-11T14:08:14.871443 | 2020-10-22T20:46:16 | 2020-10-22T20:46:16 | 250,272,423 | 2 | 0 | null | 2020-07-21T12:27:04 | 2020-03-26T13:54:49 | Python | UTF-8 | Python | false | false | 2,413 | py | #!/usr/bin/env python
import glob
angles = ["0deg", "30deg", "45deg", "60deg"]
main_results = [[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0],[0,0,0,0,0,0,0,0,0,0,0,0]] # detector | video | resolution | TP | FP | FN | processed_frames | TP/processed_frames
# open file for analysis results
result_file_name = "distance_results.txt"
result_file = open(result_file_name, mode="a")
for file in glob.glob("data/*.txt"):
data_file = open(file)
lines = data_file.readlines()
frames = len(lines) - 9
fraction = frames / 12
TP60 = 0
TP55 = 0
TP50 = 0
TP45 = 0
TP40 = 0
TP35 = 0
TP30 = 0
TP25 = 0
TP20 = 0
TP15 = 0
TP10 = 0
TP05 = 0
distances = []
for i in range(1, frames + 1):
j = frames + 1 - i
index = i + 7
if len(lines[index].strip()) > 2:
if j > fraction * 11:
TP60 += 1
elif j > fraction * 10:
TP55 += 1
elif j > fraction * 9:
TP50 += 1
elif j > fraction * 8:
TP45 += 1
elif j > fraction * 7:
TP40 += 1
elif j > fraction * 6:
TP35 += 1
elif j > fraction * 5:
TP30 += 1
elif j > fraction * 4:
TP25 += 1
elif j > fraction * 3:
TP20 += 1
elif j > fraction * 2:
TP15 += 1
elif j > fraction * 1:
TP10 += 1
else:
TP05 += 1
distances = [TP05, TP10, TP15, TP20, TP25, TP30, TP35, TP40, TP45, TP50, TP55, TP60]
if "60" in file:
# prištej distances na pravi indeks
for idx in range(0,12):
main_results[3][idx] += distances[idx]
elif "45" in file:
# prištej distances na pravi indeks
for idx in range(0,12):
main_results[2][idx] += distances[idx]
elif "30" in file:
# prištej distances na pravi indeks
for idx in range(0,12):
main_results[1][idx] += distances[idx]
else:
# prištej distances na pravi indeks
for idx in range(0,12):
main_results[0][idx] += distances[idx]
# close file
data_file.close()
# write to file
result_file.write("{}\n".format(main_results))
result_file.close() | [
"ajda.frankovic@gmail.com"
] | ajda.frankovic@gmail.com |
21de9d0d22f3db780d95013e08100bd5fcfd894f | 4fd16f40c4f396d91e9a426145827f7ef024135d | /src/test/python/dfr_test/TestBitTruncatedFinder.py | 4377ef3c8780badfbf825b7811ea1100c116827b | [] | no_license | rfalke/decreasefileredundency | 2240bdb8f201e517fd631da25ae37b8ca1557ca8 | 63704615726cb43e0a4c91400ef0a895dcd9ac51 | refs/heads/master | 2021-01-10T19:23:08.417516 | 2015-05-27T14:22:46 | 2015-05-27T14:22:46 | 7,412,847 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,702 | py |
from tempdir import TempDir
import os
from os.path import join
import unittest
from dfr import db
from dfr_test.utils import TestCase, DO_NOT_MATCH_RE
from dfr.bit_indexer import BitIndexer
from dfr.bit_truncated_finder import BitTruncatedFinder
def write_chunked(filename, *chunks):
out = open(filename, "wb")
for size, char in chunks:
for _ in range(size):
out.write(chr(char % 256))
out.close()
assert os.path.getsize(filename) == sum([x[0] for x in chunks])
class Test(TestCase):
def test_simple(self):
with TempDir() as tmpdir:
datadir = tmpdir.create_dir('data')
write_chunked(join(datadir, 'input_1'), (9000, 0))
write_chunked(join(datadir, 'input_2'), (9000, 0), (1, 1))
write_chunked(join(datadir, 'input_3'), (9000, 0), (1, 2))
write_chunked(join(datadir, 'input_4'), (9000, 0), (1, 1), (1, 2))
write_chunked(join(datadir, 'input_5'), (9000, 0), (1, 2))
write_chunked(join(datadir, 'input_6'), (9000, 0), (1, 2))
write_chunked(join(datadir, 'input_7'), (5000, 0), (3999, 1))
write_chunked(join(datadir, 'input_8'), (8999, 0))
write_chunked(join(datadir, 'input_9'), (4000, 10))
write_chunked(join(datadir, 'input_10'), (4001, 10))
db_fn = join(tmpdir.name, 'files.sdb')
the_db = db.Database(db_fn, verbose=0)
indexer = BitIndexer(the_db, DO_NOT_MATCH_RE, DO_NOT_MATCH_RE, verbose_progress=0)
indexer.run([datadir])
os.remove(join(datadir, 'input_2'))
os.remove(join(datadir, 'input_10'))
finder = BitTruncatedFinder(db.Database(db_fn, verbose=0), [datadir])
items = list(finder.find())
items = [(x.large_size, x.large_path, x.small_size, x.small_path) for x in items]
self.assertEqual(items, [
(9002, join(datadir, 'input_4'), 9000, join(datadir, 'input_1')),
(9002, join(datadir, 'input_4'), 8999, join(datadir, 'input_8')),
(9001, join(datadir, 'input_3'), 9000, join(datadir, 'input_1')),
(9001, join(datadir, 'input_5'), 9000, join(datadir, 'input_1')),
(9001, join(datadir, 'input_6'), 9000, join(datadir, 'input_1')),
(9001, join(datadir, 'input_3'), 8999, join(datadir, 'input_8')),
(9001, join(datadir, 'input_5'), 8999, join(datadir, 'input_8')),
(9001, join(datadir, 'input_6'), 8999, join(datadir, 'input_8')),
(9000, join(datadir, 'input_1'), 8999, join(datadir, 'input_8'))])
if __name__ == '__main__':
unittest.main()
| [
"i-git-stone@rf.risimo.net"
] | i-git-stone@rf.risimo.net |
0361b61925da8971443975df1032048b6f2dafc7 | 54c617189556e30202984f11d81102fc2d0bf0d3 | /bysjapp/models.py | 5f5aca47af373704e929ecbb521389b63b239048 | [] | no_license | tzdsqhw/PF | b188b08dc263da39a024d8e5e4c0f2cb4626222a | e64c7041473682b8e88d1dd80aa33295975d84fc | refs/heads/master | 2021-02-15T02:38:40.039990 | 2020-04-15T04:50:03 | 2020-04-15T04:50:03 | 244,856,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,995 | py | from django.db import models
# Create your models here.
class Users(models.Model):
userid = models.AutoField(primary_key=True)
username=models.CharField(max_length=20, verbose_name='用户名')
password=models.CharField(max_length=20, verbose_name='密码')
birth=models.DateField(verbose_name='生日')
sex=models.CharField(max_length=6,verbose_name='性别',choices=(('男','男'),('女','女')),default='男')
mnum = models.IntegerField(verbose_name='消息数', default=0)
head=models.CharField(max_length=40, verbose_name='头像',default='nohead.png')
newmessage=models.TextField(verbose_name='新信息',default='{"like":[],"comment":[],"follow":[]}')
def __str__(self):
return str(self.userid)
class Meta:
verbose_name = '用户'
verbose_name_plural = verbose_name
class Pets(models.Model):
petid= models.AutoField(primary_key=True)
petprice=models.FloatField(verbose_name='宠物价格',default=0)
pett=models.CharField(max_length=6,verbose_name='状态',choices=(('已出售','已出售'),('未出售','未出售')),default='未出售')
petname=models.CharField(max_length=20, verbose_name='宠物名')
petsex=models.CharField(max_length=6,verbose_name='性别',choices=(('公','公'),('母','母')),default='公')
petbirth=models.DateField(verbose_name='宠物生日')
speci=models.CharField(verbose_name='物种',choices=(('狗','狗'),('猫','猫'),('其他','其他')),max_length=10,default="狗")
vnum=models.IntegerField(verbose_name='访问人数',default=0)
create_time = models.DateTimeField(verbose_name='创建时间', auto_now_add=True)
def __str__(self):
return str(self.petid)
class Meta:
verbose_name = '宠物'
verbose_name_plural = verbose_name
class Goods(models.Model):
goodsid=models.AutoField(primary_key=True)
goodname=models.CharField(max_length=20, verbose_name='商品名')
goodprice=models.FloatField(verbose_name='商品单价',default=0)
goodnum=models.IntegerField(verbose_name='商品存货数',default=0)
goodtext=models.TextField(verbose_name='商品详情')
goodtype=models.CharField(verbose_name='商品类型',choices=(('宠物食品','宠物食品'),('宠物玩具','宠物玩具'),('清洁用品','清洁用品'),('药品','药品'),('宠物衣物','宠物衣物')),max_length=10,default="宠物食品")
def __str__(self):
return str(self.goodsid)
class Meta:
verbose_name = '商品'
verbose_name_plural = verbose_name
class Developments(models.Model):
dmentid= models.AutoField(primary_key=True)
user=models.ForeignKey(Users,verbose_name='用户',on_delete=models.CASCADE)
username=models.CharField(max_length=20, verbose_name='用户名')
likenum = models.IntegerField(verbose_name='点赞数', default=0)
dmenttext = models.TextField(verbose_name='动态详情')
create_time = models.DateTimeField(verbose_name='创建时间', auto_now_add=True)
create_date = models.DateField(verbose_name='创建日期', auto_now_add=True)
def __str__(self):
return str(self.dmentid)
class Meta:
verbose_name = '动态'
verbose_name_plural = verbose_name
class WhosCar(models.Model):
user = models.ForeignKey(Users, verbose_name='用户',on_delete=models.CASCADE)
carname=models.CharField(max_length=20, verbose_name='购物车表名')
useradress = models.CharField(verbose_name='收货地址', max_length=35)
phone = models.CharField(verbose_name='收货电话', max_length=11)
def __str__(self):
return self.carname
class Meta:
verbose_name = '谁的车'
verbose_name_plural = verbose_name
class Likes(models.Model):
dmentid = models.ForeignKey(Developments,verbose_name='动态',on_delete=models.CASCADE)
userid = models.ForeignKey(Users,verbose_name='用户',on_delete=models.CASCADE)
time = models.DateTimeField(verbose_name='时间',auto_now_add=True)
def __str__(self):
return str(self.dmentid)
class Meta:
verbose_name = '点赞'
verbose_name_plural = verbose_name
class Orders(models.Model):
oderid=models.CharField(max_length=30,verbose_name='订单号')
oderitem=models.TextField(verbose_name='订单详情')
time = models.DateTimeField(verbose_name='时间', auto_now_add=True)
userid = models.ForeignKey(Users,verbose_name='用户',on_delete=models.CASCADE)
useradress=models.CharField(verbose_name='收货地址',max_length=35)
phone = models.CharField(verbose_name='收货电话',max_length=11)
totalprice=models.FloatField(verbose_name='总价',default=0)
type=models.CharField(max_length=6, verbose_name='种类', choices=(('商品', '商品'), ('宠物', '宠物')), default='商品')
oders = models.CharField(max_length=6, verbose_name='状态', choices=(('未发货', '未发货'), ('已发货', '已发货')), default='未发货')
def __str__(self):
return self.oderid
class Meta:
verbose_name = '订单'
verbose_name_plural = verbose_name
class Follow(models.Model):
usertof = models.ForeignKey(Users,verbose_name='关注用户',on_delete=models.CASCADE)
usergetfid=models.CharField(max_length=20, verbose_name='被关注用户id')
def __str__(self):
return str(self.usertof)
class Meta:
verbose_name = '关注'
verbose_name_plural = verbose_name
class Comment(models.Model):
cid= models.AutoField(primary_key=True)
user=models.ForeignKey(Users,verbose_name='用户',on_delete=models.CASCADE)
dment=models.ForeignKey(Developments,verbose_name='动态',on_delete=models.CASCADE)
ctext = models.TextField(verbose_name='评论详情')
create_time = models.DateTimeField(verbose_name='创建时间', auto_now_add=True)
def __str__(self):
return str(self.cid)
class Meta:
verbose_name = '评论'
verbose_name_plural = verbose_name
| [
"taozhuodong@gmail.com"
] | taozhuodong@gmail.com |
4f3327b8280fae9a290b656015cdb0b39a93b10e | dd869217296fcea9762d3c6c7928ecbe4e3d43d6 | /FT_CARSS/python/tag_layer.py | 425dfa7b00c12f2cb019617497797918686ec2b7 | [] | no_license | iljoobaek/FT_DNN_Checkpointing | e098c3617237b93a7496d7a61a3b65665e44c81e | cd8fedfac3256bde7a47fcead9836d0ec153e88f | refs/heads/master | 2023-01-29T19:13:52.400401 | 2020-12-05T03:52:09 | 2020-12-05T03:52:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,387 | py | import os
import sys
import functools
from ctypes import cdll, c_uint, c_int, c_void_p, c_char_p, c_ulonglong, c_double, c_longlong, c_bool
libc = cdll.LoadLibrary('libc.so.6')
libpytag = cdll.LoadLibrary("libcarss.so")
# Modify the res and argtypes of *MetaJob interface
libpytag.CreateMetaJob.restype = c_void_p
libpytag.CreateMetaJob.argtypes = [c_uint, c_char_p,
c_ulonglong, c_ulonglong, c_ulonglong,
c_longlong, c_double, c_longlong, c_longlong,
c_uint]
libpytag.DestroyMetaJob.restype = None
libpytag.DestroyMetaJob.argtypes = [c_void_p]
# Modify the res and argtypes of the TagState_* interface
libpytag.CreateTagStateObj.restype = c_void_p
libpytag.CreateTagStateObj.argtypes = [c_void_p]
libpytag.DestroyTagStateObj.restype = None
libpytag.DestroyTagStateObj.argtypes = [c_void_p]
libpytag.TagState_acquire_gpu.restype = c_int
libpytag.TagState_acquire_gpu.argtypes = [c_void_p, c_int, c_longlong,
c_bool, c_bool]
libpytag.TagState_release_gpu.restype = c_int
libpytag.TagState_release_gpu.argtypes = [c_void_p, c_int]
libpytag.TagState_get_wc_exec_time_for_tid.restype = c_longlong
libpytag.TagState_get_wc_exec_time_for_tid.argtypes = [c_void_p, c_int]
libpytag.TagState_get_max_wc_exec_time.restype = c_longlong
libpytag.TagState_get_max_wc_exec_time.argtypes = [c_void_p]
libpytag.TagState_get_required_mem_for_tid.restype = c_ulonglong
libpytag.TagState_get_required_mem_for_tid.argtypes = [c_void_p, c_int]
libpytag.TagState_get_best_exec_time_for_tid.restype = c_longlong
libpytag.TagState_get_best_exec_time_for_tid.argtypes = [c_void_p, c_int]
libpytag.TagState_get_worst_exec_time_for_tid.restype = c_longlong
libpytag.TagState_get_worst_exec_time_for_tid.argtypes = [c_void_p, c_int]
libpytag.TagState_get_last_exec_time_for_tid.restype = c_longlong
libpytag.TagState_get_last_exec_time_for_tid.argtypes = [c_void_p, c_int]
libpytag.TagState_get_worst_last_exec_time.restype = c_longlong
libpytag.TagState_get_worst_last_exec_time.argtypes = [c_void_p]
libpytag.TagState_get_avg_exec_time_for_tid.restype = c_double
libpytag.TagState_get_avg_exec_time_for_tid.argtypes = [c_void_p, c_int]
libpytag.TagState_get_overall_best_exec_time.restype = c_longlong
libpytag.TagState_get_overall_best_exec_time.argtypes = [c_void_p]
libpytag.TagState_get_overall_worst_exec_time.restype = c_longlong
libpytag.TagState_get_overall_worst_exec_time.argtypes = [c_void_p]
libpytag.TagState_get_overall_avg_exec_time.restype = c_double
libpytag.TagState_get_overall_avg_exec_time.argtypes = [c_void_p]
libpytag.TagState_print_exec_stats.restype = None
libpytag.TagState_print_exec_stats.argtypes = [c_void_p]
def gettid():
SYS_gettid = 186 # SYS_gettid
return libc.syscall(SYS_gettid)
class MetaJobStruct(object):
def __init__(self, tid, job_name,
lpm=0, apm=0, wpm=0,
let=0, aet=0.0, wet=0, bet=0,
run_count=0):
# Ensure that string is in bytes before passing as c_char_p
c_name = c_char_p(job_name.encode('utf-8'))
self.mj = c_void_p(
libpytag.CreateMetaJob(tid, c_name,
lpm, apm, wpm, let, aet, wet, bet,
run_count)
)
def __del__(self):
libpytag.DestroyMetaJob(self.mj)
class TagStateStruct(object):
def __init__(self, init_meta_job_struct):
self.ts = c_void_p(libpytag.CreateTagStateObj(init_meta_job_struct.mj))
# Must hold a pointer to mj so the memory isn't free'd!
self.mj = init_meta_job_struct
def __del__(self):
libpytag.DestroyTagStateObj(self.ts)
# Removes reference to self.mj, so it can garbage collected now
def acquire_gpu(self, tid, slacktime, noslack_flag, shareable_flag):
tid = c_int(tid)
slacktime = c_longlong(slacktime)
noslack_flag = c_bool(noslack_flag)
shareable_flag = c_bool(shareable_flag)
res = libpytag.TagState_acquire_gpu(self.ts, tid, slacktime, noslack_flag, shareable_flag)
return c_int(res).value
def release_gpu(self, tid):
tid = c_int(tid)
return c_int(libpytag.TagState_release_gpu(self.ts, tid)).value
def get_wc_exec_time_for_tid(self, tid):
tid = c_int(tid)
return c_longlong(libpytag.TagState_get_wc_exec_time_for_tid(self.ts, tid)).value
def get_max_wc_exec_time(self):
return c_longlong(libpytag.TagState_get_max_wc_exec_time(self.ts)).value
def get_required_mem_for_tid(self, tid):
tid = c_int(tid)
return c_ulonglong(libpytag.TagState_get_required_mem_for_tid(self.ts, tid)).value
def get_best_exec_time_for_tid(self, tid):
tid = c_int(tid)
return c_longlong(libpytag.TagState_get_best_exec_time_for_tid(self.ts, tid)).value
def get_worst_exec_time_for_tid(self, tid):
tid = c_int(tid)
return c_longlong(libpytag.TagState_get_worst_exec_time_for_tid(self.ts, tid)).value
def get_last_exec_time_for_tid(self, tid):
tid = c_int(tid)
return c_longlong(libpytag.TagState_get_last_exec_time_for_tid(self.ts, tid)).value
def get_worst_last_exec_time(self):
return c_longlong(libpytag.TagState_get_worst_last_exec_time(self.ts)).value
def get_avg_exec_time_for_tid(self, tid):
tid = c_int(tid)
return c_double(libpytag.TagState_get_avg_exec_time_for_tid(self.ts, tid)).value
def get_overall_best_exec_time(self):
return c_longlong(libpytag.TagState_get_overall_best_exec_time(self.ts)).value
def get_overall_worst_exec_time(self):
return c_longlong(libpytag.TagState_get_overall_worst_exec_time(self.ts)).value
def get_overall_avg_exec_time(self):
return c_double(libpytag.TagState_get_overall_avg_exec_time(self.ts)).value
def print_exec_stats(self):
libpytag.TagState_print_exec_stats(self.ts)
def excl_tag_fn(fn, name):
"""
Wraps a function (with given name for job) in simple, exclusive tagging:
noslack_flag will always be set
shareable_flag will always be unset
"""
noslack_flag = True
shareable_flag = False
slacktime = 0L
@functools.wraps(fn)
def wrapper(*args, **kwargs):
tid = gettid()
if wrapper.ts.acquire_gpu(tid, slacktime,
noslack_flag, shareable_flag) < 0:
print("Aborting job, couldn't acquire gpu!")
raise Exception("Couldn't acquire gpu! Job too big!")
res = wrapper.fn(*args, **kwargs)
wrapper.ts.release_gpu(tid)
return res
mjs = MetaJobStruct(gettid(), name, 0, 0, 0, 0, 0, 0, 0)
ts = TagStateStruct(mjs) # ts will hold a ref to mjs
wrapper.ts = ts
wrapper.fn = fn
print("Installed tagging routine for fn (name: %s): %s" % (name, fn.__name__))
return wrapper
def tag_tf_layer(layer_obj):
raise NotImplementedError()
import tensorflow as tf
import types
# Manually decorate the layer_obj's __call__ function with tagging routine
assert(isinstance(layer_obj, tf.keras.layers.Layer))
print("Wrapping layer (%s) obj's __call__ fn with tagging!" % layer_obj.name)
wrapped_call = excl_tag_fn(layer_obj.__call__, layer_obj.__name__ + "__call__")
layer_obj.__call__ = types.MethodType(wrapped_call, layer_obj)
return layer_obj
def tag_all_tf_layers(enable=True):
# Dynamically tag all tf layers by overriding Keras/TF-slim's base
# Layer object's __init__ function. Then, manually decorate the
# object's __call__ (aliased by obj's 'apply()' method using excl_tag_fn wrapper
raise NotImplementedError()
if enable:
import tensorflow as tf
TFBaseLayer = tf.keras.layers.Layer
orig_init = TFBaseLayer.__init__
def override_init(self, *args, **kwargs):
orig_init(self, *args, **kwargs)
print("Manually decorating layer (%s) __call__ with tag library!" % self.name)
self = tag_tf_layer(self)
TFBaseLayer.__init__ = override_init
return True
else:
return False
def tag_pytorch_nn_layer(pt_module_obj):
"""
Monkey-patching function called within __init__
to every object of (inherited) type torch.nn.Module
Installs tagging routine to pt_module_obj around 'forward()' fn
only if obj is a 'leaf' Module class, i.e. it has no children Modules.
Returns None
Throws assertion exception if obj is not of type torch.nn.Module
"""
import torch as pt
import types
# Manually decorate the layer_obj's 'forward()' function with tagging routine
assert(isinstance(pt_module_obj, pt.nn.Module))
# Only tag 'leaf' Modules
n_children = sum(1 for _ in pt_module_obj.children())
if n_children > 0:
# Function is no-op for non-leaf Modules
return
elif hasattr(pt_module_obj, "__tagged"):
# Already wrapped
print("Already wrapped Pytorch module (%s) obj's forward fn with tagging!" % pt_module_obj.__class__)
return
else:
print("Wrapping Pytorch module (%s) obj's forward fn with tagging!" % pt_module_obj.__class__)
wrapped_call = excl_tag_fn(pt_module_obj.forward, str(pt_module_obj.__class__) +".forward")
#pt_module_obj.forward = types.MethodType(wrapped_call, pt_module_obj)
pt_module_obj.forward = wrapped_call
# Mark this object as tagged
pt_module_obj.__tagged = True
return
def tag_all_pt_submodules(cls, enable=True):
# Dynamically tag all Pytorch layers by overriding nn's base
# Module subclass' __init__ function. Then, manually decorate the
# object's forward (called within __call__) method using excl_tag_fn wrapper
if enable:
orig_init = cls.__init__
def override_init(self, *args, **kwargs):
res = orig_init(self, *args, **kwargs)
# Monkey-patch forward after original __init__
self.apply(tag_pytorch_nn_layer)
return res
cls.__init__ = override_init
return True
else:
return False
def tag_pytorch_nn_functions_list(fn_names, enable=True):
# Dynamically tag all designated Pytorch functional operations
# So that all forward passes through layers tag these designated laye
# operations
# i.e. ["conv2d", "batch_norm", "avg_pool2d", "linear", "relu"]
if enable:
import importlib
torch_fn_module = importlib.import_module("torch.nn.functional")
for fn_name in fn_names:
orig_fn = getattr(torch_fn_module, fn_name)
tagged_fn = excl_tag_fn(orig_fn, fn_name)
setattr(torch_fn_module, fn_name, tagged_fn)
print("Tagged torch.nn.functional." + str(fn_names) + " successfully")
return True
else:
return False
if __name__ == "__main__":
# Testing
print("Testing TagState from python")
def work(msg):
print "Work: " + msg
work = excl_tag_fn(work, "pywork1")
work("Hello world!")
work("Hello world!")
work("Hello world!")
work("Hello world!")
work("Hello world!")
# The wrapped function has a TagState object of its own, which collects
# the execution stats and can print these in a summary
print ("Best exec time:", work.ts.get_overall_best_exec_time())
work.ts.print_exec_stats()
| [
"souravp@andrew.cmu.edu"
] | souravp@andrew.cmu.edu |
dd49338f6077fbdc6af2c2563c7902feaa268e11 | 40f82989a290ba2f6df6fa286d2588cd1e096c8e | /class/lab5.py | 6d7a8e37633a7983e5db9802a5b0f0a0fbec5b9d | [] | no_license | euntae-bae/study-python3 | f027ab416377f94f62f5589c6e4349580ee615c7 | cdb624a20fc3877b6461e3b6f636d7194b9e7168 | refs/heads/main | 2023-06-02T14:59:19.950936 | 2021-06-24T06:23:49 | 2021-06-24T06:23:49 | 378,348,216 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 968 | py | # 예제5: 참조 카운트
class MyClass:
def __init__(self, name, value):
self.name = name
self.value = value
print(name + ".__init__()")
def __del__(self):
print(self.name + '.__del__()')
def printInfo(self):
print(self.name + '.value:', self.value)
obj1 = MyClass('obj1', 100)
obj2 = MyClass('obj2', 200)
obj2cp = obj2 # 객체가 복사되는 것이 아니라 참조가 복사된다. 즉, obj2와 obj2cp는 같은 객체를 가리킨다.
print()
obj1.printInfo() # obj1.value: 100
obj2.printInfo() # obj2.value: 200
obj2cp.printInfo() # obj2.value: 200
print()
obj2.value = 10000
obj2.printInfo() # obj2.value: 10000
obj2cp.printInfo() # obj2.value: 10000
print()
del obj1 # obj1의 소멸자가 호출된다.
del obj2 # obj2의 참조 카운트가 1 감소한다.
print()
print("end of program")
# end of program이 호출된 후 프로그램이 종료할 때 obj2cp의 소멸자 호출 | [
"euntae471@gmail.com"
] | euntae471@gmail.com |
ba3b045b4a4d1d1e79c2f9ae180a892d07e88351 | 41f417bcc9773b7a88e3285a95a1de8115fc3721 | /ryan/hw2/hw2.py | f26c008ec73887154c854f2625e23aae79f27867 | [] | no_license | ExtraOOmegaPPanDDa/Data-Mining-2018 | bf16336d3978bad1e218ea3455bc164a828f2635 | 60d02e296d38f7ebe6d9cb69597945a276722a09 | refs/heads/master | 2021-04-09T14:18:32.785678 | 2018-06-13T01:56:28 | 2018-06-13T01:56:28 | 125,489,821 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,574 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May 9 16:26:09 2018
@author: HSIN
"""
import time
import numpy as np
import openpyxl
import string
import pickle
from collections import Counter
import pyfpgrowth
#from pymining import itemmining, assocrules, perftesting
import openpyxl
import sys
import jieba
stime = time.time()
filter_words = set()
f = open('filter_words.txt')
for line in f:
filter_words.add(line.rstrip())
f.close()
punct_chs = string.punctuation + ",;。、:「」『』《》()【】!?%12345678900123456789*-/."
translator = str.maketrans(punct_chs, ' ' * len(punct_chs))
def term_valid_check(ustr):
for uchar in ustr:
if ' ' == uchar or \
uchar.isdigit():
return False
return True
def short_eng_check(term):
short_check = True
eng_check = True
if len(term) > 2:
short_check = False
for uchar in term:
if not ((u'\u0041' <= uchar <= u'\u005a') or (u'\u0061' <= uchar <= u'\u007a')):
eng_check = False
if short_check and eng_check:
return True
else:
return False
load_path = './dataset.xlsx'
wb = openpyxl.load_workbook(load_path)
ws = wb['all']
ws_max_row = ws.max_row
ws_max_col = ws.max_column
n_grams_n = [2,3,4,5,6,7]
keywords = ['鴻海', 'Apple', '選舉']
stockwords = ['台股','個股','類股','收盤','上漲','下跌','漲幅']
N_docs = 0
tf_counter = Counter()
df_counter = Counter()
the_row_idx2the_tf_counter = {}
memory_release_step = 1500
tf_thresh_min = 1
tf_thresh_max = 30
to_run = ws_max_row
for i in range(to_run):
the_row_idx = i + 1
if the_row_idx == 1:
continue
if the_row_idx % memory_release_step == 0:
print('\n')
print('----------------------------------')
print('Time Taken:', time.time() - stime)
print('Release Memory...')
origin_size = len(df_counter)
tf_thresh = int(tf_thresh_min + ((tf_thresh_max - tf_thresh_min)/to_run) * the_row_idx)
print('TF Thresh:', tf_thresh)
tf_counter = Counter({k:tf_counter[k] for k in tf_counter if tf_counter[k] > tf_thresh})
df_counter = tf_counter & df_counter
adjusted_size = len(df_counter)
print('Origin Size', origin_size)
print('Adjusted Size', adjusted_size)
print('----------------------------------')
print('\n')
the_title = ws['D' + str(the_row_idx)].value
the_content = ws['E' + str(the_row_idx)].value
stock_check = False
for stockword in stockwords:
if stockword in the_title or stockword in the_content:
stock_check = True
break
if stock_check:
continue
if '中央氣象局' in the_title:
continue
the_doc_text = the_title + ' ' + the_content
the_doc_text = the_doc_text.translate(translator)
selected = False
for keyword in keywords:
if keyword in the_doc_text:
selected = True
break
if selected:
N_docs += 1
else:
continue
the_tf_counter = Counter()
for n in n_grams_n:
for i in range(len(the_doc_text)-n+1):
term = the_doc_text[i:i+n]
if term_valid_check(term):
the_tf_counter[term] += 1
for term in the_tf_counter:
tf_counter[term] += the_tf_counter[term]
df_counter[term] += 1
the_row_idx2the_tf_counter[the_row_idx] = the_tf_counter
print('\n')
print('----------------------------------')
print('Time Taken:', time.time() - stime)
print('Release Memory...')
origin_size = len(tf_counter)
tf_thresh = tf_thresh_max
tf_counter = tf_counter = Counter({k:tf_counter[k] for k in tf_counter if tf_counter[k] > tf_thresh})
df_counter = tf_counter & df_counter
adjusted_size = len(tf_counter)
print('Origin Size', origin_size)
print('Adjusted Size', adjusted_size)
print('----------------------------------')
print('\n')
print('\n')
print('----------------------------------')
print('Time Taken:', time.time() - stime)
print('SE Score Filtering...')
origin_size = len(tf_counter)
term_list = sorted(tf_counter, key = len, reverse = True)
del_term_set = set()
se_thresh1 = 0.6
se_thresh2 = 0.6
eps = 1e-5
for term in term_list:
if short_eng_check(term):
del_term_set.add(term)
if len(term) == max(n_grams_n):
del_term_set.add(term)
if len(term) > 2:
c = term
a = term[:-1]
b = term[1:]
se_score = tf_counter[c]/(tf_counter[a] + tf_counter[b] - tf_counter[c] + eps)
if se_score > se_thresh1:
del_term_set.add(a)
del_term_set.add(b)
if tf_counter[c]/tf_counter[a] > se_thresh2:
del_term_set.add(a)
if tf_counter[c]/tf_counter[b] > se_thresh2:
del_term_set.add(b)
for term in del_term_set:
del tf_counter[term]
adjusted_size = len(tf_counter)
df_counter = tf_counter & df_counter
print('Origin Size', origin_size)
print('Adjusted Size', adjusted_size)
print('----------------------------------')
print('\n')
for term in tf_counter:
jieba.add_word(term, tf_counter[term] * 100000)
the_term_sets = []
for i in range(ws_max_row):
the_row_idx = i + 1
if the_row_idx == 1:
continue
the_title = ws['D' + str(the_row_idx)].value
the_content = ws['E' + str(the_row_idx)].value
stock_check = False
for stockword in stockwords:
if stockword in the_title or stockword in the_content:
stock_check = True
break
if stock_check:
continue
if '中央氣象局' in the_title:
continue
the_doc_text = ws['D' + str(the_row_idx)].value + ' ' + ws['E' + str(the_row_idx)].value
the_doc_text = the_doc_text.translate(translator)
selected = False
for keyword in keywords:
if keyword in the_doc_text:
selected = True
break
if not selected:
continue
the_seg_list = jieba.cut(the_doc_text)
the_seg_list = list(filter(lambda a: a != ' ', the_seg_list))
the_term_set = set()
for the_term in the_seg_list:
if the_term in tf_counter and the_term not in filter_words:
the_term_set.add(the_term)
the_term_sets.append(list(the_term_set))
print('frequent pattern mining...')
the_input = the_term_sets[:]
sup_min = 50
conf_min = 0.6
print('count', len(the_input))
print('sup_min:', sup_min)
print('conf_min:', conf_min)
patterns = pyfpgrowth.find_frequent_patterns(the_input, sup_min)
print('PATTERN')
nodes = set()
for the_key in sorted(patterns, key = len, reverse = True):
if len(the_key) <= 3:
break
the_result = patterns[the_key]
print(the_key, the_result)
for item in the_key:
nodes.add(item)
nodes = sorted(list(nodes))
adjacency_mat = np.zeros((len(nodes),len(nodes)))
for the_key in sorted(patterns, key = len, reverse = True):
if len(the_key) <= 3:
break
the_result = patterns[the_key]
for item in the_key:
for item2 in the_key:
adjacency_mat[nodes.index(item), nodes.index(item2)] += the_result
for i in range(len(adjacency_mat)):
adjacency_mat[i,:] /= sum(adjacency_mat[i,:])
print('\n\n')
rules = pyfpgrowth.generate_association_rules(patterns, conf_min)
print('RULE')
for the_key in sorted(rules, key = len, reverse = True):
the_result = rules[the_key]
print(the_key, the_result)
#relim_input = itemmining.get_relim_input(the_input)
#report = itemmining.relim(relim_input, min_support = 20)
import matplotlib.pyplot as plt
from sklearn.manifold import TSNE
#from sklearn.decomposition import PCA
#from sklearn.decomposition import NMF
from sklearn.cluster import KMeans
from adjustText import adjust_text
plt.rcParams['font.sans-serif']=['SimHei']
plt.rcParams['axes.unicode_minus'] = False
kmeans_result = KMeans(n_clusters = 10, random_state = 0).fit(adjacency_mat)
decomposition_model = TSNE(n_components = 2)
np.set_printoptions(suppress = True)
vis_data = decomposition_model.fit_transform(adjacency_mat)
vis_x = vis_data[:,0]
vis_y = vis_data[:,1]
#plt.figure(figsize=(16, 9))
#plt.scatter(vis_x, vis_y, c = kmeans_result.labels_)
#for label, x, y in zip(nodes, vis_x, vis_y):
# plt.annotate(label, xy=(x, y), xytext=(0, 0), textcoords='offset points')
#
#
#
#plt.savefig('draw.png', dpi = 1000)
#plt.show()
def plot_scatter(adjust, xvalue, yvalue, label, color):
plt.clf()
plt.figure(figsize=(16, 9))
plt.scatter(xvalue, yvalue, s = 15, c = color, edgecolors = 'None', alpha = 0.5)
texts = []
for x, y, s in zip(xvalue, yvalue, label):
texts.append(plt.text(x, y, s, size=7))
if adjust:
adjust_text(texts, arrowprops = dict(arrowstyle = "-", color = 'k', lw = 0.5))
plt.savefig('draw.png', dpi = 1000)
plt.show()
plot_scatter(adjust = True, xvalue = vis_x, yvalue = vis_y, label = nodes, color = kmeans_result.labels_)
| [
"r06725048@ntu.edu.tw"
] | r06725048@ntu.edu.tw |
acfc868e3f1ccfbe7ceec5070060748636f7c893 | 3c5c9eb27b3bc704f9bcce5dbc4c02e462a82b66 | /tests/orion/models/test_configuration.py | 62c8b8650b0390cd8f1696706f54fd568a1f767a | [
"Apache-2.0"
] | permissive | Clearcover/prefect | 188ec89263977bd08cd9847ac5542b5b9df643b9 | 355d5de4b29720d9a81c12fd77ef734fc2c1733b | refs/heads/master | 2023-02-05T11:13:35.667093 | 2023-01-15T15:24:14 | 2023-01-15T15:24:14 | 204,846,946 | 1 | 2 | Apache-2.0 | 2023-01-23T13:13:48 | 2019-08-28T04:19:14 | Python | UTF-8 | Python | false | false | 1,729 | py | from prefect.orion import models, schemas
async def test_write_and_read_new_configuration(session):
new_config = schemas.core.Configuration(key="foo", value={"foo": "bar"})
await models.configuration.write_configuration(
session=session, configuration=new_config
)
# read the configuration
read_config = await models.configuration.read_configuration(
session=session, key="foo"
)
assert isinstance(read_config, schemas.core.Configuration)
assert read_config.key == "foo"
assert read_config.value == {"foo": "bar"}
# read it again to ensure any caching behaves
read_config2 = await models.configuration.read_configuration(
# passing session=None ensures the session can't be used
session=None,
key="foo",
)
assert isinstance(read_config2, schemas.core.Configuration)
assert read_config2.key == "foo"
assert read_config2.value == {"foo": "bar"}
async def test_write_configuration_multiple_times(session):
# write a config
new_config = schemas.core.Configuration(key="foo", value={"foo": "bar"})
await models.configuration.write_configuration(
session=session, configuration=new_config
)
# write another config for the same key
new_config2 = schemas.core.Configuration(key="foo", value={"bar": "bar"})
await models.configuration.write_configuration(
session=session, configuration=new_config2
)
# read the configuration
read_config = await models.configuration.read_configuration(
session=session, key="foo"
)
assert isinstance(read_config, schemas.core.Configuration)
assert read_config.key == "foo"
assert read_config.value == {"bar": "bar"}
| [
"noreply@github.com"
] | Clearcover.noreply@github.com |
afc091a19c14a0e9a7d3d187c8587a7b5de1c186 | b59f1458a746f0ef31702ab718a8f1fb0766e4b5 | /STR_code/test_DeltaBW.py | be5a00ed3e51f1fbef858c2de2d08ecaaaba33a6 | [] | no_license | mperignon/STR_python | 32edd814f7c4c51a9c6f645141c62530f6272af6 | 4637c6e85f32ba06301f8353d660965b99e203e1 | refs/heads/master | 2021-01-21T13:40:52.427392 | 2016-05-24T18:37:19 | 2016-05-24T18:37:19 | 43,464,921 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,469 | py | import unittest
from DeltaBW import DeltaBW
class test_DeltaBW(unittest.TestCase):
def test_mass_balance(self):
f = DeltaBW(friction = "Chezy",
flood_discharge = 6.,
flood_intermittency = 1.,
bedload_discharge_input = 0.001,
grain_size = 0.5,
coeff_Chezy = 15.,
exponent_load_relation = 2.5,
critical_Shields_stress = 0.,
foreset_elev__top__init = 3.,
foreset_elev__base__init = 0.,
bed_slope__init = 0.00025,
basin_slope = 0.,
domain_length = 10000.,
domain_length_max = 500000.,
foreset_slope = 0.2,
bed_porosity = 0.4,
coeff_Manning_ks = 0.3,
coeff_bedload = 7.2,
coeff_Manning = 8.1,
dx = 500.,
dt__days = 0.182625,
num_iterations = 10000,
print_step = 5000,
density_sediment = 2650.,
density_water = 1000.,
basin_water_surface_elevation = 8.5,
verbose = False,
save_output = False)
f.run()
f.finalize()
self.assertEqual(f.sed_volume__init, 26338.5)
self.assertEqual(f.sed_volume__final, 190680.45389578832)
self.assertEqual(f.sed_volume__feed, 157788.00000000003)
self.assertEqual(f.mass_balance_error, 0.034371398650908352)
def test_stats(self):
f = DeltaBW(friction = "Chezy",
flood_discharge = 6.,
flood_intermittency = 1.,
bedload_discharge_input = 0.001,
grain_size = 0.5,
coeff_Chezy = 15.,
exponent_load_relation = 2.5,
critical_Shields_stress = 0.,
foreset_elev__top__init = 3.,
foreset_elev__base__init = 0.,
bed_slope__init = 0.00025,
basin_slope = 0.,
domain_length = 10000.,
domain_length_max = 500000.,
foreset_slope = 0.2,
bed_porosity = 0.4,
coeff_Manning_ks = 0.3,
coeff_bedload = 7.2,
coeff_Manning = 8.1,
dx = 500.,
dt__days = 0.182625,
num_iterations = 10000,
print_step = 5000,
density_sediment = 2650.,
density_water = 1000.,
basin_water_surface_elevation = 8.5,
verbose = False,
save_output = False)
f.run()
self.assertEqual(f.domain_length, 34481.071182531079)
self.assertEqual(f.bed_length, 34500.977813788544)
self.assertEqual(f.eta_upstream, 15.154555352651583)
self.assertEqual(f.eta[-2], 3.9813262514556356)
self.assertEqual(f.eta[-1], 0.0)
if __name__ == '__main__':
unittest.main() | [
"mariela@mac1-16-220-dhcp.int.colorado.edu"
] | mariela@mac1-16-220-dhcp.int.colorado.edu |
787c2274a31c79dfdceb6628ae8aab2f7590a368 | 630e5fa4fec4cee4b6936eec74a726550406c11f | /test/functional/rpc_bip38.py | 93dfb7a50712e55599f4be7defa4821459115c3a | [
"MIT"
] | permissive | crypTuron/PengolinCoin-Core | 4d815d25de927d42dc890379d15738ee728c525e | 3d6c66dd930110075ff44ee6f5a4364c533becd7 | refs/heads/master | 2022-11-24T21:17:56.271853 | 2020-07-23T13:49:52 | 2020-07-23T13:49:52 | 282,408,670 | 0 | 0 | MIT | 2020-07-25T09:04:22 | 2020-07-25T09:04:21 | null | UTF-8 | Python | false | false | 1,016 | py | #!/usr/bin/env python3
# Copyright (c) 2018 The PENGOLINCOIN developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for BIP38 encrypting and decrypting addresses."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class Bip38Test(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
password = 'test'
address = self.nodes[0].getnewaddress()
privkey = self.nodes[0].dumpprivkey(address)
self.log.info('encrypt address %s' % (address))
bip38key = self.nodes[0].bip38encrypt(address, password)['Encrypted Key']
self.log.info('decrypt bip38 key %s' % (bip38key))
assert_equal(self.nodes[1].bip38decrypt(bip38key, password)['Address'], address)
if __name__ == '__main__':
Bip38Test().main()
| [
"alonewolf2ksk@gmail.com"
] | alonewolf2ksk@gmail.com |
cedff5daa81fe70ede0a08826900f6f4efb6fb31 | 096a4889f50e24b5eee5531c3b5b0959f26f9957 | /webprintapplicaion/webprint/manage.py | 2d7273f37919eda56e3a12f7b7b69a2549f37908 | [] | no_license | arunkumarpadmaneri/WebSilentPrintExploreKnwlodege | 5308b1984b8b806daf603ca1e9f51193bac431a7 | e5c36922ac6e57d8831951662203b26dfad79076 | refs/heads/master | 2022-07-21T16:15:13.527976 | 2020-05-21T10:42:01 | 2020-05-21T10:42:01 | 265,821,292 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 540 | py | #!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'webprint.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"arunkumar1311195@gmail.com"
] | arunkumar1311195@gmail.com |
eac488008052eb823f01a6d288781c2d364ce92c | 7adbb5e76fe9f4e6850e79bda7951ebd6cbbc6be | /rowapp/rowapp/wsgi.py | 99637c55952d633561e7ef33159d8d540ee4d4a3 | [] | no_license | jasminedevv/Rowan-s-Entries | 6a640dd7fb9d0163f6a3442aabf34a27ab2b03c1 | fae5b5cbecf92043d99da9c89d320ab44c624492 | refs/heads/master | 2021-09-07T11:07:59.847862 | 2018-02-22T02:38:38 | 2018-02-22T02:38:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 390 | py | """
WSGI config for rowapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "rowapp.settings")
application = get_wsgi_application()
| [
"jasmine.yhumbert@gmail.com"
] | jasmine.yhumbert@gmail.com |
023ec9ec440834589bf01a74d48555f830421265 | 0c514ef069ee7b5308837305a899993dbc1f9ff9 | /more/more_lambda.py | 0774187c1c15357f3eaaceb2cc0fcc2ca2ff4fb7 | [] | no_license | jiaojinda/Python | fe4d07311c77a2e1600020f13dea1e086b011906 | 2eb2cc2d4fc74a2e38b8f0129392593a9b1ee274 | refs/heads/master | 2022-10-25T08:22:26.200153 | 2018-03-05T02:49:13 | 2018-03-05T02:49:13 | 123,852,769 | 0 | 1 | null | 2022-10-05T23:49:55 | 2018-03-05T02:21:52 | Python | UTF-8 | Python | false | false | 106 | py | points = [{'x':2, 'y':3},
{'x':4, 'y':1}]
points. sort( key=lambda i: i['y'] )
print( points)
| [
"jiaojinda@163.COM"
] | jiaojinda@163.COM |
2c62e3a116b8f3154284950f9d5e0579e5a364d3 | e346597620d79df3cc437a5ddc32b8b6027513fa | /video_yolov5_trt.py | 9271dce45fa4dbaffd54ef53f59d61ea5d8e836c | [] | no_license | RocaPiedra/tensorrt-yolov5 | 12790e595a210f7f3199fb25b8350a6f08cc57ee | 8a1ae52020a5da6c778fb774b484d6b5ffc8d959 | refs/heads/main | 2023-07-19T04:32:46.466565 | 2021-09-02T12:27:18 | 2021-09-02T12:27:18 | 402,041,176 | 0 | 1 | null | 2021-09-01T12:13:43 | 2021-09-01T11:48:25 | null | UTF-8 | Python | false | false | 14,310 | py | """
An example that uses TensorRT's Python api to make inferences.
"""
import ctypes
import os
import random
import sys
import threading
import time
import cv2
import numpy as np
import pycuda.autoinit
import pycuda.driver as cuda
import tensorrt as trt
import torch
import torchvision
MODEL_SIZE = 's'
INPUT_W = 608
INPUT_H = 608
CONF_THRESH = 0.7
IOU_THRESHOLD = 0.4
FPS = 0
def plot_one_box(x, img, color=None, label=None, line_thickness=None):
"""
description: Plots one bounding box on image img,
this function comes from YoLov5 project.
param:
x: a box likes [x1,y1,x2,y2]
img: a opencv image object
color: color to draw rectangle, such as (0,255,0)
label: str
line_thickness: int
return:
no return
"""
tl = (
line_thickness or round(0.002 * (img.shape[0] + img.shape[1]) / 2) + 1
) # line/font thickness
color = color or [random.randint(0, 255) for _ in range(3)]
c1, c2 = (int(x[0]), int(x[1])), (int(x[2]), int(x[3]))
cv2.rectangle(img, c1, c2, color, thickness=tl, lineType=cv2.LINE_AA)
if label:
tf = max(tl - 1, 1) # font thickness
t_size = cv2.getTextSize(label, 0, fontScale=tl / 3, thickness=tf)[0]
c2 = c1[0] + t_size[0], c1[1] - t_size[1] - 3
cv2.rectangle(img, c1, c2, color, -1, cv2.LINE_AA) # filled
cv2.putText(
img,
label,
(c1[0], c1[1] - 2),
0,
tl / 3,
[225, 255, 255],
thickness=tf,
lineType=cv2.LINE_AA,
)
def average_frame_rate(fps_vector):
average = np.mean(fps_vector)
return average
class YoLov5TRT(object):
"""
description: A YOLOv5 class that wraps TensorRT ops, preprocess and postprocess ops.
"""
def __init__(self, engine_file_path):
# Create a Context on this device,
self.cfx = cuda.Device(0).make_context()
stream = cuda.Stream()
TRT_LOGGER = trt.Logger(trt.Logger.INFO)
runtime = trt.Runtime(TRT_LOGGER)
# Deserialize the engine from file
with open(engine_file_path, "rb") as f:
engine = runtime.deserialize_cuda_engine(f.read())
context = engine.create_execution_context()
host_inputs = []
cuda_inputs = []
host_outputs = []
cuda_outputs = []
bindings = []
for binding in engine:
size = trt.volume(engine.get_binding_shape(binding)) * engine.max_batch_size
dtype = trt.nptype(engine.get_binding_dtype(binding))
# Allocate host and device buffers
host_mem = cuda.pagelocked_empty(size, dtype)
cuda_mem = cuda.mem_alloc(host_mem.nbytes)
# Append the device buffer to device bindings.
bindings.append(int(cuda_mem))
# Append to the appropriate list.
if engine.binding_is_input(binding):
host_inputs.append(host_mem)
cuda_inputs.append(cuda_mem)
else:
host_outputs.append(host_mem)
cuda_outputs.append(cuda_mem)
# Store
self.stream = stream
self.context = context
self.engine = engine
self.host_inputs = host_inputs
self.cuda_inputs = cuda_inputs
self.host_outputs = host_outputs
self.cuda_outputs = cuda_outputs
self.bindings = bindings
def infer(self, image2process, video=False):
global FPS
threading.Thread.__init__(self)
# Make self the active context, pushing it on top of the context stack.
self.cfx.push()
# Restore
stream = self.stream
context = self.context
engine = self.engine
host_inputs = self.host_inputs
cuda_inputs = self.cuda_inputs
host_outputs = self.host_outputs
cuda_outputs = self.cuda_outputs
bindings = self.bindings
# Do image preprocess
input_image, image_raw, origin_h, origin_w = self.preprocess_image(
image2process
)
# Copy input image to host buffer
np.copyto(host_inputs[0], input_image.ravel())
# Transfer input data to the GPU.
cuda.memcpy_htod_async(cuda_inputs[0], host_inputs[0], stream)
# Run inference.
context.execute_async(bindings=bindings, stream_handle=stream.handle)
# Transfer predictions back from the GPU.
cuda.memcpy_dtoh_async(host_outputs[0], cuda_outputs[0], stream)
# Synchronize the stream
stream.synchronize()
# Remove any context from the top of the context stack, deactivating it.
self.cfx.pop()
# Here we use the first row of output in that batch_size = 1
output = host_outputs[0]
# Do postprocess
result_boxes, result_scores, result_classid = self.post_process(
output, origin_h, origin_w
)
# Draw rectangles and labels on the original image
for i in range(len(result_boxes)):
box = result_boxes[i]
plot_one_box(
box,
image_raw,
label="{}:{:.2f}".format(
categories[int(result_classid[i])], result_scores[i]
),
)
frames = str(int(round(FPS)))
if frames != 0:
print('number of frames is', frames)
frame_text = 'FPS:' + frames
cv2.putText(image_raw,frame_text,(20, 30),0,0.8,[0, 255, 255],thickness=1,lineType=cv2.LINE_AA)
if not video:
parent, filename = os.path.split(input_image_path)
save_name = os.path.join(parent, "output/output_" + filename)
# Save image
cv2.imwrite(save_name, image_raw)
else:
try:
cv2.imshow('Frame',image_raw)
cv2.waitKey(1)
except:
print('opencv cant display a window')
return image_raw
def destroy(self):
# Remove any context from the top of the context stack, deactivating it.
self.cfx.pop()
def preprocess_image(self, image_raw):
"""
description: Read an image from image path, convert it to RGB,
resize and pad it to target size, normalize to [0,1],
transform to NCHW format.
param:
input_image_path: str, image path
return:
image: the processed image
image_raw: the original image
h: original height
w: original width
"""
# print('PREPROCESS: image to process is',np.shape(image2process), video)
h, w, c = image_raw.shape
image = cv2.cvtColor(image_raw, cv2.COLOR_BGR2RGB)
# Calculate widht and height and paddings
r_w = INPUT_W / w
r_h = INPUT_H / h
if r_h > r_w:
tw = INPUT_W
th = int(r_w * h)
tx1 = tx2 = 0
ty1 = int((INPUT_H - th) / 2)
ty2 = INPUT_H - th - ty1
else:
tw = int(r_h * w)
th = INPUT_H
tx1 = int((INPUT_W - tw) / 2)
tx2 = INPUT_W - tw - tx1
ty1 = ty2 = 0
# Resize the image with long side while maintaining ratio
image = cv2.resize(image, (tw, th))
# Pad the short side with (128,128,128)
image = cv2.copyMakeBorder(
image, ty1, ty2, tx1, tx2, cv2.BORDER_CONSTANT, (128, 128, 128)
)
image = image.astype(np.float32)
# Normalize to [0,1]
image /= 255.0
# HWC to CHW format:
image = np.transpose(image, [2, 0, 1])
# CHW to NCHW format
image = np.expand_dims(image, axis=0)
# Convert the image to row-major order, also known as "C order":
image = np.ascontiguousarray(image)
return image, image_raw, h, w
def xywh2xyxy(self, origin_h, origin_w, x):
"""
description: Convert nx4 boxes from [x, y, w, h] to [x1, y1, x2, y2] where xy1=top-left, xy2=bottom-right
param:
origin_h: height of original image
origin_w: width of original image
x: A boxes tensor, each row is a box [center_x, center_y, w, h]
return:
y: A boxes tensor, each row is a box [x1, y1, x2, y2]
"""
y = torch.zeros_like(x) if isinstance(x, torch.Tensor) else np.zeros_like(x)
r_w = INPUT_W / origin_w
r_h = INPUT_H / origin_h
if r_h > r_w:
y[:, 0] = x[:, 0] - x[:, 2] / 2
y[:, 2] = x[:, 0] + x[:, 2] / 2
y[:, 1] = x[:, 1] - x[:, 3] / 2 - (INPUT_H - r_w * origin_h) / 2
y[:, 3] = x[:, 1] + x[:, 3] / 2 - (INPUT_H - r_w * origin_h) / 2
y /= r_w
else:
y[:, 0] = x[:, 0] - x[:, 2] / 2 - (INPUT_W - r_h * origin_w) / 2
y[:, 2] = x[:, 0] + x[:, 2] / 2 - (INPUT_W - r_h * origin_w) / 2
y[:, 1] = x[:, 1] - x[:, 3] / 2
y[:, 3] = x[:, 1] + x[:, 3] / 2
y /= r_h
return y
def post_process(self, output, origin_h, origin_w):
"""
description: postprocess the prediction
param:
output: A tensor likes [num_boxes,cx,cy,w,h,conf,cls_id, cx,cy,w,h,conf,cls_id, ...]
origin_h: height of original image
origin_w: width of original image
return:
result_boxes: finally boxes, a boxes tensor, each row is a box [x1, y1, x2, y2]
result_scores: finally scores, a tensor, each element is the score correspoing to box
result_classid: finally classid, a tensor, each element is the classid correspoing to box
"""
# Get the num of boxes detected
num = int(output[0])
# Reshape to a two dimentional ndarray
pred = np.reshape(output[1:], (-1, 6))[:num, :]
# to a torch Tensor
pred = torch.Tensor(pred).cuda()
# Get the boxes
boxes = pred[:, :4]
# Get the scores
scores = pred[:, 4]
# Get the classid
classid = pred[:, 5]
# Choose those boxes that score > CONF_THRESH
si = scores > CONF_THRESH
boxes = boxes[si, :]
scores = scores[si]
classid = classid[si]
# Trandform bbox from [center_x, center_y, w, h] to [x1, y1, x2, y2]
boxes = self.xywh2xyxy(origin_h, origin_w, boxes)
# Do nms
indices = torchvision.ops.nms(boxes, scores, iou_threshold=IOU_THRESHOLD).cpu()
result_boxes = boxes[indices, :].cpu()
result_scores = scores[indices].cpu()
result_classid = classid[indices].cpu()
return result_boxes, result_scores, result_classid
class myThread(threading.Thread):
def __init__(self, func, args):
threading.Thread.__init__(self)
self.func = func
self.args = args
def run(self):
self.func(*self.args)
if __name__ == "__main__":
# load custom plugins
PLUGIN_LIBRARY = "build/libmyplugins.so"
ctypes.CDLL(PLUGIN_LIBRARY)
engine_file_path = "build/yolov5{0}.engine".format(MODEL_SIZE)
fps_vector = []
# load coco labels
categories = ["person", "bicycle", "car", "motorcycle", "airplane", "bus", "train", "truck", "boat", "traffic light",
"fire hydrant", "stop sign", "parking meter", "bench", "bird", "cat", "dog", "horse", "sheep", "cow",
"elephant", "bear", "zebra", "giraffe", "backpack", "umbrella", "handbag", "tie", "suitcase", "frisbee",
"skis", "snowboard", "sports ball", "kite", "baseball bat", "baseball glove", "skateboard", "surfboard",
"tennis racket", "bottle", "wine glass", "cup", "fork", "knife", "spoon", "bowl", "banana", "apple",
"sandwich", "orange", "broccoli", "carrot", "hot dog", "pizza", "donut", "cake", "chair", "couch",
"potted plant", "bed", "dining table", "toilet", "tv", "laptop", "mouse", "remote", "keyboard", "cell phone",
"microwave", "oven", "toaster", "sink", "refrigerator", "book", "clock", "vase", "scissors", "teddy bear",
"hair drier", "toothbrush"]
# a YoLov5TRT instance
yolov5_wrapper = YoLov5TRT(engine_file_path)
# from https://github.com/ultralytics/yolov5/tree/master/inference/images
input_image_paths = ["video_2.mp4"]
for input_image_path in input_image_paths:
# create a new thread to do inference
input_path = "test/" + input_image_path
if not input_path.endswith('.mp4'):
input = cv2.imread(input_path)
# print('input print is',np.shape(input))
thread1 = myThread(yolov5_wrapper.infer, [input])
# thread1.start()
# thread1.join()
else:
print('is a video, starting video thread')
# thread1 = myThread(yolov5_wrapper.infer, [input, True])
capture = cv2.VideoCapture(input_path)
print('capture started')
if not(capture.isOpened()):
print('error launching video input')
# break
ret1, frame1 = capture.read()
if ret1:
thread1 = myThread(yolov5_wrapper.infer, [frame1, True])
while capture.isOpened():
# print('capture is opened')
tic = time.perf_counter()
ret, frame = capture.read()
if ret:
# print('passing image to thread to do inference',ret)
# cv2.imshow('input',frame)
# cv2.waitKey(1)
output = yolov5_wrapper.infer(frame, True)
# print('image passed to thread')
else:
print('frame not passed to thread, next input')
break
tac = time.perf_counter()-tic
FPS = 1/tac
fps_vector.append(1/tac)
# print(FPS)
thread1.start()
thread1.join()
meanfps = average_frame_rate(fps_vector)
print('average fps achieved is:',meanfps)
# destroy the instance
yolov5_wrapper.destroy()
| [
"pablo_rocasg97@hotmail.com"
] | pablo_rocasg97@hotmail.com |
3b6c02dbdc9ba90c6adb1970f62502b3217780da | 81e9c017fd29228a6024ec7811304c6806354c96 | /plane.py | 054ca0c91d41f003688016a2a77e63d5dea9e5ed | [] | no_license | gchan5/udacity-linear-algebra-refresher | 736e6895ce29b462d923d29a71173c051a537b9a | fa2f178a692805af7837527764dddc2fb8b0a003 | refs/heads/master | 2020-03-30T15:10:16.004527 | 2018-11-01T09:35:22 | 2018-11-01T09:35:22 | 151,196,364 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,282 | py | from decimal import Decimal, getcontext
from vector import Vector
from line import Line
getcontext().prec = 30
class Plane(object):
NO_NONZERO_ELTS_FOUND_MSG = 'No nonzero elements found'
def __init__(self, normal_vector=None, constant_term=None):
self.dimension = 3
if not normal_vector:
all_zeros = [0]*self.dimension
normal_vector = Vector(all_zeros)
self.normal_vector = normal_vector
if not constant_term:
constant_term = Decimal(0)
self.constant_term = constant_term
self.set_basepoint()
def set_basepoint(self):
try:
n = self.normal_vector
c = self.constant_term
basepoint_coords = [0]*self.dimension
initial_index = Plane.first_nonzero_index(n.coordinates)
initial_coefficient = n.coordinates[initial_index]
basepoint_coords[initial_index] = c/initial_coefficient
self.basepoint = Vector(basepoint_coords)
except Exception as e:
if str(e) == Plane.NO_NONZERO_ELTS_FOUND_MSG:
self.basepoint = None
else:
raise e
def __str__(self):
num_decimal_places = 3
def write_coefficient(coefficient, is_initial_term=False):
coefficient = round(coefficient, num_decimal_places)
if coefficient % 1 == 0:
coefficient = int(coefficient)
output = ''
if coefficient < 0:
output += '-'
if coefficient > 0 and not is_initial_term:
output += '+'
if not is_initial_term:
output += ' '
if abs(coefficient) != 1:
output += '{}'.format(abs(coefficient))
return output
n = self.normal_vector
try:
initial_index = Plane.first_nonzero_index(n.coordinates)
terms = [write_coefficient(n.coordinates[i], is_initial_term=(i==initial_index)) + 'x_{}'.format(i+1)
for i in range(self.dimension) if round(n.coordinates[i], num_decimal_places) != 0]
output = ' '.join(terms)
except Exception as e:
if str(e) == self.NO_NONZERO_ELTS_FOUND_MSG:
output = '0'
else:
raise e
constant = round(self.constant_term, num_decimal_places)
if constant % 1 == 0:
constant = int(constant)
output += ' = {}'.format(constant)
return output
@staticmethod
def first_nonzero_index(iterable):
for k, item in enumerate(iterable):
if not MyDecimal(item).is_near_zero():
return k
raise Exception(Plane.NO_NONZERO_ELTS_FOUND_MSG)
def is_parallel(self, plane):
return self.normal_vector.parallel(plane.normal_vector)
def __eq__(self, plane):
if self.normal_vector.is_zero():
if not plane.normal_vector.is_zero():
return False
else:
diff = self.constant_term - plane.constant_term
return MyDecimal(Decimal(diff)).is_near_zero()
elif plane.normal_vector.is_zero():
return False
parallel = self.normal_vector.parallel(plane.normal_vector)
if parallel:
connecting_vector = self.basepoint.subtract(plane.basepoint)
return connecting_vector.orthogonal(self.normal_vector)
else:
return False
class MyDecimal(Decimal):
def is_near_zero(self, eps=1e-10):
return abs(self) < eps
if __name__ == '__main__':
plane1 = Plane(Vector([-0.412, 3.806, 0.728]), -3.46)
plane2 = Plane(Vector([1.03, -9.515, -1.82]), 8.65)
print(plane1.__eq__(plane2))
print(plane1.is_parallel(plane2))
print('--------------------')
plane1 = Plane(Vector([2.611, 5.528, 0.283]), 4.6)
plane2 = Plane(Vector([7.715, 8.306, 5.342]), 3.76)
print(plane1.__eq__(plane2))
print(plane1.is_parallel(plane2))\
print('--------------------')
plane1 = Plane(Vector([-7.926, 8.625, -7.212]), -7.952)
plane2 = Plane(Vector([-2.642, 2.875, -2.404]), 3.76)
print(plane1.__eq__(plane2))
print(plane1.is_parallel(plane2))
| [
"unusgilbert@gmail.com"
] | unusgilbert@gmail.com |
8c24d77f85ac64da80cb0bd41fae46738e54e830 | b085f79c4dbc07557e4c82f71628f44d1a2a3c55 | /accounts/migrations/0003_auto_20160922_0338.py | 0027cb116306145f49ab57259bd0c0c132400216 | [] | no_license | HugOoOguH/ctlTwynco | 6c1fd856871243cacc5668cd2ff619130d82c460 | 6d88b4a1a35ce585f6ee2df0372aac948a6b2955 | refs/heads/master | 2020-09-27T16:59:36.790096 | 2016-09-22T05:25:04 | 2016-09-22T05:25:04 | 66,982,996 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-09-22 03:38
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0002_auto_20160920_2010'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='sexM',
),
migrations.AlterField(
model_name='profile',
name='sexH',
field=models.BooleanField(choices=[(True, 'Man'), (False, 'Woman')], default=True),
),
]
| [
"hugo-ensc@outlook.com"
] | hugo-ensc@outlook.com |
f740ad09fb45a89955eead364d2738e2c5cfd61f | 0e3e194b5b13e97b6d6eb4b5c88bce1c2ab5a977 | /test/firstnuron.py | 0ba66e1bdcf5f1c2d163b938d51d9029bcc290eb | [] | no_license | Sunil1997/MNIST_Nural | 60d8e0dc01e8c190034bf46760a6b0035d135e3e | d9781d2be4d141bfcaa8c966c889901a00fa96b6 | refs/heads/master | 2020-03-27T04:25:47.613983 | 2018-08-24T03:45:51 | 2018-08-24T03:45:51 | 145,938,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,218 | py | import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
def add_layer(inputs,in_size,out_size,activation_function=None):
with tf.name_scope("layer"):
with tf.name_scope("WEIGHTS"):
weights = tf.Variable(tf.random_normal([in_size,out_size]),name='w')
with tf.name_scope("biases"):
biases = tf.Variable(tf.zeros([1,out_size])+0.1,name='b')
with tf.name_scope("inputs"):
Wx_plus_b = tf.add(tf.matmul(inputs,weights), biases)
if activation_function == None:
outputs = Wx_plus_b
else:
outputs = activation_function(Wx_plus_b)
return outputs
#make up some real data
x_data = np.linspace(-1,1,300)[:,np.newaxis]
noise = np.random.normal(0,0.05,x_data.shape)
y_data = np.square(x_data) - 0.5 + noise
# plt.scatter(x_data,y_data)
# plt.show()
#define placeholder for inputs to network
with tf.name_scope("input"):
xs = tf.placeholder(tf.float32, [None,1],name='x_input')
ys = tf.placeholder(tf.float32, [None,1],name='y_input')
#add hindden layer
l1 = add_layer(xs, 1, 10, activation_function=tf.nn.relu)
#add output layers
prediction = add_layer(l1, 10, 1, activation_function=None)
#error between prdiction and real
with tf.name_scope("loss"):
loss = tf.reduce_mean(tf.reduce_sum(tf.square(ys-prediction), reduction_indices=[1]))
with tf.name_scope("train"):
train_step = tf.train.GradientDescentOptimizer(0.1).minimize(loss)
#important step
sess = tf.Session()
writer = tf.summary.FileWriter("logs/", sess.graph)
init = tf.initialize_all_variables()
sess.run(init)
# plot the data
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ax.scatter(x_data, y_data)
plt.ion()
plt.show()
for i in range(1000):
#training
sess.run(train_step,feed_dict={xs:x_data,ys:y_data})
if i%50 == 0:
#to see step improvoment
# print(sess.run(loss,feed_dict={xs:x_data,ys:y_data}))
try:
ax.lines.remove(lines[0])
except Exception:
pass
prediction_value = sess.run(prediction,feed_dict={xs:x_data})
#plot prdiction
lines = ax.plot(x_data, prediction_value, 'r-', lw=5)
plt.pause(1)
| [
"sunil@localhost.localdomain"
] | sunil@localhost.localdomain |
c9a09744c14a4aca18236c5fcb9f65b330800de4 | 4db86e0e887c28aae03368c3eed870e4803ccd2d | /library_website/controllers/controllers.py | d009e5c1d4a412ec927f6118b2265e85f121bd61 | [] | no_license | michaelZhong08/Oodoo-Demo-Code | 604686d4708483ccb0628d95c6addaf9fe7dd0bf | 9563301e8fcd0b847a068ed10c71999103b151c4 | refs/heads/master | 2020-08-13T13:11:54.123813 | 2019-10-14T08:07:00 | 2019-10-14T08:07:00 | 214,973,588 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 818 | py | # -*- coding: utf-8 -*-
from odoo import http
# class LibraryWebsite(http.Controller):
# @http.route('/library_website/library_website/', auth='public')
# def index(self, **kw):
# return "Hello, world"
# @http.route('/library_website/library_website/objects/', auth='public')
# def list(self, **kw):
# return http.request.render('library_website.listing', {
# 'root': '/library_website/library_website',
# 'objects': http.request.env['library_website.library_website'].search([]),
# })
# @http.route('/library_website/library_website/objects/<model("library_website.library_website"):obj>/', auth='public')
# def object(self, obj, **kw):
# return http.request.render('library_website.object', {
# 'object': obj
# }) | [
"micheal_zhong@outlook.com"
] | micheal_zhong@outlook.com |
665167de52b93ffc4a32935b37c45148f4d1cb62 | 2aeccf93bb57b22dc895de9022413a0128497257 | /fw/FaceSwap/add_image.py | b34e857f891712f3eacccb7ec2f09161e2baed4c | [
"MIT"
] | permissive | Huang-chi/face_classification | 72b42774e904dd8f97813bc55a4a5a64e7bcf8ee | 9be2b51871008c97f3e71e4ceb2d4d031a0ca363 | refs/heads/master | 2022-12-05T05:13:13.181571 | 2019-05-01T07:32:55 | 2019-05-01T07:32:55 | 177,799,345 | 0 | 0 | MIT | 2022-11-21T21:16:15 | 2019-03-26T13:58:11 | Jupyter Notebook | UTF-8 | Python | false | false | 771 | py | import cv2 as cv
import numpy as np
def Add_image(image_array, emotion_icon=None):
x_offset = 520
y_offset = 0
y1, y2 = y_offset, y_offset + emotion_icon.shape[0]
x1, x2 = x_offset, x_offset + emotion_icon.shape[1]
cv.rectangle(image_array, (x1, y1), (x2, y2), (0, 0, 0), -1)
width = emotion_icon.shape[0]
height = emotion_icon.shape[1]
alpha_value = np.ones((width, height, 1))*255
emotion_icon1 = np.c_[emotion_icon, alpha_value]
alpha_s = emotion_icon1[:, :, 3] / 255.0
alpha_l = 1.0 - alpha_s
for c in range(0, 3):
image_array[y1:y2, x1:x2, c] = (alpha_s * emotion_icon[:, :, c] +
alpha_l * image_array[y1:y2, x1:x2, c])
return image_array
| [
"a0983080692@gmail.com"
] | a0983080692@gmail.com |
e59cd1e05315ae0cca56b62175892102c42e1912 | 6b5dfd3b325e857fd037ecb694cc88058a07bce8 | /Snow.py | eb8e1962d059fa8920c8cc9a51415bdc7cb6270b | [] | no_license | BloodyPig/LittleThing | 76df9b3dcd78c8709868ac377dd9b9288c175c11 | eda02027da60bf44dbd4e71194246c5f1a578553 | refs/heads/master | 2020-04-14T08:40:28.769756 | 2019-03-21T05:13:16 | 2019-03-21T05:13:16 | 163,740,357 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 979 | py | # 科赫曲线
# 正整数n代表科赫曲线的阶数,表示生成科赫曲线过程的操作次数。
# 科赫曲线初始化阶数为0,表示一个长度为L的直线。
# 对于直线L将其等分为3段,中间一段用边长为L/3的等边三角形的两个边替代,
# 得到1阶科赫曲线,它包含4条线段。进一步对每条线段重复同样的操作后得到的2阶科赫曲线。
# 重复操作N次可以得到N阶科赫曲线。
import turtle
from turtle import *
def koch(size, n):
if n == 0:
turtle.fd(size)
else:
for angle in [0, 60, -120, 60]:
turtle.left(angle)
koch(size/3, n-1)
def main():
turtle.setup(720, 720)
turtle.speed(0)
turtle.penup()
turtle.goto(-200, 100)
turtle.pendown()
turtle.pensize(1)
level = 5
koch(400, level)
turtle.right(120)
koch(400, level)
turtle.right(120)
koch(400, level)
turtle.hideturtle()
done()
main()
| [
"ximuning213@163.com"
] | ximuning213@163.com |
15e5789932b0355c04ac2b725eab7e72e658eead | 768932af57174cfa8c7f593be71baed1a95697e4 | /Fraction-defective-analysis-Pilot/MLP.py | 787c03d722f49ac785476b30c34f2c5a7ff4c8e1 | [] | no_license | INFIA-FOUNDATION/machine-running | e010db8bc6fadfe807b847428d54d52c064511f0 | afb9fd01061cb9bf7a9cebb8395834593544fde5 | refs/heads/master | 2020-04-10T06:09:35.256106 | 2018-12-12T19:01:17 | 2018-12-12T19:01:17 | 160,847,016 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,417 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Dec 1 21:15:00 2018
@author: INFIA Protocol
"""
#import random
#import sys
#import scipy
import numpy as np
import pandas as pd
from sklearn.neural_network import MLPClassifier
train = pd.read_csv('cluster_train.csv')
#train = train.dropna()
train1= train.drop(['datetime','conds','dire'], axis=1)
#train1 = train1.dropna()
train1 = train1.values
test = pd.read_csv('cluster_test.csv')
buffer = pd.read_csv('cluster_test.csv')
#test = test.dropna()
test1= test.drop(['datetime','conds','dire'], axis=1)
#test1 = test1.dropna()
test1 = test1.values
'''
mat = scipy.io.loadmat('clusterMLP_distribution.mat')
# 학습 데이터를 받아와서 저장
for i in mat['clusterMLP_distribution']:
train_data.append([i[0], i[1], i[2], i[3], i[4]])
for i in mat['clusterMLP_distribution']:
test_data.append([i[0], i[1], i[2], i[3], i[4]])
# 쿼리로 받은 데이터를 고객 ID별로 묶어준다
whole_data =[]
for line in train_data:
whole_data.append([line[0], line[1], line[2]])
'''
'''
defective_x = []
not_defective_x=[]
defective_label = 0
not_defective_label = 1
for line in whole_data:
for n in range(len(line) ,2,-1):
if n == len(line):
defective_x.append(line[n-10000:n] )
else:
not_defective_x.append(line[n-10000:n])
random.shuffle(not_defective_x)
not_defective_x = not_defective_x[0:len(defective_x)]
X = []
X.extend(defective_x)
X.extend(not_defective_x)
Y=[]
Y.extend([defective_label] * len(defective_x))
Y.extend([not_defective_label] * len(not_defective_x))
'''
accuracy_list = []
defective_001_label = 1
defective_002_label = 2
defective_003_label = 3
defective_004_label = 4
defective_005_label = 5
defective_006_label = 6
defective_007_label = 7
defective_008_label = 8
defective_009_label = 9
defective_010_label = 10
defective_011_label = 11
defective_012_label = 12
defective_013_label = 13
defective_014_label = 14
defective_015_label = 15
defective_016_label = 16
defective_017_label = 17
defective_018_label = 18
defective_019_label = 19
defective_020_label = 20
defective_021_label = 21
defective_022_label = 22
defective_023_label = 23
defective_024_label = 24
defective_025_label = 25
defective_026_label = 26
defective_027_label = 27
defective_028_label = 28
defective_029_label = 29
defective_030_label = 30
defective_031_label = 31
defective_032_label = 32
defective_033_label = 33
defective_034_label = 34
defective_035_label = 35
defective_036_label = 36
defective_037_label = 37
defective_038_label = 38
defective_039_label = 39
defective_040_label = 40
#X = []
#X.append(train2)
X = train1
#Y = []
train_yy = train['conds']
#train_y = train_y.dropna()
train_yy = train_yy.values
test_yy = test['conds']
#train_y = train_y.dropna()
test_yy = test_yy.values
#train_yy = np.array([])
#train_yy = np.zeros((1,80000))
#train_yy = train_y
#np.sort(buffer_yy, axis=None, kind='quicksort')
print('print start-->')
train_yy[train_yy == 'Blowing Sand'] = 1
train_yy[train_yy == 'Clear'] = 2
train_yy[train_yy == 'Drizzle'] = 3
train_yy[train_yy == 'Fog'] = 4
train_yy[train_yy == 'Funnel Cloud'] = 5
train_yy[train_yy == 'Haze'] = 6
train_yy[train_yy == 'Heavy Fog'] = 7
train_yy[train_yy == 'Heavy Rain'] = 8
train_yy[train_yy == 'Heavy Thunderstorms and Rain'] = 9
train_yy[train_yy == 'Heavy Thunderstorms with Hail'] = 10
train_yy[train_yy == 'Light Drizzle'] = 11
train_yy[train_yy == 'Light Fog'] = 12
train_yy[train_yy == 'Light Freezing Rain'] = 13
train_yy[train_yy == 'Light Hail Showers'] = 14
train_yy[train_yy == 'Light Haze'] = 15
train_yy[train_yy == 'Light Rain'] = 16
train_yy[train_yy == 'Light Rain Showers'] = 17
train_yy[train_yy == 'Light Sandstorm'] = 18
train_yy[train_yy == 'Light Thunderstorm'] = 19
train_yy[train_yy == 'Light Thunderstorms and Rain'] = 20
train_yy[train_yy == 'Mist'] = 21
train_yy[train_yy == 'Mostly Cloudy'] = 22
train_yy[train_yy == 'Overcast'] = 23
train_yy[train_yy == 'Partial Fog'] = 24
train_yy[train_yy == 'Partly Cloudy'] = 25
train_yy[train_yy == 'Patches of Fog'] = 26
train_yy[train_yy == 'Rain'] = 27
train_yy[train_yy == 'Rain Showers'] = 28
train_yy[train_yy == 'Sandstorm'] = 29
train_yy[train_yy == 'Scattered Clouds'] = 30
train_yy[train_yy == 'Shallow Fog'] = 31
train_yy[train_yy == 'Smoke'] = 32
train_yy[train_yy == 'Squalls'] = 33
train_yy[train_yy == 'Thunderstorm'] = 34
train_yy[train_yy == 'Thunderstorms and Rain'] = 35
train_yy[train_yy == 'Thunderstorms with Hail'] = 36
train_yy[train_yy == 'Unknown'] = 37
train_yy[train_yy == 'Volcanic Ash'] = 38
train_yy[train_yy == 'Widespread Dust'] = 39
train_yy[train_yy == 0] = 40
print('print end-->')
print('print start-->')
test_yy[test_yy == 'Blowing Sand'] = 1
test_yy[test_yy == 'Clear'] = 2
test_yy[test_yy == 'Drizzle'] = 3
test_yy[test_yy == 'Fog'] = 4
test_yy[test_yy == 'Funnel Cloud'] = 5
test_yy[test_yy == 'Haze'] = 6
test_yy[test_yy == 'Heavy Fog'] = 7
test_yy[test_yy == 'Heavy Rain'] = 8
test_yy[test_yy == 'Heavy Thunderstorms and Rain'] = 9
test_yy[test_yy == 'Heavy Thunderstorms with Hail'] = 10
test_yy[test_yy == 'Light Drizzle'] = 11
test_yy[test_yy == 'Light Fog'] = 12
test_yy[test_yy == 'Light Freezing Rain'] = 13
test_yy[test_yy == 'Light Hail Showers'] = 14
test_yy[test_yy == 'Light Haze'] = 15
test_yy[test_yy == 'Light Rain'] = 16
test_yy[test_yy == 'Light Rain Showers'] = 17
test_yy[test_yy == 'Light Sandstorm'] = 18
test_yy[test_yy == 'Light Thunderstorm'] = 19
test_yy[test_yy == 'Light Thunderstorms and Rain'] = 20
test_yy[test_yy == 'Mist'] = 21
test_yy[test_yy == 'Mostly Cloudy'] = 22
test_yy[test_yy == 'Overcast'] = 23
test_yy[test_yy == 'Partial Fog'] = 24
test_yy[test_yy == 'Partly Cloudy'] = 25
test_yy[test_yy == 'Patches of Fog'] = 26
test_yy[test_yy == 'Rain'] = 27
test_yy[test_yy == 'Rain Showers'] = 28
test_yy[test_yy == 'Sandstorm'] = 29
test_yy[test_yy == 'Scattered Clouds'] = 30
test_yy[test_yy == 'Shallow Fog'] = 31
test_yy[test_yy == 'Smoke'] = 32
test_yy[test_yy == 'Squalls'] = 33
test_yy[test_yy == 'Thunderstorm'] = 34
test_yy[test_yy == 'Thunderstorms and Rain'] = 35
test_yy[test_yy == 'Thunderstorms with Hail'] = 36
test_yy[test_yy == 'Unknown'] = 37
test_yy[test_yy == 'Volcanic Ash'] = 38
test_yy[test_yy == 'Widespread Dust'] = 39
test_yy[test_yy == 0] = 40
print('print end-->')
#Y = np.array([])
#Y = np.zeros((1,80000))
#Y = np.vstack([Y, train_yy])
Y = train_yy
Y = Y.astype('int')
test_yy = test_yy.astype('int')
clf = MLPClassifier(solver='lbfgs', alpha=1e-5, hidden_layer_sizes=(500,200,100,24,12), random_state=0)
clf.fit(X,Y)
print('done clf !!!!!!!!!!!!!')
'''
id_data = []
tmp_id=[]
for line in test_data:
whole_data.append([line[0], line[1], line[2]])
test_x = []
tmp = []
for line in whole_data:
for n in range(len(line) ,2,-1):
if n == len(line):
tmp.append(line[n-10000:n])
break
'''
test_x = test1
#predicted_y = clf.predict(test_x)
predicted_yy = clf.predict(test_x)
accuracy = len( [p for p, y in zip(predicted_yy, test_yy) if p == y] ) / len(predicted_yy) *100
accuracy_list.append( '{}''s MLP result accuracy is {:.2f}%'.format(test_x, accuracy) )
print('Accuracy is {}'.format(accuracy))
#predicted_yy = np.array([])
#predicted_yy = np.zeros((1,80000))
#predicted_yy = pd.DataFrame(predicted_y)
predicted_yy = predicted_yy.astype('object')
predicted_yy[predicted_yy == 1] = 'Blowing Sand'
predicted_yy[predicted_yy == 2] = 'Clear'
predicted_yy[predicted_yy == 3] = 'Drizzle'
predicted_yy[predicted_yy == 4] = 'Fog'
predicted_yy[predicted_yy == 5] = 'Funnel Cloud'
predicted_yy[predicted_yy == 6] = 'Haze'
predicted_yy[predicted_yy == 7] = 'Heavy Fog'
predicted_yy[predicted_yy == 8] = 'Heavy Rain'
predicted_yy[predicted_yy == 9] = 'Heavy Thunderstorms and Rain'
predicted_yy[predicted_yy == 10] = 'Heavy Thunderstorms with Hail'
predicted_yy[predicted_yy == 11] = 'Light Drizzle'
predicted_yy[predicted_yy == 12] = 'Light Fog'
predicted_yy[predicted_yy == 13] = 'Light Freezing Rain'
predicted_yy[predicted_yy == 14] = 'Light Hail Showers'
predicted_yy[predicted_yy == 15] = 'Light Haze'
predicted_yy[predicted_yy == 16] = 'Light Rain'
predicted_yy[predicted_yy == 17] = 'Light Rain Showers'
predicted_yy[predicted_yy == 18] = 'Light Sandstorm'
predicted_yy[predicted_yy == 19] = 'Light Thunderstorm'
predicted_yy[predicted_yy == 20] = 'Light Thunderstorms and Rain'
predicted_yy[predicted_yy == 21] = 'Mist'
predicted_yy[predicted_yy == 22] = 'Mostly Cloudy'
predicted_yy[predicted_yy == 23] = 'Overcast'
predicted_yy[predicted_yy == 24] = 'Partial Fog'
predicted_yy[predicted_yy == 25] = 'Partly Cloudy'
predicted_yy[predicted_yy == 26] = 'Patches of Fog'
predicted_yy[predicted_yy == 27] = 'Rain'
predicted_yy[predicted_yy == 28] = 'Rain Showers'
predicted_yy[predicted_yy == 29] = 'Sandstorm'
predicted_yy[predicted_yy == 30] = 'Scattered Clouds'
predicted_yy[predicted_yy == 31] = 'Shallow Fog'
predicted_yy[predicted_yy == 32] = 'Smoke'
predicted_yy[predicted_yy == 33] = 'Squalls'
predicted_yy[predicted_yy == 34] = 'Thunderstorm'
predicted_yy[predicted_yy == 35] = 'Thunderstorms and Rain'
predicted_yy[predicted_yy == 36] = 'Thunderstorms with Hail'
predicted_yy[predicted_yy == 37] = 'Unknown'
predicted_yy[predicted_yy == 38] = 'Volcanic Ash'
predicted_yy[predicted_yy == 39] = 'Widespread Dust'
predicted_yy[predicted_yy == 40] = '0'
#predicted_yyy = np.array([])
#predicted_yyy = np.zeros((1,80000))
#predicted_yyy = np.vstack([predicted_yyy, predicted_yy])
buffer['Predict_Defective'] = predicted_yy
buffer.to_csv('MLP_distribution.csv', header=True, index=False) | [
"noreply@github.com"
] | INFIA-FOUNDATION.noreply@github.com |
885dc21c225ed220511379ff73cb7b7d0bf7d591 | 89d2d34a9e4e68f99d0396e371e6fa07d04b78a2 | /app/Models/test_models.py | decfd7d429442879a72519bdce374f77bcde9b32 | [] | no_license | ysy950803/MyServer | d44a445f39fac4ea323edaab02bed1486aab605f | c4e37ca78cac2e8a5efc0a4003f43d35f5ce5558 | refs/heads/master | 2021-01-10T01:09:14.017942 | 2016-03-01T06:54:39 | 2016-03-01T06:54:39 | 51,681,662 | 1 | 0 | null | 2016-02-14T05:45:07 | 2016-02-14T05:45:07 | null | UTF-8 | Python | false | false | 42 | py | # -*- coding: utf-8 -*-
from . import *
| [
"harold@HarolddeMacBook-Pro.local"
] | harold@HarolddeMacBook-Pro.local |
030e52259d44d34774c35c42d1d85544d7bbead2 | 0f7e18a483a44352dfac27137b8d351416f1d1bb | /tools/extract_gif.py | 2954d57a2760b14fc8ed5596e03e11684e3c682c | [] | no_license | rinoshinme/slim_finetune | b5ec4ed53a2d6c15dfa5b4cfb73677ccb58a4aa6 | 1e465e3faff668e65cc873828057365114d4cfb1 | refs/heads/master | 2022-11-07T21:02:38.253001 | 2022-11-02T14:48:45 | 2022-11-02T14:48:45 | 199,089,723 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | """
Extract image frames from gif files.
"""
import cv2
import os
def read_gif(gif_file, write_folder, name_prefix):
capture = cv2.VideoCapture(gif_file)
cnt = 1
while True:
ret, frame = capture.read()
if not ret:
break
save_name = os.path.join(write_folder, '%s_%06d.jpg' % (name_prefix, cnt))
cv2.imwrite(save_name, frame)
cnt += 1
if __name__ == '__main__':
gif_folder = r'D:\data\21cn_baokong\bad_format'
fnames = os.listdir(gif_folder)
for name in fnames:
gif_path = os.path.join(gif_folder, name)
prefix = name.split('.')[0]
read_gif(gif_path, gif_folder, prefix)
| [
"rinoshinme@163.com"
] | rinoshinme@163.com |
d0bf9c5380c412bd8792dcbe9fbfe16b6a0eff54 | 294b310982869369f398cc961e7a4b10ec4a37ab | /InformationHiding/real_User/models.py | 73387c3a03085b6aefe8a8a3ff3e64db7938d47f | [] | no_license | hasrat97/InformationHiding---Copy | 1858fff8515287f134e11e280c9c864f5685dbcb | c7b40d894809474508418e94003302655c3b8bdd | refs/heads/main | 2022-12-25T01:59:11.286785 | 2020-10-03T13:41:03 | 2020-10-03T13:41:03 | 300,889,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 913 | py | from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Profile(models.Model):
#eta profile er model/table.. aro kichu add koris..add image
#userName = models.CharField(max_length=100, blank= False, unique= True) ei username ta hobe kina sure na ami.. tui jkhn profile kaaj korbi tkhn to dekhbii.
FirstName= models.CharField(max_length=100, blank= True)
LastName = models.CharField(max_length=100, blank= True)
Phone_No = models.IntegerField(blank=True, null= True)
EmailID = models.EmailField(max_length=100, unique= True)
Occupation = models.CharField(max_length=100, blank= True)
City = models.CharField(max_length= 100, blank= True)
Country = models.CharField(max_length=100,blank=True)
user = models.ForeignKey(User, default= None, on_delete=models.CASCADE)
def __str__(self):
return self.user.username
| [
"68140564+hasrat97@users.noreply.github.com"
] | 68140564+hasrat97@users.noreply.github.com |
2319403ffee6db7a854ac6e5126ce4367d053e43 | b28c8ef3a6bbca98ecb1ae2e5487c297b561d8e4 | /music/urls.py | 77de32e95b9a25bbced65a31070910152c6b4316 | [] | no_license | deepak-891/first_django_app | 2973269394d47fd4899bdc37ba080453f5e54f4f | 14186c9ba805c87adbdaa890e54f79eb55fff743 | refs/heads/master | 2021-06-23T21:32:23.761355 | 2019-08-17T16:08:33 | 2019-08-17T16:08:33 | 182,037,989 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | py | from django.urls import path
from . import views
app_name = 'music'
urlpatterns = [
# /music/
path('', views.IndexView.as_view(), name='index'),
# /music/<album_id>/
path('<int:pk>', views.DetailView.as_view(), name='detail'),
]
| [
"noreply@github.com"
] | deepak-891.noreply@github.com |
be9a0907c9036e8fdd48e08104e662b540b981c3 | 089861b716767719b268c5e2c4718bc56b1b37b1 | /tiles.py | 9c6baed97ea127f845729f2b2dd31cec4a855cfe | [] | no_license | ajw4g13/Tiles | 413eab93f014ef0b5aa9242e4f8f41331338943c | fa49a4b0f3d0af0ec555950efa4c2b5245e244c4 | refs/heads/master | 2021-01-10T14:58:13.302435 | 2016-02-09T14:40:05 | 2016-02-09T14:40:05 | 51,127,037 | 0 | 0 | null | 2016-02-06T15:04:27 | 2016-02-05T05:01:36 | Python | UTF-8 | Python | false | false | 8,360 | py |
### This code uses inputted dimensions for a rectangular room and a
### rectangular tile, and outputs the number of tiles needed to tile the
### room optimally. It then outputs instructions on how to tile your room,
### a diagram of the final room, the number of tiles needed etc...
## This section takes inputted x and y dimensions for the room and tile and
## gives the x and y dimensions in millimetres.
import sys
allowed_units = ['metre', 'metres', 'm', 1000.0, 'centimetre', 'centimetres', 'cm', 10.0, 'milimetre', 'milimetres', 'mm', 1.0, 'yard', 'yards', 'yd', 914.4, 'foot', 'feet', 'ft', 304.8, 'inch', 'inches', 'in', 25.4]
ilx, ily, tunits = float(input("Width of tile: ")), float(input("Length of tile: ")), raw_input("type units: ")
# ilx and ily is the inputted width
# and height of the tiles, with units
# tunits.
lx, ly, Lx, Ly = 0, 0, 0, 0
for i in range(len(allowed_units)/4):
if tunits == allowed_units[4*i] or tunits == allowed_units[4*i + 1] or tunits == allowed_units[4*i + 2]:
lx, ly = allowed_units[4*i + 3] * ilx, allowed_units[4*i + 3] * ily
# This code assigns the units that
# final tile measurements are given
# in, based on inputted tile units,
# while simultaneously defining new
# values for the length and width of
# the tile, lx and ly, in millimetres.
if lx == 0 or ly == 0:
sys.exit('Invalid dimensions or units entered.')
iLx, iLy, runits = float(input("Width of room: ")), float(input("Length of room: ")), raw_input("type units: ")
for i in range(6):
if runits == allowed_units[4*i] or runits == allowed_units[4*i + 1] or runits == allowed_units[4*i + 2]:
Lx, Ly = allowed_units[4*i + 3] * iLx, allowed_units[4*i + 3] * iLy
# Repeat of above, for room.
if Lx == 0 or Ly == 0:
sys.exit('Invalid dimensions or units entered.')
## This section takes the x and y dimensions in millimetres, and gives the
## number of tiles needed in both directions, along with the total number of
## tiles needed, for both alignments. It then gives the dimensions for the
## side tiles, in millimetres.
nxa = int(Lx / lx - 1.0)
nya = int(Ly / ly - 1.0) # Number of tiles needed in x and y
# directions.
Na = (nxa + 2) * (nya + 2) # Total number of tiles needed.
nxb = int(Lx / ly - 1.0)
nyb = int(Ly / lx - 1.0)
Nb = int(nxb + 2.0) * int(nyb + 2.0) # Repeat for opposite alignment
# of tiles.
if Nb < Na:
N = Nb
temp = lx
lx = ly
ly = temp
nx = nxb
ny = nyb
else:
N = Na
nx = nxa
ny = nya
# Choose optimal alignment. If
# opposite alignment is optimal, lx
# and ly switch. The number of tiles
# needed in each direction are
# defined as nx and ny.
Rx = 0.5*(Lx-(nx*lx))
Ry = 0.5*(Ly-(ny*ly))
# Remainder of tiles in millimetres
# (float).
print (" ")
## This section uses the inputted x and y dimensions for the room and tile
## to give an image of the tiled room.
from PIL import Image
import numpy as np
X, Y, x, y, rx, ry = int(round(Lx)), int(round(Ly)), int(round(lx)), int(round(ly)), int(round(Rx)), int(round(Ry))
# Returns integer values for all the
# lengths in previous section, in
# order to assign pixels.
rimg = np.empty((X,Y),np.uint32) # Uses the room dimensions to create
# an empty array, to be used for the
# room image.
rimg.shape=Y,X # Sets the size of the array to be
# X by Y.
rimg[0:Y:,0:X]=0xFFFFFFFF # Turns the empty pixels into white
# pixels. The eight values are
# hexidecimal numbers in the format
# 0xAABBGGRR, where B, G, and R are
# blue, green and red respectively,
# while A is transparency. FF is the
# highest possible value (255), so
# FFFFFFFF gives a solid white pixel.
rimg[ry:(Y-ry),0:rx]=0xFF00FF00
rimg[ry:(Y-ry),(X-rx):X]=0xFF00FF00
rimg[0:ry,rx:(X-rx)]=0xFFFF0000
rimg[(Y-ry):Y,rx:(X-rx)]=0xFFFF0000
rimg[0:ry,0:rx]=0xFF0000FF
rimg[0:ry,(X-rx):X]=0xFF0000FF
rimg[(Y-ry):Y,0:rx]=0xFF0000FF
rimg[(Y-ry):Y,(X-rx):X]=0xFF0000FF
# Creates coloured tiles around edges
# of the image.
rimg[0:9, 0:X] = 0xFF000000
rimg[(Y-9):Y, 0:X] = 0xFF000000
rimg[0:Y, 0:9] = 0xFF000000
rimg[0:Y, (X-9):X] = 0xFF000000
# Creates a black border around the
# image.
for i in range(ny + 1):
rimg[((ry + y*i) - 4):((ry + y*i) + 5), 0:X] = 0xFF000000
for i in range(nx + 1):
rimg[0:Y, (rx + x*i):(rx + x*i) + 1] = 0xFF000000
# Creates black lines at intervals,
# representing the tiles.
img = Image.frombuffer('RGBA',(X,Y),rimg,'raw','RGBA',0,1)
img.save("room_layout.png","PNG")
img.show()
print N, 'tiles needed'
print ' '
print 'The side tiles (green) will be', ilx*(Rx/lx), tunits, 'by', ily, tunits
print 'The side tiles (blue) will be', ilx, tunits, 'by', ily*(Ry/ly), tunits
print 'The corner tiles (red) will be', ilx*(Rx/lx), tunits, 'by', ily*(Ry/ly), tunits, '.'
print ' '
if (nx % 2 != 0) and (ny % 2 != 0):
print "In order to lay your floor, mark the centre point of the room. Place a tile so that it's centre lines up with the centre of the centre of the room. Then place your tiles so that they all line up with this central tile."
elif (nx % 2 == 0) and (ny % 2 == 0):
print "In order to lay your floor, mark the centre point of the room. Place four tiles so that each has a corner on the centre point of the room. Then place your tiles so that they all line up with these central tiles."
elif (nx % 2 != 0) and (ny % 2 == 0) and Lx < Ly:
print "In order to lay your floor, mark the centre point of the room. Place a tile either side of the point so that the tiles line up with the long side of the room. Then place your tiles so that they all line up with this central tile."
elif (nx % 2 == 0) and (ny % 2 != 0) and Lx > Ly:
print "In order to lay your floor, mark the centre point of the room. Place a tile either side of the point so that the tiles line up with the long side of the room. Then place your tiles so that they all line up with this central tile."
elif (nx % 2 != 0) and (ny % 2 == 0) and Lx > Ly:
print "In order to lay your floor, mark the centre point of the room. Place a tile either side of the point so that the tiles line up with the short side of the room. Then place your tiles so that they all line up with this central tile."
elif (nx % 2 == 0) and (ny % 2 != 0) and Lx < Ly:
print "In order to lay your floor, mark the centre point of the room. Place a tile either side of the point so that the tiles line up with the short side of the room. Then place your tiles so that they all line up with this central tile."
raw_input("press enter to exit")
| [
"awhitehead1@hotmail.co.uk"
] | awhitehead1@hotmail.co.uk |
9a303d5a18b1917d5e0ff10d6430ce6a4b6bd086 | 3b9b4049a8e7d38b49e07bb752780b2f1d792851 | /src/tools/perf/page_sets/blink_memory_mobile.py | 40b5f134886c79490da62c0fdd3e8fdc1c01ea0c | [
"BSD-3-Clause",
"Apache-2.0",
"LGPL-2.0-or-later",
"MIT",
"GPL-2.0-only",
"LicenseRef-scancode-unknown",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | webosce/chromium53 | f8e745e91363586aee9620c609aacf15b3261540 | 9171447efcf0bb393d41d1dc877c7c13c46d8e38 | refs/heads/webosce | 2020-03-26T23:08:14.416858 | 2018-08-23T08:35:17 | 2018-09-20T14:25:18 | 145,513,343 | 0 | 2 | Apache-2.0 | 2019-08-21T22:44:55 | 2018-08-21T05:52:31 | null | UTF-8 | Python | false | false | 5,080 | py | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
from telemetry import story
from page_sets.login_helpers import google_login
DUMP_WAIT_TIME = 3
class BlinkMemoryMobilePage(page_module.Page):
def __init__(self, url, page_set, name):
super(BlinkMemoryMobilePage, self).__init__(
url=url, page_set=page_set, name=name,
shared_page_state_class=shared_page_state.SharedMobilePageState,
credentials_path='data/credentials.json')
self.archive_data_file = 'data/blink_memory_mobile.json'
def _DumpMemory(self, action_runner, phase):
with action_runner.CreateInteraction(phase):
action_runner.Wait(DUMP_WAIT_TIME)
action_runner.ForceGarbageCollection()
action_runner.SimulateMemoryPressureNotification('critical')
action_runner.Wait(DUMP_WAIT_TIME)
if not action_runner.tab.browser.DumpMemory():
logging.error('Unable to get a memory dump for %s.', self.name)
def RunPageInteractions(self, action_runner):
action_runner.ScrollPage()
self._DumpMemory(action_runner, 'scrolled')
class TheVergePage(BlinkMemoryMobilePage):
COMMENT_LINK_SELECTOR = '.show_comments_link'
def __init__(self, page_set):
super(TheVergePage, self).__init__(
'http://www.theverge.com/2015/8/11/9133883/taylor-swift-spotify-discover-weekly-what-is-going-on',
page_set=page_set,
name='TheVerge')
def RunPageInteractions(self, action_runner):
action_runner.WaitForElement(selector=TheVergePage.COMMENT_LINK_SELECTOR)
action_runner.ExecuteJavaScript(
'window.location.hash = "comments"')
action_runner.TapElement(
selector=TheVergePage.COMMENT_LINK_SELECTOR)
action_runner.WaitForJavaScriptCondition(
'window.Chorus.Comments.collection.length > 0')
super(TheVergePage, self).RunPageInteractions(action_runner)
class FacebookPage(BlinkMemoryMobilePage):
def __init__(self, page_set):
super(FacebookPage, self).__init__(
'https://facebook.com/barackobama',
page_set=page_set,
name='Facebook')
def RunNavigateSteps(self, action_runner):
super(FacebookPage, self).RunNavigateSteps(action_runner)
action_runner.WaitForJavaScriptCondition(
'document.getElementById("u_0_c") !== null &&'
'document.body.scrollHeight > window.innerHeight')
class GmailPage(BlinkMemoryMobilePage):
def __init__(self, page_set):
super(GmailPage, self).__init__(
'https://mail.google.com/mail/',
page_set=page_set,
name='Gmail')
def RunNavigateSteps(self, action_runner):
google_login.LoginGoogleAccount(action_runner, 'google',
self.credentials_path)
super(GmailPage, self).RunNavigateSteps(action_runner)
# Needs to wait for navigation to handle redirects.
action_runner.WaitForNavigate()
action_runner.WaitForElement(selector='#apploadingdiv')
action_runner.WaitForJavaScriptCondition(
'document.querySelector("#apploadingdiv").style.opacity == "0"')
class BlinkMemoryMobilePageSet(story.StorySet):
"""Key mobile sites for Blink memory reduction."""
def __init__(self):
super(BlinkMemoryMobilePageSet, self).__init__(
archive_data_file='data/blink_memory_mobile.json',
cloud_storage_bucket=story.PARTNER_BUCKET)
# Why: High rate of Blink's memory consumption rate.
self.AddStory(BlinkMemoryMobilePage(
'https://www.pinterest.com',
page_set=self,
name='Pinterest'))
self.AddStory(FacebookPage(self))
# TODO(bashi): Enable TheVergePage. http://crbug.com/522381
# self.AddStory(TheVergePage(self))
# Why: High rate of Blink's memory comsumption rate on low-RAM devices.
self.AddStory(BlinkMemoryMobilePage(
'http://en.m.wikipedia.org/wiki/Wikipedia',
page_set=self,
name='Wikipedia (1 tab) - delayed scroll start',))
self.AddStory(BlinkMemoryMobilePage(
url='http://www.reddit.com/r/programming/comments/1g96ve',
page_set=self,
name='Reddit'))
self.AddStory(BlinkMemoryMobilePage(
'https://en.blog.wordpress.com/2012/09/04/freshly-pressed-editors-picks-for-august-2012/',
page_set=self,
name='Wordpress'))
# Why: Renderer memory usage is high.
self.AddStory(BlinkMemoryMobilePage(
'http://worldjournal.com/',
page_set=self,
name='Worldjournal'))
# Why: Key products.
self.AddStory(GmailPage(page_set=self))
self.AddStory(BlinkMemoryMobilePage(
'http://googlewebmastercentral.blogspot.com/2015/04/rolling-out-mobile-friendly-update.html?m=1',
page_set=self,
name='Blogger'))
self.AddStory(BlinkMemoryMobilePage(
'https://plus.google.com/app/basic/110031535020051778989/posts?source=apppromo',
page_set=self,
name='GooglePlus'))
| [
"changhyeok.bae@lge.com"
] | changhyeok.bae@lge.com |
78d2b8b47a7eee961d5859141f27596799c74d15 | 1bac0cd9812e33724664dc0467976948b8628521 | /DataHack_2018/NLP_project/news/news/items.py | 00bcbad309d0e3febf657f932e523bdb474325e7 | [
"MIT"
] | permissive | atalyaalon/anyway_projects | a1509fffb61e6781ff645db288be448cc47bd524 | cc76f238f116e70a663e55d2a3fe0c7c5cd3cdf6 | refs/heads/master | 2021-06-28T21:43:28.655495 | 2020-09-17T21:34:35 | 2020-09-17T21:34:35 | 151,638,491 | 0 | 0 | MIT | 2018-10-04T21:32:42 | 2018-10-04T21:32:42 | null | UTF-8 | Python | false | false | 284 | py | # -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# https://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class NewsItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
| [
"litmanovitziv@gmail.com"
] | litmanovitziv@gmail.com |
c3d90ec27fcc681bfa880229fd4e3c3d7269f3d8 | 9de3cd1f8d66e223b7f193776376147136321b30 | /tests/test_cookie_storage.py | 80d9dbd52236af389425acd1c41c63ce277620ad | [
"Apache-2.0"
] | permissive | kolko/aiohttp_session | 223b5eaf8c291407f289e20efe692a0afef31bca | 8df78b3d7bd5623f82ff8171c9d947107421f4ce | refs/heads/master | 2021-01-18T09:43:10.928082 | 2015-08-06T16:15:51 | 2015-08-06T16:15:51 | 40,311,233 | 0 | 0 | null | 2015-08-06T15:06:58 | 2015-08-06T15:06:58 | null | UTF-8 | Python | false | false | 5,099 | py | import asyncio
import json
import socket
import unittest
from aiohttp import web, request
from aiohttp_session import (Session, session_middleware,
get_session, SimpleCookieStorage)
class TestSimleCookieStorage(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
self.srv = None
self.handler = None
def tearDown(self):
self.loop.run_until_complete(self.handler.finish_connections())
self.srv.close()
self.loop.close()
def find_unused_port(self):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('127.0.0.1', 0))
port = s.getsockname()[1]
s.close()
return port
@asyncio.coroutine
def create_server(self, method, path, handler=None):
middleware = session_middleware(SimpleCookieStorage())
app = web.Application(middlewares=[middleware], loop=self.loop)
if handler:
app.router.add_route(method, path, handler)
port = self.find_unused_port()
handler = app.make_handler()
srv = yield from self.loop.create_server(
handler, '127.0.0.1', port)
url = "http://127.0.0.1:{}".format(port) + path
self.handler = handler
self.srv = srv
return app, srv, url
def make_cookie(self, data):
value = json.dumps(data)
return {'AIOHTTP_SESSION': value}
def test_create_new_sesssion(self):
@asyncio.coroutine
def handler(request):
session = yield from get_session(request)
self.assertIsInstance(session, Session)
self.assertTrue(session.new)
self.assertFalse(session._changed)
self.assertEqual({}, session)
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request('GET', url, loop=self.loop)
self.assertEqual(200, resp.status)
self.loop.run_until_complete(go())
def test_load_existing_sesssion(self):
@asyncio.coroutine
def handler(request):
session = yield from get_session(request)
self.assertIsInstance(session, Session)
self.assertFalse(session.new)
self.assertFalse(session._changed)
self.assertEqual({'a': 1, 'b': 2}, session)
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request(
'GET', url,
cookies=self.make_cookie({'a': 1, 'b': 2}),
loop=self.loop)
self.assertEqual(200, resp.status)
self.loop.run_until_complete(go())
def test_change_sesssion(self):
@asyncio.coroutine
def handler(request):
session = yield from get_session(request)
session['c'] = 3
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request(
'GET', url,
cookies=self.make_cookie({'a': 1, 'b': 2}),
loop=self.loop)
self.assertEqual(200, resp.status)
morsel = resp.cookies['AIOHTTP_SESSION']
self.assertEqual({'a': 1, 'b': 2, 'c': 3}, eval(morsel.value))
self.assertTrue(morsel['httponly'])
self.assertEqual('/', morsel['path'])
self.loop.run_until_complete(go())
def test_clear_cookie_on_sesssion_invalidation(self):
@asyncio.coroutine
def handler(request):
session = yield from get_session(request)
session.invalidate()
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request(
'GET', url,
cookies=self.make_cookie({'a': 1, 'b': 2}),
loop=self.loop)
self.assertEqual(200, resp.status)
self.assertEqual(
'Set-Cookie: AIOHTTP_SESSION="{}"; httponly; Path=/'.upper(),
resp.cookies['AIOHTTP_SESSION'].output().upper())
self.loop.run_until_complete(go())
def test_dont_save_not_requested_session(self):
@asyncio.coroutine
def handler(request):
return web.Response(body=b'OK')
@asyncio.coroutine
def go():
_, _, url = yield from self.create_server('GET', '/', handler)
resp = yield from request(
'GET', url,
cookies=self.make_cookie({'a': 1, 'b': 2}),
loop=self.loop)
self.assertEqual(200, resp.status)
self.assertNotIn('AIOHTTP_SESSION', resp.cookies)
self.loop.run_until_complete(go())
| [
"andrew.svetlov@gmail.com"
] | andrew.svetlov@gmail.com |
64010d201f6a37108148ac511448e0bfea8f5f95 | 2d9a17e2b896d2f6a90913a4ba02d41f0ede5dd0 | /_gsinfo/qiyecxb/test.py | 8290bb75e875e96e55b8b937147c849a9ba5f19c | [] | no_license | wolfwhoami/xxxxx | 1cf2ed2c8ed78048d87cccf2953ca86c0871a783 | 670787ec71127bc05c1645cc3d8ef7c3a91fe84b | refs/heads/master | 2020-03-30T00:44:55.864817 | 2016-12-16T01:45:03 | 2016-12-16T01:45:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,842 | py | #!/usr/bin/env python
# -*- coding:utf8 -*-
import os
import sys
sys.path.append(sys.path[0]+"/..")
from spider.spider import Spider
from spider.httpreq import SessionRequests, CurlReq
import spider.util
import pycurl
import cutil
import json
import abc
from spider.runtime import Log
import time
import base64
from Crypto.Cipher import AES
class CCIQ_AES:
def __init__( self, key = None ):
self.key = key
if self.key is None:
self.key = "9BF70E91907CA25135B36B5328AF36C4"
BS = 16
self.pad = lambda s: s + (BS - len(s) % BS) * chr(BS - len(s) % BS)
self.unpad = lambda s : s[:-ord(s[len(s)-1:])]
def encrypt(self, raw):
iv = "\0" * 16
raw = self.pad(raw)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(cipher.encrypt(raw))
def decrypt( self, enc ):
iv = "\0" * 16
enc = base64.b64decode(enc)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self.unpad(cipher.decrypt(enc))
#req = {"encryptedJson":"ZlWT15DsXFm0Y4QnYoK2ufXYi39Plo9\/yhwguqs9FWAHRqkKsKobDI+ai8+GR4NTJNeaHC7hDsivmsbOkOQ\/0lHsES3Wl5kF+pLW98YratGzlf4Tc5qnXiNDVUrc0WaqJD8obqeFhJLQsocfxB8REE6XpIbzthyB+CHX3TQpcJskJEZkJOyPxRdg9PTsCjTLPmgNHuWq3fSNyd3DpR6RIl\/AJNb+Ex70Uf0QDarg3koMErtDXwvcnEtxblp3kaMu2QmXxnDbkClaGASOP6ZsuKgVu6LXdW\/KOHk6cP+\/tEQ=","extJson":"Hoi6oX70l9whauZmjq8jVAmoe3UspXXhX9mPG+KAeqs1rKZVr\/uapICH92P\/CrrycI\/OjobbzuafHXthwGM38\/RMXUoOjROK+Psk7SCSv2\/vBYNK3RYrJk26Fgu1HxLDg9LWqdILYeoDE2G3IezMHPYyzrU1yEoFGenXS1U8gvc="}
req = {'encryptedJson': 'j4kzTnVqwSoHC8BLgjy0n5nZl1zDkMKA3NY4NVwFfL+65BzsXkmfBvobIYdT04u/Gnyzr5ku9XUk0lJLny6uOx9z0pYqJ7cqA+KQPlb5sjJP1aBtfQPKAwFe1KEe35yy3uk8dChwBgt2Gaefubx1qb3dNP7DIyiplJ2q3WEOuvqAmIZRKet6MSds5NRzb5Kugp0IUN2kRqO10jdv+ILkem8pU9ZNxRFaqlpG+d/x5c0ouiCroEudVE50RN03eXxI/+HSudcalTJ82iWGtK/3LC8vHYl8hqz+jUai3L37/VDN65BRAEezpd7zxibNjwMf', 'extJson': 'O3wNDhI0rBOcBnMa0PfzrrQ9CVUtEGW4+4ryKQpX3Q3LSTt2psf0V8gzYq5CObmvolnCfTl8Amfk6qUIRulQqbX7Vi/hzCI44sUJE/vx8XmDT2whbjcnJFx9mmf3lAkTmWe9lQO6AfC+pxWodDf8Og=='}
vv = {"c":"yfqNKTQ0trAStqJeTkG3hBc5zyK8fgp/q+Zmwq8EDUbFcom7Xy7UdSIVX/rClF/ZmsA3I4nJUHA+5o6iURC33vNfUj/tG1pdSESwmP7fSgcG62JDjw0c7qOWU0pNr8oJy5+Q+GziteEoJpASp9G439W4WqC6bwfDgP8Serd2KGoIV95jht1cqr8GofvWI9ipWXpKj6g3YW40jTZECZpO5pVE65y5akOlEjdqE35CUKoqhFFZkVooDWPLLQC8YlsAHp17NOW3aNJHtyRd35XGsQWLA79wqorrUIjCwQ6orJqPnuthJdtF37/a8rp7D1tGJUxteInnl4Dp4Gmqdkc5oobNuRg4MrE3g8ZMuWeq7xUGGCThKoDLdP2eDrMmuFaTNHwDktEfn/gixjyA7xUbUN21A9JdakvIfNQA9x6FVg6zZdH8zExWDhq6WGFGEFexcgJZYUZUclCGy4TkQELnMQbg0ZxPcu0YKkyf3FOaKN7815wln96Vht1G3B8F7TutV53S/9E4YpAiNR5vWp7kWfbU19jmEzWtBMv2YTUZQla4XHq12FMr8XvqkPKrh4KNCfOXkziwZ3sMHpr2658wRVSTgap9WPWf0DdZsaGXmO5wHyrVt6TXG5i4ao/ArWnTRANItUJziL7NOFEpffg7MYUXNJUQF8EUH1cRV5DW5mNS3lTj7v5Rb+OufvrB2DYKiQWrTjpZHAif2cLy1ZHPKqW3VZhjcwWT+VaJUGJqPHtp/qmZr/iwl3VJdfYxnabl9TrpZ7IcE6O6JRDRuMxfpptvNnxzucWlABITre1CzPr3cQvYatv7UOFY91TEHb852lIjFafRw/jhdP66jiAOKJF1Juz/A+2RIyaiIKNnAzBHmxy5vg7OfPnoM3e5Yd7/NvGuBt7oa4+T6+d9RJgCSsaJJRMN5+Uw/uZJ1aNNZeQCz8BeVGIdyp46Ub0aeyoybTl+pZFeuULqZSO/wwSNR2SQImpsJVLfveMcR96Zuv34eJwwrv6F0e1ono5G640uoxZXjDo0G78pkcyD1QrKxxSpTg06/uMr9ZFcmdb+wvrC78hQkyWLSOG3Llce+q25ps6iCsIALndu3gJxukokDi5ILp3FD2u+9UYeVV+1kep3PuK31/em4+bzKTFFZzG1Lw5wvKpkUfeZwB2GgnnS+YCX4bCK6JPRmCIC3J+DVf7NBNk25vfHNVmr3Erw7h0KWSwuRz6gVSHj3g9BJJuwd19Pc1oLJCsHY4U8fF0mrL2v4BqwZfubv75tqLYN9HtUb++HzgWso6RPvhFpxww2pvv91o23OgKQ7RtXPn4NL1WhUmnSP16StFAR4ifyqRQLcFVGxvzFRAX+PFyIXKdYvbwjhid1oLxUk3grandJAtdd8Dd03r5SK7GmEXyLQODxE5hCnIFq1qGhqAVYXJ6GRJHYepTDRQ8pYUC8QbXs/5J5ntSmZHzYJNTIrRriwzCDUI6HazJr8iTKqeliWdGh1WypgjClvpBeYyuC9ryk74twXtKyB5E0OwtDfKPekMJ8oeH3VKiWtNFHDkwWDZMoi546JHCok/ynzqXyIB73Gq9XHiXra6nZMvYVO2HMIljoC4lfAnzDprhZ1kMgkKxSJHoXJMle2Z2Hfmn25iMprfbIsIXNdz4SRrBmWqw0qkY/pOLDvACltpOrOkL//3wXBwQN2AP6zk6E6G0qMK0MBfFPQXJ1cdaZtTXRnbgEBPqNddBwO5xnb6o8vwp2veuZ0nTxCg+X5PrTQiGqJ47JqgbgaTCfo6NDftz2Ut3N9BgRRtPBY1oWDnISZp+5qFMQYno/PYhoQSac0E7Qv1Ls9hPAhea/l1preojLXZIzbtDi+2t8kd//KbgZDcajNejtGoaxdsnR/ZNuPGvv0c8ijWE+CRV47XWrMKKrFIu/ffqTfmbANXt6GOIWzKRomYD79kOGMeSreHbauf7QfwJ8jTcqk2g8sZFMYhZhLgiqQKTNhzpvNVfzWv5clCpLOVWWft0xujuXI5YXCBvuVLAKn1DYJXcoBq6RfaQ2ZVVXaSCTKxyIPRR/26HqzSDoVx/8CHTtrtXOtsl8U/h1Qo1arncUKUfLX8htYZX+Y/APDWPd+vfVAlTB14VP56H5pPLwBNYPJ2ayqeq8tFCI2NK4eqiZ4Ts70RayYbt8brIQim1UoRLMH7tqXqfIbJqnfs6XlXioevRYYvWcStue4C2yzJZlFdksX5iYYHZ3T3y8K2LO8OZnqo7qKprd2E88IpILF2c26dpCFQg/YIhRLSm4faWwll7Y9QIkQ8PqXFjv/g2pQdux1l5qihtftbsjRh2xCtLsBMctGvtro+HtbRoEiEAomy/UC63pKnYQJgIsyIniuw1AJhjrbFs1Etc+RrYHtqysnmO2NJ+r8nY24RUL0KFt4RzjdbLUSkcpxpOAuenKB93f+qYpAsXmW01mNVo+u1bWl9OVI+WaYBJBdhrR7OQymvtF7bhJJUl+2oqEfsnSpMURm6btYPjNsCAHIsW/Erc3jUSnIGg99T2EYLfLfD2SqDcmuKwQSS1p9KUTx6B3T/vpG/4a6gdfmsyoBei7D7SVk7M4YMQH+HT3UqdCbv5Rikovvyz/U6UapYrsmRC/zMZJjnzpwi+iC0veSq1r+g5kOAoAJgwleLLpeRVes3jBql/tEws6F8zpS1TVTmqUp+Q++covQwwc/60PLPvfo5sQJYwEwNKsDEDhU9EOECyjgrEQjuDGrZApNOv9YXtS4JEpSwKb4Gayhg6m9iiOwVFPBJysugp5WQjgxFuSotX8NErqRsiWJ2Tub9jPcWd0ldjwDqRhnHEcIfmlS/jo3sjczUImB5uP4L6xtslTnq8LWEu+pSpUT4QGcxB1ynaOdGHXVSazesfdTIvz5MdBvIJXKenRUd5I8k4hkde8A3ejINiGcH0G7zIeDB/Aa0HQKvAsKmPZv4UQW4occ9zNkSBIYchDQJAG7jj+GJSDguYIslJwfRysBTZJK02bpEPccu6BF0hC0n2c83MB6ph9AYRrdoyYxDiHpS+J2sm0u5khpUhOQ3esJgQjPZ63pNnv5vPlSHeNtgbdNwdSBCncssSldU7TpBJm3ym5y7+D18DuOnOPj17nLfBwtWwXdfqsAnYwwETIZoJJi1NntU9HZ8BWKgQitxaAf6su/FgX+Khu26lzZFlcpAnHd9oCdec84kgMZiy0pM73AzqS8GJTpqYR6Qr1pPvCssu31ZlGZiJndctnRWZfZNR23IGrgsPZwYtgWtKpxNserPDCLuDayhc5T4Pm96J8EsgFC3chuTnVcxHiMewGFUNN9H2dQthrzH6NQZ6jiinqJxhqqnWR8p6E56c6CnW/qlTWlShdkkkgfBX6H11PIAAkkzTO41sToFtgiIzvZqAATpBxlTOKrVKFT9ui49bAGOqE7EZQRZJvI8jHWPzfGezVU9dH5cmxkvYH2t8Hp68YWrWIROdx9rieSstv9+uzVt1k0s69PUbCq7cB/RiPssdGlq367qNxAPq9SdV0nXb+iaT2dtMtMX/BjLP+72dJuL+iZUrMCNTxZxSKe63G036ScNtdVU6jfu8o6J9wz9WwQh2bH6xDu32CIrleLeCKVglqNEatE/k8xVSF6VPkXpo6h/rqsRqWOdZ4j/LudfabKOejIuCMCffifSePMF+YEhVtk+wxp7zYEWh4jg3prKtuQATCMzUxNty6JxN9CySxZPmi0ddVrQaakvnCFD7sznDb14xUi/I5RjqorCG6SrzoJXFEnFsrQOEJyhDwXC6XX7eWHWDf8Q4QlNU4yhq6uVufGMxySYQQTAutww2sU03ejJziYiBV0tv4upQF00MRlOtWlJGCLFj193R2PgEUaJW9I+CNK0C45rGrQ5TOGtO2BConxlAsaW4M8oo9gZHVYGOtWvVQGlgXxEQcfBrzBfh+UvUKVI8O3Jj3gJOkzf29NHYmyzHETYVGk6/XXAyYqcPmP8xWIW6PRwZtj7IQhxAoXfStOBZwwR4akiM5B4ToIBxN8IN2twfPUHUo5girbW70AN3PCiz3ECTfprjBTNNsxBu93AsKBDulnQH0WPsdtvRqwWsVO427uZ+XGS4NKreYzAU7gRo+P7S/8msPboInAyRmhyi80UuUdZaKvUMx/6RZ1TolDtiyJNIuC5ufqLyFBNi1GaZYAzprc2jJxMgt07DbLJorEVADgpgULSgZOj2d42bxF6VnARnz/j0TaCKTPez4wkaORUHCY6y5Wxwdh4LLzYhvipWKD+668/vNpQLkoSHiVryB619uhMb2DO7OTPmzgvRSpz15OvlVGC2zeCv47jA260p8uygRBtzS4ZqmCxzsLnEhlUr3VSj4ospvCa0NGdwnWOx5FdifzmTpNQQd1bwZE9u2Tbznhpb6VHwAa4Jcwnkz9VzlkF/TYj9XLj6LQUBXI118ISkz+2q8jxF05jorsE8Ly/3jh2PBBQl+vNw/x7X4O5T9obEe0fldVyhEXcd2Wk1F91LSzKtpurtMZfwHDXO9xp7dY9NkQliHbGAj22/uvsfWtscPcEFjMESQJ+DAL42McZiBMU7P1yc404FJkTOTCLjJZEQHhiIH1E1O6Xv12rCiovf+ZUBKfHqwqc54Z/rVoy0j3TtZk/N2i9ftWeFE96bnrIo4za7CL9luZmJ7qDs522L798AAMk2wdCMjK2Vxjc0PMwtNIW6Y/SQBhPmf8dWOsa/bg8XOFpbUl6JgWgETzDDIqYojXJf8YXpE+X5GYHQlV4+HV34uotjHytlqO1HNyRgUNR567mDG27zZKvGuUxHqKRYV35iLzyX6VRYzcIvZSPeB5eDs6tibO+KJ70L3NNrD402+CwPmJjak1BLINEw8ZCYYD3TI8OEDhRIXuR480QoFFc9iDeXFfJ06f2RrR8uSGuqksC39yrZ5lb3dJPtxmCL/ioAzkeRt7MKfEp4ZLopep+48YVvc34Sz3r47I8mQdQ6BVh+numD7zHeVHegHC5YvjZw/7EwtQmym9uxDzuSHGJQa+Ryw03iPU2qO7D0D85dDIEN0Yw86Qed90pcYp+2M66DPK3SZkVWuZU9hbD0E7w3I/Ha8BEJN3A4czeOxsqxFCw0ZOPW6DARXQoOg4DjE8XhNhI6/ETHs1gQP3p/9q9HiNAFvba7YKSf29sBhLzPE4y6U4l4XPTwLF+ern5ntZgoMQg+oCs/GmAJYgteiw01BJ0EWeLCNNg3AsUJyJ3G9VlvwWFNX2CSodCupr8PT8h+6O37wZENI62xjz8wJgZMf+ixpgmtoMSOxhTjFbPCRJPSweap1j3dJ58xVaUT0yBW/Kxpxw7gVoN/KnNoRWxBc1TOMWbjZ4N6GkixZWtZDCrWeN2PUTWiJAbikRYEtN0QgBKoMs7ke6PTMXge/SnE56THF/dL7irrTC0c0Jz58di6zKfo3FZDp2M6UaosOZIPqJGS8rsCCCn6yUpQyFfzXUfXEVH7mmVIYYffqG7uciVYE8KOSvLvtsh0BM5niVsFdkPiaZ4fGKLakXiRH+RaHXWsR9LxLtNqjp5vubUoW5ZvV/27+CSrbTTRx1+ut8PN06pLJuWcUQrAZAXAh0sJGbOicll1GpCO3UK9o+QZBB2mHc7j37STuWQQ+3upgVuV3Qp4jogXQdmqrR/vIXlovIiGMRkRV5s6KqwADNrXLX9PtRdQgd7E3pHylHts55NEmap9mNFO1yBx+17Zrew+e9aHzInUM1oA5kt0SqiQzO2SUgm/e0mQW6z7cxZZVIMi96v8a8GBRhwRCcJ/1865O324gCFhkQLNyLiBVdKcM+IuxM4fKTuDA+nLjx/noTruN0RKaDXaJutTbrW4C0+A/xoy7XnOS6ne2RRCDFX9qNP6lAdSFobq6J3n3/89HZZB3wDlOQcyRT95WJmAFyCkGRKwGHtar+Fs0bYClVusJvG99FDfljsI2SJM6WrXoF/dkmlBX15QA/oI3KXoQcBXZjDjA7DN3/2Fk+h9WteBp6xlmP11Idry4dS9ijgysH1befw9AbERSHLqGkzFNEUuuKJIYWQunNhjP/IJtDawMU8BFJE7XgUjofXkozgPfvZrsECq9hd9NQjXaUxsJfE/DrBIXPXBpZ+JcvLjUpQuRD49P+nFJIwRkWCUdNRtVOD3aSQtTElnFsbKXfdEDQP9nZwNpLJ7/FeVR3FakaS2TZV4rSB3PKD5X6lsaKjAoa4bZP7EJgziCHDV36l1HcPzJVlUiux71wASFNiBiPPvOWmN7wK5IadaODXXzqq59U51W+XnpHhrfluX9cYUmiZ5Om2ST2YH3x8R3WhwBrWUO/Zl2p6+Yjpzmq1zP27TKi2Uoaf6JeNA+HQaZrKlhVZvFZ/ZuXuT/99ID7pLQkX/WOx6lJGp8JYP/ndoZY8rwYRAec2zFAsp8iOc3v1xIfzJ58s95WckXE1i9ATxZpAeOXFD5QNILqj3eqaB0xL+NsJRlsQ8nX6/+qdD2Ow86MMU8knCE7oYGmCcA0C4WN/TXruiSQOmFm4lvDcv9FiF9wshvmua6zP3Gvz/y0cOScyn+WyCNpVrkGy766NYfR/jW93Gg61xZuw6PTBOXnZtHGGZsnr/+WFmLetAn30DYWdMM2fg7LagO7OZrLKvMXkpF6Q831lT0SL93Ts8rgq3Tng+livAiDHkFRRcUOU0sgPt823LjKRD2PSCjjNw6xYytOk2EQfscuKmMvV0GI5sPq1Oq11ljsbBap9qR2CyPrcuSyLoDsZPQsPQ603YBTDIQ9unqFLkXXoezs8q3OvdV82PsKTpOXdEtWH6twlcpen+6Fe789Fdck4G0glqbrEpas9fxS77qEN+GPAazBloFDFv5GMJU+LXJchzo9cQ8aFMHryVeXnhedToM81nIP1apsDRFghYddsu8LWK3FU7QTg1iNQ+wiU7aheCxDoOVH5/BEgasY5EGXG5bmurVYFCQFUlVjapUtisWcL7fcIuP/GNVaV+sp/K1CT02BRUyNJ4BideKVHTrTLHkgDF1K2GaXR8wOWow5aNfBMXdi2ucdNjhWirmvpm9bm5JNoAHXcG7m+sKVBj1jNM3HkMnuO8WVin7rwxi9QUa3g0JIBfsXi7/4sikvR2lwujLYkicrfLSBW5h87n7n4jLOvZ1LtQ9dD+C+Q8Ahmb1JqDF6ztZBZvZrNpWoe2ohLiJLNf6ailhpXVdvt2mYMdLfA6nkmETKMQ4h6PbVZsew9BBnZ1MXMc7k4zqDddgCxQb1WCs6a1XhcTDSFhjRcEqdTirxIjEiFCAzHL/uPr0k2GdleuJ4gREQ9bKejfBsGWPejoTW6SnLZMZXrUJNFS7dhxMBUw0btFXSHc7+IusWQAk5DbKqRufJpVn4md0qsqgVp5y7mLIeR0v3KJ+yzntddMihSsrIGowf6SeERXDiw6In0Z9pvw2XcM83ympShUL34iAKiVbWzI4v4i7mN1ieFyKr+pufIE/l/Cns/kycneSYHOMIQD+FGEMGDXBBMI4ygQdVKP0R8xuByEs+/Cw7vS0zdbEtSS2cNrzkmhSkzQrQdC+T1oWt727tebRk6Yf+bwL7bQuA4muw/jOfnTFXBPu1Z5m0pH9ns2agEq/Ik8CyftP7PIuAQx3r2KT9ohyBm7c5tMarPTF6KHFP8w8iandB/Mujf18oX/KBkp4QORAwKcRPP/ymIgQZDfs1PUtJo9huswe+XV0ujFdr6r+C6WmvWGGb3m2PZ40KJ2ad1OB0CTMkPQ6FiROQbiqyrO7WNFkdFGi0WQ55BWzGNNDs6wJiFDTAf7WnzI0m7EUmlBegPYiFqXYPAjqUutcFUWiukl9w+5Org/rYMaTGTr2fXrSMzjKg9NSku+/lj4PikjYMASoYAKCbYSFjKeXlKmKo0yoANHiPuNYvZOBndIuOGcam9dnkGWzKAI6n9EaRh6CTPCmNCAUSmb/O7LlKoUTUaJPIkxtBb1JU8++0bYm3Bv2SGHcnVLKpWmxRkIfiFOfdiNl4qdNbm++iEs9fT2YDP9dcNgPXrJe4b0P+60Gj0FtfqnEC5bL46zMOzb3FyH5193g32yeb9SEoUFUzxTarR18Gv3FhNaDqlRKhrqdQrgB38FN/hjWw4gHhG8g/pxz+TVWMuq6G8zdj9jdW0Htf9oUQQ1XGF6vIKju+dJeiO+0Jn0g5Lry9ocByr+2C3ZHQmZ3uPcgAJRmZgZ4HItOD+Mb7ySLOFgqf/78Sl27b+q827HCdCzSrA4N51Scwvz4nUDaSA9bgaZfqTW1a1co3pkRQJVx20NUWZ5H3dPQ4oLzlSQR28NirMkDPQJlTIyT3+7Tax+FuUU4SNe5Xayb5agqPlXfJ3y05ru3fiXbY0J0uEdE+clPJkPcs5rm9jp6f5G5bs0aNsJf854jHW/8MihrZqswB/wPKsvLSfn07slOwS5ioMZ2Mil8fPIkGp/XAU0cO0hFQDgPsscalkmfdWKL5QjmG/7Oge1kKfs0fbMkXSi/O30d7NDW3tDBKJqhRspHCxIAfs2lfF4raiELZTnhPoWBq17P6sF/fNXejq5+0gEZSmSn6n1er5mo3c5nX+5xw91bPidwI+hURhTt3BHfsfL7Qc73wgVtHmFIrVqih2YfuztVgZsucIkml91oUTuZlVXOClqKMgWPZ276uHpLI6elBaSeS901BV8U3oTtdD50mWrk9REbBqxuLwcQ6WtYAy7sgELz1igHACAbOOQtuEgEaLJ53gIjAnyUcWwrdmOOGr0B+Yv47gDTX0xN+r9Jrkz9XQtPPVUicMw5vk9S6J8SS3vyIvFV+3qwqzs/S0g4Oh3GDGKRAtvnsgBdrI+Stg3fW1kpyXPTcKEOk9AA/0mJBznUAZPHzpUsK1YVH9e84rfv1eAsrqSpyrkOU58jHI0gFo9l+0nS3PUaowHqdAIGBmo1bQgtPP6vACsjy5/a/edQNacrp87wlTxbj60RyRMKt4p8NlJNFqPRSjJ91lsgv5kYZWNtrDuA77RQTz8IvVPOgxvSx3qCj0/ktJoMyJC/Wg6WsuQG768ikq8SBkZ1BoQB1aLTUDWcJCg64nVWDoFL0oelpE82us3BAJn5wOX7jLOcVTXb3fT6UrhvAqc40xaHFNhTH1rtcvpWpbBl4tvXgAgeWmxqP1JEzhGXd2Sr34Beg+hKoq25dvidX6ThR8QPzppG/PjYzSJsuWPGlMnLw1ev1zn3vUdoWjtm8ZPVlIodpEq8e0Y5dyKi6/r1ee0O/78yYkKXr5WIz4u037aTmQ2Ow7iW8B+jKwzTB9xJkgpnnNxc6kF1Cij0zyrzjURxDu3dp7CKDxGu0RqJXdn9wFGHm0aLjF77h4Wy+UCn6PUJzxbooVP1VlMoaDUlrq11JSB/wlYSA6O1JCWpXsACaQ8OuBqBzuKbuBlCSVHlzMiuUlbGjbC+8gdw6yknPiaXuKQ1txIgv5RsXr6M0SaNUZpfkVB8smOIw2o63hp6iVKFHyBZm70peR7MAIHfQls/mBlkVKWoDS5gm5WuUyFLYVbUdYyuIWOp4MtGclJ6SIguPP5M5WfN9JjvZasS4MMhkU5NGZZyO5EHJEffJgXI5Ee+oWNYRNp/B7W6hCKFrilxxZNQTiiah2ew3iX9qpxjtOjKq25kEqEli6AgCIB49AMK6c7slZU+ef3FaR0X+vbKbxXV/RBI337LRQSd1qyvt+4gBgRKozL6TAkkI3utWZLpqKRQfTVNp33yWts8l2zwSYYX++XoCVsAVrSfK61eRETiV+JSSEiD5kxk+omaMBWheynEkBZbeoPiEBRKjWR1Onqt9caQeLmgTBtJgqfqCOBmWSEmGeF7zFTMjtu1EGF0vQPQCSMSt6gpttLoiOUsu8Dtm9UqBGiJefsEhwnLSk3mEVIOtdt8rCyGteOgQviRMYsXz7P7FvLWjTOfOE6puxOquC51gtZYPFQppOCdFwWhk3r52I/hpF82aoZoisYoOIjVQxiAe0BKTw8xcm/SmI5p7wy9NHmUlb5gVmJmOvc50CvRkJiADUXZITSNO6EhXMG0Ua+j02/V78B7LiK4FYXeegAYcpzVutBuFkkF2RJbL1WGh4BeO3/PzPi8U866haQAaeZ9tkxKprNSS1D3Hl4XJp6AUQXDB4FPYWUpQIIy3ojjDLakIYRcEzUIrsKKFWClPHOjKQefmfXP+BZ9v3RizjPjWPr4ydTJ3NrCGZgDnsjE1efpWK1rTVkMpmceOb+HPt/WnBiE+Bf2YjEAu+zokJFcgAbJrgWsCC0kVz0u8draXGzXmbWcvkh/BBezejXsNMok7MOkN3/ueeKzcljifXiJtZhbp6+hCY6vIWBVM/p+6x2oZzn4eGkpzMb15bGT8/EpcK3Vo9P1vviuJgnR+PkTR9OqiHofQ6EQeU9NG/LgwzR658+1Eai3+Xz22QLAcRAk2VQMgB05YG7hTbhwGaOZVuJ6d3sGKVFfGKL6hC9DC1w//m+eK8knSaKWb3f+DZZXWN31UUOWLVR0pikCq2T0+X9aZ8blhXJs6l+VkbNoiLvm/tcioolt7Mel/HNYZAZZ1Q5E3MSMQgfEjuOF78AgHMmvJozmWut2y04ut/7arLeFEIGQt75pK5oRcLT+Wthe2/3mvwap7425TwNCIDuNreDNvGBHEz3biAuzyS9Zixbrvdn0KoadprQlt6jUVBtcMs+nRQ1HTVcM5Q12cTvvWAAembi/B3s1MtqrbBoK+zb5sej0Eygz8AaIr+Sy673WaPsRwxCyLklKTVa0EPlxiPIOXYbbWESjbS5KJooYetyXTp3OOoII32iH9mmMk0X8Xtx96JPRC9EPsf2nTRpxDuB9DT5EwY0DxmAQbpx/dtzfQsxD2ZKG6j2wALTPIOvm/L9G2sul01QfMsqps2NIq2CCPCtLse8qdVLITxGpfGsos7FLGTzTw6/mQ0vhGlIKYLgKMaBnOMlBAKutvHjwBZcGPisl6UNT5LIRy5r6tNvBBQogjr98iTL2ERRipbgE0KZiVqhYx4aeasNl7NqxnzlHGeRabW/aDLhxtLOS+zCm3X9Po7ePHBaYXDrc0eWUeU+GAHA5nw56iACVKxd7tyuGQfgLeOnvlZg0LDio8ApjqgZTGtfkCDfuWOXdCsf6jr7X+eyMWMn1X0QQztrbIPO9Zc2bEG3ubAjVQjN/mPZaerPof65/oe/IYe0Fqd9nFuZUIbLIIzSaug0qMkbDo+4VlvgLJtAmAgKdX3yBWHajorH7GqQOTo5X0gmENbn0JUS8kyfRU61+BDQmO3U4Ma9+382c7kqhNHuFd3WE2iafYW/DjSz17Tny2i31fXo9hLwL84pCqiuBQjvbxj5rQTGwsgDDdNW55KuAIcby0d8IFMIFsLDKqrvpVKPxwsRdpDqlkoQF6WsseUNIte2/yjn+o+oGx5+sJQsjQCYu+9msVN7SILOGuElcxYS5RyjEqkSyEoWF1xCaasEYGbGZ/yzpg4L/saKyv5jnQ065FQkcjW40WuYaVAxM/sLBfW1WM8mOATFOgwur/XjF+Bv5Cv0IvKeQLt1Gbw0EbQnOGvFsEoDudHauzWM0Io+figqs65yaD+F3EjRVu6NB82k0RAt2DGImYHZYCTl32Fmm5ChdAsGJtBZNuc7mbKSUAoBldDnm1Cc0nvg3/J1TTZxB7skYn/MS0rEC9x0LO7sd5H8kLvIM+X0TSwX4li506UQh3nuPgvQLjYCiDV13PJ/1bNuWrUeetxilSxDz5Kp7A1mviNLbZeGcm794OoeE24QyHEKA0V4FAakR/OIqhCH2MVZ7BCLStcUnRl10ctDXwDx9s1VG65us9v7mkhCbInL9pm2wOGItMu1ANgpnwQZ/4rlShBpzDPTG/4fjPB6owi58V4erJAPJu46xhNaN6QO8r1h0KH4bO6DbU10brVfG9O+7VgZ0r2B7OUZ1Fojw/e6Wsx4eX70ECJwkVGKOPHZBJSoiUEMRmA5Aydzp57tpGs0+hyPIplZvUAdsG9vB/NQDhszTY9ovMuriKk7lWxoiFplZfqgD1Tgd7fQYgfa9ezXcxsTf2ge0520pEopdOvG1vQZarUNL46T2en8DJg6RBcI53fZ+n2es4Yq1oA2Vm7RLKhb/RYw8/XOCw3GQ+dPq9YR1S8AGrSuhSzKy23G19q8fBXnef2JW7mzUomwonKSIy5Ee5CndoCwJA0dRQvKAi9pRqkyp+hAACTbQBGKvJm/aUvtGsABkmSKh2YAzpB7JjU1zfWitQNy/6r4ERf9u18WTx5dr7BHsd2l9P2PEjpo7k1dO+ViK9X91vIZWFpDxfhSfh5q47c4fFUefKbQdFksFzfPx0AZ/HS5e+o7SVs1UKzO0FXi4V0kG7zjAJBCOyt9dJhC3xbLYEkx7x5fySIOCPzK7eFpn3Z/Y9g3e+mMaYP2pDH2CRVjZWIdxKrvdNY84+y4zYbuLv+bPKdor+jxhK9uJoAemNW4L0tcJHG8DdNu1izNmIRuHVxQN419AVQMSqzoU6jtH4JLVy9BW6TIOJONnrzwVcvt0O3rAb6CefgDHQLq6+WndWa0VLlk68/venjDe54pVXgMqLdzMjFKk/GG8jrZLRq9l9M2QDbHh7DjUGtDf02P80TDo/78bxH5AOt56mmurAWdZSgBtjnWUxBfrrP7ygB5TEoGU4Di1yet7/TLOWAXC8php2UYFt9ujRKH/RaQkWw7e1pj2us8O7kZI00+Eb/vvrmBdVsyBEr7iuaMB6sxCkuYW+gWC1hWnIWaXK9Kf+4zV0vI9k+RsT2wPzJbEKxYJt18HSgUWCeQWnRqHQu7w/rh5P+Qh/a+3D6om2LmsnGG8roMHcag3YP1z0jaOxMoA97B5VYS+ROGsNkQVPyZdaOo9B0p5Kjr87EA7Vm59JEK/b4BUQVoQouIz1FcnABZdyU8i6bfc0jMIWXzjgGggdzM5dehUFB3VX7dAWpGqB0fJcqeDRBDrXQa1qTRGC4xYX0fwnUgrewb7YeIhuE4eDMbwKLioNYdx5vXhFpp02pb06bP+xQiFaZsb81HUdmgw9+Han0IEKRSqkMKjWV3LFxBH8FA49QXEXV6a0zlxoZxCdY3mnIFRSAo+0N1tDw45dHVf650YuF+1dxHH1Er3STEBFGgEUmqFehilFFTxl0SuYjDD4KRk6Kj4smrXL3mDPXiOrR3239fr22ExwWJ6v3fds56LuB1p5zefN2Hy53tYynzn7ub9yjrIaldDeBpvu/JV8k4cwZKeiET7WFKKjP59vTcz0FcjZSW5ADdvLh+HMn90tQKP5lfUspX/lNMAvCJgv16yNsjTfTsefYLboKe4WAC2pGSEUCxiyw/UR7SF0VWHJi3j0v0trp1Y0vuZ6xn8a8Azrx3gI6SS/iNuiBA6SOA045UFtwGxsuzCx3viCFIKUmujhAtv/cgKqgp1DfYDILUvISr+Jn4I60fDHYMw4A4W26CWTyf1FdaTdm3qa0/i44pBgZI0YL6CQ0hRctnQd7NNC15Dm+uDIX8SFgBqZ7T6AcqD+2HQ7ctN4hgDo3TsvnDAC4uYBxLMB0baY5tSKbSSOIggGTNKNXZ8LzC1xhV8hJ7UIAY4KDDSad6M3N/QdHVsuIsmq3y/Bsnz1k4y2C1LeKilxX1ifSeOdp5PcOuj7EuBNSrsu2LuwpbEyfi0rJMf9MXXEcn2HLLZS/oJFkj7fa80vwgBh7afy6WtVTrcNorBG7yuj6oqcpF7Zgq2OESTfzNR91hqN1YafLvVe9QHjohBPUBT9nMUgie8cB0jOFWcgb9TlfxHwy3C2Me/z2uhV7qJ1KmNml6c8MNeUZHoYN/CkMwYDogUyEhtEgaDnC/qojxr1QgjNVA5lYZEoOTboAFyC67cNfJvDRqDXfN2gIPZXmrsDpcxI1uvD2fIU1OKrAVQP+xcW3xb+4lfV3xCe9ZS2ZxxtxDT47HmeNUl5qXTV81hc1xjx55wajlU2K9ozNaQS6ImSLfzU2lJB8p4ZimnULPf5j0jbq1zXdAHEkOKO2LCELBWNeBKqYdV8R1+zd38Qnh6ilq3h9/rkcwlXPwAUvgaKjk9iux7AtRFvTR+5JJwMCJzv4cDeqnmqtZt2O3/z6coz6zveWxlziO2PY0MS7ZSfYGvl4WqA9jQpinujjdE6Lh7BKl1dCWX3YfqffozfH3DjjIgAxyl2fh5NYUAJVYxge4x171maX9G4XyQAsbYZXyrIL3KdL4m9rC5R14OH9nQj9SpXBLp8p/KcewuMCXpV55OHW+udLSnEqFO1A0/wcOxWr4342t9id6aZvRvPKbs2blTxS0CZg5tDc8Q4s/HwTeQOnTUsG8pVRC4DF/r9156Sl6yyemRUfYq25xAgwcs+TYHbK2lN17CIYpVk2R5TBCUFrsqW7Bdd+7IdcWECI0heY8Z2JcQIzJVNOwGkJK6yrue1HV5RJTTCWsG6CeZUF1/TcwMnDvAennA9XO8x114tqDuDOHSqwzqBEwbVgJl5NcAHDtFcSa/C2K9fg0D7Mo1mJHflQpCpNhNLJdV/GQ1eZxXz5RM2o6kf5b05C9VVSVXluQMXnPbFFJo0fuMpvzuv3eulA/L5Cv1mkxG2j7EoRIYXXeVd0f8cm7FdQWiC7Tl5gDxr4e7yht/nRuR5FyeztnwhrqFWxQ1eroXdJVC+MUyCTG42zpWO4E=","v":"m2JSqrBtIXdQ4Vq6LTC1kw==","m":"BsRioKDdMi3oPdq2Klc/Bw=="}
if __name__ == '__main__':
# for n, v in req.items():
# print n, "=", CCIQ_AES().decrypt(v)
# print '----------------------------------'
# for n in ["v", "m"]: #, "c"
# o = CCIQ_AES().decrypt(vv[n])
# print n, "=", len(o), o
# print '----------------------------------'
# o = CCIQ_AES("BB1856A312580D41256311147089E0CC").decrypt(vv['c'])
# print len(o), o
encryptedJson = {
"pagesize" : "20",
"page" : "1",
"od_orderBy" : "0",
"sh_searchType" : "一般搜索",
"od_statusFilter" : "0",
"v1" : "QZOrgV004",
"oc_name" : "腾讯",
"sh_u_uid" : "",
"sh_u_name" : ""
}
extJson = {
"cl_screenSize" : "640x960",
"cl_cookieId" : "16923697-D73E-485A-BDCF-68FAD456AC02",
"Org_iOS_Version" : "2.0.1"
}
param = {"encryptedJson": CCIQ_AES().encrypt(encryptedJson.__str__()), "extJson": CCIQ_AES().encrypt(extJson.__str__())}
print param | [
"jianghao@ipin.com"
] | jianghao@ipin.com |
9d4365826dd9af614ce9b518bc6de82921605311 | 621a865f772ccbab32471fb388e20b620ebba6b0 | /compile/gameserver/data/scripts/quests/378_MagnificentFeast/__init__.py | b971073fd517a5f3915abefa119d21a16ab52a08 | [] | no_license | BloodyDawn/Scions_of_Destiny | 5257de035cdb7fe5ef92bc991119b464cba6790c | e03ef8117b57a1188ba80a381faff6f2e97d6c41 | refs/heads/master | 2021-01-12T00:02:47.744333 | 2017-04-24T06:30:39 | 2017-04-24T06:30:39 | 78,662,280 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,714 | py | # Magnificent Feast - v0.1 by DrLecter (adapted for L2JLisvus by roko91)
import sys
from net.sf.l2j.gameserver.model.quest import State
from net.sf.l2j.gameserver.model.quest import QuestState
from net.sf.l2j.gameserver.model.quest.jython import QuestJython as JQuest
qn = "378_MagnificentFeast"
#NPC
RANSPO = 7594
#ITEMS
WINE_15,WINE_30,WINE_60 = range(5956,5959)
SCORE = 4421
RP_SALAD,RP_SAUCE,RP_STEAK = range(1455,1458)
RP_DESSERT = 5959
#REWARDS
REWARDS={
9:[847,1,5700],
10:[846,2,0],
12:[909,1,25400],
17:[846,2,1200],
18:[879,1,6900],
20:[890,2,8500],
33:[879,1,8100],
34:[910,1,0],
36:[848,1,2200],
}
class Quest (JQuest) :
def __init__(self,id,name,descr): JQuest.__init__(self,id,name,descr)
def onEvent (self,event,st) :
htmltext = event
score = st.getInt("score")
cond = st.getInt("cond")
if event == "7594-2.htm" and cond == 0 :
st.set("cond","1")
st.setState(STARTED)
st.playSound("ItemSound.quest_accept")
elif event == "7594-4a.htm" :
if st.getQuestItemsCount(WINE_15) and cond == 1 :
st.takeItems(WINE_15,1)
st.set("cond","2")
st.set("score",str(score+1))
else :
htmltext = "7594-4.htm"
elif event == "7594-4b.htm" :
if st.getQuestItemsCount(WINE_30) and cond == 1 :
st.takeItems(WINE_30,1)
st.set("cond","2")
st.set("score",str(score+2))
else :
htmltext = "7594-4.htm"
elif event == "7594-4c.htm" :
if st.getQuestItemsCount(WINE_60) and cond == 1 :
st.takeItems(WINE_60,1)
st.set("cond","2")
st.set("score",str(score+4))
else :
htmltext = "7594-4.htm"
elif event == "7594-6.htm" :
if st.getQuestItemsCount(SCORE) and cond == 2 :
st.takeItems(SCORE,1)
st.set("cond","3")
else :
htmltext = "7594-5.htm"
elif event == "7594-8a.htm" :
if st.getQuestItemsCount(RP_SALAD) and cond == 3 :
st.takeItems(RP_SALAD,1)
st.set("cond","4")
st.set("score",str(score+8))
else :
htmltext = "7594-8.htm"
elif event == "7594-8b.htm" :
if st.getQuestItemsCount(RP_SAUCE) and cond == 3 :
st.takeItems(RP_SAUCE,1)
st.set("cond","4")
st.set("score",str(score+16))
else :
htmltext = "7594-8.htm"
elif event == "7594-8c.htm" :
if st.getQuestItemsCount(RP_STEAK) and cond == 3 :
st.takeItems(RP_STEAK,1)
st.set("cond","4")
st.set("score",str(score+32))
else :
htmltext = "7594-8.htm"
return htmltext
def onTalk (self,npc,st):
htmltext = "no-quest.htm"
npcId = npc.getNpcId()
id = st.getState()
cond=st.getInt("cond")
if cond == 0 :
if st.getPlayer().getLevel() >= 20 :
htmltext = "7594-1.htm"
else:
htmltext = "7594-0.htm"
st.exitQuest(1)
elif cond == 1 :
htmltext = "7594-3.htm"
elif cond == 2 :
if st.getQuestItemsCount(SCORE) :
htmltext = "7594-5a.htm"
else :
htmltext = "7594-5.htm"
elif cond == 3 :
htmltext = "7594-7.htm"
elif cond == 4 :
score = st.getInt("score")
if st.getQuestItemsCount(RP_DESSERT) and score in REWARDS.keys() :
item,qty,adena=REWARDS[score]
st.giveItems(item,qty)
if adena :
st.giveItems(57,adena)
st.takeItems(RP_DESSERT,1)
st.playSound("ItemSound.quest_finish")
htmltext = "7594-10.htm"
st.exitQuest(1)
else :
htmltext = "7594-9.htm"
return htmltext
QUEST = Quest(378,qn,"Magnificent Feast")
CREATED = State('Start', QUEST)
STARTED = State('Started', QUEST)
QUEST.setInitialState(CREATED)
QUEST.addStartNpc(RANSPO)
QUEST.addTalkId(RANSPO) | [
"psv71@yandex.ru"
] | psv71@yandex.ru |
a536535cdfecee48f5b8cb141595826e7bde7ba0 | 36c1db035d186f34a42a5a5ebda0ac25b6aafa0a | /drawing_with_variables.py | ada8b096c11ede3b680f8c5c5023f172b1326b33 | [] | no_license | ShaanSavani/classwork | a19c96b76e666ba3ea6ac9503d541eafd047565d | d14facfd19bbf76fddec66e99bcfd5fe14daee3d | refs/heads/master | 2020-04-22T22:34:04.377480 | 2019-02-26T16:04:38 | 2019-02-26T16:04:38 | 170,712,777 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 461 | py | import arcade
WIDTH = 640
HEIGHT = 480
x = int(input("Please enter the x value of the shape: "))
y = int(input("Please enter the y value of the shape: "))
radius = int(input("Please enter the radius: "))
arcade.open_window(WIDTH, HEIGHT, "My Drawing")
arcade.set_background_color(arcade.color.WHITE)
arcade.start_render()
# Begin drawing
arcade.draw_circle_filled(x, y, radius, arcade.color.COCOA_BROWN)
# End drawing
arcade.finish_render()
arcade.run() | [
"noreply@github.com"
] | ShaanSavani.noreply@github.com |
b8e37ddd6a647d5e6b5d91fd3f74e56d213ea176 | 8d3e6464ad779ffc3e2864fee3aaf948b8514fc2 | /cgnenhancer/tests/test_general.py | adb62b825591676768a7dd4af8b16345977d9aba | [] | no_license | danilofo/cgnenhancer | ea82586c7b65343be940404382fe817f9d3a5e8b | 0c8f938b24ec0c997629d319eafa79d10070d04f | refs/heads/master | 2022-07-18T22:26:21.579547 | 2018-06-23T18:44:40 | 2018-06-23T18:44:40 | 89,143,023 | 0 | 2 | null | 2022-04-06T18:44:48 | 2017-04-23T13:15:02 | Jupyter Notebook | UTF-8 | Python | false | false | 34 | py | def fail_test():
assert False
| [
"danilo.forastiere@outlook.com"
] | danilo.forastiere@outlook.com |
ad1bf68a660e4ab9ea3514eb7a9d1c009dd6e56d | e6dab5aa1754ff13755a1f74a28a201681ab7e1c | /.parts/lib/django-1.3/django/contrib/gis/gdal/base.py | 642a7c541e681e266343d47f00fbee2db2507210 | [] | no_license | ronkagan/Euler_1 | 67679203a9510147320f7c6513eefd391630703e | 022633cc298475c4f3fd0c6e2bde4f4728713995 | refs/heads/master | 2021-01-06T20:45:52.901025 | 2014-09-06T22:34:16 | 2014-09-06T22:34:16 | 23,744,842 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | /home/action/.parts/packages/googleappengine/1.9.4/lib/django-1.3/django/contrib/gis/gdal/base.py | [
"ron.y.kagan@gmail.com"
] | ron.y.kagan@gmail.com |
97dcbef55e8a7a8d97e9e199539cbb5686065123 | 3839c7388496f4904ac5c86157191a0be8ad327e | /longhornbrewing/pagestyle/migrations/0002_pagestyle_active.py | 83ea194bc0ab6ec6d0d30f926b4462c578286049 | [] | no_license | UTAlan/WoodcreekBrewing | 748e58b39ae70c4309da099a0c3b9664acba264a | 6e731a829ee53d224106d0aab2240b580e4e69f0 | refs/heads/master | 2021-01-18T11:23:21.018073 | 2015-09-01T19:56:45 | 2015-09-01T19:56:45 | 29,046,088 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 432 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('pagestyle', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='pagestyle',
name='active',
field=models.BooleanField(default=False),
preserve_default=True,
),
]
| [
"alan@alanbeam.net"
] | alan@alanbeam.net |
3ee86c8d0bff10d72f0e812e2906e5151dae4b24 | c21e65d2937102d776391b9ebad7f20064740638 | /semaine5/app.py | 646a6e60cc3350617c870dd0aa73caf5b3cd7110 | [
"MIT"
] | permissive | aviau/INF3005_LAB | 948aed4650a2c182066ec7f5372be4f83aedf0cf | 5b451c833318a4ba222282d7be74f5a265160aa0 | refs/heads/master | 2021-05-09T04:32:14.508460 | 2018-02-04T02:56:34 | 2018-02-04T02:56:34 | 119,280,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,062 | py | from flask import Flask
from flask import render_template
from flask import request
from flask import redirect
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html")
@app.route("/success")
def success():
return render_template("success.html")
@app.route("/submit", methods=["POST", "GET"])
def submit():
name = request.form.get("name", None)
gender = request.form.get("gender", None)
thing = request.form.get("thing", None)
error = None
if name is None or name == "":
error = "Name is required"
elif gender is None or gender == "":
error = "Gender is required"
elif thing is None or thing == "":
error = "Thing is required"
if error is not None:
return render_template("index.html", error=error)
with open("signups.txt", "a") as f:
f.write(
"{name},{gender},{thing}\n".format(
name=name,
gender=gender,
thing=thing,
)
)
return redirect("/success")
| [
"alexandre@alexandreviau.net"
] | alexandre@alexandreviau.net |
7ce2fb4443b6fe30f3ac0b008c3c21364fe3f79b | c18ca2d47a3efa74cef7504d9dd2d7e715d6a88b | /users/forms.py | ca97c9bf83ff3f24983cb4b655d995cfd461136d | [] | no_license | cqh6666/SuppliedMS | d180a16de6df12c772cd947e58e959b60f7dfb8c | e74420f7bc836439f5686fa8653cf5616f62df6e | refs/heads/master | 2020-06-01T16:55:37.077790 | 2019-06-25T10:48:00 | 2019-06-25T10:48:00 | 190,856,794 | 1 | 0 | null | 2019-06-09T06:34:08 | 2019-06-08T07:05:12 | Python | UTF-8 | Python | false | false | 858 | py | # -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: forms
Description :
Author : chen
date: 2019/6/8
-------------------------------------------------
Change Activity:
2019/6/8:
-------------------------------------------------
"""
from django import forms
from .models import UserProfile
class LoginForm(forms.Form):
username = forms.CharField(required=True)
password = forms.CharField(required=True,min_length=5)
class RegisterForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ['first_name','college','mobile','username','organization','position','password']
class ModifyForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ['first_name','college','mobile','organization','position'] | [
"2018ch@m.scnu.edu.cn"
] | 2018ch@m.scnu.edu.cn |
11d80d981f0eb530daca06e154200e0096e36887 | 9a8577bf60595a4e5f0ad86fa64967405a8567ea | /projects/ex48-done/tests/lexicon_tests.py | f14357f89ac088fd5cec110be46d35443c4a1101 | [] | no_license | arutters/learn_python_the_hard_way | 1d5d9225afd694c3de58c6c54cee9410b1883e68 | 499cc5519592713b5906d823db407d342ee99b25 | refs/heads/master | 2023-07-13T00:03:17.054272 | 2021-08-23T22:20:53 | 2021-08-23T22:20:53 | 387,937,303 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,634 | py | from nose.tools import *
from ex48 import lexicon
def test_directions():
assert_equal(lexicon.scan("north"), [('direction', 'north')])
result = lexicon.scan("north south east")
assert_equal(result, [('direction', 'north'),
('direction', 'south'),
('direction', 'east')])
assert_equal(lexicon.scan("up"), [('direction', 'up')])
def test_verbs():
assert_equal(lexicon.scan("go"), [('verb', 'go')])
result = lexicon.scan("go kill eat")
assert_equal(result, [('verb', 'go'),
('verb', 'kill'),
('verb', 'eat')])
def test_stops():
assert_equal(lexicon.scan("the"), [('stop', 'the')])
result = lexicon.scan("the in of")
assert_equal(result, [('stop', 'the'),
('stop', 'in'),
('stop', 'of')])
def test_nouns():
assert_equal(lexicon.scan("bear"), [('noun', 'bear')])
result = lexicon.scan("bear princess")
assert_equal(result, [('noun', 'bear'),
('noun', 'princess')])
def test_numbers():
assert_equal(lexicon.scan("1234"), [('number', 1234)])
result = lexicon.scan("3 91234")
assert_equal(result, [('number', 3),
('number', 91234)])
def test_errors():
assert_equal(lexicon.scan("ASDFGHJKL"),
[('error', 'ASDFGHJKL')])
result = lexicon.scan("bear IAS princess")
assert_equal(result, [('noun', 'bear'),
('error', 'IAS'),
('noun', 'princess')])
| [
"annabelle4086@gmail.com"
] | annabelle4086@gmail.com |
551fbca4a6614d91516633237c0818a95fb45e7d | 986630b72263dc5db7acb2d617d989111bc23649 | /urbanizze/map/migrations/0006_terreno_setor.py | 29d7b9ccb761a532bcf7f9d2fab2d7b746bb9e42 | [] | no_license | marcellobenigno/urbanizze | afbef6d45077ed93e0edd3abf21d167561659914 | ca11fa55846030a75e8d4815e13dcd1df89ff421 | refs/heads/master | 2022-08-13T03:55:32.269185 | 2019-08-08T10:33:42 | 2019-08-08T10:33:42 | 201,235,674 | 1 | 2 | null | 2021-12-13T20:07:21 | 2019-08-08T10:31:30 | JavaScript | UTF-8 | Python | false | false | 510 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-20 01:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('map', '0005_setor'),
]
operations = [
migrations.AddField(
model_name='terreno',
name='setor',
field=models.CharField(default='Setor 04', max_length=50, verbose_name='setor'),
preserve_default=False,
),
]
| [
"benigno.marcello@gmail.com"
] | benigno.marcello@gmail.com |
1fe7616ebc4c9786ecf6bf6b454fa0ec33f01f13 | a51fb5bcf1e8309721b07ecb0164cda373cefb13 | /task010.py | d446e7dcc8a304de9f87d172ed398c4da3a92ca6 | [] | no_license | laqie/euler | e0193b54008f303d800f237038867fa076de6eaf | 24136a95bf62a208d374f89c04390bc81952c6b7 | refs/heads/master | 2016-08-02T23:42:18.262509 | 2013-08-28T03:33:16 | 2013-08-28T03:33:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 371 | py | from math import sqrt
def is_prime(num):
if num == 0 or num == 1:
return False
if num == 2:
return True
for x in xrange(2, int(sqrt(num) + 2)):
if num % x == 0:
return False
else:
return True
result = 0
for x in xrange(1, 2000000):
if is_prime(x):
result += x
print result
# print is_prime(2)
| [
"mail@laqie.net"
] | mail@laqie.net |
9bacdad171082f06867c0997a01c9326fc87fa95 | bb45fe63307a8176325845c543c43b9fb3ce8da2 | /plugins/modules/tag_info.py | d894d57e50ec20199cc6952ba9a47cab408b2d8d | [
"Apache-2.0"
] | permissive | lego963/ansible-collection-quay | 67f342847f205c25a6245d63e4b8a671329d8732 | e64026a5ba67dee1d42e8d06b27be0d60e77335b | refs/heads/master | 2023-07-08T21:20:06.324922 | 2021-08-11T10:58:14 | 2021-08-11T10:58:14 | 394,558,399 | 0 | 0 | Apache-2.0 | 2021-08-11T10:58:06 | 2021-08-10T07:08:55 | Python | UTF-8 | Python | false | false | 4,502 | py | #!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.lego963.quay.plugins.module_utils.quay import QuayBase
DOCUMENTATION = '''
module: tag_info
short_description: Query Quay Tags info.
extends_documentation_fragment: lego963.quay.quay
version_added: "0.0.1"
author: "Rodion Gyrbu (@lego963)"
description:
- This interface is used to query Quay Tags info based on search criteria.
options:
repository:
description: The full path of the repository. e.g. namespace/name
type: str
required: True
only_active_tags:
description: Filter to only active tags.
type: bool
default: 'yes'
page:
description: Page index for the results.
type: int
default: 1
limit:
description: Limit to the number of results to return per page.
type: int
specific_tag:
description: Filters the tags to the specific tag.
type: str
'''
RETURN = '''
quay_tags:
description: Sorted quay tag list of the repository.
type: complex
returned: success
contains:
name:
description:
- Specifies the tag name.
type: str
sample: "latest"
reversion:
description:
- Specifies the revision status.
type: bool
sample: false
start_ts:
description:
- Specifies the start timestamp.
type: int
sample: 1617711789
image_id:
description:
- Specifies the image ID.
type: str
sample: "0b9fa0b7f59414a64fbee47c7542217909614ebd1046520657e54c34f4af3b47"
last_modified:
description:
- Specifies the instance ID.
type: str
sample: "Tue, 06 Apr 2021 13:46:29 -0000"
manifest_digest:
description:
- Specifies the manifest digest.
type: str
sample: "sha256:b5557b4f77e7382b3203b940aaa050286e8f201d13520c169fdd2cab5bc3b88a"
docker_image_id:
description:
- Specifies the docker image ID.
type: str
sample: "0b9fa0b7f59414a64fbee45c7542517909614ebd1046520657e54c34f4af3b47"
is_manifest_list:
description:
- Specifies the manifest list status.
type: bool
sample: false
size:
description:
- Specifies the image size.
type: int
sample: 293412157
'''
EXAMPLES = '''
# Get all Quay Tags
- lego963.quay.tag_info:
repository: "opentelekomcloud/apimon"
register: quay_tags
# Get only active Quay Tags
- lego963.quay.tag_info:
repository: "opentelekomcloud/apimon"
only_active_tags: true
register: filtered_quay_tags
'''
class TagModule(QuayBase):
argument_spec = dict(
repository=dict(type='str', required=True),
only_active_tags=dict(type='bool', default=False, required=False),
page=dict(type='int', defaul=1, required=False),
limit=dict(type='int', required=False),
specific_tag=dict(type='str', required=False),
)
module_kwargs = dict(
supports_check_mode=True
)
def run(self):
changed = False
repository = self.params['repository']
query = {}
only_active_tags = self.params['only_active_tags']
page = self.params['page']
limit = self.params['limit']
specific_tag = self.params['specific_tag']
if only_active_tags:
query.update({'onlyActiveTags': only_active_tags})
if page:
query.update({'page': page})
if limit:
query.update({'page': page})
if specific_tag:
query.update({'specificTag': specific_tag})
tag_info = self.get_tag_info(repository, query)
if tag_info is None:
self.fail_json(
msg=f'Cannot fetch repository tags for {repository}',
errors=self.errors
)
if len(tag_info['tags']) == 0:
sorted_tags = []
else:
sorted_tags = sorted(tag_info['tags'], key=lambda item: item['start_ts'], reverse=True)
if len(self.errors) == 0:
self.exit_json(
changed=changed,
quay_tags=sorted_tags
)
else:
self.fail_json(
changed=changed,
msg='Failures occured',
errors=self.errors,
)
def main():
module = TagModule()
module()
if __name__ == '__main__':
main()
| [
"noreply@github.com"
] | lego963.noreply@github.com |
56104be7b888315ad95f9167b9242fd41724b8e4 | c93c88d7e45cfcf05822c701a1c1dafee8153347 | /projects/cs102/circle_of_squares.py | fd97d939c1fb1f0196448c0b8b062ffd70ff92aa | [] | no_license | timisenman/python | b7c09f6377e9a28787fce7b0ade6cab499691524 | 9ea6b6605bd78b11b981ca26a4b9b43abe449713 | refs/heads/master | 2020-05-21T22:46:50.502488 | 2017-01-18T22:54:23 | 2017-01-18T22:54:23 | 63,548,164 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 983 | py | #A Circle of Squares
#Either creates a draw square function,
#and rotate it 10 degrees 36 times.
import turtle
def draw_square(a_turtle):
for i in range(1,5):
a_turtle.forward(100)
a_turtle.right(90)
def draw_art():
window = turtle.Screen()
window.bgcolor("blue")
brad = turtle.Turtle()
brad.shape("turtle")
brad.color("white")
brad.speed(10)
for i in range(1,37):
draw_square(brad)
brad.right(10)
brad.right(630)
print("Square done")
## angie = turtle.Turtle()
## angie.shape("turtle")
## angie.color("yellow")
## angie.speed(2)
## angie.circle(100)
## print("Circle drawn")
##
## bro = turtle.Turtle()
## bro.shape("turtle")
## bro.color("green")
## bro.speed(2)
## bro.left(90)
## bro.forward(100)
## bro.left(90)
## bro.forward(100)
## bro.left(135)
## bro.forward(141.42)
## print("Triangle done")
window.exitonclick()
draw_art()
| [
"tim.isenman@gmail.com"
] | tim.isenman@gmail.com |
0d0fe379e53f12b38a5a7bc0f6e267ac8e508779 | ff55c558787128104336601348ec95886d6474a1 | /2 week/work_2.4.py | 117e1dc343934488c02f1d14e1a780dd87026278 | [] | no_license | makar-81/HSE_Python | ff51fa363761af09fd15c8710322fbfaa69c4373 | 02313bc221ff4b605216576ca1c47eb57baf597f | refs/heads/master | 2023-01-25T02:49:21.152819 | 2023-01-14T10:13:55 | 2023-01-14T10:13:55 | 252,149,510 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 106 | py | a = int(input())
if (a % 4 == 0 and a % 100 != 0) or a % 400 == 0:
print("YES")
else:
print("NO")
| [
"makar-81@mail.ru"
] | makar-81@mail.ru |
0bfd14fba7b6da0fffddd04bedfa1a6df56b0812 | 1c9918c4c98a15c4889370ecaa72649658321186 | /activation.py | 1a58561fd1f4026b9520d96bb4c3f5d09587cf08 | [] | no_license | R1nlz3r/LibML | 2eef4d86332ef3c625f6981f09c9c35f58d34a4c | a11308116dfad7d7c0aeefd4772cabff698dd288 | refs/heads/master | 2020-05-03T10:48:09.258621 | 2019-03-30T17:09:18 | 2019-03-30T17:09:18 | 178,587,806 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 376 | py | from numba import jit
import numpy as np
def identity(x):
return x
def identity_gradient(x):
return 1
@jit
def sigmoid(x):
return 1 / (1 + np.exp(-x))
@jit
def sigmoid_gradient(x):
return sigmoid(x) * (1 - sigmoid(x))
@jit
def tanh(x):
return np.tanh(x)
@jit
def tanh_gradient(x):
return 1.0 - tanh(x) ** 2
@jit
def softmax(x):
return np.exp(x) / sum(np.exp(x))
| [
"mapandel@student.42.fr"
] | mapandel@student.42.fr |
a14918e8e344b9e079594ff6e61671c697f4b86c | 1b82fd5af3a00378ed9fab0f898d0a09f993db12 | /game/vision.py | cb2ca3516c1511f0496461a2cb372741e41c70a5 | [] | no_license | Adames4/NFS_AI | b25e7b16bd555c8f420b525030009228aebc9f18 | 1bb4125f334f34f9e46f3d814af9c78136c94982 | refs/heads/main | 2023-02-25T00:26:04.562929 | 2021-01-30T10:23:35 | 2021-01-30T10:23:35 | 333,165,667 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,725 | py | from math import sin, cos, pi
import pygame
from .constants import BACKGROUND, GRASS, WIN, RED
class Vision():
'''
Vision object represent vision for every single Car object
'''
def __init__(self):
self.visible = False
def make_points(self, center, angle):
'''
determinate distances from front of car to sides of road in 5
different angles (-90, -45, 0, 45, 90) and coordinates of sides
of road
center: (x, y) of Car object
angle: angle of Car object
'''
self.points = list()
self.distances = list()
for a in range(-90, 91, 45):
angle_of_line = angle + a
x, y = center
step = 0
while BACKGROUND.get_at((int(x), int(y))) != GRASS and step < 250:
step += 1
x -= cos(angle_of_line / 180 * pi)
y += sin(angle_of_line / 180 * pi)
self.distances.append(step)
self.points.append((x, y))
if self.visible:
self.draw(center)
def draw(self, center):
'''
draw 5 lines from front of car to sides of road
'''
for point in self.points:
pygame.draw.line(WIN, RED, center, point)
def visible_on(self):
'''
turn vision on
'''
self.visible = True
def visible_off(self):
'''
turn vision off
'''
self.visible = False
def get_vision(self):
'''
return: distance from front of car to the side of road in 5 different
directions
'''
return self.distances
| [
"noreply@github.com"
] | Adames4.noreply@github.com |
b7922b170721e4cabe8d030551301d301feb2b7c | cf6edacfbcef73e499c42688e6ef54a3d277a0eb | /program.py | ca5ef1f80cad8ea36b0e46d5a26c29517085adbb | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | jackcenter/Localization_for_Differential_Drive | 1ebd19445550780924c00a286f6665886b639088 | ccdb9f7d2c2e90ce1ba2bc5e0d037ed009d3cab8 | refs/heads/master | 2022-04-28T09:47:54.953691 | 2020-05-01T15:06:19 | 2020-05-01T15:06:19 | 257,748,995 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,994 | py | import os
def main():
working = True
while working:
print_header()
cmd = get_user_input()
working = interpret_command(cmd)
def print_header():
print('---------------------------------------------------')
print(' COHRINT')
print(' Particle Filter Localization ')
print(' Jack Center ')
print('---------------------------------------------------')
print()
def get_user_input():
print('Select from the following programs:')
print(' [1]: Static Simulation')
# print(' [2]: Linear Dynamics Simulation')
# print(' [3]: Nonlinear Dynamics Simulation')
print(' [q]: Quit')
print()
print(' NOTE: parameters for landmarks and dynamics models can be changed the configuration file.')
print()
cmd = input(' Select an exercise would you like to run: ')
print()
cmd = cmd.strip().lower()
return cmd
def interpret_command(cmd):
if cmd == '1': # path planning
status = os.system("python UI_static_simulation.py")
elif cmd == '2':
print(" Sorry, this section is not functional at this time")
# status = os.system("python benchmarking/UI_static_simulation.py")
elif cmd == '3':
print(" Sorry, this section is not functional at this time")
# status = os.system("python decentralized_data_fusion/UI_static_simulation.py")
elif cmd == '4':
print(" Sorry, this section is not functional at this time")
# status = os.system("python target_search/UI_static_simulation.py")
elif cmd == 'q':
print(" closing program ... goodbye!")
return False
else:
print(' ERROR: unexpected command...')
run_again = input(' Would you like to run another program?[y/n]: ')
print()
if run_again != 'y':
print(" closing program ... goodbye!")
return False
return True
if __name__ == '__main__':
main()
| [
"jace3373@gmail.com"
] | jace3373@gmail.com |
ad834e9742ef760877d17971fb5cc046a03d2d57 | a7003c87524ed26876a6e8c2a7b41bac5486d3c9 | /addD.py | 195157d78a162fd957299bf7685d408b25bb36d2 | [] | no_license | asmaa13295/catalog-project | e2a3f5e2ca753e02d6d37b08ccc446bcc55da8bd | 77ec06db575d41d80be686f1371cb8d7c256c71c | refs/heads/master | 2021-01-20T09:14:11.292631 | 2017-08-29T21:38:52 | 2017-08-29T21:38:52 | 101,585,966 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,993 | py | from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from dataBase_setup import Cat, Base, Item
engine = create_engine('sqlite:///catalog.db')
# Bind the engine to the metadata of the Base class so that the
# declaratives can be accessed through a DBSession instance
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
# A DBSession() instance establishes all conversations with the database
# and represents a "staging zone" for all the objects loaded into the
# database session object. Any change made against the objects in the
# session won't be persisted into the database until you call
# session.commit(). If you're not happy about the changes, you can
# revert all of them back to the last commit by calling
# session.rollback()
session = DBSession()
# Menu for Cupcake
Cupcake = Cat(name="Cupcake")
session.add(Cupcake)
session.commit()
item1 = Item(name="Apple Cinnamon", description="Enjoy a mouthful of tiny bits of apple topped with our delicious brown -sugar cream cheese icing and sprinkled cinnamon.", price="$7.50", cat_id=1)
session.add(item1)
session.commit()
item2 = Item(name="Birthday Cupcake", description=" Celebrate your birthday with our vanilla rainbow cake topped with milk chocolate and maltesers.", price="$8.50", cat_id=1)
session.add(item2)
session.commit()
item3 = Item(name="Blackberry Chunks", description="Add flavor to your day with this one- baked with our vanilla batter and topped with buttercream with cheese and blackberries.", price="$7", cat_id=1)
session.add(item3)
session.commit()
item4 = Item(name="Chocolate", description="Enjoy 100% Belgian chocolate cupcake as it melts in your mouth.", price="$9", cat_id=1)
session.add(item4)
session.commit()
item5 = Item(name="Red Velvet", description="the perfect combination of our moist vanilla and chocolate batter, topped with cream cheese icing.", price="$15", cat_id=1)
session.add(item5)
session.commit()
item6 = Item(name="Tiramisu", description="For all the coffee lovers, moistened with coffee and topped with imported Italian mascarpone cheese icing.", price="$9", cat_id=1)
session.add(item6)
session.commit()
item7 = Item(name="Vanilla", description="Classic vanilla cake topped with buttercream icing.", price="6", cat_id=1)
session.add(item7)
session.commit()
item8 = Item(name="Gluten Free-Almond", description="Gluten Free almond cupcakes are delicious to enjoy even with your special diet. Order 25 hours in advance.", price="$13", cat_id=1)
session.add(item8)
session.commit()
# Menu for Cronuts
Cronuts = Cat(name="Cronuts")
session.add(Cronuts)
session.commit()
item9 = Item(name="Blueberry", description="Treat yourself to our croissant-doughnut pastry filled with blueberry and topped with sugar.", price="$12", cat_id=2)
session.add(item9)
session.commit()
item10 = Item(name="Cream Cheese", description="Delight in our croissant-doughnut pastry filled with your famous cream cheese filling, topped with sugar, and crushed walnuts.", price="$11.5", cat_id=2)
session.add(item10)
session.commit()
item11 = Item(name="Nutella", description="Scrumptiously prepared croissant-doughnut pastry filled with Nutella, topped with Nutella and crushed hazelnuts.", price="$17", cat_id=2)
session.add(item11)
session.commit()
item12 = Item(name="Vanilla", description="Baked to perfection, croissant-doughnut pastry filled with vanilla and topped with pink pastry cream.", price="$9.5", cat_id=2)
session.add(item12)
session.commit()
# Menu for Cake Pops
CakePops = Cat(name="CakePops")
session.add(CakePops)
session.commit()
item13 = Item(name="Chocolate Cake Pops ", description="The chocolate shell gives way with a little snap to a moist and soft inside, like a brownie that melts in your mouth", price="$9", cat_id=3)
session.add(item13)
session.commit()
item14 = Item(name="Rainbow Cake Pops ", description="Rainbow cake pops are a huge hit with the kids covered in rainbow sprinkles and full of color and flavor in the center.", price="$8.5", cat_id=3)
session.add(item14)
session.commit()
item15 = Item(name="Oreo Cake Pops ", description="Bite into Oreo Cake Pops for a concentrated combination of chocolate vanilla and Oreos that melt in your mouth at once.", price="$11", cat_id=3)
session.add(item15)
session.commit()
# Menu for Bakery
Bakery = Cat(name="Bakery")
session.add(Bakery)
session.commit()
item16 = Item(name="Plain Croissant ", description="healthy and delisious baked croissant", price="$13.5", cat_id=4)
session.add(item16)
session.commit()
item17 = Item(name="Zaatar Croissant ", description="healthy and delisious baked croissant", price="$15", cat_id=4)
session.add(item17)
session.commit()
item18 = Item(name="Almond Croissant", description="healthy and delisious baked croissant", price="$12", cat_id=4)
session.add(item18)
session.commit()
item19 = Item(name="Paprika Pate", description="healthy and delisious baked croissant", price="$12", cat_id=4)
session.add(item19)
session.commit() | [
"asmaa.mohamed.elshaer@hotmail.com"
] | asmaa.mohamed.elshaer@hotmail.com |
4c4b658d1fd4262f483b4b62d5a424b7b5f92cf3 | 051790c87c5c8cf3d46b6490254f4d535d10bf62 | /keymaker/settings.py | 862b4a738a0da20c07608246356f6217edfda155 | [] | no_license | ImLordImpaler/keyMaker | 41c3a22893f5351208c9d338816fe48e5b47dc52 | 7708f9bc823a248227bbce90a6ff66f54f77323d | refs/heads/main | 2022-12-30T17:28:20.905680 | 2020-10-27T04:04:52 | 2020-10-27T04:04:52 | 307,581,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,126 | py | """
Django settings for keymaker project.
Generated by 'django-admin startproject' using Django 3.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9n_x^^096fpd^*2n32t&v8b$+bd01bgog635dtrzn+cj^n6eu#'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'app'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'keymaker.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'keymaker.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
| [
"rachit.saxena42@gmail.com"
] | rachit.saxena42@gmail.com |
410def9279b1fe218b8508f66e6eaeceaa99efa1 | 2370f989c0bac502a8ecca2ef75c8772584fb111 | /accessingAWS.py | d30c6c670d94847186d8605b6ea7e86fddd7bd59 | [] | no_license | maraolo/python | 6859ed835f0e3b7fdf755adfe9445ccf09ec7337 | 1eb34209f2ca9c2fa434c0a17334004284e07336 | refs/heads/master | 2020-12-05T09:56:24.497753 | 2020-01-06T10:07:39 | 2020-01-06T10:07:39 | 232,074,293 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,734 | py | import boto3
# The calls to AWS STS AssumeRole must be signed with the access key ID
# and secret access key of an existing IAM user or by using existing temporary
# credentials such as those from another role. (You cannot call AssumeRole
# with the access key for the root account.) The credentials can be in
# environment variables or in a configuration file and will be discovered
# automatically by the boto3.client() function. For more information, see the
# Python SDK documentation:
# http://boto3.readthedocs.io/en/latest/reference/services/sts.html#client
# https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_use_switch-role-api.html
# create an STS client object that represents a live connection to the
# STS service
# sts_client = boto3.client('sts')
# Call the assume_role method of the STSConnection object and pass the role
# ARN and a role session name.
# assumed_role_object=sts_client.assume_role(
# RoleArn="arn:aws:iam::account-of-role-to-assume:role/name-of-role",
# RoleSessionName="AssumeRoleSession1"
#)
# From the response that contains the assumed role, get the temporary
# credentials that can be used to make subsequent API calls
# credentials=assumed_role_object['Credentials']
# Use the temporary credentials that AssumeRole returns to make a
# connection to Amazon S3
# s3_resource=boto3.resource(
# 's3',
# aws_access_key_id=credentials['AccessKeyId'],
# aws_secret_access_key=credentials['SecretAccessKey']
# aws_session_token=credentials['SessionToken'],
#)
s3 = boto3.resource('s3')
# Use the Amazon S3 resource object that is now configured with the
# credentials to access your S3 buckets.
for bucket in s3_resource.buckets.all():
print(bucket.name) | [
"paolo@epost.ch"
] | paolo@epost.ch |
d40aa39fcc7c40aa27d51e9298fce5d00831e296 | 1b0d876158c9aa7f34f416254f0df625d1233b68 | /privacy/linear.py | c152f05ae61aa6152c5d5dd0c6b7498dd84bebf0 | [] | no_license | SCccc21/mi | 42f7d4e4d12f8b7c7378e57c3bafe399134e2efb | 9616109e2d705df668046ed03acf8df01ef5b46b | refs/heads/master | 2023-03-21T12:55:06.070111 | 2020-11-14T23:18:45 | 2020-11-14T23:18:45 | 266,634,797 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,489 | py | from work import load_iwpc, extract_target, inver, inver_continuous, engine
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeRegressor
from sklearn.metrics import mean_squared_error
from sklearn import linear_model
from math import sqrt
import numpy as np
import models, os
# [cyp2c9, vkorc1]
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
target_str = "vkorc1"
data_folder = 'data'
# [reg, vib]
model_name = 'reg'
random_seed = 2
# min age
# age 0
# height 9
# weight 16
val_list = [0, 9, 16]
sen_list = [2, 3, 4, 5, 6, 7, 13, 14, 15]
# [-1, 1]
def trans_project(y):
min_y, max_y = np.min(y[:]), np.max(y[:])
y = (y - min_y) / (max_y - min_y)
y = (y - 0.5) * 2
return y
if __name__ == "__main__":
# [0, 9, 16]
x, y, featnames = load_iwpc(data_folder)
y = trans_project(y)
'''
for target_col in val_list:
min_val, max_val = np.min(x[:, target_col]), np.max(x[:, target_col])
x[:, target_col] = (x[:, target_col] - min_val) / (max_val - min_val)
min_val, max_val = np.min(x[:, target_col]), np.max(x[:, target_col])
'''
t, target_cols = extract_target(x, target_str, featnames)
if target_str == "special":
cnt = np.zeros(8)
tot = x.shape[0]
train_list, test_list = [], []
for i in range(tot):
for j in range(2, 8):
if x[i, j] == 1:
idx = j
if cnt[idx] < 300 and idx in [2, 3, 4]:
cnt[idx] += 1
train_list.append(i)
else:
test_list.append(i)
train_x, test_x = x[train_list, :], x[test_list, :]
train_y, test_y = y[train_list], y[test_list]
train_t, test_t = t[train_list], t[test_list]
else:
train_x, test_x, train_y, test_y, train_t, test_t = train_test_split(x, y, t, random_state=random_seed, test_size=0.25)
if model_name == "reg" or model_name == "def" or model_name == "dp":
model = models.MLP(input_dim=x.shape[1]).cuda()
train_error, test_error = engine(model, model_name, train_x, test_x, train_y, test_y, train_t, target_cols)
elif model_name == "vib":
model = models.MLP_vib(input_dim=x.shape[1]).cuda()
train_error, test_error = engine(model, model_name, train_x, test_x, train_y, test_y)
elif model_name == "sen":
model = models.MLP_sen(input_dim=x.shape[1]).cuda()
train_error, test_error = engine(model, model_name, train_x, test_x, train_y, test_y)
elif model_name == "sklearn":
model = linear_model.Ridge(alpha=0.5)
model.fit(train_x, train_y)
train_error = sqrt(mean_squared_error(train_y, model.predict(train_x)))
test_error = sqrt(mean_squared_error(test_y, model.predict(test_x)))
else:
print("Model does not exist")
exit()
if target_str in ["height", "weight", "age"]:
mae = inver_continuous(model, model_name, train_x, train_y, train_t, target_cols, min_val, max_val)
print("Model name:{}\tTarget Str:{}\tTrain Error:{:.4f}\tTest Error:{:.4f}\tAttack MAE:{:.2f}".format(
model_name, target_str, train_error, test_error, mae))
else:
attack_acc = inver(model, model_name, train_x, train_y, train_t, target_cols)
print("Model name:{}\tTarget Str:{}\tTrain Error:{:.4f}\tTest Error:{:.4f}\tAttack Acc:{:.2f}".format(
model_name, target_str, train_error, test_error, attack_acc * 100))
| [
"chensi@vt.edu"
] | chensi@vt.edu |
15140eac6cb392859e8c0fdf1faaa7210cf6fbb3 | 08e7d062cc11c597a4f3ca9fbe100f2c81d6da0c | /PYTB1L2MyFirstOperations/operators.py | 0a48bfff39c90998c8b6de751f7b9e600f32d33b | [] | no_license | 4SchoolZero/-F1M1PYT | 079651104964075b8b6ef2f9c7e1fa906f12bf4d | 1f3c6980a35d3f6ac95196d38a1ecc27ef4c22da | refs/heads/main | 2023-08-21T16:11:44.907336 | 2021-10-15T08:09:41 | 2021-10-15T08:09:41 | 407,456,322 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 567 | py | toekennen = 1
optellen = 1+2
aftrekken = 1-2
keer = 5*5
delen = 52/5
restwaarde = 56%5
delen += keer
toekennen /= aftrekken
print("alle sommen kunt u zien in de code \n")
print("toekennen (assignment) = " + str(toekennen))
print("optellen (addition) + " + str(optellen))
print("aftrekken (subtraction) - " + str(aftrekken))
print("keer (multiply) * " + str(keer))
print("delen (divide) / " + str(delen))
print("restwaarde (modulo) % " + str(restwaarde))
print("optellen bij waarde van variabele +=, zie code")
print("delen door waarde van variabele /=, zie code")
| [
"34179@ma-web.nl"
] | 34179@ma-web.nl |
2234ffacb596c341c8a8eb97867b5220163a962f | b8464b455ac8a8266ffbc3b7c7cff7a232e17d8e | /game_engines/game_versions/GameEngine_v012.py | 856e32a6cf4b6d9c5f16906f2411546b60df5845 | [] | no_license | dxcv/TradingGame | 0732ed5bfb37485ec42beb23875fa73fbb667310 | f882bca4145e6c47c37b6c71289cab9d314fd009 | refs/heads/master | 2020-06-03T06:52:49.659788 | 2018-11-18T15:27:27 | 2018-11-18T15:27:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,940 | py | from random import randint
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.pyplot import plot, draw, show
import matplotlib.animation as animation
import matplotlib.image as mpimg
from matplotlib.widgets import Button, TextBox
import sys
sys.setrecursionlimit(10000) # 10000 is an example, try with different values
# Game engine v002
# .astype(np.uint8)
# np.set_printoptions(threshold=np.nan, linewidth=300)
# everything needs to be uint
# HAS ETH
class PlayGame(object):
def __init__(self):
# LOAD DATA
dateParse = lambda x: pd.datetime.strptime(x, "%Y-%m-%d %I-%p")
self.training_df_BTC = pd.read_csv("/home/andras/PycharmProjects/TradingGame/crypto/Gdax_BTCUSD_1h_close_train.csv", parse_dates=["Date"], date_parser=dateParse, index_col=0)
self.eval_df_BTC = pd.read_csv("/home/andras/PycharmProjects/TradingGame/crypto/Gdax_BTCUSD_1h_close_eval.csv", parse_dates=["Date"], date_parser=dateParse, index_col=0)
self.training_df_ETH = pd.read_csv("/home/andras/PycharmProjects/TradingGame/crypto/Gdax_ETHUSD_1h_close_train.csv", parse_dates=["Date"], date_parser=dateParse, index_col=0)
self.eval_df_ETH = pd.read_csv("/home/andras/PycharmProjects/TradingGame/crypto/Gdax_ETHUSD_1h_close_eval.csv", parse_dates=["Date"], date_parser=dateParse, index_col=0)
self.latest_df_BTC = pd.read_csv("/home/andras/PycharmProjects/TradingGame/crypto/latest_BTC_close.csv", parse_dates=["Date"], date_parser=dateParse, index_col=0)
self.latest_df_ETH = pd.read_csv("/home/andras/PycharmProjects/TradingGame/crypto/latest_ETH_close.csv", parse_dates=["Date"], date_parser=dateParse, index_col=0)
self.prediction = False
self.gameStep = 0
self.rewardList = []
self.rewardSum = 0
self.trainLogName = "/home/andras/PycharmProjects/TradingGame/logs/trainLog_040.csv"
self.evalLogName = "/home/andras/PycharmProjects/TradingGame/logs/evalLog_040.csv"
self.trainLogFile = pd.DataFrame(columns=["rewardSum", "profit", "guessedRightCnt", "guessedWrongCnt", "guessUpCnt", "guessDownCnt", "guessSkipCnt", "guessCnt"])
self.evalLogFile = pd.DataFrame(columns=["rewardSum", "profit", "guessedRightCnt", "guessedWrongCnt", "guessUpCnt", "guessDownCnt", "guessSkipCnt", "guessCnt"])
self.profitLogFile = pd.DataFrame(columns=["profit"])
self.profitCnt = 0
self.guessedRightCnt = 0
self.guessedWrongCnt = 0
self.guessUpCnt = 0
self.guessDownCnt = 0
self.guessSkipCnt = 0
self.guessCnt = 0
self.eLogCnt = 0
self.tLogCnt = 0
self.badGuess = 0
def startGame(self, evaluation):
self.evaluation = evaluation
if self.evaluation == True:
self.df_BTC = self.eval_df_BTC
self.df_ETH = self.eval_df_ETH
else:
self.df_BTC = self.training_df_BTC
self.df_ETH = self.training_df_ETH
self.gameLength = 168 # How long the game should go on
self.timeFrame = 84 # How many data increment should be shown as history. Could be hours, months
self.timeStepSize = "H" # Does nothing atm
self.amountToSpend = 500 # How much to purchase crypto for
self.initialBalance = 100000 # Starting Money
self.cashBalance = self.initialBalance
self.BTC_Balance = 0 # BTC to start with
self.actionTaken = 0
self.BTCToTrade = 0.2
if self.timeStepSize == "D":
self.df_BTC = self.df_BTC.resample("D").mean()
self.dataSize = len(self.df_BTC.index)
# GET RANDOM SEGMENT FROM DATA
self.startDate, self.endDate, self.startIndex, self.endIndex = self.randomChart()
if self.evaluation == True:
self.df_segment_BTC = self.df_BTC.loc[self.endDate: self.startDate]
self.df_segment_ETH = self.df_ETH.loc[self.endDate: self.startDate]
else:
self.df_segment_BTC = self.df_BTC.loc[self.startDate: self.endDate]
self.df_segment_ETH = self.df_ETH.loc[self.startDate: self.endDate]
# print("Random Chart:", self.startIndex, " - ", self.endIndex)
# print("Random Chart:", self.startDate, " - ", self.endDate)
self.currentBTCPrice = 0
self.previousBTCPrice = 0
self.fullBalance = self.cashBalance
self.prevFullBalance = self.fullBalance
self.getInitBTCPrice()
self.done = False
self.cnt = 1
self.reward = 0
self.profit = 0
self.previousProfit = 0
self.firstPurchase = True
def getInitBTCPrice(self):
endIndex = self.endIndex
endDate = self.df_BTC.index[endIndex]
nextRow = self.df_BTC.loc[[endDate]]
self.currentBTCPrice = nextRow["Close"][0]
def randomChart(self):
if self.timeStepSize == "H":
startIndex = randint((self.timeFrame + self.gameLength), (self.dataSize - 1))
endIndex = startIndex - self.timeFrame + 1
if self.timeStepSize == "D":
startIndex = randint((self.timeFrame + self.gameLength), (self.dataSize - 1))
endIndex = startIndex - self.timeFrame
startDate = self.df_BTC.index[startIndex]
endDate = self.df_BTC.index[endIndex]
if self.timeStepSize == "H":
startDateStr = startDate.strftime("%Y-%m-%d %H:%M:%S")
endDateStr = endDate.strftime("%Y-%m-%d %H:%M:%S")
if self.timeStepSize == "D":
startDateStr = startDate.strftime("%Y-%m-%d")
endDateStr = endDate.strftime("%Y-%m-%d")
return startDateStr, endDateStr, startIndex, endIndex
def nextStep(self, action):
#print("\n")
self.gameStep += 1
self.cnt = self.cnt + 1
self.reward = 0
self.BTCPercentChange = 0
terminal_life_lost = False
self.previousProfit = self.profit
self.previousBTCPrice = self.currentBTCPrice
self.endIndex = self.endIndex - 1
self.endDate = self.df_BTC.index[self.endIndex]
self.nextRow_BTC = self.df_BTC.loc[[self.endDate]]
self.df_segment_BTC = pd.concat([self.nextRow_BTC, self.df_segment_BTC])
self.df_segment_BTC = self.df_segment_BTC.drop(self.df_segment_BTC.index[len(self.df_segment_BTC) - 1])
self.nextRow_ETH = self.df_ETH.loc[[self.endDate]]
self.df_segment_ETH = pd.concat([self.nextRow_ETH, self.df_segment_ETH])
self.df_segment_ETH = self.df_segment_ETH.drop(self.df_segment_ETH.index[len(self.df_segment_ETH) - 1])
self.currentBTCPrice = self.nextRow_BTC["Close"][0]
#print(self.previousBTCPrice, "-->", self.currentBTCPrice)
#print("profit",self.profit)
#print("full bal", self.fullBalance)
if action == 1: #1:
#print("Guess: Increase")
self.actionTaken = 1
if self.firstPurchase == True:
self.firstPurchase = False
self.BTC_Balance = 0.1
self.cashBalance = self.cashBalance - (0.1 * self.currentBTCPrice)
if self.firstPurchase == False:
tradeCost = self.BTCToTrade * self.currentBTCPrice
if tradeCost <= self.cashBalance:
self.cashBalance = self.cashBalance - tradeCost
self.BTC_Balance = round((self.BTC_Balance + self.BTCToTrade), 5)
#print("BOUGHT", self.BTCToTrade, "BTC for", tradeCost)
else:
#print("RAN OUT OF MONEY!!!")
moneyEnoughForThisBTC = self.cashBalance / self.currentBTCPrice
self.cashBalance = self.cashBalance - moneyEnoughForThisBTC
self.BTC_Balance = round((self.BTC_Balance + moneyEnoughForThisBTC), 5)
#print("BOUGHT", moneyEnoughForThisBTC, "BTC for", self.cashBalance)
if action == 2: #2:
#print("Guess: Decrease")
self.actionTaken = 2
leftOverBTC = self.BTC_Balance
self.BTC_Balance = self.BTC_Balance - leftOverBTC
self.cashBalance = self.cashBalance + (leftOverBTC * self.currentBTCPrice)
#print("SOLD", leftOverBTC, "BTC for", (leftOverBTC * self.currentBTCPrice))
if action == 0 or action == 3:
####print("Skipped")
self.actionTaken = 3
self.cashBalance = round((self.cashBalance), 0)
self.BTC_Balance = round((self.BTC_Balance), 5)
self.fullBalance = round((self.cashBalance + (self.BTC_Balance * self.currentBTCPrice)), 0)
self.profit = self.fullBalance - self.initialBalance
# - REWARDING SYSTEM -
BTCPercentGainLoss = (self.currentBTCPrice / self.previousBTCPrice)
self.BTCPercentChange = -1 * (np.round((100 - (BTCPercentGainLoss * 100)), 2))
#print(self.previousBTCPrice, "-->", self.currentBTCPrice)
#print("changed by:", self.BTCPercentChange)
# WHEN BTC WENT UP
if self.currentBTCPrice > self.previousBTCPrice:
if self.actionTaken == 1:
self.reward = self.BTCPercentChange
self.guessedRightCnt += 1
#print("Guessed Right - reward =", self.reward)
if self.actionTaken == 2:
self.reward = self.BTCPercentChange * -1
self.guessedWrongCnt += 1
#print("Guessed Wrong - reward =", self.reward)
if self.actionTaken == 3 or self.actionTaken == 0:
#print("Guessed Skipped - reward =", self.reward)
self.reward = 0
# WHEN BTC DROPPED
if self.currentBTCPrice < self.previousBTCPrice:
if self.actionTaken == 1:
self.reward = self.BTCPercentChange
self.guessedWrongCnt += 1
#print("Guessed Wrong - reward =", self.reward)
if self.actionTaken == 2:
self.reward = self.BTCPercentChange * -1
self.guessedRightCnt += 1
#print("Guessed Right - reward =", self.reward)
if self.actionTaken == 3 or self.actionTaken == 0:
#print("Guessed Skipped - reward =", self.reward)
self.reward = 0
if self.reward < 0.15 and self.reward > -0.3:
self.reward = 0
self.guessCnt += 1
self.previousBTCPrice = self.currentBTCPrice
if self.cnt == self.gameLength:
self.done = True
if self.guessedWrongCnt == 10:
self.done = True
if self.done == True:
terminal_life_lost = True
self.gameStep = 0
self.profit = 0
image = self.getChartImage(self.timeFrame)
if self.actionTaken == 1:
self.guessUpCnt += 1
if self.actionTaken == 2:
self.guessDownCnt +=1
self.profitLogFile.loc[self.profitCnt] = self.previousProfit
self.profitCnt += 1
self.profitLogFile.to_csv("profit.csv", index=True)
#print("profit", self.previousProfit)
self.initialBalance = self.fullBalance
if self.actionTaken == 3:
self.guessSkipCnt +=1
# WRITE EVALUATION LOG
if self.evaluation == False:
self.rewardSum = self.rewardSum + self.reward
if self.done == True:
self.trainLogFile.loc[self.tLogCnt] = self.rewardSum, self.profit, self.guessedRightCnt, self.guessedWrongCnt, self.guessUpCnt, self.guessDownCnt, self.guessSkipCnt, self.guessCnt
self.tLogCnt += 1
self.trainLogFile.to_csv(self.trainLogName, index=True)
self.rewardSum = 0
self.guessUpCnt = 0
self.guessDownCnt = 0
self.guessedRightCnt = 0
self.guessedWrongCnt = 0
self.guessSkipCnt = 0
self.guessCnt = 0
else:
self.rewardSum = self.rewardSum + self.reward
if self.done == True:
self.evalLogFile.loc[self.eLogCnt] = self.rewardSum, self.profit, self.guessedRightCnt, self.guessedWrongCnt, self.guessUpCnt, self.guessDownCnt, self.guessSkipCnt, self.guessCnt
self.eLogCnt += 1
self.evalLogFile.to_csv(self.evalLogName, index=True)
self.rewardSum = 0
self.guessUpCnt = 0
self.guessDownCnt = 0
self.guessedRightCnt = 0
self.guessedWrongCnt = 0
self.guessSkipCnt = 0
self.guessCnt = 0
return image, self.reward, self.done
def getBTCPercentChange(self):
return self.BTCPercentChange
def getActionTaken(self):
return self.actionTaken
def getProfit(self):
return (self.fullBalance - self.initialBalance)
def getChartData(self):
image = self.getChartImage(self.timeFrame)
return image
def getCash(self):
return self.cashBalance
def getBTC(self):
return self.BTC_Balance
# --------------------------------- CHART IMAGE GENERATION ---------------------------------
def getChartImage(self, timeFrame):
def scale_list(x, to_min, to_max):
def scale_number(unscaled, to_min, to_max, from_min, from_max):
return (to_max - to_min) * (unscaled - from_min) / (from_max - from_min) + to_min
if len(set(x)) == 1:
print("SET(X) == 1")
return [np.floor((to_max + to_min) / 2)] * len(x)
else:
return [scale_number(i, to_min, to_max, min(x), max(x)) for i in x]
timeFrame = timeFrame
PRICE_RANGE = timeFrame
half_scale_size = int(PRICE_RANGE / 2)
#half_scale_size = int(PRICE_RANGE)
closes_BTC = self.df_segment_BTC["Close"]
roundedCloses = ['%.2f' % elem for elem in closes_BTC]
closes_BTC = closes_BTC[::-1]
close_data_together_BTC = list(np.round(scale_list(closes_BTC[timeFrame - timeFrame: timeFrame], 0, half_scale_size - 1), 0))
graph_close_BTC = close_data_together_BTC[0:PRICE_RANGE]
#print(df_segment_ETH)
closes_ETH = self.df_segment_ETH["Close"]
roundedCloses = ['%.2f' % elem for elem in closes_ETH]
closes_ETH = closes_ETH[::-1]
close_data_together_ETH = list(np.round(scale_list(closes_ETH[timeFrame - timeFrame: timeFrame], 0, half_scale_size - 1), 0))
graph_close_ETH = close_data_together_ETH[0:PRICE_RANGE]
def graphRender(data):
blank_matrix_close = np.zeros(shape=(half_scale_size, timeFrame))
x_ind = 0
previous_pixel = 0
for next_pixel in data:
blank_matrix_close[int(next_pixel), x_ind] = 255
plus = True
if x_ind == 0:
previous_pixel = next_pixel
difference = int((previous_pixel - next_pixel))
absDifference = abs(difference)
previous_pixel = next_pixel
for diff in range(absDifference):
if difference >= 0:
next_pixel = (next_pixel + 1).astype(np.uint8)
blank_matrix_close[next_pixel, x_ind] = 80
if difference < 0:
next_pixel = (next_pixel - 1).astype(np.uint8)
blank_matrix_close[next_pixel, x_ind] = 180
x_ind += 1
blank_matrix_close = blank_matrix_close[::-1]
return blank_matrix_close
BTC = graphRender(graph_close_BTC)
ETH = graphRender(graph_close_ETH)
stackedCharts = np.vstack([BTC, ETH])
return stackedCharts
df = pd.DataFrame(columns=['profit'])
cnt = 0
terminal = False
restart = False
def HourLater(action):
global restart
global terminal
global cnt
plt.close()
if restart == True:
restart = False
plt.style.use('seaborn')
df_segment_BTC = test.getChartData()
plt.imshow(df_segment_BTC, cmap='hot')
buyCom = plt.axes([0.9, 0.2, 0.1, 0.075])
buyButt = Button(buyCom, 'UP', color='red', hovercolor='green')
buyButt.on_clicked(_buy)
sellCom = plt.axes([0.9, 0.1, 0.1, 0.075])
sellButt = Button(sellCom, 'DOWN', color='red', hovercolor='green')
sellButt.on_clicked(_sell)
skipCom = plt.axes([0.9, 0.0, 0.1, 0.075])
skipButt = Button(skipCom, 'SKIP', color='red', hovercolor='green')
skipButt.on_clicked(_skip)
dollMeter = plt.axes([0.9, 0.7, 0.1, 0.075])
dollText = TextBox(dollMeter, 'Dollar', color='grey', initial=test.getCash())
btcMeter = plt.axes([0.9, 0.6, 0.1, 0.075])
btcMeter = TextBox(btcMeter, 'BTC', color='grey', initial=test.getBTC())
profitMeter = plt.axes([0.9, 0.5, 0.1, 0.075])
profitMeter = TextBox(profitMeter, 'Profit', color='grey', initial=test.getProfit())
plt.show()
if terminal == True:
plt.style.use('dark_background')
df.loc[cnt] = test.profit
cnt += 1
df.to_csv("Human_Trader_Log.csv", index=True)
df_segment_BTC = test.getChartData()
plt.imshow(df_segment_BTC, cmap='hot')
dollMeter = plt.axes([0.9, 0.7, 0.1, 0.075])
dollText = TextBox(dollMeter, 'Dollar', color='grey', initial=test.getCash())
btcMeter = plt.axes([0.9, 0.6, 0.1, 0.075])
btcMeter = TextBox(btcMeter, 'BTC', color='grey', initial=test.getBTC())
profitMeter = plt.axes([0.9, 0.5, 0.1, 0.075])
profitMeter = TextBox(profitMeter, 'Profit', color='grey', initial=test.getProfit())
endMeter = plt.axes([0.9, 0.4, 0.1, 0.075])
if test.getProfit() < 0:
endMeter = TextBox(endMeter, '', color='red', initial="LOST!")
else:
endMeter = TextBox(endMeter, '', color='green', initial="WON!")
plt.show()
profit = 0
terminal = False
restart = True
test.startGame(True)
HourLater(1)
else:
chart, r_t, terminal = test.nextStep(action)
#printBalances()
plt.imshow(chart, cmap='hot')
buyCom = plt.axes([0.9, 0.2, 0.1, 0.075])
buyButt = Button(buyCom, 'UP', color='red', hovercolor='green')
buyButt.on_clicked(_buy)
sellCom = plt.axes([0.9, 0.1, 0.1, 0.075])
sellButt = Button(sellCom, 'DOWN', color='red', hovercolor='green')
sellButt.on_clicked(_sell)
skipCom = plt.axes([0.9, 0.0, 0.1, 0.075])
skipButt = Button(skipCom, 'SKIP', color='red', hovercolor='green')
skipButt.on_clicked(_skip)
dollMeter = plt.axes([0.9, 0.7, 0.1, 0.075])
dollText = TextBox(dollMeter, 'Dollar', color='grey', initial=test.getCash())
btcMeter = plt.axes([0.9, 0.6, 0.1, 0.075])
btcMeter = TextBox(btcMeter, 'BTC', color='grey', initial=test.getBTC())
profitMeter = plt.axes([0.9, 0.5, 0.1, 0.075])
profitMeter = TextBox(profitMeter, 'Profit', color='grey', initial=test.getProfit())
plt.show()
def printEndReason():
profit = test.getProfit()
if profit < 0:
print("----------------------------------------------------------------------------")
print("----------------------------- ------------------------------------")
print("----------------------------- BANKRUPT ------------------------------------")
print("----------------------------- ------------------------------------")
print("----------------------------------------------------------------------------")
else:
print("----------------------------------------------------------------------------")
print("----------------------------- ------------------------------------")
print("--------------------------- YOU MADE MONEY ---------------------------------")
print("----------------------------- ------------------------------------")
print("----------------------------------------------------------------------------")
def printBalances():
print("Dollar = $", test.getCash(), sep='', end='')
print("\n", sep='', end='')
print("BTC = ", test.getBTC(), " BTC", sep='', end='')
print("\n", sep='', end='')
print("PROFIT = ", test.getProfit(), sep='', end='')
print("\n")
def _buy(event):
global terminal
HourLater(1)
def _sell(event):
global terminal
HourLater(2)
def _skip(event):
global terminal
HourLater(3)
def newGame():
test.startGame(True)
df_segment_BTC = test.getChartData()
#print(df_segment_BTC)
#printBalances()
plt.imshow(df_segment_BTC, cmap='hot')
buyCom = plt.axes([0.9, 0.2, 0.1, 0.075])
buyButt = Button(buyCom, 'UP', color='red', hovercolor='green')
buyButt.on_clicked(_buy)
sellCom = plt.axes([0.9, 0.1, 0.1, 0.075])
sellButt = Button(sellCom, 'DOWN', color='red', hovercolor='green')
sellButt.on_clicked(_sell)
skipCom = plt.axes([0.9, 0.0, 0.1, 0.075])
skipButt = Button(skipCom, 'SKIP', color='red', hovercolor='green')
skipButt.on_clicked(_skip)
dollMeter = plt.axes([0.9, 0.7, 0.1, 0.075])
dollText = TextBox(dollMeter, 'Dollar', color='grey', initial=test.getCash())
btcMeter = plt.axes([0.9, 0.6, 0.1, 0.075])
btcMeter = TextBox(btcMeter, 'BTC', color='grey', initial=test.getBTC())
profitMeter = plt.axes([0.9, 0.5, 0.1, 0.075])
profitMeter = TextBox(profitMeter, 'Profit', color='grey', initial=test.getProfit())
plt.show()
if __name__ == "__main__":
test = PlayGame()
newGame()
| [
"andrasormos@gmail.com"
] | andrasormos@gmail.com |
42037aa7142d275e0165763bd30886a51a333cc8 | ece5984159985f419cb4d6cd89eb3342c86d2282 | /examples/DGL-KE/hotfix/partition.py | c0618595fd84fd03a78fb3992c8588b26e9dff13 | [
"Apache-2.0"
] | permissive | ryantd/dgl-operator | 9c235ac68f14960458141e4170288d334cc29ee0 | 2fba3585294c5cb03bf61e9adfe66f678ed677dd | refs/heads/master | 2023-08-21T16:05:50.086062 | 2021-09-01T05:57:09 | 2021-09-01T05:57:09 | 384,377,660 | 1 | 0 | Apache-2.0 | 2021-07-09T08:49:23 | 2021-07-09T08:49:22 | null | UTF-8 | Python | false | false | 6,156 | py | # -*- coding: utf-8 -*-
#
# partition.py
#
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import scipy as sp
import numpy as np
import argparse
import os
import dgl
from dgl import backend as F
from dgl.data.utils import load_graphs, save_graphs
from .dataloader import get_dataset
def write_txt_graph(path, file_name, part_dict, total_nodes, total_relations):
partition_book = [0] * total_nodes
for part_id in part_dict:
print('write graph %d...' % part_id)
# Get (h,r,t) triples
partition_path = path + str(part_id)
if not os.path.exists(partition_path):
os.makedirs(partition_path)
triple_file = os.path.join(partition_path, file_name)
f = open(triple_file, 'w')
graph = part_dict[part_id]
src, dst = graph.all_edges(form='uv', order='eid')
rel = graph.edata['tid']
assert len(src) == len(rel)
src = F.asnumpy(src)
dst = F.asnumpy(dst)
rel = F.asnumpy(rel)
for i in range(len(src)):
f.write(str(src[i])+'\t'+str(rel[i])+'\t'+str(dst[i])+'\n')
f.close()
# Get local2global
l2g_file = os.path.join(partition_path, 'local_to_global.txt')
f = open(l2g_file, 'w')
pid = F.asnumpy(graph.parent_nid)
for i in range(len(pid)):
f.write(str(pid[i])+'\n')
f.close()
# Update partition_book
partition = F.asnumpy(graph.ndata['part_id'])
for i in range(len(pid)):
partition_book[pid[i]] = partition[i]
# Write partition_book.txt
for part_id in part_dict:
partition_path = path + str(part_id)
pb_file = os.path.join(partition_path, 'partition_book.txt')
f = open(pb_file, 'w')
for i in range(len(partition_book)):
f.write(str(partition_book[i])+'\n')
f.close()
# Write relation_count.txt
for part_id in part_dict:
partition_path = path + str(part_id)
rel_count_file = os.path.join(partition_path, 'relation_count.txt')
f = open(rel_count_file, 'w')
f.write(str(total_relations)+'\n')
f.close()
def main():
parser = argparse.ArgumentParser(description='Partition a knowledge graph')
parser.add_argument('--data_path', type=str, default='data',
help='The path of the directory where DGL-KE loads knowledge graph data.')
parser.add_argument('--dataset', type=str, default='FB15k',
help='dataset name, under data_path')
parser.add_argument('--data_files', type=str, default=None, nargs='+',
help='A list of data file names. This is used if users want to train KGE'\
'on their own datasets. If the format is raw_udd_{htr},'\
'users need to provide train_file [valid_file] [test_file].'\
'If the format is udd_{htr}, users need to provide'\
'entity_file relation_file train_file [valid_file] [test_file].'\
'In both cases, valid_file and test_file are optional.')
parser.add_argument('--delimiter', type=str, default='\t',
help='Delimiter used in data files. Note all files should use the same delimiter.')
parser.add_argument('--format', type=str, default='built_in',
help='The format of the dataset. For builtin knowledge graphs,'\
'the foramt should be built_in. For users own knowledge graphs,'\
'it needs to be raw_udd_{htr} or udd_{htr}.')
parser.add_argument('-k', '--num-parts', required=True, type=int,
help='The number of partitions')
args = parser.parse_args()
num_parts = args.num_parts
print('load dataset..')
# load dataset and samplers
dataset = get_dataset(args.data_path,
args.dataset,
args.format,
args.delimiter,
args.data_files)
print('construct graph...')
src, etype_id, dst = dataset.train
coo = sp.sparse.coo_matrix((np.ones(len(src)), (src, dst)),
shape=[dataset.n_entities, dataset.n_entities])
g = dgl.DGLGraph(coo, readonly=True, multigraph=True, sort_csr=True)
g.edata['tid'] = F.tensor(etype_id, F.int64)
print('partition graph...')
part_dict = dgl.transform.metis_partition(g, num_parts, 1)
tot_num_inner_edges = 0
for part_id in part_dict:
part = part_dict[part_id]
num_inner_nodes = len(np.nonzero(F.asnumpy(part.ndata['inner_node']))[0])
num_inner_edges = len(np.nonzero(F.asnumpy(part.edata['inner_edge']))[0])
print('part {} has {} nodes and {} edges. {} nodes and {} edges are inside the partition'.format(
part_id, part.number_of_nodes(), part.number_of_edges(),
num_inner_nodes, num_inner_edges))
tot_num_inner_edges += num_inner_edges
part.copy_from_parent()
print('write graph to txt file...')
txt_file_graph = os.path.join(args.data_path, args.dataset)
txt_file_graph = os.path.join(txt_file_graph, 'partition_')
write_txt_graph(txt_file_graph, 'train.txt', part_dict, g.number_of_nodes(), dataset.n_relations)
print('there are {} edges in the graph and {} edge cuts for {} partitions.'.format(
g.number_of_edges(), g.number_of_edges() - tot_num_inner_edges, len(part_dict)))
if __name__ == '__main__':
main() | [
"qihoo@360.cn"
] | qihoo@360.cn |
a8428249fdde3e7aee112e751354d15c3c72b3fa | bba5ef3efab68481a1ac835fc57ec0bef62edaee | /my_site/activities/admin.py | 356fe86304b541725c085b2c9d0451cfd27e7250 | [] | no_license | diegosalas00/diego_site_family_center | 53e50ed5b5eec43f66dd50ad613112e92ff55a2c | a6cb1c6e6ee08cc263c87037e01934d1cb8c584b | refs/heads/master | 2021-01-17T14:51:35.167391 | 2017-06-30T02:40:26 | 2017-06-30T02:40:26 | 95,391,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | from django.contrib import admin
from .models import Activity, PartyPackage, Available
# Register your models here.
class PackageInline(admin.StackedInline):
model = PartyPackage
class ActivityAdmin(admin.ModelAdmin):
inlines = [PackageInline, ]
admin.site.register(Activity, ActivityAdmin)
admin.site.register(PartyPackage)
admin.site.register(Available)
| [
"noreply@github.com"
] | diegosalas00.noreply@github.com |
78782747d55e2d731b4ecbc7339466b4ab9fe2ee | 3b883e16b3eb5307433c5ffdffa8f0efcdfdb106 | /pack/is_valid.py | 15b634b246c638508251ebdf782e8fe625c1af3c | [
"MIT"
] | permissive | hasanhammad/SUDOKU-SOLVER | 73eca0d82332f5e41953e7e2a444ffd5625520fe | 56990ee143d698a8480cd53e9b8556ffc76c20fc | refs/heads/master | 2022-11-28T12:56:59.372769 | 2020-07-31T16:38:11 | 2020-07-31T16:38:11 | 284,076,895 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,435 | py | import os
def read_from_file (filename, table):
with open(filename, 'r') as f: # open file
for line in f: # take a file
number_strings = line.split() # split the line on runs of whitespace
numbers = [int(n) for n in number_strings] # convert to integers
table.append(numbers) # add the row (line) to the table.
def in_col(col, num, sudoku):
for row in range(9):
if sudoku[row][col] == num: # if the passed number found in the row (duplicated)
return True
return False
def in_row(row, num, sudoku):
for col in range(9):
if sudoku[row][col] == num: # if the passed number found in the column (duplicated)
return True
return False
def in_box(row, col, num, sudoku):
row_range = row - row % 3
col_range = col - col % 3
for i in range(3):
for j in range(3):
if sudoku[i + row_range][j + col_range] == num: # if the passed number found in the 3X3 box (duplicated)
return True
return False
def is_safe(row, col, num, sudoku): # checks if the sudoku table contains duplicate numbers
return not in_row(row, num, sudoku) and not in_col(col, num, sudoku) and not in_box(row, col, num, sudoku)
def find_empty_cell(row_col, sudoku): # returns an empty cell
for row in range(9):
for col in range(9):
if sudoku[row][col] == 0:
row_col[0] = row
row_col[1] = col
return True
return False
def notInRow(arr, row): # checks whether there is any duplicate in current row or not
st = set() # Set to store characters seen so far.
for i in range(0, 9):
# If already encountered before, return false
if arr[row][i] in st:
return False
# If it is not an empty cell, insert value at the current cell in the set
if arr[row][i] != 0:
st.add(arr[row][i])
return True
def notInCol(arr, col): # checks whether there is any duplicate in current column or not
st = set()
for i in range(0, 9):
# If already encountered before, return false
if arr[i][col] in st:
return False
# If it is not an empty cell, insert value at the current cell in the set
if arr[i][col] != 0:
st.add(arr[i][col])
return True
def notInBox(arr, startRow, startCol): # Checks whether there is any duplicate in current 3x3 box or not.
st = set()
for row in range(0, 3):
for col in range(0, 3):
curr = arr[row + startRow][col + startCol]
# If already encountered before, return false
if curr in st:
return False
# If it is not an empty cell, insert value at current cell in set
if curr != 0:
st.add(curr)
return True
# checks whether current row and current column and current 3x3 box is valid or not
def isValid(arr, row, col):
return notInRow(arr, row) and notInCol(arr, col) and notInBox(arr, row - row % 3, col - col % 3)
def isValidConfig(arr, n):
for i in range(0, n):
for j in range(0, n):
if not isValid(arr, i, j): # if current row or current column or current 3x3 box is not valid, return false
return False
return True
| [
"noreply@github.com"
] | hasanhammad.noreply@github.com |
bc3d42179512c48e51520f9779b54c1f443c8622 | 08b567b30da87aae515d7990f82f058d0a6cb630 | /BlumeFit.py | 1066b1736a2af9feebf8f368f93b8376075628ac | [] | no_license | ParasKoundal/icecube-scripts | 49e151b2c518e646101df0b462c39483a8b67034 | fa3ae5c4e2462aff16c3157a712f94e745169977 | refs/heads/master | 2021-05-31T06:47:31.805953 | 2016-05-24T15:16:31 | 2016-05-24T15:16:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,465 | py | #!/usr/bin/env python
import icecube
from icecube import icetray, dataio, dataclasses, millipede
from icecube.icetray import traysegment
import numpy, os, sys, math
#Things stolen from IgelFit
#from icecube.igelfit.igel import *
import recos
from recos import gulliver_commons
from I3Tray import *
geofile = dataio.I3File(sys.argv[1])
gFrame = geofile.pop_frame()
geometry_ = gFrame["I3Geometry"]
#List of keys to remove
delete_keys = []
#Number of minimums to find BlumeFits for
num_mins = 3
#Vertex Seed for the BlumeFit (default is IgelFit)
vert_seed = "Monopod_Clast"
#Tuple of variations. Make square around original seed for now
#BlumeFit zenith and azimuth variations
#pt_tup = [(0,0),(0,10),(10,0),(math.sqrt(200),math.sqrt(200)),(0,-10),(-10,0),(math.sqrt(200),-math.sqrt(200)),(-math.sqrt(200),-math.sqrt(200)),(-math.sqrt(200),math.sqrt(200))]
#Super BlumeFit zenith and azimuth variations
pt_tup = [(0,0),(0,5),(5,0),(0,-5),(-5,0),(0,-20),(-20,0),(0,10),(10,0),(20,0),(0,20),(math.sqrt(800),math.sqrt(800)),(-math.sqrt(800),math.sqrt(800)),(math.sqrt(800),-math.sqrt(800)),(-math.sqrt(800),-math.sqrt(800)),(math.sqrt(50),math.sqrt(50)),(-math.sqrt(50),math.sqrt(50)),(math.sqrt(50),-math.sqrt(50)),(-math.sqrt(50),-math.sqrt(50)),(math.sqrt(200),math.sqrt(200)),(0,-10),(-10,0),(math.sqrt(200),-math.sqrt(200)),(-math.sqrt(200),-math.sqrt(200)),(-math.sqrt(200),math.sqrt(200))]
#Millipede Constants
PhotonsPerBin = 1
Boundary = 800
MuonSpacing = 0
ShowerSpacing = 2
Pulses = "newSRT_Cleaned_WavedeformPulses"
BadDOMsList = 'BadDomsList'
MuonPhotonicsService = 'MuonSplinePhotonicsService'
CascadePhotonicsService = 'CascadeSplinePhotonicsService'
#Useful functions
def makeSeed(vertSeed,zen,azi):
partPos = vertSeed.pos
partDir = dataclasses.I3Direction(zen,azi)
partTime = vertSeed.time
partLength = vertSeed.length
partShape = dataclasses.I3Particle.ParticleShape.InfiniteTrack
seedPart = dataclasses.I3Particle(partPos,partDir,partTime,partShape,partLength)
return seedPart
def enum(**enums):
return type('Enum', (), enums)
tray = I3Tray()
topleveldir = '/data/sim/sim-new/ice/photonics-mie-ZeroLengthMuons/'
topleveldir1 = '/data/user/mntobin/IceRec/Tables/'
spline_tables = enum(
PhotoSplineAmplitudeTableCscd = topleveldir1 + '/ems_mie_z20_a10_150.abs.fits',
PhotoSplineTimingTableCscd = topleveldir1 + '/ems_mie_z20_a10_150.prob.fits',
PhotoSplineAmplitudeTableMuon = topleveldir + '/ZeroLengthMieMuons_150_z20_a10.abs.fits',
PhotoSplineTimingTableMuon = topleveldir + '/ZeroLengthMieMuons_150_z20_a10.prob.fits')
gulliver_commons(tray, spline_tables, photonics=False)
#Start tray
def GetKinVals(frame,place):
dx = 0
dy = 0
dz = 0
direction_seedmap = [ (2.895, 326.508), (15.672, 162.173), (16.767, 87.737), (18.129, 229.531), (19.722, 26.743), (21.298, 330.356), (23.698, 281.026), (28.827, 125.710), (31.061, 196.084), (34.125, 359.745), (34.318, 161.840), (34.513, 74.688), (36.617, 228.496), (36.623, 42.977), (39.431, 320.570), (39.878, 258.007), (41.991, 286.490), (43.931, 107.588), (45.736, 139.059), (47.193, 19.992), (47.461, 181.905), (48.162, 207.024), (49.125, 343.152), (50.498, 60.924), (51.610, 84.793), (54.236, 305.186), (54.818, 159.996), (55.054, 229.122), (57.401, 39.527), (57.517, 273.028), (57.653, 251.102), (58.547, 122.045), (59.240, 2.276), (61.120, 325.560), (63.448, 101.669), (64.816, 141.974), (65.174, 197.997), (65.622, 177.720), (66.515, 21.331), (67.513, 290.510), (68.553, 56.038), (68.612, 344.275), (70.256, 82.903), (70.463, 217.280), (71.816, 237.675), (72.318, 309.569), (73.205, 159.667), (75.461, 256.591), (75.848, 38.162), (76.894, 124.772), (77.745, 1.153), (78.929, 275.381), (79.323, 328.637), (81.836, 99.225), (82.525, 68.660), (82.990, 203.303), (83.230, 142.442), (83.619, 183.084), (84.748, 18.512), (85.156, 295.863), (86.650, 226.503), (87.640, 345.340), (89.007, 51.396), (89.967, 244.626), (90.305, 313.727), (91.619, 159.032), (92.353, 114.548), (92.595, 84.163), (92.953, 263.092), (94.871, 33.936), (96.016, 3.708), (96.424, 281.216), (98.532, 330.262), (98.538, 132.156), (98.858, 212.697), (98.880, 193.912), (100.627, 65.667), (100.745, 175.297), (103.470, 99.489), (104.156, 232.650), (106.136, 347.694), (107.120, 47.948), (107.186, 149.017), (107.497, 251.486), (108.294, 309.776), (110.604, 117.281), (110.972, 81.944), (111.262, 25.152), (111.858, 270.743), (112.577, 290.622), (114.551, 5.272), (116.000, 186.422), (116.676, 218.117), (117.133, 327.713), (117.249, 165.794), (119.897, 63.479), (120.573, 134.659), (121.799, 238.553), (124.231, 40.068), (124.687, 347.503), (126.739, 306.960), (127.666, 91.105), (127.826, 259.967), (129.131, 114.754), (129.206, 201.995), (129.770, 17.963), (130.005, 283.609), (132.239, 152.431), (133.257, 177.853), (136.254, 328.457), (137.161, 224.991), (138.137, 70.215), (141.720, 357.397), (142.669, 41.947), (144.044, 130.683), (144.611, 300.635), (145.476, 268.121), (145.549, 98.300), (147.597, 199.267), (149.391, 163.157), (154.675, 235.391), (154.903, 331.643), (156.532, 66.760), (157.074, 17.965), (161.884, 119.272), (162.990, 283.154), (165.667, 188.203), (175.463, 26.215) ]
#Save the llh for all Igelfits in the file
content_map = []
for zen, azi in direction_seedmap:
millipede_id = 'Igelfit_0000' + '_%im_%im_%im_%ideg_%ideg' % (dx, dy, dz, zen, azi)
if not frame.Has(millipede_id):
print "WARNING: Skipping igelfit in event %s" % millipede_id
continue
millipede = frame[millipede_id]
millipede_fitparams = frame[millipede_id + 'FitParams']
rlogl = millipede_fitparams.rlogl
content_map.append([rlogl, millipede_id])
if(len(content_map)==0):
return 0,0,0
#Find the num_mins values of energy,zenith and azimuth
if(place==1):
fit = min(content_map, key= lambda content_map : content_map[0])
else:
for i in xrange(place-1):
content_map.remove(min(content_map, key= lambda content_map : content_map[0]))
fit = min(content_map, key= lambda content_map : content_map[0])
fit_rlogl = fit[0]
fit_id = fit[1]
track_fit = frame[fit_id]
track_fit_fitparams = frame[fit_id + 'FitParams']
zenith, azimuth, energy = track_fit[0].dir.zenith, track_fit[0].dir.azimuth, 0.
for bin in track_fit : energy += bin.energy
return energy,zenith,azimuth
def Seeder(frame):
global pt_tup
igel_seed = frame[vert_seed]
zen = igel_seed.dir.zenith
azi = igel_seed.dir.azimuth
# for i in xrange(len(pt_tup)):
# igel_seed = frame[vert_seed]
# for j in xrange(num_mins):
#Set values of energy, zenith and azimuth from the current llh minimum
# energy,zenith,azimuth = GetKinVals(frame,j+1)
# igel_seed.energy = energy
# igel_seed.dir = dataclasses.I3Direction(zenith,azimuth)
# zen = igel_seed.dir.zenith + (numpy.pi/180)*pt_tup[i][0]
# azi = igel_seed.dir.azimuth + (numpy.pi/180)*pt_tup[i][1]
#Fix edge cases to keep values between 0 and 2pi. Almost undoubtedly a better way to do this. So, so lazy
# if(zen>(numpy.pi)): zen = 2*numpy.pi - zen
# if(zen<0): zen = -1*zen
# if(azi>(2*numpy.pi)): azi = azi - (2*numpy.pi)*math.floor(azi/(2*numpy.pi))
# if(azi<0): azi = azi + (2*numpy.pi)*(1+math.floor(-1*azi/(2*numpy.pi)))
#Get igelfit seed, modify it, put it in the frame
# seed_id = 'Seed'+'%i%i' % (i,j)
seed_id = 'Seed'
frame[seed_id] = makeSeed(igel_seed,zen,azi)
#Find best fit value (mostly stolen from igelfit)
def BestFit(frame):
global pt_tup, delete_keys
content_map = []
igel_seed = frame[vert_seed]
# for i in xrange(len(pt_tup)):
# for j in xrange(num_mins):
seed_id = 'Seed'
millipede_id = 'Milli_'
millipede = frame[millipede_id]
millipede_fitparams = frame[millipede_id + 'FitParams']
rlogl = millipede_fitparams.rlogl
content_map.append([rlogl, millipede_id])
fit = min(content_map, key= lambda content_map : content_map[0])
fit_rlogl = fit[0]
fit_id = fit[1]
print 'Looking for the best fit',fit_id
track_fit = frame[fit_id]
track_fit_fitparams = frame[fit_id + 'FitParams']
zenith, azimuth = track_fit[0].dir.zenith, track_fit[0].dir.azimuth
forged_particle = dataclasses.I3Particle()
forged_particle.energy = 0
forged_particle.dir = dataclasses.I3Direction(zenith, azimuth)
forged_particle.pos.x = igel_seed.pos.x
forged_particle.pos.y = igel_seed.pos.y
forged_particle.pos.z = igel_seed.pos.z
forged_particle.time = igel_seed.time
forged_particle.speed = 0.299792
forged_particle.shape = igel_seed.shape.InfiniteTrack
frame["BlumeFit"]=forged_particle
#Run tray and make tray segments
tray.Add("I3Reader",Filenamelist=[sys.argv[1],sys.argv[2]])
tray.Add(Seeder)
@traysegment
def Blumefit(tray, name,
If=lambda frame: True):
# for i in xrange(len(pt_tup)):
# for j in xrange(num_mins):
#Get igelfit seed, modify it, put it in the frame
# seed_id = 'Seed'+'%i%i' % (i,j)
seed_id = 'Seed'
#Run MuMillipede for seed particle
millipede_id = 'Milli_'
tray.Add("MuMillipede", 'MuBlume',
MuonPhotonicsService= MuonPhotonicsService,
CascadePhotonicsService= CascadePhotonicsService,
PhotonsPerBin= PhotonsPerBin,
MuonSpacing= MuonSpacing,
ShowerSpacing= ShowerSpacing,
Boundary= Boundary,
Pulses= Pulses,
ExcludedDOMs= [BadDOMsList],
SeedTrack= seed_id,
Output= millipede_id,
If= If and (lambda frame: seed_id in frame))
tray.AddSegment(Blumefit, 'MuBlumefit',
If = lambda frame : frame.Stop == frame.Physics)
@traysegment
def DeleteAll(tray, name,
If=lambda frame: True):
global pt_tup, delete_keys
for i in xrange(len(pt_tup)):
for j in xrange(num_mins):
seed_id = 'Seed'+'%i%i' % (i,j)
millipede_id = 'Milli_'+'%i%i' % (i,j)
millipedefp_id = 'Milli_'+'%i%i' % (i,j) +'FitParams'
delete_keys.append(seed_id)
delete_keys.append(millipede_id)
delete_keys.append(millipedefp_id)
tray.Add('Delete', 'All_delete', Keys=delete_keys)
tray.Add(BestFit)
#tray.Add(DeleteAll)
tray.Add("I3Writer",
DropOrphanStreams = [icetray.I3Frame.DAQ],
filename=sys.argv[3])
tray.Execute()
| [
"mamday@gmail.com"
] | mamday@gmail.com |
5840f5a44ec2b7e1d647341efaad3c9b25397fb8 | e660f7324473a1761df385d20cea9f2e21ec8e09 | /modules/trivia2.py | abfad3aa20f799e94bb8d12059eef7cd3bf105b2 | [
"MIT"
] | permissive | relet/phenny-games | ef659d2327da288af43a23c19bed492e667a583b | 68c037b5d5ab0708e44e88270bd4ba190e42b826 | refs/heads/master | 2021-01-22T09:04:13.600199 | 2015-03-26T08:36:33 | 2015-03-26T08:36:33 | 1,606,698 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,032 | py | #!/usr/bin/env python
"""
trivia.py - Phenny Wikipedia Trivia Module
Copyleft 2008, Thomas Hirsch, http://relet.net
Licensed under (among possibly others) the GPLv2
"""
from urllib2 import urlopen, Request
from operator import itemgetter
from time import time, sleep
import re, traceback, random
import yaml, bz2
useragent = "Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.0.4) Gecko/2008111317 Ubuntu/8.04 (hardy) Firefox/3.0.4"
url = "http://en.wikipedia.org/wiki/Special:Random"
headers = {'User-Agent:': useragent }
config = "/home/relet.net/.phenny/trivia.yaml"
re_trivial = "'''+(.*?)'''+[^\.\n]*? (is|was|are|were|will be|can be|consists of) (.*?)\."
cp_trivial = re.compile(re_trivial)
re_paragraph = "'''+(.*?)'''+[^\.\n]*? (is|was|are|were|will be|can be|consists of) (.*?)\n"
cp_paragraph = re.compile(re_paragraph)
whitelist = bz2.BZ2File("wordlist/top100k.bz2", "r")
pages = []
for line in whitelist:
pages.append(line.split()[1])
def setup(self):
try:
yamldata = open(config,'r')
self.trivia = yaml.load(yamldata.read())
except:
self.trivia = {}
self.trivia['scores'] = {}
self.trivia['mode'] = 'paragraph'
self.trivia['source'] = 'random'
self.trivia['count'] = 0
self.trivia['round'] = {}
self.trivia['lastaction']=time()
reset_trivia(self)
def reset_trivia(phenny):
phenny.trivia['clue'] = None
phenny.trivia['solution'] = None
phenny.trivia['hidden'] = None
phenny.trivia['level'] = 0
def trivia(phenny, input):
if phenny.trivia['clue']:
numq = phenny.trivia['startcount']-phenny.trivia['count']
phenny.say(str(numq)+": "+phenny.trivia['clue'])
return
put = input[8:]
try:
number = int(put)
if number<1: number = 10
if number>1000: number = 1000
except:
number = 10
phenny.trivia['count']=number
phenny.trivia['startcount']=number
phenny.trivia['round'] = {}
new_trivia(phenny)
def stop(phenny, input):
phenny.trivia['count'] = 0
reset_trivia(phenny)
phenny.say("Trivia is over - for now.")
scores(phenny, input)
stop.commands=['strivia','stop']
stop.priority='high'
stop.thread=False
def new_trivia(phenny):
count = phenny.trivia['count']
if count < 1:
stop(phenny, ".stop")
return
#wait for a few seconds.
sleep(3)
if phenny.trivia['source']=='random':
#fetch Special:Random
request = Request(url, None, headers)
data = urlopen(request)
#see where it has redirected us
pageurl = data.geturl()
pagetitle = pageurl[29:]
else:
#choose one page from the wordlist
pagetitle = random.choice(pages)
phenny.trivia['lasttitle'] = pagetitle
#refetch that page as raw wiki code
try:
rawurl = "http://en.wikipedia.org/w/index.php?title="+pagetitle+"&action=raw"
request2 = Request(rawurl, None, headers)
data2 = urlopen(request2)
content = data2.read()
except:
phenny.say("URL not found: "+rawurl)
new_trivia(phenny)
return
#strip content from all possible wiki tags
rawdata = content
abbreviations = ['appr','ca']
for i in range(0,100): #a while true with an escape path
oldrawdata = rawdata
#strip dots, at least in wikilinks
rawdata = re.sub('(?P<one>\[\[[^\]]*)\.+(?P<two>[^\]]*\]\])','\g<one>\g<two>', rawdata)
#FIXME: currently, the dot in f.e. an abbr. is not distinguishable from a full stop.
#strip <ref..>..</ref> tags.
rawdata = re.sub('<ref.*?>.*?</ref>','', rawdata)
rawdata = re.sub('<ref.*?/>','', rawdata)
rawdata = re.sub('\{\{.*?\}\}','', rawdata) #rm all templates. all.
if oldrawdata == rawdata:
break
for abbr in abbreviations:
rawdata = re.sub(" "+abbr+"\.", " "+abbr, rawdata)
rawdata = re.sub('\[\[([^\]]*?\|)?(?P<text>.*?)\]\]','\g<text>', rawdata)
rawdata = re.sub(' ','', rawdata) #str replace would do fine
if phenny.trivia['mode'] == 'paragraph':
findings = re.findall(cp_paragraph, rawdata)
else:
findings = re.findall(cp_trivial, rawdata)
if len(findings)>0:
count = count - 1
phenny.trivia['count'] = count
solution = findings[0][0]
clue = findings[0][2]
clue = clue.replace('\'','')
clue = clue.replace('*','')
clue = clue[0:400] #character limit. extend by ...
if len(clue)==400:
if clue.rfind(" ")>-1:
clue = clue[0:clue.rfind(" ")]+"..."
#create the "hidden" version of the solution
hidden = re.sub('[A-Za-z0-9]','_', solution)
#replace any words of the solution with ~~~
words = solution.split()
for word in words:
if len(word)>3:
clue = re.sub("(?i)"+re.escape(word), '~~~', clue)
phenny.trivia['clue'] = clue
phenny.trivia['solution'] = solution
phenny.trivia['lastclue'] = clue
phenny.trivia['lastsolution'] = solution
phenny.trivia['hidden'] = hidden
numq = phenny.trivia['startcount']-phenny.trivia['count']
phenny.say(str(numq)+": "+clue)
phenny.trivia['lastaction']=time()
elif content.find('may refer to')>-1:
phenny.say(pagetitle+" seems to be a disambiguation page, sorry.")
new_trivia(phenny)
return
elif content.find('ist of')>-1:
phenny.say(pagetitle+" seems to be one of those annoying lists of everything and nothing, sorry.")
new_trivia(phenny)
return
else: #a default fallback still has to be implemented
logs = open('fails.log','aw')
logs.write(pagetitle+"\n")
logs.write(rawdata+"\n")
logs.write("----\n")
logs.close()
phenny.say("I was not yet able to parse the page "+pagetitle+", sorry.")
new_trivia(phenny)
return
trivia.commands = ['trivia']
trivia.priority = 'high'
trivia.thread=False
def check_timer(phenny):
if phenny.trivia['solution']:
last = phenny.trivia['lastaction']
if time()>last + 5.0:
hint(phenny, "...")
def build_hint(hidden, solution, level, phenny, input):
nruter = ""
count = 0
spare = 0
for i in range(0,len(solution)):
if solution[i]==' ':
count = 0
if count < level:
nruter += solution[i]
else:
nruter += hidden[i]
spare += 1
count+=1
if spare<3:
solve(phenny, input)
return ""
else:
return nruter + " ("+str(max(1, 10 - 2 * level))+" points)"
def hint(phenny, input):
if phenny.trivia['hidden']:
phenny.trivia['level'] += 1
level = phenny.trivia['level']
hidden = phenny.trivia['hidden']
solution = phenny.trivia['solution']
phenny.say(build_hint(hidden, solution, level, phenny, input))
phenny.trivia['lastaction']=time()
else:
phenny.say("Try .trivia first.")
#hint.commands = ['hint','wtf','what']
#hint.priority = 'low'
#hint.thread=False
def mode(phenny, input):
if input == ".mode":
phenny.say("Currently in "+phenny.trivia['mode']+" mode.")
elif input == ".mode paragraph":
phenny.trivia['mode'] = 'paragraph'
phenny.say("Ok. We're now in paragraph mode.")
else:
phenny.trivia['mode'] = 'line'
phenny.say("Ok. We're now in line mode.")
mode.commands = ['mode']
mode.priority = 'low'
mode.thread=False
def source(phenny, input):
if input == ".source":
phenny.say("Currently using "+phenny.trivia['source']+" source.")
elif input == ".source random":
phenny.trivia['source'] = 'random'
phenny.say("Ok. Using Special:Random as trivia input.")
else:
phenny.trivia['source'] = 'list'
phenny.say("Ok. We're now using the wordlist.")
source.commands = ['source']
source.priority = 'low'
source.thread=False
def solve(phenny, input):
if phenny.trivia['solution']:
phenny.say("A: "+phenny.trivia['solution'])
reset_trivia(phenny)
new_trivia(phenny) #check if there's some more
else:
phenny.say("Try .trivia first.")
solve.commands = ['solve']
solve.priority = 'low'
solve.thread=False
def scores(phenny, input):
scores = phenny.trivia['round']
ordered = sorted(scores.items(), key=itemgetter(1), reverse=True)
msg = "This round: "
for pair in ordered:
msg += pair[0]+": "+str(pair[1])+"; "
phenny.say(msg)
scores.commands = ['scores', 'score']
scores.priority = 'low'
scores.thread=False
def hof(phenny, input):
scores = phenny.trivia['scores']
ordered = sorted(scores.items(), key=itemgetter(1), reverse=True)
msg = "Total trivia scores: "
for pair in ordered[0:10]:
msg += pair[0]+": "+str(pair[1])+"; "
phenny.say(msg)
hof.commands = ['hof', 'top']
hof.priority = 'low'
hof.thread=False
def canonical(string):
canon = re.sub('[^a-zA-Z0-9]','',string).lower()
canon = re.sub('s\b','',canon)
return canon
def answer(phenny, input):
if phenny.trivia['solution']:
if canonical(phenny.trivia['solution'])==canonical(input):
scores = phenny.trivia['scores']
thisround = phenny.trivia['round']
nick = input.nick
thisturn = max(1, 10 - 2 * (phenny.trivia['level']))
if nick in scores:
scores[nick] += thisturn
else:
scores[nick] = thisturn
if nick in thisround:
thisround[nick] += thisturn
else:
thisround[nick] = thisturn
phenny.trivia['scores']=scores
yamldump = open(config,'w') #save teh permanent scores
yamldump.write(yaml.dump(phenny.trivia))
yamldump.close()
phenny.trivia['round']=thisround
phenny.say("Yay! "+nick+" is right and gains "+str(thisturn)+" points! This game: "+str(thisround[nick])+". Total: "+str(scores[nick]))
reset_trivia(phenny)
new_trivia(phenny) #check if there's some more
else:
check_timer(phenny)
answer.rule=".*"
answer.priority='high'
answer.thread=False
def report(phenny, input):
logs = open('reports.log','aw')
logs.write(input+"\n")
logs.write(phenny.trivia['lasttitle']+"\n")
logs.write(phenny.trivia['lastclue']+"\n")
logs.write(phenny.trivia['lastsolution']+"\n")
logs.write("---\n")
logs.close()
phenny.say("Thank you for your report. The question and your comment have been recorded.")
report.commands = ['report', 'r']
report.priority = 'low'
report.thread=False
if __name__ == '__main__':
print __doc__.strip()
| [
"relet.net@h1353430.stratoserver.net"
] | relet.net@h1353430.stratoserver.net |
012d85c16aa3834cd7e943c22a12b73f34fb5ca3 | 887e917333ed5711722285a9b8c03005b8315bb2 | /landing/admin.py | 778f0014a209e1acbdd5f0161cb9feaa6dbeb9ab | [] | no_license | AlexNavidu/LandingTest | 6204c9f71e7758003e84c1be0b326d85643defe8 | 69bc3e3d50eeded2dca2feb2f4c2994b4c23fefd | refs/heads/master | 2021-01-20T08:43:40.050444 | 2017-05-03T19:43:44 | 2017-05-03T19:43:44 | 90,186,894 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | from django.contrib import admin
from .models import Subscriber
class SubscriberAdmin(admin.ModelAdmin):
# list_display = ["name", "email"]
list_display =[field.name for field in Subscriber._meta.fields]
# exclude =["email"]
fields = ["email"]
list_filter = ["name"]
search_fields = ["email","name",]
class Meta:
models = Subscriber
admin.site.register(Subscriber, SubscriberAdmin)
| [
"alexnavidu@gmail.com"
] | alexnavidu@gmail.com |
a0c7286ba5f4f12c116f1556cd289a9a1f0f65ff | 8f7d864d154e333bf8fa7986288d37134b8578b7 | /nosqlbiosets/pathways/index_metabolic_networks.py | 340204cdb90b39c90103cf20951c62283fd0ffc9 | [
"MIT"
] | permissive | uludag/nosql-biosets | 9ac1077d8232fa0f99c77d5d46049e4bfa8a8dcb | b3148efcccdcaa21f684237bb375c1f513bdcee8 | refs/heads/master | 2023-04-29T04:28:13.433903 | 2023-04-24T13:10:46 | 2023-04-24T13:10:46 | 87,645,482 | 16 | 3 | null | null | null | null | UTF-8 | Python | false | false | 6,641 | py | #!/usr/bin/env python
"""Index metabolic network files, current/initial version is limited
to SBML files and psamm-model-collection project yaml files """
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import json
import logging
import os
import cobra
from psamm.datasource import native, sbml
from psamm.expression.boolean import ParseError
from pymongo import IndexModel
from nosqlbiosets.dbutils import DBconnection
logger = logging.getLogger(__name__)
INDEX = 'biosets'
DOCTYPE = 'metabolic_network'
def psamm_yaml_to_sbml(inf):
reader = native.ModelReader.reader_from_path(inf)
print(reader.name)
m = reader.create_model()
writer = sbml.SBMLWriter(cobra_flux_bounds=True)
tempsbml = inf + ".sbml"
print(tempsbml)
try:
writer.write_model(tempsbml, m, pretty=True)
return tempsbml
except TypeError as e:
print("Type error while saving %s in SBML: %s" % (inf, e))
except ParseError as e:
print("Parser error while saving %s in SBML: %s" % (inf, e))
return None
def sbml_to_cobra_json(inf):
c = cobra.io.read_sbml_model(inf)
r = cobra.io.model_to_dict(c)
return r
class SBMLIndexer(DBconnection):
def __init__(self, db, index=INDEX, doctype=DOCTYPE, host=None, port=None):
self.index = index
self.doctype = doctype
self.db = db
es_indexsettings = {
"index.mapping.total_fields.limit": 8000,
"number_of_replicas": 0}
super(SBMLIndexer, self).__init__(db, index, host, port,
recreateindex=True,
es_indexsettings=es_indexsettings)
if db != "Elasticsearch":
print(doctype)
self.mcl = self.mdbi[doctype]
# Read and index metabolic network files, PSAMM yaml or sbml
def read_and_index_model_files(self, infile):
if os.path.isdir(infile):
for child in os.listdir(infile):
c = os.path.join(infile, child)
if os.path.isdir(c) and os.path.exists(
os.path.join(c, "model.yaml")):
c = os.path.join(c, "model.yaml")
self.read_and_index_model_file(c)
else:
self.read_and_index_model_file(infile)
if self.db == 'Elasticsearch':
self.es.indices.refresh(index=self.index)
else:
index = IndexModel([("name", "text")])
self.mdbi[self.doctype].create_indexes([index])
# Read PSAMM yaml or SBML file, index using the database selected earlier
def read_and_index_model_file(self, infile):
print("Reading/indexing %s " % infile)
if not os.path.exists(infile):
print("Input file not found")
raise FileNotFoundError(infile)
if infile.endswith(".yaml"):
try:
infile_ = psamm_yaml_to_sbml(infile)
if infile_ is not None:
self.read_and_index_sbml_file(infile_)
else:
print("Unable to process PSAMM yaml file: %s" % infile)
except Exception as e:
print("Error while processing PSAMM yaml file: %s, %s" %
(infile, e))
elif infile.endswith(".xml") or infile.endswith(".sbml"):
try:
self.read_and_index_sbml_file(infile)
except Exception as e:
print("Error while processing SBML file: %s, %s" %
(infile, e))
else:
print(
"Only .xml, .sbml (for SBML) and .yaml (for PSAMM)"
" files are supported")
return
# Read sbml file, index using the function indexf
def read_and_index_sbml_file(self, infile):
if os.path.exists(infile):
r = sbml_to_cobra_json(infile)
# Changes to COBRAby json, see readme file in this folder
if r is not None:
for react in r['reactions']:
ml = [{"id": mid, "st": react['metabolites'][mid]}
for mid in react['metabolites']]
react['metabolites'] = ml
if r['id'] is None:
del (r['id'])
if self.db == "Elasticsearch":
self.es_index_sbml(1, r)
else: # "MongoDB"
self.mongodb_index_sbml(1, r)
else:
print("SBML file not found")
# Index metabolic network model with Elasticsearch
def es_index_sbml(self, _, model):
docid = model['name'] if 'name' in model else model['id']
try:
self.es.index(index=self.index, doc_type=self.doctype,
id=docid, body=json.dumps(model))
return True
except Exception as e:
print(e)
return False
# Index metabolic network model with MongoDB
def mongodb_index_sbml(self, _, model):
docid = model['name'] if 'name' in model else model['id']
spec = {"_id": docid}
try:
self.mcl.update(spec, model, upsert=True)
return True
except Exception as e:
print(e)
return False
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Index metabolic network files (SBML, or PSAMM yaml)'
' with Elasticsearch or MongoDB')
parser.add_argument('-infile', '--infile',
help='Input file or folder name'
' with Metabolic network file(s) in PSAMM .yaml'
' or SBML .xml formats')
parser.add_argument('--index',
default=INDEX,
help='Name of the Elasticsearch index'
' or MongoDB database')
parser.add_argument('--doctype',
default=DOCTYPE,
help='Name for the Elasticsearch document type or '
'MongoDB collection')
parser.add_argument('--host',
help='Elasticsearch or MongoDB server hostname')
parser.add_argument('--port',
help="Elasticsearch or MongoDB server port number")
parser.add_argument('--db', default='Elasticsearch',
help="Database: 'Elasticsearch' or 'MongoDB'")
args = parser.parse_args()
indxr = SBMLIndexer(args.db, args.index, args.doctype, args.host, args.port)
indxr.read_and_index_model_files(args.infile)
| [
"mahmut.uludag@kaust.edu.sa"
] | mahmut.uludag@kaust.edu.sa |
b041e4d0442deaa0295edf79b28d9af49a57e475 | a5756dfd61e3cd9cf60d562be156a8ee891950cc | /api.py | 9cbcebb0bb3ee32793ce97c04c3ec77f0b1b268a | [] | no_license | SuperHQC/CastingAgencyBackend | 732a35dbdd4ea63d1899325be65bdbcd2e814968 | 83d0813549cf1cec9ea8f169aa1b2c44797fade2 | refs/heads/master | 2023-02-09T02:14:35.218563 | 2020-02-29T03:03:53 | 2020-02-29T03:03:53 | 243,426,565 | 0 | 0 | null | 2023-02-02T06:17:33 | 2020-02-27T04:00:43 | Python | UTF-8 | Python | false | false | 6,231 | py | import os
from flask import Flask, request, jsonify, abort
from sqlalchemy import exc
import json
from flask_cors import CORS
from models import db_drop_and_create_all, setup_db, Actor, Movie
from auth import AuthError, requires_auth
app = Flask(__name__)
setup_db(app)
CORS(app)
'''
!! NOTE uncomment the following line to initialize the datbase
!! NOTE THIS WILL DROP ALL RECORDS AND START YOUR DB FROM SCRATCH
!! NOTE THIS MUST BE UNCOMMENTED ON FIRST RUN
'''
# db_drop_and_create_all()
# ROUTES
@app.route("/actors", methods=["GET"])
@requires_auth("get:actors")
def get_actors(payload):
'''
get actors
receive get request, return a list of actors
'''
actors = [a.format() for a in Actor.query.all()]
# print(actors)
return jsonify({"success": True, "actors": actors, "total": len(actors)})
@app.route("/actors", methods=["POST"])
@requires_auth("add:actor")
def create_actor(payload):
'''
create_actor
receive post request, return the new actor id and a list of actors
'''
body = request.get_json()
if not body:
abort(422)
new_name = body.get("name", None)
new_age = body.get("age", None)
new_gender = body.get("gender", None)
if not new_name:
abort(422)
new_actor = Actor(name=new_name, age=new_age, gender=new_gender,)
new_actor.insert()
actors = [a.format() for a in Actor.query.all()]
# print(actors)
return jsonify({"success": True, "new_actor_id": new_actor.id, "actors": actors})
@app.route("/actors/<int:id>", methods=["DELETE"])
@requires_auth("delete:actor")
def delete_actor(payload, id):
'''
delete actor
Receive delete request, then return deleted id and list of actors
'''
actor = Actor.query.filter(Actor.id == id).one_or_none()
if not actor:
abort(404)
actor.delete()
actors = [a.format() for a in Actor.query.all()]
return jsonify({"success": True, "deleted_id": id, "actors": actors})
@app.route("/actors/<int:id>", methods=["PATCH"])
@requires_auth("modify:actor")
def update_actor(payload, id):
'''
update actor
Receive patch request to update actor, then return list of actors
'''
# print(id)
actor = Actor.query.filter(Actor.id == id).one_or_none()
if not actor:
abort(404)
body = request.get_json()
if not body:
abort(422)
new_name = body.get("name", None)
new_age = body.get("age", None)
new_gender = body.get("gender", None)
if new_name is None:
new_name = actor.name
if new_age is None:
new_age = actor.age
if new_gender is None:
new_gender = actor.gender
actor.name = new_name
actor.age = new_age
actor.gender = new_gender
actor.update()
actors = [a.format() for a in Actor.query.all()]
return jsonify({"success": True, "updated_id": id, "actors": actors})
"""
/movies
"""
@app.route("/movies", methods=["GET"])
@requires_auth("get:movies")
def get_movies(payload):
'''
get movies
receive get request, return a list of movies
'''
movies = [m.format() for m in Movie.query.all()]
return jsonify({"success": True, "movies": movies, "total": len(movies)})
@app.route("/movies", methods=["POST"])
@requires_auth("add:movie")
def create_movie(payload):
'''
create movie
recieve post request, then return the new movie id, and list of movies
'''
body = request.get_json()
if not body:
abort(422)
new_title = body.get("title", None)
new_release = body.get("release", None)
if not new_title:
abort(422)
new_movie = Movie(title=new_title, release=new_release)
new_movie.insert()
movies = [m.format() for m in Movie.query.all()]
return jsonify({"success": True, "new_movie_id": new_movie.id, "movies": movies})
@app.route("/movies/<int:id>", methods=["DELETE"])
@requires_auth("delete:movie")
def delete_movie(payload, id):
'''
delete movie
Receive delete request, then return deleted id and list of movies
'''
movie = Movie.query.filter(Movie.id == id).one_or_none()
if not movie:
abort(404)
movie.delete()
movies = [m.format() for m in Movie.query.all()]
return jsonify({"success": True, "deleted_id": id, "movies": movies})
@app.route("/movies/<int:id>", methods=["PATCH"])
@requires_auth("modify:movie")
def update_moive(payload, id):
'''
update movie
Receive patch request to update movies, then return list of movies
'''
movie = Movie.query.filter(Movie.id == id).one_or_none()
if not movie:
abort(404)
body = request.get_json()
if not body:
abort(422)
new_title = body.get("title", None)
new_release = body.get("release", None)
if new_title is None:
new_title = movie.title
if new_release is None:
new_release = movie.release
movie.title = new_title
movie.release = new_release
movie.update()
movies = [m.format() for m in Movie.query.all()]
return jsonify({"success": True, "updated_id": id, "movies": movies})
# Error Handling
@app.errorhandler(422)
def unprocessable(error):
'''
error handling for unprocessable entity
'''
return jsonify({
"success": False,
"error": 422,
"message": "unprocessable"
}), 422
@app.errorhandler(404)
def not_found(error):
'''
error handler for 404
'''
return jsonify({
"success": False,
"error": 404,
"message": "Resource Not Found"
}), 404
@app.errorhandler(400)
def bad_request(error):
'''
error for bad request
'''
return jsonify({
"success": False,
"error": 400,
"Message": "Bad Request"
}), 400
@app.errorhandler(405)
def method_not_allowed(error):
'''
error for unallowed method
'''
return jsonify({
"success": False,
"error": 405,
"message": "Method not allowed"
}), 405
@app.errorhandler(AuthError)
def not_auth(AuthError):
'''
error handler for AuthError
'''
return jsonify({
"success": False,
"error": AuthError.error['code'],
"message": AuthError.error['description']
}), 401
| [
"hqc@Erwins-MacBook-Pro.local"
] | hqc@Erwins-MacBook-Pro.local |
d7af08baecfb386bf96937f7c257974459261bd8 | a3191e8edc7e1a8bc710861163bcf8a7990bd60b | /tinderforeduapp/migrations/0006_profile_college.py | 3c574ce5c9b5b96e9715a731d599a1e5ba2c504f | [] | no_license | KtdevDesktop/matchandlearn | 989d318fe6fbfc94d88603bc021155ecb41ca210 | fcd89f1586b3667b8f62746a4d7b5e52ba374c69 | refs/heads/master | 2020-12-12T07:35:19.899152 | 2020-07-16T20:11:13 | 2020-07-16T20:11:13 | 234,079,642 | 2 | 2 | null | 2020-07-16T20:11:15 | 2020-01-15T12:49:48 | JavaScript | UTF-8 | Python | false | false | 396 | py | # Generated by Django 3.0.2 on 2020-01-29 15:13
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tinderforeduapp', '0005_profile'),
]
operations = [
migrations.AddField(
model_name='profile',
name='college',
field=models.CharField(blank=True, max_length=100),
),
]
| [
"s6101012630160@email.kmutnb.ac.th"
] | s6101012630160@email.kmutnb.ac.th |
36ec74d2153acbd86ddc37f139f312281ae8f9ce | d32a817b0b3351bb3df46e288398f93097aae4b9 | /03_Pandas/G-Group.py | 42799977bede74738c0f4c640cd6137dfdf726a1 | [] | no_license | 2019-a-gr1-python/Py-Olmedo-Velez-Veronica-Elizabeth | 5538bc2f42c88533ff62fa46f35dea8b447969e8 | 900c28ed64d22f0f27213df3abfa1a6c7a49936a | refs/heads/master | 2020-05-04T16:56:16.394382 | 2019-07-31T16:01:33 | 2019-07-31T16:01:33 | 179,292,793 | 0 | 1 | null | 2019-07-31T16:02:32 | 2019-04-03T13:10:53 | Jupyter Notebook | UTF-8 | Python | false | false | 3,175 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed May 29 08:22:55 2019
@author: VeronicaOlmedo
"""
#Agrupamiento
import pandas as pd
import numpy as np
import math
path_guardado= '/Users/VeronicaOlmedo/Documents/GitHub/Py-Olmedo-Velez-Veronica-Elizabeth/03_Pandas/Data/csv/artwork_data.pickle'
df = pd.read_pickle(path_guardado)
#Saca una copy con los 2 artistas
seccion_df = df.iloc[49980:50019,:].copy()
df_agrupado_ay = seccion_df.groupby('acquisitionYear')
df_agrupado_ay = seccion_df.groupby('artist')
type(df_agrupado_ay)
for acquisitionYear, registros in df_agrupado_ay:
print(acquisitionYear)
#print(registros)
#Llenar valores vacios
def llenar_valores_vacios(series):
valores = series.values_counts()
if (valores.empty):
return series
"""
# 1) iterar y sumar los valores
sumatoria = 0
numero_nans = 0
for valor in series:
print(valor)
print(type(valor))
if type(valor) == str:
sumatoria = sumatoria + int(valor)
if type(valor) == float:
numero_nans = numero_nans + 1
print(sumatoria)
# 2) Dividir para el numero de valores
division = series.size - numero_nans
valor_mas_utilizado = sumatoria / division
print(valor_mas_utilizado)
"""
nuevo_valor = series.fillna(valores.index[0])
return nuevo_valor
#Agrupar df por el artistas en una lista
#Guardar los dataframe, creamos otro y lo devolvemos
def transformar_df(df):
df_artist = df.groupby('artist')
arreglo_df_grupo = []
for nombre_artista, registros_agrupados in df_artist:
copia=registros_agrupados.copy()
serie_medium = registros_agrupados['medium']
serie_units = registros_agrupados['units']
copia.loc[:,'medium'] = llenar_valores_vacios(serie_medium) #Material en el que fue echo la obra de arte
copia.loc[:,'units'] = llenar_valores_vacios(serie_units)
arreglo_df_grupo.append(copia)
nuevo_df_transformado = pd.concat(arreglo_df_grupo)
return nuevo_df_transformado
#Para tener los titulos
seccion_df_t = transformar_df(seccion_df)
df_agrupado_titulo = df.groupby('title')
#size devuelve la serie
print(df_agrupado_titulo.size())
print(type(df_agrupado_titulo.size()))
#Titulos ordenados
serie_titulos = df_agrupado_titulo.size().sort_values()
#Titulos ordenados acendentemente
serie_titulos = df_agrupado_titulo.size().sort_values(ascending=False)
#Filtrado de Dataframe
df_filtrado = df.filter(items=["artis","title"])
condicion = lambda x: len(x.index) > 1
#Se obtiene el dataframe sin los titulos duplicados
df_titulos_duplicados = df_agrupado_titulo.filter(condicion)
#Ordenados por titulos duplicados
resultado = df_titulos_duplicados.sort_values('title')
resultado = df_titulos_duplicados.sort_values('title',inplace=True)
#Agrupado por año
df_agrupado_ano= df.groupby('year')
ano = df_agrupado_ano.size()
| [
"veronica_1998825@hotmail.com"
] | veronica_1998825@hotmail.com |
7dcdf535c808a5c36e494aa9a1cf757ec49afd7f | 5052845ed84d0c67445938df82912984e0fdd58a | /algorithm/heap_3.py | cd8d722971c4bd929ce1b07bae251d65896e4ce5 | [] | no_license | devwon/TIL | 8797d808c6a81eb927ea78aff2dd2f7cc6de000e | 4623df7c2cf55601f2f0ab1414c929188eb72db0 | refs/heads/master | 2023-07-22T00:34:20.414631 | 2021-08-10T13:04:31 | 2021-08-10T13:04:31 | 117,534,500 | 0 | 0 | null | 2021-07-01T14:45:28 | 2018-01-15T11:01:26 | Python | UTF-8 | Python | false | false | 1,438 | py | # https://programmers.co.kr/learn/courses/30/lessons/42628?language=python3
# v1은 온전히 나의 힘으로 푼 코드!!
import heapq
import re
def solution(operations):
tasks = []
q = []
for i in range(len(operations)):
heapq.heappush(tasks, [i, operations[i]])
while len(tasks) > 0:
# v1
# operation = heapq.heappop(tasks)[1]
# if operation.startswith('I'):
# # 숫자 삽입
# heapq.heappush(q, int(''.join(map(str, re.findall('[+-]?\d+', operation)))))
# elif operation == 'D 1' and len(q) > 0:
# # 최대값 삭제
# q = heapq.nlargest(len(q), q)[1:]
# heapq.heapify(q)
# elif operation == 'D -1' and len(q) > 0:
# # 최솟값 삭제
# heapq.heappop(q)
# v2
operation = heapq.heappop(tasks)[1]
command = operation[0]
value = int(operation[2:])
if command == 'I':
# 숫자 삽입
heapq.heappush(q, int(''.join(map(str, re.findall('[+-]?\d+', operation)))))
elif len(q) != 0:
if value > 0:
# 최대값 삭제
q = heapq.nlargest(len(q), q)[1:]
heapq.heapify(q)
else:
# 최솟값 삭제
heapq.heappop(q)
answer = [heapq.nlargest(1, q)[0], q[0]] if len(q) > 0 else [0, 0]
return answer | [
"wonhyerin96@gmail.com"
] | wonhyerin96@gmail.com |
e6539a96dc9a4f98e1fbe662239ea27dfeab7839 | b53f5b2cfd20e4fac32a7957231a153941c54819 | /projects/migrations/0002_auto_20210106_1414.py | cf23b200fa823bb5d6d2d4d11b7f1b7988f21dbf | [] | no_license | JL-stefan/kanban | d7d047e58d130cfeeb3a1b6232cc47e5cd71aa38 | fc0718eb271559f73c8b2df33ecac228f2673d49 | refs/heads/main | 2023-02-17T04:11:01.956347 | 2021-01-08T10:35:39 | 2021-01-08T10:35:39 | 326,596,292 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,516 | py | # Generated by Django 3.1.4 on 2021-01-06 14:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('projects', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('createAt', models.DateTimeField(auto_now_add=True, verbose_name='创建时间')),
('createBy', models.IntegerField(verbose_name='创建人员')),
('updateAt', models.DateTimeField(auto_now=True, verbose_name='更新时间')),
('updateBy', models.IntegerField(verbose_name='更新人员')),
('id', models.AutoField(primary_key=True, serialize=False)),
('title', models.CharField(max_length=128, verbose_name='标题')),
('user', models.IntegerField(verbose_name='执行者')),
('tag', models.CharField(max_length=64, verbose_name='标签')),
('desc', models.CharField(max_length=1024, verbose_name='描述')),
('projectID', models.IntegerField(verbose_name='项目ID')),
('iterationID', models.IntegerField(verbose_name='迭代ID')),
],
options={
'verbose_name': '任务',
'verbose_name_plural': '任务信息',
},
),
migrations.RenameField(
model_name='iteration',
old_name='description',
new_name='desc',
),
]
| [
"zeyongma@163.com"
] | zeyongma@163.com |
ddcdd315cc4c16e70a0e7fa66863bed197df8296 | 75a1dd845a25ba08b8fb50853ca0647a7995197d | /src/subimage/src/SuironIO.py | 373b981681d161a4b8e68a60fac89bab6800a9a1 | [] | no_license | lbaitemple/templeracecar | 2d45698b7fb6d67449c55fc2c03bbeb49f833ccf | f2bb433a38effa5f27f5d5dbc372976dc9fa9a62 | refs/heads/master | 2021-04-09T04:51:59.919006 | 2020-10-16T23:59:34 | 2020-10-16T23:59:34 | 248,840,121 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,970 | py | import time
import random
import numpy as np
import pandas as pd
import cv2, os, csv
import matplotlib.pyplot as plt
from functions import cnn_to_raw
from img_serializer import serialize_image
from file_finder import get_new_filename
import clock
import subprocess
import re
class SuironIO(clock.Action):
"""
Class which handles input output aspect of the suiron
- Reads inputs from webcam and normalizes them
- Also reads serial input and write them to file
"""
# Constructor
def __init__(self, id=1, width=72, height=48, depth=3, baudrate=57600):
# Image settings
self.width = int(width)
self.height = int(height)
self.depth = int(depth)
self.sz=self.width *self.height *self.depth
self.locked = False
# Video IO
# self.cap = cv2.VideoCapture(id) # Use first capture device
# Serial IO
self.outfile = None
self.header= False
# In-memory variable to record data
# to prevent too much I/O
self.frame_results = []
self.servo_results = []
self.motorspeed_results = []
""" Functions below are used for inputs (recording data) """
# Initialize settings before saving
def init_saving(self, folder='data', filename='output_', extension='.csv'):
#
fileoutname = get_new_filename(folder=folder, filename=filename, extension=extension)
# Filename to save serial data and image data
# Output file
print(fileoutname)
self.imagefolder =fileoutname[0:fileoutname.find(".")]
number = int(re.search(r'\d+', self.imagefolder).group())
number = str(number)
number1 = (str(number)).zfill(4)
self.imagefolder =self.imagefolder.replace(number, number1)
self.imagefolder =self.imagefolder.replace("/", "/images/")##
self.imagecount=0
if not os.path.exists(self.imagefolder) and (os.environ['FORMAT']=="img"):
subprocess.check_output(['mkdir', '-p', self.imagefolder])
#os.mkdir(self.imagefolder)
print("Directory " , self.imagefolder , " Created ")
# else:
# print("Directory " , self.imagefolder , " already exists")
outfile = open(fileoutname, 'w') # Truncate file first
self.outfile = open(fileoutname, 'a')
self.df = pd.DataFrame([], columns=['image', 'servo', 'motor'])
#self.df.to_csv(self.outfile) # Prevents data to be written in .csv file
self.header = True
def start(self, period):
thread=clock.Clock(self, period)
thread.start()
return thread
def run(self):
time.sleep(0.01)
# Saves both inputs
def lock(self, locked = True):
self.locked = locked
def unlock(self):
self.locked = False
def check_lock(self):
return self.locked
def record_inputs(self, s_inputs, frame, dataFormat):
# Frame is just a numpy array
if (not self.check_lock()):
# frame = self.get_frame()
self.lock()
# Serial inputs is a dict with key 'servo', and 'motor'
# If its not in manual mode then proceed
# print("yeah")
# print("helllo {}".format(s_inputs))
if s_inputs:
servo = s_inputs['servo']
motor = s_inputs['motor']
# Rounds the servo and motor data into 2 digits and make it a whole number
servo = round(servo, 2)
servo = servo*100
servo = int(servo)
motor = round(motor, 2)
motor = motor*100
motor = int(motor)
# Append to memory
# tolist so it actually appends the entire thing
if (dataFormat=="c6sv"):
frame=self.normalize_frame(frame)
dat=serialize_image(frame)
print(frame.shape, len(dat))
self.frame_results.append(dat)
if(dataFormat=="img"):
## Converts data into strings and save them
imagecount = str(self.imagecount)
servo = str(servo)
motor = str(motor)
## Save data based on values
filename=self.imagefolder+"/"+imagecount.zfill(7)+"_s"+servo+"_m"+motor+".jpg"
print(filename)
cv2.imwrite(filename, frame)
self.frame_results.append(filename)
self.imagecount=self.imagecount+1
self.servo_results.append(servo)
self.motorspeed_results.append(motor)
self.append_inputs()
self.unlock()
# Gets frame
def get_frame(self):
ret, frame = self.cap.read()
# If we get a frame, save it
if not ret:
raise IOError('No image found!')
frame = self.normalize_frame(frame)
return frame
# Gets frame
def get_camframe(self):
ret, frame = self.cap.read()
return(frame)
# Gets frame for prediction
def get_frame_prediction(self):
ret, frame = self.cap.read()
# if we get a frame
if not ret:
raise IOError('No image found!')
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
frame = cv2.resize(frame, (self.width, self.height), interpolation=cv2.INTER_CUBIC)
frame = frame.astype('uint8')
return frame
# Normalizes inputs so we don't have to worry about weird
# characters e.g. \r\n
def normalize_serial(self, line):
# Assuming that it receives
# servo, motor
# 'error' basically means that
# its in manual mode
try:
line = line.replace('\n', '').split(',')
line_dict = {'servo': int(line[0]), 'motor': int(line[1])}
return line_dict
except:
return None
# Normalizes frame so we don't have BGR as opposed to RGB
def normalize_frame(self, frame):
frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
frame = cv2.resize(frame, (self.width, self.height), interpolation=cv2.INTER_CUBIC)
frame = frame.flatten()
frame = frame.astype('uint8')
return frame
# Saves files
def save_inputs(self):
raw_data = {
'image': self.frame_results,
'servo': self.servo_results,
'motor': self.motorspeed_results
}
#df = pd.DataFrame(raw_data, columns=['image', 'servo', 'motor'])
#df.to_csv(self.outfile)
# Saves files
def append_inputs(self):
raw_data = {
'image': self.frame_results,
'servo': self.servo_results,
'motor': self.motorspeed_results
}
# df = pd.DataFrame(raw_data, columns=['image', 'servo', 'motor'])
# df.to_csv(self.outfile)
if (self.header):
self.df = pd.DataFrame(raw_data, columns=['image', 'servo', 'motor'])
self.df.to_csv(self.outfile, mode='a', header=False) # Saves data into .csv
self.frame_results = []
self.servo_results = []
self.motorspeed_results = []
else:
self.df = pd.DataFrame(raw_data, columns=['image', 'servo', 'motor'])
self.df.to_csv(self.outfile)
self.header=True
""" Functions below are used for ouputs (controlling servo/motor) """
# Controls the servo given the numpy array outputted by
# the neural network
def servo_write(self, np_y):
servo_out = cnn_to_raw(np_y)
if (servo_out < 90):
servo_out *= 0.85
elif (servo_out > 90):
servo_out *= 1.15
self.ser.write('steer,' + str(servo_out) + '\n')
time.sleep(0.02)
# Sets the motor at a fixed speed
def motor_write_fixed(self):
self.ser.write('motor,80\n')
time.sleep(0.02)
# Stops motors
def motor_stop(self):
self.ser.write('motor,90\n')
time.sleep(0.02)
# Staightens servos
def servo_straighten(self):
self.ser.write('steer,90')
time.sleep(0.02)
def __del__(self):
if self.outfile:
self.outfile.close()
| [
"noreply@github.com"
] | lbaitemple.noreply@github.com |
8d15840a70249fe8bcf37a74a9731998bebb5901 | 60f98479ab8d157c15271fb58b9a1e68e74234bd | /graphing/stackbar_from_csv.py | 90274464e306e2735fc98b5c422c07abf3ecb7e0 | [] | no_license | seed-good/mycode | 66af8f2bb1029466c4d8763ab57f212e16626dc3 | 3f56cf24a38f508f83415f8254c632be3d5b624f | refs/heads/main | 2023-05-06T22:33:06.446932 | 2021-05-28T18:56:54 | 2021-05-28T18:56:54 | 361,773,493 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,753 | py | #!/usr/bin/python3
# from python std library
import csv
# python3 -m pip install np
import numpy as np
# python3 -m pip install matplotlib
import matplotlib
matplotlib.use('Agg')
# sudo apt install python3-tk
import matplotlib.pyplot as plt
def parsecsvdata():
"""returns a list. [0] is LAN and [1] WAN data"""
summary = [] # list that will contain [(LAN), (WAN)]
# open csv data
with open("/home/student/mycode/graphing/2018summary.csv",\
"r") as downtime:
# parse csv data with csv.reader
downdata = csv.reader(downtime, delimiter=",")
for row in downdata:
rowdat = (int(row[0]), int(row[1]), int(row[2]), int(row[3]))
summary.append(rowdat) # add dict to list
print(summary)
return summary
def main():
N = 4
## grab our data
summary = parsecsvdata() # grab our data
localnetMeans = summary[0] # LAN data
wanMeans = summary[1] # WAN data
ind = np.arange(N) # the x locations for the groups
# the width of the bars: can also be len(x) sequence
width = 0.35
# describe where to display p1
p1 = plt.bar(ind, localnetMeans, width)
# stack p2 on top of p1
p2 = plt.bar(ind, wanMeans, width, bottom=localnetMeans)
# Describe the table metadata
plt.ylabel("Length of Outage (mins)")
plt.title("2018 Network Summary")
plt.xticks(ind, ("Q1", "Q2", "Q3", "Q4"))
plt.yticks(np.arange(0, 81, 10))
plt.legend((p1[0], p2[0]), ("LAN", "WAN"))
# SAVE the graph locally
plt.savefig("/home/student/mycode/graphing/2018summaryv2.png")
# Save to "~/static"
plt.savefig("/home/student/static/2018summaryv2.png")
print("Graph created.")
if __name__ == "__main__":
main()
| [
"vasanti.seed@stellantis.com"
] | vasanti.seed@stellantis.com |
5b181828b6da1f9a3e5538bc19bc0fe51599f8f8 | 5683778e04094617d5c9b11c9c8552c429018698 | /Guia2 Funciones/Ejercicio11.py | 28c88ebd26cc227b3d97491f4261a786fc887df3 | [] | no_license | pablilloab/Python | 72166a4beb3af2231dbb03adfd5b4ae44e74bbbd | fb18c7bb3a2ba27b95920c32b1f58f7d31b8b17c | refs/heads/main | 2023-07-12T02:09:08.307627 | 2021-08-21T15:53:38 | 2021-08-21T15:53:38 | 391,510,524 | 0 | 0 | null | 2021-08-21T15:53:39 | 2021-08-01T03:02:20 | Python | UTF-8 | Python | false | false | 232 | py | def s2hms (segs):
mins,segs = divmod(segs,60)
hrs,mins = divmod(mins,60)
print(f"{hrs} {mins} {segs}")
def hms2s (hrs, mins,segs):
seg = hrs * 3600 + mins * 60 + segs
print(f"{seg}")
s2hms (8000)
hms2s(2,13,20) | [
"pab.a.bari@gmail.com"
] | pab.a.bari@gmail.com |
ab6a73ecbd0d595a81760ffaeefa5a2113771679 | f2129efe1662f9ddc627dd21eae0d038bf0061e6 | /.metadata/.plugins/org.eclipse.core.resources/.history/51/20ec6d211ec600171d34bdf158f64b41 | 9b8d96c8eb03c5ac8f896ae84ecd61b451896dc7 | [] | no_license | AKL-FIRE/Optimization | 73bbbdb286b127acfa803bf41906a6e7b79f38a3 | 4c826c09cfff1248697a319923ddde983156668b | refs/heads/master | 2021-08-23T20:43:34.085677 | 2017-12-06T13:05:07 | 2017-12-06T13:05:07 | 111,691,138 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 556 | #!/usr/local/bin/python2.7
#-*- coding: UTF-8 -*-
import numpy as np
import os
import matplotlib.pyplot as plt
##############读取文件内容并显示################
data = np.loadtxt('./ex1data1.txt')
plt.figure(1)
plt.scatter(data[:,0],data[:,1])
plt.title('The data Distribution')
#plt.show()
print('The matrix dimension is:',data.shape)
#############初始化拟合直线##############
theta = np.random.rand(1,1) * 10
bias = np.random.rand(1,1)
x = np.linspace(0, 22, 50).reshape(1,50)
y = theta * x + bias
plt.figure(2)
plt.plot(x,y)
plt.show()
| [
"apple@AppledeMacBook-Pro.local"
] | apple@AppledeMacBook-Pro.local | |
5d02379f98ac637617a3f89c3ac250ecf7787fbd | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/knapsack_20200708153620.py | dd426fbe9706b3c9e39d8c8e2e1af773190273a4 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 656 | py | def Knap(a,b,w):
# declare an empty dictionary
newArr = []
for i,j in zip(a,b):
smallArr = []
smallArr.append(i)
smallArr.append(j)
newArr.append(smallArr)
i = 0
# at position 0 is the weight and at position 1 is the value
# goal is to find highest value but not greater than W
while i < len(newArr):
if (newArr[i][0] + newArr[i+1][0]) <= w:
value = newArr[i][1] + newArr[i+1][1]
print('value',value ,'>',)
if value > newArr[i][1] + newArr[i+1][1]:
print(value)
i +=1
Knap([10,20,30],[60,100,120],220) | [
"mary.jereh@gmail.com"
] | mary.jereh@gmail.com |
161bf505525f66e8b71df3145f2a6963c9287247 | 9347b4a46456b679beb15ba6200a64f895b2eced | /Alphapose_skeleton.py | 598d6847f3341a28130ea58c57e0ebd63bee0298 | [] | no_license | jethrotan/ActionDetectionInVideos | 35332fca74781f8e7a1c29e7b55de1f44a7cf1a8 | 85c107f44e48167a04c4b75e0b3f70ec9de976be | refs/heads/master | 2020-04-18T06:46:25.274931 | 2019-01-22T08:08:58 | 2019-01-22T08:08:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,574 | py | import torch
from torch.autograd import Variable
import torch.nn.functional as F
import torchvision.transforms as transforms
import torch.nn as nn
import torch.utils.data
import numpy as np
import matplotlib.pyplot as plt
from PIL import Image
import math
import copy
import os, sys
def add_path(path):
if path not in sys.path:
sys.path.insert(0, path)
this_dir = os.path.dirname(__file__)
lib_path = os.path.join(this_dir, 'AlphaPose')
add_path(lib_path)
from opt import opt
# from dataloader import Image_loader, VideoDetectionLoader, DataWriter, crop_from_dets, Mscoco, DetectionLoader
from dataloader import *
from yolo.util import write_results, dynamic_write_results
from SPPE.src.main_fast_inference import *
from SPPE.src.utils.eval import getPrediction_batch
from SPPE.src.utils.img import load_image
import os
import time
from fn import getTime
import cv2
import random
from pPose_nms import pose_nms, write_json
import json
from yolo.darknet import Darknet
args = opt
args.dataset = 'coco'
class Alphapose_skeleton:
def __init__(self, cuda_id=0, fast_yolo=False):
self.time_det = 0.0
self.time_run = 0.0
self.cuda_id = cuda_id
self.target_kps = [5, 6, 7, 8, 9, 10]
# Load yolo detection model
print('Loading YOLO model..')
if fast_yolo:
self.det_model = Darknet('./AlphaPose/yolo/cfg/yolov3-tiny.cfg', self.cuda_id)
self.det_model.load_weights('./AlphaPose/models/yolo/yolov3-tiny.weights')
else:
self.det_model = Darknet('./AlphaPose/yolo/cfg/yolov3.cfg', self.cuda_id)
self.det_model.load_weights('./AlphaPose/models/yolo/yolov3.weights')
self.det_model.cuda(self.cuda_id)
self.det_model.eval()
# Load pose model
print('Loading Alphapose pose model..')
pose_dataset = Mscoco()
if args.fast_inference:
self.pose_model = InferenNet_fast(4 * 1 + 1, pose_dataset)
else:
self.pose_model = InferenNet(4 * 1 + 1, pose_dataset)
self.pose_model.cuda(self.cuda_id)
self.pose_model.eval()
def run(self, folder_or_imglist, sample_rate):
time_run_start = time.time()
if type(folder_or_imglist) == 'str':
inputpath = folder_or_imglist
print(inputpath)
args.inputpath = inputpath
# Load input images
im_names = [img for img in sorted(os.listdir(inputpath)) if img.endswith('jpg')]
N = len(im_names)
dataset = Image_loader(im_names, format='yolo')
else:
N = len(folder_or_imglist)
imglist = [img for i, img in enumerate(folder_or_imglist) if i % sample_rate == 0]
dataset = Image_loader_from_images(imglist, format='yolo')
# Load detection loader
test_loader = DetectionLoader(dataset, self.det_model, self.cuda_id).start()
skeleton_result_list = []
for i in range(dataset.__len__()):
with torch.no_grad():
(inp, orig_img, im_name, boxes, scores) = test_loader.read()
if boxes is None or boxes.nelement() == 0:
skeleton_result = None
else:
# Pose Estimation
time_det_start = time.time()
inps, pt1, pt2 = crop_from_dets(inp, boxes)
inps = Variable(inps.cuda(self.cuda_id))
hm = self.pose_model(inps)
hm_data = hm.cpu().data
preds_hm, preds_img, preds_scores = getPrediction(
hm_data, pt1, pt2, args.inputResH, args.inputResW, args.outputResH, args.outputResW)
skeleton_result = pose_nms(boxes, scores, preds_img, preds_scores)
self.time_det += (time.time() - time_det_start)
skeleton_result_list.append(skeleton_result)
skeleton_list = []
j = 0
for i in range(N):
im_name = 'image_{:05d}.jpg'.format(i+1)
if (i == sample_rate * (1+j)):
j += 1
skeleton_result = skeleton_result_list[j]
skeleton_list.append([im_name.split('/')[-1]])
if skeleton_result is not None:
for human in skeleton_result:
kp_preds = human['keypoints']
kp_scores = human['kp_score']
# ## remove small hand
# if float(kp_scores[9]) < 0.2 and float(kp_scores[10]) < 0.2:
# continue
for n in range(kp_scores.shape[0]):
skeleton_list[-1].append(int(kp_preds[n, 0]))
skeleton_list[-1].append(int(kp_preds[n, 1]))
skeleton_list[-1].append(round(float(kp_scores[n]), 2))
self.time_run += (time.time() - time_run_start)
return skeleton_list
def runtime(self):
return self.time_det, self.time_run
def save_skeleton(self, skeleton_list, outputpath):
if not os.path.exists(outputpath):
os.mkdir(outputpath)
out_file = open(os.path.join(outputpath, 'skeleton.txt'), 'w')
for skeleton in skeleton_list:
out_file.write(' '.join(str(x) for x in skeleton))
out_file.write('\n')
out_file.close()
if __name__ == "__main__":
base_folder = '/media/qcxu/qcxuDisk/Dataset/scratch_dataset/'
__action__ = ['others', 'pick', 'scratch']
# base_folder = '/media/qcxu/qcxuDisk/windows_datasets_all/clips/'
# __action__ = ['normal', 'clean', 'pick', 'scratch']
# get skeleton
skeleton_det = Alphapose_skeleton()
time1 = time.time()
for act in __action__:
base_in_clip_folder = base_folder + act + '/clips/'
base_skeleton_folder = base_folder + act + '/skeletons/'
for sub_id, sub in enumerate(os.listdir(base_in_clip_folder)):
if act != 'pick' or sub != 'Video_12_4_1':
continue
in_clip_folder = base_in_clip_folder + sub
skeleton_folder = base_skeleton_folder + sub
imglist = []
for im_name in os.listdir(in_clip_folder):
if im_name.endswith('jpg'):
imglist.append(cv2.imread(os.path.join(in_clip_folder, im_name)))
skeleton_list = skeleton_det.run(imglist, sample_rate=1)
# skeleton_det.save_skeleton(skeleton_list, skeleton_folder)
print(time.time() - time1) | [
"969915370@sjtu.edu.cn"
] | 969915370@sjtu.edu.cn |
27f38e0cf34c7a458c77862ed021f69b1081d219 | 9adea4131921ae4b8c94e6e20c8dcd5efa8f5f4a | /src/group_by.py | 585b07ad5374f384b3db8715b75f8af318fac5fe | [
"BSD-3-Clause"
] | permissive | ferhatcicek/minifold | 33f447133601c299c9ddf6e7bfaa888f43c999fd | 00c5e912e18a713b0496bcb869f5f6af4f3d40c9 | refs/heads/master | 2022-12-15T05:58:04.541226 | 2020-09-25T16:55:52 | 2020-09-25T16:55:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,689 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of the minifold project.
# https://github.com/nokia/minifold
__author__ = "Marc-Olivier Buob"
__maintainer__ = "Marc-Olivier Buob"
__email__ = "marc-olivier.buob@nokia-bell-labs.com"
__copyright__ = "Copyright (C) 2018, Nokia"
__license__ = "BSD-3"
from .connector import Connector
from .hash import to_hashable
from .query import Query, ACTION_READ
from .values_from_dict import ValuesFromDictFonctor
def group_by_impl(fonctor :ValuesFromDictFonctor, entries :list) -> dict:
ret = dict()
for entry in entries:
key = fonctor(entry)
if len(key) == 1:
(key,) = key
key = to_hashable(key)
if key not in ret.keys(): ret[key] = list()
ret[key].append(entry)
return ret
def group_by(attributes :list, entries :list) -> list:
fonctor = ValuesFromDictFonctor(attributes)
return group_by_impl(fonctor, entries)
class GroupByConnector(Connector):
def __init__(self, attributes :list, child):
super().__init__()
self.m_fonctor = ValuesFromDictFonctor(attributes)
self.m_child = child
@property
def child(self):
return self.m_child
def attributes(self, object :str):
return set(self.m_fonctor.attributes)
def query(self, q :Query) -> list:
super().query(q)
return self.answer(
q,
group_by_impl(
self.m_fonctor,
self.m_child.query(q)
)
)
def __str__(self) -> str:
return "GROUP BY %s" % ", ".join(self.m_fonctor.attributes)
| [
"marc-olivier.buob@nokia-bell-labs.com"
] | marc-olivier.buob@nokia-bell-labs.com |
d202ffe04150297400e04709fcc09d24982baf93 | 8d402df39c18eba7e1c86c762f205c944357c5df | /www/gallery/sidebar.py | 40830ea74ad141050c9cf4d414e8d9b5a387a0f1 | [
"BSD-3-Clause"
] | permissive | brython-dev/brython | 87cc023e25550dec9ce459ba68774189f33712b6 | b33958bff0e8c7a280babc30232dc389a2500a7a | refs/heads/master | 2023-09-04T04:49:29.156209 | 2023-09-01T06:36:08 | 2023-09-01T06:36:08 | 24,046,239 | 6,569 | 625 | BSD-3-Clause | 2023-07-05T06:13:32 | 2014-09-15T06:58:21 | Python | UTF-8 | Python | false | false | 236 | py | x=0
from browser.html import A,B,BR
print('A',A)
from os import *
def menu(title,links):
# links is a list of tuples (name,href)
res = B(title)
for _name,href in links:
res += BR()+A(_name,href=href)
return res
| [
"pierre.quentel@gmail.com"
] | pierre.quentel@gmail.com |
62b751f46b3df27ff08a0ff7f2fd7ec87b4ba081 | cd5dd04b439faf09b09e1441d922ce39bcb56c21 | /Employee_project/wsgi.py | 5b8a953913381515339972cc243ddfc72fc1c41f | [] | no_license | Primalr3d/my-first-blog | 4ef8d138d624b1ec46f14f69af86e6aca7755900 | ee29b26bf9ceb4d98fd974f093099299c213754c | refs/heads/master | 2022-12-18T13:29:02.286046 | 2019-08-01T10:58:05 | 2019-08-01T10:58:05 | 200,034,170 | 0 | 0 | null | 2022-12-08T05:57:42 | 2019-08-01T11:00:29 | Python | UTF-8 | Python | false | false | 504 | py | """
WSGI config for Employee_project project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
# from whitenoise.django import DjangoWhiteNoise
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Employee_project.settings')
application = get_wsgi_application()
# application = DjangoWhiteNoise(application)
| [
"f2016139@goa.bits-pilani.ac.in"
] | f2016139@goa.bits-pilani.ac.in |
bcac628ae19f47ccd92cb2f34735ac402c0d84d3 | bdede50e3376c9bebad48d295eb544fad101c82b | /ev/ev/views.py | 8cf9286d4208e8197f95bc796e2049ed59318642 | [
"MIT"
] | permissive | cnu-ev/ev-backend | affa14a7519835b76a132713e1893f119f6f187a | e692ed6747bd2ceaea1cba9479a96206f8e4314f | refs/heads/master | 2021-06-20T14:59:08.219972 | 2019-08-15T11:55:03 | 2019-08-15T11:55:03 | 194,825,793 | 0 | 3 | MIT | 2021-06-10T21:40:37 | 2019-07-02T08:51:43 | Python | UTF-8 | Python | false | false | 602 | py | from django.views.generic.base import TemplateView
from django.views.generic.edit import CreateView
from django.contrib.auth.forms import UserCreationForm
from django.urls import reverse_lazy
#-- HomeView : 홈 페이지 템플릿 지정
class HomeView(TemplateView):
template_name = 'home.html'
#-- User Creation : register에 관해서
class UserCreateView(CreateView):
template_name = 'registration/register.html'
form_class = UserCreationForm
success_url = reverse_lazy('register_done')
class UserCreateDoneTV(TemplateView):
template_name = 'registration/register_done.html' | [
"eocjstnals12@gmail.com"
] | eocjstnals12@gmail.com |
1415e5f0e7f73c5d28e6c2cab69b59d5b367240a | 27f7a4a6cec7a4aac822cf3849d5b25be0df5fed | /Tinder.py | 60f96cb9dda57f9b2ec5a995b3d4e6801466742b | [] | no_license | nellyds/lefter | d28dae4ce1125258f25d81548b79e77f70749a0e | 491c8330afa3260e6056f18416a3242fd3ef7c26 | refs/heads/master | 2020-08-08T07:01:01.823753 | 2019-10-21T01:30:53 | 2019-10-21T01:30:53 | 213,769,512 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 648 | py | from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.action_chains import ActionChains
import time
def tinderLogin(driver):
driver.find_element_by_xpath("//*[@id=\"modal-manager\"]/div/div/div[2]/div/div[3]/div/button/span").click()
time.sleep(3)
def tinderSwipe(driver):
actions = ActionChains(driver)
try:
while (driver.find_element_by_xpath("//div[contains(@aria-label, 'Like')]")):
time.sleep(.5)
actions.move_to_element(driver.find_element_by_xpath("//div[contains(@aria-label, 'Like')]")).click().perform()
except:
NoSuchElementException
| [
"ndsilva822@gmail.com"
] | ndsilva822@gmail.com |
046bd5cbb97925fc7bf0e7bba36bb81c56dc798f | 079f2163a4bd00b998ef7ec01a1e6232313b0d4e | /ESPNet/train/DataSet.py | 6ca2a6410c9ecc182218618ef07decc03202d8b2 | [] | no_license | ryotanji/ESPNET | 91636011263e5251ae0b7d8829e5114f90ad5053 | cd71d1803ce25a15558244030c90d2e7164c8836 | refs/heads/master | 2023-01-12T12:19:42.746801 | 2020-11-01T12:48:17 | 2020-11-01T12:48:17 | 257,574,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,307 | py | import torch
import cv2
import torch.utils.data
import numpy as np
from PIL import Image
__author__ = "Sachin Mehta"
class MyDataset(torch.utils.data.Dataset):
'''
Class to load the dataset
'''
def __init__(self, imList, labelList, transform=None):
'''
:param imList: image list (Note that these lists have been processed and pickled using the loadData.py)
:param labelList: label list (Note that these lists have been processed and pickled using the loadData.py)
:param transform: Type of transformation. SEe Transforms.py for supported transformations
'''
self.imList = imList
self.labelList = labelList
self.transform = transform
def __len__(self):
return len(self.imList)
def __getitem__(self, idx):
'''
:param idx: Index of the image file
:return: returns the image and corresponding label file.
'''
image_name = self.imList[idx]
label_name = self.labelList[idx]
image = cv2.imread(image_name)
label = Image.open(label_name)
label = np.array(label)
index = np.where(label == 255)
label[index] = 0
if self.transform:
[image, label] = self.transform(image, label)
return (image, label)
| [
"noreply@github.com"
] | ryotanji.noreply@github.com |
53e62f8d8703c493130f1759cf7ee0c205c4f15c | d396aa3495407069935ebab3faf870ad87025014 | /python-leetcode/剑指offer/tree/剑指 Offer 55 - I. 二叉树的深度.py | 36b475c95dd52bc934243fe9103e8b69f9f96341 | [] | no_license | qwjaskzxl/For-OJ | 4d3b2f873cf11c5f9ddb74c4b91b4b78cccc3afa | 1fe6e5099d95fff2dcfc503951ff6e311202919b | refs/heads/master | 2021-12-25T21:17:44.232567 | 2021-08-05T14:01:59 | 2021-08-05T14:01:59 | 175,966,016 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,156 | py | pass
'''
输入一棵二叉树的根节点,求该树的深度。从根节点到叶节点依次经过的节点(含根、叶节点)形成树的一条路径,最长路径的长度为树的深度。
例如:
给定二叉树 [3,9,20,null,null,15,7],
3
/ \
9 20
/ \
15 7
返回它的最大深度 3 。
提示:
节点总数 <= 10000
'''
# Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def maxDepth(self, root: TreeNode) -> int:
if not root: return 0
MAX = 0
def dfs(node, depth):
nonlocal MAX
if not node:
return
if depth > MAX:
MAX = depth
if node.left: dfs(node.left, depth + 1)
if node.right: dfs(node.right, depth + 1)
dfs(root, 1)
return MAX
class Solution_666:
def maxDepth(self, root: TreeNode) -> int:
if not root: return 0
return max(self.maxDepth(root.left),
self.maxDepth(root.right)) \
+ 1
| [
"870384605@qq.com"
] | 870384605@qq.com |
2b9e92e6220567d947887c1532abe81e555fe74a | bdc6119c5910093d2417830e15f2389a7e5e74b7 | /sols/projecteuler/071-OrderedFractions.py | 38c5f1417fe3f25268f0d03d17e33657219b8748 | [] | no_license | danscu/onlinejudge | 0d10d4902cb152dd6401d3624ebb7bc6c44bba38 | a981794d868dd8366d8f50931f10704066fbab96 | refs/heads/master | 2021-01-15T15:33:47.893332 | 2016-08-31T00:50:50 | 2016-08-31T00:50:58 | 17,789,216 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 559 | py | def findLess(n, frac):
l = 0
r = n
while r - l > 1:
m = (l + r) / 2
if 1.0 * m / n < frac:
l = m
else:
r = m
return l
def gcd(a, b):
while b != 0:
t = b
b = a % b
a = t
return a
def closest_numerator(N, frac):
dist = 2
ans = 0
for i in range(1,N+1):
l = findLess(i, frac)
if frac - 1.0 * l/i < dist:
dist = frac - 1.0 * l/i
x = gcd(l, i)
ans = l / x
# print "i=",i,"l=",l,"l/i=",1.0*l/i
return ans
if __name__ == '__main__':
print closest_numerator(1000000, 3.0/7)
| [
"danke.xie@gmail.com"
] | danke.xie@gmail.com |
203c2787740212d5fef64565dc25997cf58c2ccb | 4a12c77f287d605d858aa88002c8e8cb8a93a592 | /manage.py | 9f78952855195271836995dafaf9bf1035b62f51 | [] | no_license | vineetmadeshia/MovieRating | 045f77c86d7e2df6e1e4c65e91b90146aadc36ee | 34f5c8c9fd0d4b7cc7c4f1386f3dae1054992340 | refs/heads/main | 2023-06-01T07:46:32.706272 | 2021-05-19T09:23:06 | 2021-05-19T09:23:06 | 368,789,855 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 543 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "MovieRating.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
| [
"vineetmadeshia@gmail.com"
] | vineetmadeshia@gmail.com |
6950bb2514c10a37f69d8268d2a1bcae8b0f65e0 | f0257428fed2a5f10950ee52e88a0f6811120323 | /book_study/book_chapter4/tuple.py | 8b984479ddcedf9425ce141e5f6f5ccf6b3c92c8 | [] | no_license | tata-LY/python | 454d42cc8f6db9a1450966aba4af6894e1b59b78 | 55d13b7f61cbb87ff3f272f596cd5b8c53b807c5 | refs/heads/main | 2023-04-08T19:31:57.945506 | 2021-04-19T05:39:17 | 2021-04-19T05:39:17 | 328,880,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | #!/usr/bin/env python3
# _*_coding:utf-8_*_
# by liuyang
tuple1 = (1,100)
print(tuple1)
print(tuple1[0])
#tuple1[0] = 10 #元组不允许修改
#print(tuple1)
for i in tuple1:
print(i)
tuple1 = ('liuyang', 'zhangjuan') # 不允许修改,可以重新定义
print(tuple1)
| [
"ainiyang20@qq.com"
] | ainiyang20@qq.com |
22f0a214ee1dca65b2464ec11b94e429d66e79cc | 78eb766321c7ed3236fb87bb6ac8547c99d0d1a4 | /oneYou2/pages/migrations/0001_initial.py | 8aaa7b7e731d48ae75dd6fe64a29f8b282817fdc | [] | no_license | danmorley/nhs-example | 9d7be76116ed962248e1f7e287355a6870534f5d | ae4b5f395d3518ee17ef89348ed756c817e0c08c | refs/heads/master | 2022-12-13T02:13:18.484448 | 2019-02-28T11:05:31 | 2019-02-28T11:05:31 | 203,353,840 | 1 | 0 | null | 2022-12-07T04:29:46 | 2019-08-20T10:30:15 | Python | UTF-8 | Python | false | false | 1,087 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2018-02-06 14:59
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
initial = True
dependencies = [
('wagtailcore', '0040_page_draft_title'),
]
operations = [
migrations.CreateModel(
name='OneYou2Page',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', wagtail.core.fields.StreamField((('heading', wagtail.core.blocks.CharBlock(classname='full title')), ('paragraph', wagtail.core.blocks.RichTextBlock()), ('image', wagtail.images.blocks.ImageChooserBlock())))),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| [
"andrewkenyon123@gmail.com"
] | andrewkenyon123@gmail.com |
227da66e9de18e3788780a9e941542e77cf692c7 | 983309797dd79e6cf02c30e8f824f6e6f9af40dc | /venv/bin/wamp | 10dbf35ce2790f56bf5fbb221093fb6d00878937 | [] | no_license | ErmShadow/DjangoProject | 184a7cdc74e46453262d4effb904884ad66e7401 | 2e3475bd2be14c6251be182387481a42f1a56bca | refs/heads/master | 2023-05-31T14:20:18.950151 | 2021-06-17T15:22:08 | 2021-06-17T15:22:08 | 377,874,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | #!/home/eavuser/PycharmProjects/djangoProject/venv/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from autobahn.__main__ import _main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(_main())
| [
"shadow0touch@gmail.com"
] | shadow0touch@gmail.com | |
97e25af591a27b65414cb6af1325caf83381bf30 | ac7435b0b3faa6b6cf51d0d6b43984b77b70a37c | /nova/tests/unit/network/test_neutronv2.py | 7424cbb63fba3e40671c004d21b540ff5b69b930 | [
"Apache-2.0"
] | permissive | gokrokvertskhov/nova-mesos-driver | 04688cd51cad9790cf5460b44ba527b51080760d | fdb9c8468f6a8680c19095a81bf77884ae61e170 | refs/heads/master | 2021-01-10T10:51:07.096729 | 2016-03-25T01:45:10 | 2016-03-25T01:45:10 | 54,685,199 | 0 | 1 | Apache-2.0 | 2020-07-24T01:00:58 | 2016-03-25T01:22:06 | Python | UTF-8 | Python | false | false | 181,927 | py | # Copyright 2012 OpenStack Foundation
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import collections
import contextlib
import copy
import uuid
import mock
from mox3 import mox
from neutronclient.common import exceptions
from neutronclient.v2_0 import client
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import timeutils
import six
from six.moves import range
from nova.compute import flavors
from nova import context
from nova import exception
from nova.network import model
from nova.network.neutronv2 import api as neutronapi
from nova.network.neutronv2 import constants
from nova import objects
from nova.openstack.common import policy as common_policy
from nova.pci import manager as pci_manager
from nova.pci import whitelist as pci_whitelist
from nova import policy
from nova import test
from nova.tests.unit import fake_instance
CONF = cfg.CONF
# NOTE: Neutron client raises Exception which is discouraged by HACKING.
# We set this variable here and use it for assertions below to avoid
# the hacking checks until we can make neutron client throw a custom
# exception class instead.
NEUTRON_CLIENT_EXCEPTION = Exception
fake_info_cache = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': False,
'instance_uuid': 'fake-uuid',
'network_info': '[]',
}
class MyComparator(mox.Comparator):
def __init__(self, lhs):
self.lhs = lhs
def _com_dict(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for key, value in six.iteritems(lhs):
if key not in rhs:
return False
rhs_value = rhs[key]
if not self._com(value, rhs_value):
return False
return True
def _com_list(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for lhs_value in lhs:
if lhs_value not in rhs:
return False
return True
def _com(self, lhs, rhs):
if lhs is None:
return rhs is None
if isinstance(lhs, dict):
if not isinstance(rhs, dict):
return False
return self._com_dict(lhs, rhs)
if isinstance(lhs, list):
if not isinstance(rhs, list):
return False
return self._com_list(lhs, rhs)
if isinstance(lhs, tuple):
if not isinstance(rhs, tuple):
return False
return self._com_list(lhs, rhs)
return lhs == rhs
def equals(self, rhs):
return self._com(self.lhs, rhs)
def __repr__(self):
return str(self.lhs)
class TestNeutronClient(test.NoDBTestCase):
def setUp(self):
super(TestNeutronClient, self).setUp()
neutronapi.reset_state()
def test_withtoken(self):
self.flags(url='http://anyhost/', group='neutron')
self.flags(timeout=30, group='neutron')
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token')
cl = neutronapi.get_client(my_context)
self.assertEqual(CONF.neutron.url, cl.httpclient.endpoint_override)
self.assertEqual(my_context.auth_token,
cl.httpclient.auth.auth_token)
self.assertEqual(CONF.neutron.timeout, cl.httpclient.session.timeout)
def test_withouttoken(self):
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(exceptions.Unauthorized,
neutronapi.get_client,
my_context)
def test_withtoken_context_is_admin(self):
self.flags(url='http://anyhost/', group='neutron')
self.flags(timeout=30, group='neutron')
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token',
is_admin=True)
cl = neutronapi.get_client(my_context)
self.assertEqual(CONF.neutron.url, cl.httpclient.endpoint_override)
self.assertEqual(my_context.auth_token,
cl.httpclient.auth.auth_token)
self.assertEqual(CONF.neutron.timeout, cl.httpclient.session.timeout)
def test_withouttoken_keystone_connection_error(self):
self.flags(auth_strategy='keystone', group='neutron')
self.flags(url='http://anyhost/', group='neutron')
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(NEUTRON_CLIENT_EXCEPTION,
neutronapi.get_client,
my_context)
@mock.patch('nova.network.neutronv2.api._ADMIN_AUTH')
@mock.patch.object(client.Client, "list_networks", new=mock.Mock())
def test_reuse_admin_token(self, m):
self.flags(url='http://anyhost/', group='neutron')
my_context = context.RequestContext('userid', 'my_tenantid',
auth_token='token')
tokens = ['new_token2', 'new_token1']
def token_vals(*args, **kwargs):
return tokens.pop()
m.get_token.side_effect = token_vals
client1 = neutronapi.get_client(my_context, True)
client1.list_networks(retrieve_all=False)
self.assertEqual('new_token1', client1.httpclient.auth.get_token(None))
client1 = neutronapi.get_client(my_context, True)
client1.list_networks(retrieve_all=False)
self.assertEqual('new_token2', client1.httpclient.auth.get_token(None))
class TestNeutronv2Base(test.TestCase):
def setUp(self):
super(TestNeutronv2Base, self).setUp()
self.context = context.RequestContext('userid', 'my_tenantid')
setattr(self.context,
'auth_token',
'bff4a5a6b9eb4ea2a6efec6eefb77936')
self.tenant_id = '9d049e4b60b64716978ab415e6fbd5c0'
self.instance = {'project_id': self.tenant_id,
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance',
'availability_zone': 'nova',
'host': 'some_host',
'info_cache': {'network_info': []},
'security_groups': []}
self.instance2 = {'project_id': self.tenant_id,
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance2',
'availability_zone': 'nova',
'info_cache': {'network_info': []},
'security_groups': []}
self.nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'my_tenantid'}]
self.nets2 = []
self.nets2.append(self.nets1[0])
self.nets2.append({'id': 'my_netid2',
'name': 'my_netname2',
'subnets': ['mysubnid2'],
'tenant_id': 'my_tenantid'})
self.nets3 = self.nets2 + [{'id': 'my_netid3',
'name': 'my_netname3',
'tenant_id': 'my_tenantid'}]
self.nets4 = [{'id': 'his_netid4',
'name': 'his_netname4',
'tenant_id': 'his_tenantid'}]
# A network request with external networks
self.nets5 = self.nets1 + [{'id': 'the-external-one',
'name': 'out-of-this-world',
'router:external': True,
'tenant_id': 'should-be-an-admin'}]
# A network request with a duplicate
self.nets6 = []
self.nets6.append(self.nets1[0])
self.nets6.append(self.nets1[0])
# A network request with a combo
self.nets7 = []
self.nets7.append(self.nets2[1])
self.nets7.append(self.nets1[0])
self.nets7.append(self.nets2[1])
self.nets7.append(self.nets1[0])
# A network request with only external network
self.nets8 = [self.nets5[1]]
# An empty network
self.nets9 = []
# A network that is both shared and external
self.nets10 = [{'id': 'net_id', 'name': 'net_name',
'router:external': True, 'shared': True}]
self.nets = [self.nets1, self.nets2, self.nets3, self.nets4,
self.nets5, self.nets6, self.nets7, self.nets8,
self.nets9, self.nets10]
self.port_address = '10.0.1.2'
self.port_data1 = [{'network_id': 'my_netid1',
'device_id': self.instance2['uuid'],
'tenant_id': self.tenant_id,
'device_owner': 'compute:nova',
'id': 'my_portid1',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'status': 'DOWN',
'admin_state_up': True,
'fixed_ips': [{'ip_address': self.port_address,
'subnet_id': 'my_subid1'}],
'mac_address': 'my_mac1', }]
self.float_data1 = [{'port_id': 'my_portid1',
'fixed_ip_address': self.port_address,
'floating_ip_address': '172.0.1.2'}]
self.dhcp_port_data1 = [{'fixed_ips': [{'ip_address': '10.0.1.9',
'subnet_id': 'my_subid1'}],
'status': 'ACTIVE',
'admin_state_up': True}]
self.port_address2 = '10.0.2.2'
self.port_data2 = []
self.port_data2.append(self.port_data1[0])
self.port_data2.append({'network_id': 'my_netid2',
'device_id': self.instance['uuid'],
'tenant_id': self.tenant_id,
'admin_state_up': True,
'status': 'ACTIVE',
'device_owner': 'compute:nova',
'id': 'my_portid2',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'fixed_ips':
[{'ip_address': self.port_address2,
'subnet_id': 'my_subid2'}],
'mac_address': 'my_mac2', })
self.float_data2 = []
self.float_data2.append(self.float_data1[0])
self.float_data2.append({'port_id': 'my_portid2',
'fixed_ip_address': '10.0.2.2',
'floating_ip_address': '172.0.2.2'})
self.port_data3 = [{'network_id': 'my_netid1',
'device_id': 'device_id3',
'tenant_id': self.tenant_id,
'status': 'DOWN',
'admin_state_up': True,
'device_owner': 'compute:nova',
'id': 'my_portid3',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'fixed_ips': [], # no fixed ip
'mac_address': 'my_mac3', }]
self.subnet_data1 = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2 = []
self.subnet_data_n = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']},
{'id': 'my_subid2',
'cidr': '20.0.1.0/24',
'network_id': 'my_netid2',
'gateway_ip': '20.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2.append({'id': 'my_subid2',
'cidr': '10.0.2.0/24',
'network_id': 'my_netid2',
'gateway_ip': '10.0.2.1',
'dns_nameservers': ['8.8.2.1', '8.8.2.2']})
self.fip_pool = {'id': '4fdbfd74-eaf8-4884-90d9-00bd6f10c2d3',
'name': 'ext_net',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_pool_nova = {'id': '435e20c3-d9f1-4f1b-bee5-4611a1dd07db',
'name': 'nova',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_unassociated = {'tenant_id': 'my_tenantid',
'id': 'fip_id1',
'floating_ip_address': '172.24.4.227',
'floating_network_id': self.fip_pool['id'],
'port_id': None,
'fixed_ip_address': None,
'router_id': None}
fixed_ip_address = self.port_data2[1]['fixed_ips'][0]['ip_address']
self.fip_associated = {'tenant_id': 'my_tenantid',
'id': 'fip_id2',
'floating_ip_address': '172.24.4.228',
'floating_network_id': self.fip_pool['id'],
'port_id': self.port_data2[1]['id'],
'fixed_ip_address': fixed_ip_address,
'router_id': 'router_id1'}
self._returned_nw_info = []
self.mox.StubOutWithMock(neutronapi, 'get_client')
self.moxed_client = self.mox.CreateMock(client.Client)
self.addCleanup(CONF.reset)
self.addCleanup(self.mox.VerifyAll)
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.stubs.UnsetAll)
def _fake_instance_object(self, instance):
return fake_instance.fake_instance_obj(self.context, **instance)
def _fake_instance_info_cache(self, nw_info, instance_uuid=None):
info_cache = {}
if instance_uuid is None:
info_cache['instance_uuid'] = str(uuid.uuid4())
else:
info_cache['instance_uuid'] = instance_uuid
info_cache['deleted'] = False
info_cache['created_at'] = timeutils.utcnow()
info_cache['deleted_at'] = timeutils.utcnow()
info_cache['updated_at'] = timeutils.utcnow()
info_cache['network_info'] = model.NetworkInfo.hydrate(six.text_type(
jsonutils.dumps(nw_info)))
return info_cache
def _fake_instance_object_with_info_cache(self, instance):
expected_attrs = ['info_cache']
instance = objects.Instance._from_db_object(self.context,
objects.Instance(), fake_instance.fake_db_instance(**instance),
expected_attrs=expected_attrs)
return instance
def _stub_allocate_for_instance(self, net_idx=1, **kwargs):
self.instance = self._fake_instance_object(self.instance)
self.instance2 = self._fake_instance_object(self.instance2)
api = neutronapi.API()
self.mox.StubOutWithMock(api, 'get_instance_nw_info')
has_portbinding = False
has_extra_dhcp_opts = False
dhcp_options = kwargs.get('dhcp_options')
if dhcp_options is not None:
has_extra_dhcp_opts = True
if kwargs.get('portbinding'):
has_portbinding = True
api.extensions[constants.PORTBINDING_EXT] = 1
self.mox.StubOutWithMock(api, '_refresh_neutron_extensions_cache')
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
neutronapi.get_client(
mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
api._refresh_neutron_extensions_cache(mox.IgnoreArg(),
neutron=self.moxed_client)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client,
refresh_cache=True).AndReturn(has_portbinding)
else:
self.mox.StubOutWithMock(api, '_refresh_neutron_extensions_cache')
api._refresh_neutron_extensions_cache(mox.IgnoreArg(),
neutron=self.moxed_client)
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
# Net idx is 1-based for compatibility with existing unit tests
nets = self.nets[net_idx - 1]
ports = {}
fixed_ips = {}
macs = kwargs.get('macs')
if macs:
macs = set(macs)
req_net_ids = []
ordered_networks = []
if 'requested_networks' in kwargs:
for request in kwargs['requested_networks']:
if request.port_id:
if request.port_id == 'my_portid3':
self.moxed_client.show_port(request.port_id
).AndReturn(
{'port': {'id': 'my_portid3',
'network_id': 'my_netid1',
'tenant_id': self.tenant_id,
'mac_address': 'my_mac1',
'device_id': kwargs.get('_device') and
self.instance2.uuid or
''}})
ports['my_netid1'] = [self.port_data1[0],
self.port_data3[0]]
ports[request.port_id] = self.port_data3[0]
request.network_id = 'my_netid1'
if macs is not None:
macs.discard('my_mac1')
elif request.port_id == 'invalid_id':
PortNotFound = exceptions.PortNotFoundClient(
status_code=404)
self.moxed_client.show_port(request.port_id
).AndRaise(PortNotFound)
else:
self.moxed_client.show_port(request.port_id).AndReturn(
{'port': {'id': 'my_portid1',
'network_id': 'my_netid1',
'tenant_id': self.tenant_id,
'mac_address': 'my_mac1',
'device_id': kwargs.get('_device') and
self.instance2.uuid or
''}})
ports[request.port_id] = self.port_data1[0]
request.network_id = 'my_netid1'
if macs is not None:
macs.discard('my_mac1')
else:
fixed_ips[request.network_id] = request.address
req_net_ids.append(request.network_id)
ordered_networks.append(request)
else:
for n in nets:
ordered_networks.append(
objects.NetworkRequest(network_id=n['id']))
if kwargs.get('_break') == 'pre_list_networks':
self.mox.ReplayAll()
return api
# search all req_net_ids as in api.py
search_ids = req_net_ids
if search_ids:
mox_list_params = {'id': mox.SameElementsAs(search_ids)}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
else:
mox_list_params = {'tenant_id': self.instance.project_id,
'shared': False}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
mox_list_params = {'shared': True}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': []})
if kwargs.get('_break') == 'post_list_networks':
self.mox.ReplayAll()
return api
if (('requested_networks' not in kwargs or
kwargs['requested_networks'].as_tuples() == [(None, None, None)])
and len(nets) > 1):
self.mox.ReplayAll()
return api
preexisting_port_ids = []
ports_in_requested_net_order = []
nets_in_requested_net_order = []
for request in ordered_networks:
port_req_body = {
'port': {
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
},
}
# Network lookup for available network_id
network = None
for net in nets:
if net['id'] == request.network_id:
network = net
break
# if net_id did not pass validate_networks() and not available
# here then skip it safely not continuing with a None Network
else:
continue
if has_portbinding:
port_req_body['port']['binding:host_id'] = (
self.instance.get('host'))
if not has_portbinding:
api._populate_neutron_extension_values(mox.IgnoreArg(),
self.instance, mox.IgnoreArg(),
mox.IgnoreArg(), neutron=self.moxed_client).AndReturn(None)
else:
# since _populate_neutron_extension_values() will call
# _has_port_binding_extension()
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client).\
AndReturn(has_portbinding)
if request.port_id:
port = ports[request.port_id]
self.moxed_client.update_port(request.port_id,
MyComparator(port_req_body)
).AndReturn(
{'port': port})
ports_in_requested_net_order.append(request.port_id)
preexisting_port_ids.append(request.port_id)
else:
request.address = fixed_ips.get(request.network_id)
if request.address:
port_req_body['port']['fixed_ips'] = [
{'ip_address': str(request.address)}]
port_req_body['port']['network_id'] = request.network_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = \
self.instance.project_id
if macs:
port_req_body['port']['mac_address'] = macs.pop()
if has_portbinding:
port_req_body['port']['binding:host_id'] = (
self.instance.get('host'))
res_port = {'port': {'id': 'fake'}}
if has_extra_dhcp_opts:
port_req_body['port']['extra_dhcp_opts'] = dhcp_options
if kwargs.get('_break') == 'mac' + request.network_id:
self.mox.ReplayAll()
return api
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn(res_port)
ports_in_requested_net_order.append(res_port['port']['id'])
nets_in_requested_net_order.append(network)
api.get_instance_nw_info(mox.IgnoreArg(),
self.instance,
networks=nets_in_requested_net_order,
port_ids=ports_in_requested_net_order,
admin_client=None,
preexisting_port_ids=preexisting_port_ids
).AndReturn(self._returned_nw_info)
self.mox.ReplayAll()
return api
def _verify_nw_info(self, nw_inf, index=0):
id_suffix = index + 1
self.assertEqual('10.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index]['address'])
self.assertEqual('172.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index].floating_ip_addresses()[0])
self.assertEqual('my_netname%s' % id_suffix,
nw_inf[index]['network']['label'])
self.assertEqual('my_portid%s' % id_suffix, nw_inf[index]['id'])
self.assertEqual('my_mac%s' % id_suffix, nw_inf[index]['address'])
self.assertEqual('10.0.%s.0/24' % id_suffix,
nw_inf[index]['network']['subnets'][0]['cidr'])
ip_addr = model.IP(address='8.8.%s.1' % id_suffix,
version=4, type='dns')
self.assertIn(ip_addr, nw_inf[index]['network']['subnets'][0]['dns'])
def _get_instance_nw_info(self, number):
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(mox.IgnoreArg(),
self.instance['uuid'],
mox.IgnoreArg()).AndReturn(
fake_info_cache)
port_data = number == 1 and self.port_data1 or self.port_data2
net_info_cache = []
for port in port_data:
net_info_cache.append({"network": {"id": port['network_id']},
"id": port['id']})
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
net_ids = [port['network_id'] for port in port_data]
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.list_networks(
id=net_ids).AndReturn({'networks': nets})
for i in range(1, number + 1):
float_data = number == 1 and self.float_data1 or self.float_data2
for ip in port_data[i - 1]['fixed_ips']:
float_data = [x for x in float_data
if x['fixed_ip_address'] == ip['ip_address']]
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[i - 1]['id']).AndReturn(
{'floatingips': float_data})
subnet_data = i == 1 and self.subnet_data1 or self.subnet_data2
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid%s' % i])).AndReturn(
{'subnets': subnet_data})
self.moxed_client.list_ports(
network_id=subnet_data[0]['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': []})
self.instance['info_cache'] = self._fake_instance_info_cache(
net_info_cache, self.instance['uuid'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(mox.IgnoreArg(),
self.instance['uuid']).AndReturn(
self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nw_inf = api.get_instance_nw_info(self.context, instance)
for i in range(0, number):
self._verify_nw_info(nw_inf, i)
def _allocate_for_instance(self, net_idx=1, **kwargs):
api = self._stub_allocate_for_instance(net_idx, **kwargs)
return api.allocate_for_instance(self.context, self.instance, **kwargs)
class TestNeutronv2(TestNeutronv2Base):
def setUp(self):
super(TestNeutronv2, self).setUp()
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
def test_get_instance_nw_info_1(self):
# Test to get one port in one network and subnet.
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(1)
def test_get_instance_nw_info_2(self):
# Test to get one port in each of two networks and subnets.
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(2)
def test_get_instance_nw_info_with_nets_add_interface(self):
# This tests that adding an interface to an instance does not
# remove the first instance from the instance.
network_model = model.Network(id='network_id',
bridge='br-int',
injected='injected',
label='fake_network',
tenant_id='fake_tenant')
network_cache = {'info_cache': {
'network_info': [{'id': self.port_data2[0]['id'],
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}}
self._fake_get_instance_nw_info_helper(network_cache,
self.port_data2,
self.nets2,
[self.port_data2[1]['id']])
def test_get_instance_nw_info_remove_ports_from_neutron(self):
# This tests that when a port is removed in neutron it
# is also removed from the nova.
network_model = model.Network(id=self.port_data2[0]['network_id'],
bridge='br-int',
injected='injected',
label='fake_network',
tenant_id='fake_tenant')
network_cache = {'info_cache': {
'network_info': [{'id': 'network_id',
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}}
self._fake_get_instance_nw_info_helper(network_cache,
self.port_data2,
None,
None)
def test_get_instance_nw_info_ignores_neutron_ports(self):
# Tests that only ports in the network_cache are updated
# and ports returned from neutron that match the same
# instance_id/device_id are ignored.
port_data2 = copy.copy(self.port_data2)
# set device_id on the ports to be the same.
port_data2[1]['device_id'] = port_data2[0]['device_id']
network_model = model.Network(id='network_id',
bridge='br-int',
injected='injected',
label='fake_network',
tenant_id='fake_tenant')
network_cache = {'info_cache': {
'network_info': [{'id': 'network_id',
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}}
self._fake_get_instance_nw_info_helper(network_cache,
port_data2,
None,
None)
def _fake_get_instance_nw_info_helper(self, network_cache,
current_neutron_ports,
networks=None, port_ids=None):
"""Helper function to test get_instance_nw_info.
:param network_cache - data already in the nova network cache.
:param current_neutron_ports - updated list of ports from neutron.
:param networks - networks of ports being added to instance.
:param port_ids - new ports being added to instance.
"""
# keep a copy of the original ports/networks to pass to
# get_instance_nw_info() as the code below changes them.
original_port_ids = copy.copy(port_ids)
original_networks = copy.copy(networks)
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg()).AndReturn(fake_info_cache)
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': current_neutron_ports})
ifaces = network_cache['info_cache']['network_info']
if port_ids is None:
port_ids = [iface['id'] for iface in ifaces]
net_ids = [iface['network']['id'] for iface in ifaces]
nets = [{'id': iface['network']['id'],
'name': iface['network']['label'],
'tenant_id': iface['network']['meta']['tenant_id']}
for iface in ifaces]
if networks is None:
self.moxed_client.list_networks(
id=net_ids).AndReturn({'networks': nets})
else:
networks = networks + [
dict(id=iface['network']['id'],
name=iface['network']['label'],
tenant_id=iface['network']['meta']['tenant_id'])
for iface in ifaces]
port_ids = [iface['id'] for iface in ifaces] + port_ids
index = 0
current_neutron_port_map = {}
for current_neutron_port in current_neutron_ports:
current_neutron_port_map[current_neutron_port['id']] = (
current_neutron_port)
for port_id in port_ids:
current_neutron_port = current_neutron_port_map.get(port_id)
if current_neutron_port:
for ip in current_neutron_port['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=current_neutron_port['id']).AndReturn(
{'floatingips': [self.float_data2[index]]})
self.moxed_client.list_subnets(
id=mox.SameElementsAs([ip['subnet_id']])
).AndReturn(
{'subnets': [self.subnet_data_n[index]]})
self.moxed_client.list_ports(
network_id=current_neutron_port['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': self.dhcp_port_data1})
index += 1
self.instance['info_cache'] = self._fake_instance_info_cache(
network_cache['info_cache']['network_info'], self.instance['uuid'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(
mox.IgnoreArg(),
self.instance['uuid']).MultipleTimes().AndReturn(
self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nw_infs = api.get_instance_nw_info(self.context,
instance,
networks=original_networks,
port_ids=original_port_ids)
self.assertEqual(index, len(nw_infs))
# ensure that nic ordering is preserved
for iface_index in range(index):
self.assertEqual(nw_infs[iface_index]['id'],
port_ids[iface_index])
def test_get_instance_nw_info_without_subnet(self):
# Test get instance_nw_info for a port without subnet.
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg()).AndReturn(fake_info_cache)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data3})
self.moxed_client.list_networks(
id=[self.port_data1[0]['network_id']]).AndReturn(
{'networks': self.nets1})
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
net_info_cache = []
for port in self.port_data3:
net_info_cache.append({"network": {"id": port['network_id']},
"id": port['id']})
self.instance['info_cache'] = self._fake_instance_info_cache(
net_info_cache, self.instance['uuid'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(
mox.IgnoreArg(),
self.instance['uuid']).AndReturn(self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nw_inf = api.get_instance_nw_info(self.context,
instance)
id_suffix = 3
self.assertEqual(0, len(nw_inf.fixed_ips()))
self.assertEqual('my_netname1', nw_inf[0]['network']['label'])
self.assertEqual('my_portid%s' % id_suffix, nw_inf[0]['id'])
self.assertEqual('my_mac%s' % id_suffix, nw_inf[0]['address'])
self.assertEqual(0, len(nw_inf[0]['network']['subnets']))
def test_refresh_neutron_extensions_cache(self):
api = neutronapi.API()
# Note: Don't want the default get_client from setUp()
self.mox.ResetAll()
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.QOS_QUEUE}]})
self.mox.ReplayAll()
api._refresh_neutron_extensions_cache(mox.IgnoreArg())
self.assertEqual(
{constants.QOS_QUEUE: {'name': constants.QOS_QUEUE}},
api.extensions)
def test_populate_neutron_extension_values_rxtx_factor(self):
api = neutronapi.API()
# Note: Don't want the default get_client from setUp()
self.mox.ResetAll()
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.QOS_QUEUE}]})
self.mox.ReplayAll()
flavor = flavors.get_default_flavor()
flavor['rxtx_factor'] = 1
instance = objects.Instance(system_metadata={})
with mock.patch.object(instance, 'save'):
instance.set_flavor(flavor)
port_req_body = {'port': {}}
api._populate_neutron_extension_values(self.context, instance,
None, port_req_body)
self.assertEqual(port_req_body['port']['rxtx_factor'], 1)
def test_allocate_for_instance_1(self):
# Allocate one port in one network env.
self._allocate_for_instance(1)
def test_allocate_for_instance_2(self):
# Allocate one port in two networks env.
api = self._stub_allocate_for_instance(net_idx=2)
self.assertRaises(exception.NetworkAmbiguous,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_accepts_macs_kwargs_None(self):
# The macs kwarg should be accepted as None.
self._allocate_for_instance(1, macs=None)
def test_allocate_for_instance_accepts_macs_kwargs_set(self):
# The macs kwarg should be accepted, as a set, the
# _allocate_for_instance helper checks that the mac is used to create a
# port.
self._allocate_for_instance(1, macs=set(['ab:cd:ef:01:23:45']))
def test_allocate_for_instance_accepts_only_portid(self):
# Make sure allocate_for_instance works when only a portid is provided
self._returned_nw_info = self.port_data1
result = self._allocate_for_instance(
requested_networks=objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')]))
self.assertEqual(self.port_data1, result)
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
def test_allocate_for_instance_not_enough_macs_via_ports(self,
mock_unbind):
# using a hypervisor MAC via a pre-created port will stop it being
# used to dynamically create a port on a network. We put the network
# first in requested_networks so that if the code were to not pre-check
# requested ports, it would incorrectly assign the mac and not fail.
requested_networks = objects.NetworkRequestList(
objects = [
objects.NetworkRequest(network_id=self.nets2[1]['id']),
objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac1']),
_break='mac' + self.nets2[1]['id'])
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['my_mac1']))
mock_unbind.assert_called_once_with(self.context, [],
self.moxed_client, mock.ANY)
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
def test_allocate_for_instance_not_enough_macs(self, mock_unbind):
# If not enough MAC addresses are available to allocate to networks, an
# error should be raised.
# We could pass in macs=set(), but that wouldn't tell us that
# allocate_for_instance tracks used macs properly, so we pass in one
# mac, and ask for two networks.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=self.nets2[1]['id']),
objects.NetworkRequest(network_id=self.nets2[0]['id'])])
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2']),
_break='mac' + self.nets2[0]['id'])
with mock.patch.object(api, '_delete_ports'):
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance,
requested_networks=requested_networks,
macs=set(['my_mac2']))
mock_unbind.assert_called_once_with(self.context, [],
self.moxed_client, mock.ANY)
def test_allocate_for_instance_two_macs_two_networks(self):
# If two MACs are available and two networks requested, two new ports
# get made and no exceptions raised.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=self.nets2[1]['id']),
objects.NetworkRequest(network_id=self.nets2[0]['id'])])
self._allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2', 'my_mac1']))
def test_allocate_for_instance_mac_conflicting_requested_port(self):
# specify only first and last network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
net_idx=1, requested_networks=requested_networks,
macs=set(['unknown:mac']),
_break='pre_list_networks')
self.assertRaises(exception.PortNotUsable,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['unknown:mac']))
def test_allocate_for_instance_without_requested_networks(self):
api = self._stub_allocate_for_instance(net_idx=3)
self.assertRaises(exception.NetworkAmbiguous,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_with_requested_non_available_network(self):
"""verify that a non available network is ignored.
self.nets2 (net_idx=2) is composed of self.nets3[0] and self.nets3[1]
Do not create a port on a non available network self.nets3[2].
"""
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets3[0], self.nets3[2], self.nets3[1])])
self._allocate_for_instance(net_idx=2,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks(self):
# specify only first and last network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets3[1], self.nets3[0], self.nets3[2])])
self._allocate_for_instance(net_idx=3,
requested_networks=requested_networks)
def test_allocate_for_instance_with_invalid_network_id(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='invalid_id')])
api = self._stub_allocate_for_instance(net_idx=9,
requested_networks=requested_networks,
_break='post_list_networks')
self.assertRaises(exception.NetworkNotFound,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_fixedip(self):
# specify only first and last network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=self.nets1[0]['id'],
address='10.0.1.0')])
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_port(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_no_networks(self):
"""verify the exception thrown when there are no networks defined."""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
self.moxed_client.list_extensions().AndReturn({'extensions': []})
self.moxed_client.list_networks(
tenant_id=self.instance.project_id,
shared=False).AndReturn(
{'networks': model.NetworkInfo([])})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': model.NetworkInfo([])})
self.mox.ReplayAll()
nwinfo = api.allocate_for_instance(self.context, self.instance)
self.assertEqual(len(nwinfo), 0)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
def test_allocate_for_instance_ex1(self,
mock_unbind,
mock_preexisting):
"""verify we will delete created ports
if we fail to allocate all net resources.
Mox to raise exception when creating a second port.
In this case, the code should delete the first created port.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
mock_preexisting.return_value = []
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client,
refresh_cache=True).AndReturn(False)
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets2[0], self.nets2[1])])
self.moxed_client.list_networks(
id=['my_netid1', 'my_netid2']).AndReturn({'networks': self.nets2})
index = 0
for network in self.nets2:
binding_port_req_body = {
'port': {
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
},
}
port_req_body = {
'port': {
'network_id': network['id'],
'admin_state_up': True,
'tenant_id': self.instance.project_id,
},
}
port_req_body['port'].update(binding_port_req_body['port'])
port = {'id': 'portid_' + network['id']}
api._populate_neutron_extension_values(self.context,
self.instance, None, binding_port_req_body,
neutron=self.moxed_client).AndReturn(None)
if index == 0:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn({'port': port})
else:
NeutronOverQuota = exceptions.OverQuotaClient()
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(NeutronOverQuota)
index += 1
self.moxed_client.delete_port('portid_' + self.nets2[0]['id'])
self.mox.ReplayAll()
self.assertRaises(exception.PortLimitExceeded,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
mock_unbind.assert_called_once_with(self.context, [],
self.moxed_client, mock.ANY)
def test_allocate_for_instance_ex2(self):
"""verify we have no port to delete
if we fail to allocate the first net resource.
Mox to raise exception when creating the first port.
In this case, the code should not delete any ports.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client,
refresh_cache=True).AndReturn(False)
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets2[0], self.nets2[1])])
self.moxed_client.list_networks(
id=['my_netid1', 'my_netid2']).AndReturn({'networks': self.nets2})
binding_port_req_body = {
'port': {
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
},
}
port_req_body = {
'port': {
'network_id': self.nets2[0]['id'],
'admin_state_up': True,
'device_id': self.instance.uuid,
'tenant_id': self.instance.project_id,
},
}
api._populate_neutron_extension_values(self.context,
self.instance, None, binding_port_req_body,
neutron=self.moxed_client).AndReturn(None)
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
self.mox.ReplayAll()
self.assertRaises(NEUTRON_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
def test_allocate_for_instance_no_port_or_network(self):
class BailOutEarly(Exception):
pass
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
self.moxed_client.list_extensions().AndReturn({'extensions': []})
self.mox.StubOutWithMock(api, '_get_available_networks')
# Make sure we get an empty list and then bail out of the rest
# of the function
api._get_available_networks(self.context, self.instance.project_id,
[],
neutron=self.moxed_client).\
AndRaise(BailOutEarly)
self.mox.ReplayAll()
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest()])
self.assertRaises(BailOutEarly,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
def test_allocate_for_instance_second_time(self):
# Make sure that allocate_for_instance only returns ports that it
# allocated during _that_ run.
new_port = {'id': 'fake'}
self._returned_nw_info = self.port_data1 + [new_port]
nw_info = self._allocate_for_instance()
self.assertEqual(nw_info, [new_port])
def test_allocate_for_instance_port_in_use(self):
# If a port is already in use, an exception should be raised.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks',
_device=True)
self.assertRaises(exception.PortInUse,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def test_allocate_for_instance_port_not_found(self):
# If a port is not found, an exception should be raised.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='invalid_id')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks')
self.assertRaises(exception.PortNotFound,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def test_allocate_for_instance_port_invalid_tenantid(self):
self.tenant_id = 'invalid_id'
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks')
self.assertRaises(exception.PortNotUsable,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def test_allocate_for_instance_with_externalnet_forbidden(self):
"""Only one network is available, it's external, and the client
is unauthorized to use it.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
self.moxed_client.list_extensions().AndReturn({'extensions': []})
# no networks in the tenant
self.moxed_client.list_networks(
tenant_id=self.instance.project_id,
shared=False).AndReturn(
{'networks': model.NetworkInfo([])})
# external network is shared
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': self.nets8})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.ExternalNetworkAttachForbidden,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_with_externalnet_multiple(self):
"""Multiple networks are available, one the client is authorized
to use, and an external one the client is unauthorized to use.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
self.moxed_client.list_extensions().AndReturn({'extensions': []})
# network found in the tenant
self.moxed_client.list_networks(
tenant_id=self.instance.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
# external network is shared
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': self.nets8})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(
exception.NetworkAmbiguous,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_with_externalnet_admin_ctx(self):
"""Only one network is available, it's external, and the client
is authorized.
"""
admin_ctx = context.RequestContext('userid', 'my_tenantid',
is_admin=True)
api = self._stub_allocate_for_instance(net_idx=8)
api.allocate_for_instance(admin_ctx, self.instance)
def test_allocate_for_instance_with_external_shared_net(self):
"""Only one network is available, it's external and shared."""
ctx = context.RequestContext('userid', 'my_tenantid')
api = self._stub_allocate_for_instance(net_idx=10)
api.allocate_for_instance(ctx, self.instance)
def _deallocate_for_instance(self, number, requested_networks=None):
# TODO(mriedem): Remove this conversion when all neutronv2 APIs are
# converted to handling instance objects.
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
port_data = number == 1 and self.port_data1 or self.port_data2
ports = {port['id'] for port in port_data}
ret_data = copy.deepcopy(port_data)
if requested_networks:
if isinstance(requested_networks, objects.NetworkRequestList):
# NOTE(danms): Temporary and transitional
with mock.patch('nova.utils.is_neutron', return_value=True):
requested_networks = requested_networks.as_tuples()
for net, fip, port, request_id in requested_networks:
ret_data.append({'network_id': net,
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
'id': port,
'status': 'DOWN',
'admin_state_up': True,
'fixed_ips': [],
'mac_address': 'fake_mac', })
self.moxed_client.list_ports(
device_id=self.instance.uuid).AndReturn(
{'ports': ret_data})
self.moxed_client.list_extensions().AndReturn({'extensions': []})
if requested_networks:
for net, fip, port, request_id in requested_networks:
self.moxed_client.update_port(port)
for port in ports:
self.moxed_client.delete_port(port)
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(self.context,
self.instance.uuid,
{'network_info': '[]'}).AndReturn(
fake_info_cache)
self.mox.ReplayAll()
api = neutronapi.API()
api.deallocate_for_instance(self.context, self.instance,
requested_networks=requested_networks)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_1_with_requested(self, mock_preexisting):
mock_preexisting.return_value = []
requested = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='fake-net',
address='1.2.3.4',
port_id='fake-port')])
# Test to deallocate in one port env.
self._deallocate_for_instance(1, requested_networks=requested)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_2_with_requested(self, mock_preexisting):
mock_preexisting.return_value = []
requested = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='fake-net',
address='1.2.3.4',
port_id='fake-port')])
# Test to deallocate in one port env.
self._deallocate_for_instance(2, requested_networks=requested)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_1(self, mock_preexisting):
mock_preexisting.return_value = []
# Test to deallocate in one port env.
self._deallocate_for_instance(1)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_2(self, mock_preexisting):
mock_preexisting.return_value = []
# Test to deallocate in two ports env.
self._deallocate_for_instance(2)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_port_not_found(self,
mock_preexisting):
# TODO(mriedem): Remove this conversion when all neutronv2 APIs are
# converted to handling instance objects.
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
mock_preexisting.return_value = []
port_data = self.port_data1
self.moxed_client.list_ports(
device_id=self.instance.uuid).AndReturn(
{'ports': port_data})
self.moxed_client.list_extensions().AndReturn({'extensions': []})
NeutronNotFound = exceptions.NeutronClientException(status_code=404)
for port in reversed(port_data):
self.moxed_client.delete_port(port['id']).AndRaise(
NeutronNotFound)
self.mox.ReplayAll()
api = neutronapi.API()
api.deallocate_for_instance(self.context, self.instance)
def _test_deallocate_port_for_instance(self, number):
port_data = number == 1 and self.port_data1 or self.port_data2
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.delete_port(port_data[0]['id'])
net_info_cache = []
for port in port_data:
net_info_cache.append({"network": {"id": port['network_id']},
"id": port['id']})
self.instance['info_cache'] = self._fake_instance_info_cache(
net_info_cache, self.instance['uuid'])
api = neutronapi.API()
neutronapi.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data[1:]})
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
net_ids = [port['network_id'] for port in port_data]
self.moxed_client.list_networks(id=net_ids).AndReturn(
{'networks': nets})
float_data = number == 1 and self.float_data1 or self.float_data2
for data in port_data[1:]:
for ip in data['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=data['id']).AndReturn(
{'floatingips': float_data[1:]})
for port in port_data[1:]:
self.moxed_client.list_subnets(id=['my_subid2']).AndReturn({})
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(mox.IgnoreArg(),
self.instance['uuid']).AndReturn(
self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nwinfo = api.deallocate_port_for_instance(self.context, instance,
port_data[0]['id'])
self.assertEqual(len(nwinfo), len(port_data[1:]))
if len(port_data) > 1:
self.assertEqual(nwinfo[0]['network']['id'], 'my_netid2')
def test_deallocate_port_for_instance_1(self):
# Test to deallocate the first and only port
self._test_deallocate_port_for_instance(1)
def test_deallocate_port_for_instance_2(self):
# Test to deallocate the first port of two
self._test_deallocate_port_for_instance(2)
def test_list_ports(self):
search_opts = {'parm': 'value'}
self.moxed_client.list_ports(**search_opts)
self.mox.ReplayAll()
neutronapi.API().list_ports(self.context, **search_opts)
def test_show_port(self):
self.moxed_client.show_port('foo').AndReturn(
{'port': self.port_data1[0]})
self.mox.ReplayAll()
neutronapi.API().show_port(self.context, 'foo')
def test_validate_networks(self):
requested_networks = [('my_netid1', None, None, None),
('my_netid2', None, None, None)]
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': []})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 50}})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_without_port_quota_on_network_side(self):
requested_networks = [('my_netid1', None, None, None),
('my_netid2', None, None, None)]
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': []})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {}})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_ex_1(self):
requested_networks = [('my_netid1', None, None, None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1'])).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': []})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 50}})
self.mox.ReplayAll()
api = neutronapi.API()
try:
api.validate_networks(self.context, requested_networks, 1)
except exception.NetworkNotFound as ex:
self.assertIn("my_netid2", six.text_type(ex))
def test_validate_networks_ex_2(self):
requested_networks = [('my_netid1', None, None, None),
('my_netid2', None, None, None),
('my_netid3', None, None, None)]
ids = ['my_netid1', 'my_netid2', 'my_netid3']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.mox.ReplayAll()
api = neutronapi.API()
try:
api.validate_networks(self.context, requested_networks, 1)
except exception.NetworkNotFound as ex:
self.assertIn("my_netid2", six.text_type(ex))
self.assertIn("my_netid3", six.text_type(ex))
def test_validate_networks_duplicate_enable(self):
# Verify that no duplicateNetworks exception is thrown when duplicate
# network ids are passed to validate_networks.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid1')])
ids = ['my_netid1', 'my_netid1']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': []})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 50}})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_allocate_for_instance_with_requested_networks_duplicates(self):
# specify a duplicate network to allocate to instance
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets6[0], self.nets6[1])])
self._allocate_for_instance(net_idx=6,
requested_networks=requested_networks)
def test_allocate_for_instance_requested_networks_duplicates_port(self):
# specify first port and last port that are in same network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port['id'])
for port in (self.port_data1[0], self.port_data3[0])])
self._allocate_for_instance(net_idx=6,
requested_networks=requested_networks)
def test_allocate_for_instance_requested_networks_duplicates_combo(self):
# specify a combo net_idx=7 : net2, port in net1, net2, port in net1
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid2'),
objects.NetworkRequest(port_id=self.port_data1[0]['id']),
objects.NetworkRequest(network_id='my_netid2'),
objects.NetworkRequest(port_id=self.port_data3[0]['id'])])
self._allocate_for_instance(net_idx=7,
requested_networks=requested_networks)
def test_validate_networks_not_specified(self):
requested_networks = objects.NetworkRequestList(objects=[])
self.moxed_client.list_networks(
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
shared=True).AndReturn(
{'networks': self.nets2})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.NetworkAmbiguous,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_port_not_found(self):
# Verify that the correct exception is thrown when a non existent
# port is passed to validate_networks.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(
network_id='my_netid1',
port_id='3123-ad34-bc43-32332ca33e')])
PortNotFound = exceptions.PortNotFoundClient()
self.moxed_client.show_port(requested_networks[0].port_id).AndRaise(
PortNotFound)
self.mox.ReplayAll()
# Expected call from setUp.
neutronapi.get_client(None)
api = neutronapi.API()
self.assertRaises(exception.PortNotFound,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_port_show_raises_non404(self):
# Verify that the correct exception is thrown when a non existent
# port is passed to validate_networks.
fake_port_id = '3123-ad34-bc43-32332ca33e'
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(
network_id='my_netid1',
port_id=fake_port_id)])
NeutronNotFound = exceptions.NeutronClientException(status_code=0)
self.moxed_client.show_port(requested_networks[0].port_id).AndRaise(
NeutronNotFound)
self.mox.ReplayAll()
# Expected call from setUp.
neutronapi.get_client(None)
api = neutronapi.API()
exc = self.assertRaises(exception.NovaException,
api.validate_networks,
self.context, requested_networks, 1)
expected_exception_message = ('Failed to access port %(port_id)s: '
'An unknown exception occurred.' %
{'port_id': fake_port_id})
self.assertEqual(expected_exception_message, str(exc))
def test_validate_networks_port_in_use(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=self.port_data3[0]['id'])])
self.moxed_client.show_port(self.port_data3[0]['id']).\
AndReturn({'port': self.port_data3[0]})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.PortInUse,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_port_no_subnet_id(self):
port_a = self.port_data3[0]
port_a['device_id'] = None
port_a['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.PortRequiresFixedIP,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_no_subnet_id(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='his_netid4')])
ids = ['his_netid4']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets4})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.NetworkRequiresSubnet,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_ports_in_same_network_enable(self):
# Verify that duplicateNetworks exception is not thrown when ports
# on same duplicate network are passed to validate_networks.
port_a = self.port_data3[0]
port_a['fixed_ips'] = {'ip_address': '10.0.0.2',
'subnet_id': 'subnet_id'}
port_b = self.port_data1[0]
self.assertEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id']),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn(
{'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn(
{'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_ports_not_in_same_network(self):
port_a = self.port_data3[0]
port_a['fixed_ips'] = {'ip_address': '10.0.0.2',
'subnet_id': 'subnet_id'}
port_b = self.port_data2[1]
self.assertNotEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id']),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_no_quota(self):
# Test validation for a request for one instance needing
# two ports, where the quota is 2 and 2 ports are in use
# => instances which can be created = 0
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid2')])
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': self.port_data2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 2}})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(max_count, 0)
def test_validate_networks_with_ports_and_networks(self):
# Test validation for a request for one instance needing
# one port allocated via nova with another port being passed in.
port_b = self.port_data2[1]
port_b['device_id'] = None
port_b['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
ids = ['my_netid1']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': self.port_data2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 5}})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(max_count, 1)
def test_validate_networks_one_port_and_no_networks(self):
# Test that show quota is not called if no networks are
# passed in and only ports.
port_b = self.port_data2[1]
port_b['device_id'] = None
port_b['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(max_count, 1)
def test_validate_networks_some_quota(self):
# Test validation for a request for two instance needing
# two ports each, where the quota is 5 and 2 ports are in use
# => instances which can be created = 1
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid2')])
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': self.port_data2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 5}})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 2)
self.assertEqual(max_count, 1)
def test_validate_networks_unlimited_quota(self):
# Test validation for a request for two instance needing
# two ports each, where the quota is -1 (unlimited)
# => instances which can be created = 1
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid2')])
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.list_ports(tenant_id='my_tenantid').AndReturn(
{'ports': self.port_data2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': -1}})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 2)
self.assertEqual(max_count, 2)
def test_validate_networks_no_quota_but_ports_supplied(self):
port_a = self.port_data3[0]
port_a['fixed_ips'] = {'ip_address': '10.0.0.2',
'subnet_id': 'subnet_id'}
port_b = self.port_data2[1]
self.assertNotEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id']),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(max_count, 1)
def _mock_list_ports(self, port_data=None):
if port_data is None:
port_data = self.port_data2
address = self.port_address
self.moxed_client.list_ports(
fixed_ips=MyComparator('ip_address=%s' % address)).AndReturn(
{'ports': port_data})
self.mox.ReplayAll()
return address
def test_get_fixed_ip_by_address_fails_for_no_ports(self):
address = self._mock_list_ports(port_data=[])
api = neutronapi.API()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.get_fixed_ip_by_address,
self.context, address)
def test_get_fixed_ip_by_address_succeeds_for_1_port(self):
address = self._mock_list_ports(port_data=self.port_data1)
api = neutronapi.API()
result = api.get_fixed_ip_by_address(self.context, address)
self.assertEqual(self.instance2['uuid'], result['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_more_than_1_port(self):
address = self._mock_list_ports()
api = neutronapi.API()
self.assertRaises(exception.FixedIpAssociatedWithMultipleInstances,
api.get_fixed_ip_by_address,
self.context, address)
def _get_available_networks(self, prv_nets, pub_nets,
req_ids=None, context=None):
api = neutronapi.API()
nets = prv_nets + pub_nets
if req_ids:
mox_list_params = {'id': req_ids}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
else:
mox_list_params = {'tenant_id': self.instance['project_id'],
'shared': False}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': prv_nets})
mox_list_params = {'shared': True}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': pub_nets})
self.mox.ReplayAll()
rets = api._get_available_networks(
context if context else self.context,
self.instance['project_id'],
req_ids)
self.assertEqual(rets, nets)
def test_get_available_networks_all_private(self):
self._get_available_networks(prv_nets=self.nets2, pub_nets=[])
def test_get_available_networks_all_public(self):
self._get_available_networks(prv_nets=[], pub_nets=self.nets2)
def test_get_available_networks_private_and_public(self):
self._get_available_networks(prv_nets=self.nets1, pub_nets=self.nets4)
def test_get_available_networks_with_network_ids(self):
prv_nets = [self.nets3[0]]
pub_nets = [self.nets3[-1]]
# specify only first and last network
req_ids = [net['id'] for net in (self.nets3[0], self.nets3[-1])]
self._get_available_networks(prv_nets, pub_nets, req_ids)
def test_get_available_networks_with_custom_policy(self):
rules = {'network:attach_external_network':
common_policy.parse_rule('')}
policy.set_rules(rules)
req_ids = [net['id'] for net in self.nets5]
self._get_available_networks(self.nets5, pub_nets=[], req_ids=req_ids)
def test_get_floating_ip_pools(self):
api = neutronapi.API()
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.mox.ReplayAll()
pools = api.get_floating_ip_pools(self.context)
expected = [self.fip_pool['name'], self.fip_pool_nova['name']]
self.assertEqual(expected, pools)
def _get_expected_fip_model(self, fip_data, idx=0):
expected = {'id': fip_data['id'],
'address': fip_data['floating_ip_address'],
'pool': self.fip_pool['name'],
'project_id': fip_data['tenant_id'],
'fixed_ip_id': fip_data['port_id'],
'fixed_ip':
{'address': fip_data['fixed_ip_address']},
'instance': ({'uuid': self.port_data2[idx]['device_id']}
if fip_data['port_id']
else None)}
if expected['instance'] is not None:
expected['fixed_ip']['instance_uuid'] = \
expected['instance']['uuid']
return expected
def _test_get_floating_ip(self, fip_data, idx=0, by_address=False):
api = neutronapi.API()
fip_id = fip_data['id']
net_id = fip_data['floating_network_id']
address = fip_data['floating_ip_address']
if by_address:
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
else:
self.moxed_client.show_floatingip(fip_id).\
AndReturn({'floatingip': fip_data})
self.moxed_client.show_network(net_id).\
AndReturn({'network': self.fip_pool})
if fip_data['port_id']:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[idx]})
self.mox.ReplayAll()
expected = self._get_expected_fip_model(fip_data, idx)
if by_address:
fip = api.get_floating_ip_by_address(self.context, address)
else:
fip = api.get_floating_ip(self.context, fip_id)
self.assertEqual(expected, fip)
def test_get_floating_ip_unassociated(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0)
def test_get_floating_ip_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1)
def test_get_floating_ip_by_address(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0,
by_address=True)
def test_get_floating_ip_by_address_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1,
by_address=True)
def test_get_floating_ip_by_address_not_found(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': []})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ip_by_id_not_found(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NeutronClientException(status_code=404)
floating_ip_id = self.fip_unassociated['id']
self.moxed_client.show_floatingip(floating_ip_id).\
AndRaise(NeutronNotFound)
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFound,
api.get_floating_ip,
self.context, floating_ip_id)
def test_get_floating_ip_raises_non404(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NeutronClientException(status_code=0)
floating_ip_id = self.fip_unassociated['id']
self.moxed_client.show_floatingip(floating_ip_id).\
AndRaise(NeutronNotFound)
self.mox.ReplayAll()
self.assertRaises(exceptions.NeutronClientException,
api.get_floating_ip,
self.context, floating_ip_id)
def test_get_floating_ip_by_address_multiple_found(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated] * 2})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpMultipleFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ips_by_project(self):
api = neutronapi.API()
project_id = self.context.project_id
self.moxed_client.list_floatingips(tenant_id=project_id).\
AndReturn({'floatingips': [self.fip_unassociated,
self.fip_associated]})
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.moxed_client.list_ports(tenant_id=project_id).\
AndReturn({'ports': self.port_data2})
self.mox.ReplayAll()
expected = [self._get_expected_fip_model(self.fip_unassociated),
self._get_expected_fip_model(self.fip_associated, idx=1)]
fips = api.get_floating_ips_by_project(self.context)
self.assertEqual(expected, fips)
def _test_get_instance_id_by_floating_address(self, fip_data,
associated=False):
api = neutronapi.API()
address = fip_data['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
if associated:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[1]})
self.mox.ReplayAll()
if associated:
expected = self.port_data2[1]['device_id']
else:
expected = None
fip = api.get_instance_id_by_floating_address(self.context, address)
self.assertEqual(expected, fip)
def test_get_instance_id_by_floating_address(self):
self._test_get_instance_id_by_floating_address(self.fip_unassociated)
def test_get_instance_id_by_floating_address_associated(self):
self._test_get_instance_id_by_floating_address(self.fip_associated,
associated=True)
def test_allocate_floating_ip(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, 'ext_net')
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_addr_gen_fail(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndRaise(exceptions.IpAddressGenerationFailureClient)
self.mox.ReplayAll()
self.assertRaises(exception.NoMoreFloatingIps,
api.allocate_floating_ip, self.context, 'ext_net')
def test_allocate_floating_ip_exhausted_fail(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndRaise(exceptions.ExternalIpAddressExhaustedClient)
self.mox.ReplayAll()
self.assertRaises(exception.NoMoreFloatingIps,
api.allocate_floating_ip, self.context, 'ext_net')
def test_allocate_floating_ip_with_pool_id(self):
api = neutronapi.API()
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'id': pool_id}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, pool_id)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_allocate_floating_ip_with_default_pool(self):
api = neutronapi.API()
pool_name = self.fip_pool_nova['name']
pool_id = self.fip_pool_nova['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool_nova]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context)
self.assertEqual(fip, self.fip_unassociated['floating_ip_address'])
def test_release_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fip_id = self.fip_unassociated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.delete_floatingip(fip_id)
self.mox.ReplayAll()
api.release_floating_ip(self.context, address)
def test_disassociate_and_release_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fip_id = self.fip_unassociated['id']
floating_ip = {'address': address}
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.delete_floatingip(fip_id)
self.mox.ReplayAll()
api.disassociate_and_release_floating_ip(self.context, None,
floating_ip)
def test_release_floating_ip_associated(self):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpAssociated,
api.release_floating_ip, self.context, address)
def _setup_mock_for_refresh_cache(self, api, instances):
nw_info = model.NetworkInfo()
self.mox.StubOutWithMock(api, '_get_instance_nw_info')
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
for instance in instances:
api._get_instance_nw_info(mox.IgnoreArg(), instance).\
AndReturn(nw_info)
api.db.instance_info_cache_update(mox.IgnoreArg(),
instance['uuid'],
mox.IgnoreArg()).AndReturn(
fake_info_cache)
def test_associate_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fixed_address = self.port_address2
fip_id = self.fip_unassociated['id']
instance = self._fake_instance_object(self.instance)
search_opts = {'device_owner': 'compute:nova',
'device_id': instance.uuid}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[1]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': self.fip_associated['port_id'],
'fixed_ip_address': fixed_address}})
self._setup_mock_for_refresh_cache(api, [instance])
self.mox.ReplayAll()
api.associate_floating_ip(self.context, instance,
address, fixed_address)
@mock.patch('nova.objects.Instance.get_by_uuid')
def test_reassociate_floating_ip(self, mock_get):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
new_fixed_address = self.port_address
fip_id = self.fip_associated['id']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance2['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': 'my_portid1',
'fixed_ip_address': new_fixed_address}})
self.moxed_client.show_port(self.fip_associated['port_id']).\
AndReturn({'port': self.port_data2[1]})
mock_get.return_value = fake_instance.fake_instance_obj(
self.context, **self.instance)
instance2 = self._fake_instance_object(self.instance2)
self._setup_mock_for_refresh_cache(api, [mock_get.return_value,
instance2])
self.mox.ReplayAll()
api.associate_floating_ip(self.context, instance2,
address, new_fixed_address)
def test_associate_floating_ip_not_found_fixed_ip(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
fixed_address = self.fip_associated['fixed_ip_address']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.associate_floating_ip, self.context,
instance, address, fixed_address)
def test_disassociate_floating_ip(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
fip_id = self.fip_associated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': None}})
self._setup_mock_for_refresh_cache(api, [instance])
self.mox.ReplayAll()
api.disassociate_floating_ip(self.context, instance, address)
def test_add_fixed_ip_to_instance(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
self._setup_mock_for_refresh_cache(api, [instance])
network_id = 'my_netid1'
search_opts = {'network_id': network_id}
self.moxed_client.list_subnets(
**search_opts).AndReturn({'subnets': self.subnet_data_n})
search_opts = {'device_id': instance.uuid,
'device_owner': 'compute:nova',
'network_id': network_id}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [{'subnet_id': 'my_subid1'},
{'subnet_id': 'my_subid1'}],
},
}
port = self.port_data1[0]
port['fixed_ips'] = [{'subnet_id': 'my_subid1'}]
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.add_fixed_ip_to_instance(self.context,
instance,
network_id)
def test_remove_fixed_ip_from_instance(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
self._setup_mock_for_refresh_cache(api, [instance])
address = '10.0.0.3'
zone = 'compute:%s' % self.instance['availability_zone']
search_opts = {'device_id': self.instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [],
},
}
port = self.port_data1[0]
port['fixed_ips'] = []
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.remove_fixed_ip_from_instance(self.context, instance,
address)
def test_list_floating_ips_without_l3_support(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NeutronClientException(
status_code=404)
self.moxed_client.list_floatingips(
fixed_ip_address='1.1.1.1', port_id=1).AndRaise(NeutronNotFound)
self.mox.ReplayAll()
neutronapi.get_client('fake')
floatingips = api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 1)
self.assertEqual(floatingips, [])
def test_nw_info_get_ips(self):
fake_port = {
'fixed_ips': [
{'ip_address': '1.1.1.1'}],
'id': 'port-id',
}
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_get_floating_ips_by_fixed_and_port')
api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 'port-id').AndReturn(
[{'floating_ip_address': '10.0.0.1'}])
self.mox.ReplayAll()
neutronapi.get_client('fake')
result = api._nw_info_get_ips(self.moxed_client, fake_port)
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['address'], '1.1.1.1')
self.assertEqual(result[0]['floating_ips'][0]['address'], '10.0.0.1')
def test_nw_info_get_subnets(self):
fake_port = {
'fixed_ips': [
{'ip_address': '1.1.1.1'},
{'ip_address': '2.2.2.2'}],
'id': 'port-id',
}
fake_subnet = model.Subnet(cidr='1.0.0.0/8')
fake_ips = [model.IP(x['ip_address']) for x in fake_port['fixed_ips']]
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_get_subnets_from_port')
api._get_subnets_from_port(self.context, fake_port).AndReturn(
[fake_subnet])
self.mox.ReplayAll()
neutronapi.get_client('fake')
subnets = api._nw_info_get_subnets(self.context, fake_port, fake_ips)
self.assertEqual(len(subnets), 1)
self.assertEqual(len(subnets[0]['ips']), 1)
self.assertEqual(subnets[0]['ips'][0]['address'], '1.1.1.1')
def _test_nw_info_build_network(self, vif_type):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id',
'binding:vif_type': vif_type,
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(net['subnets'], fake_subnets)
self.assertEqual(net['id'], 'net-id')
self.assertEqual(net['label'], 'foo')
self.assertEqual(net.get_meta('tenant_id'), 'tenant')
self.assertEqual(net.get_meta('injected'), CONF.flat_injected)
return net, iid
def test_nw_info_build_network_ovs(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_OVS)
self.assertEqual(net['bridge'], CONF.neutron.ovs_bridge)
self.assertNotIn('should_create_bridge', net)
self.assertEqual(iid, 'port-id')
def test_nw_info_build_network_dvs(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_DVS)
self.assertEqual('net-id', net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertNotIn('ovs_interfaceid', net)
self.assertIsNone(iid)
def test_nw_info_build_network_bridge(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_BRIDGE)
self.assertEqual(net['bridge'], 'brqnet-id')
self.assertTrue(net['should_create_bridge'])
self.assertIsNone(iid)
def test_nw_info_build_network_tap(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_TAP)
self.assertIsNone(net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertIsNone(iid)
def test_nw_info_build_network_other(self):
net, iid = self._test_nw_info_build_network(None)
self.assertIsNone(net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertIsNone(iid)
def test_nw_info_build_no_match(self):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id1',
'tenant_id': 'tenant',
'binding:vif_type': model.VIF_TYPE_OVS,
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id2', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(fake_subnets, net['subnets'])
self.assertEqual('net-id1', net['id'])
self.assertEqual('net-id1', net['id'])
self.assertEqual('tenant', net['meta']['tenant_id'])
def test_nw_info_build_network_vhostuser(self):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id',
'binding:vif_type': model.VIF_TYPE_VHOSTUSER,
'binding:vif_details': {
model.VIF_DETAILS_VHOSTUSER_OVS_PLUG: True
}
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(net['subnets'], fake_subnets)
self.assertEqual(net['id'], 'net-id')
self.assertEqual(net['label'], 'foo')
self.assertEqual(net.get_meta('tenant_id'), 'tenant')
self.assertEqual(net.get_meta('injected'), CONF.flat_injected)
self.assertEqual(net['bridge'], CONF.neutron.ovs_bridge)
self.assertNotIn('should_create_bridge', net)
self.assertEqual(iid, 'port-id')
def test_build_network_info_model(self):
api = neutronapi.API()
fake_inst = objects.Instance()
fake_inst.project_id = 'fake'
fake_inst.uuid = 'uuid'
fake_inst.info_cache = objects.InstanceInfoCache()
fake_inst.info_cache.network_info = model.NetworkInfo()
fake_ports = [
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port1',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:01',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
# admin_state_up=False and status='DOWN' thus vif.active=True
{'id': 'port2',
'network_id': 'net-id',
'admin_state_up': False,
'status': 'DOWN',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:02',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
# admin_state_up=True and status='DOWN' thus vif.active=False
{'id': 'port0',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'DOWN',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:03',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port3',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:04',
'binding:vif_type': model.VIF_TYPE_HW_VEB,
'binding:vnic_type': model.VNIC_TYPE_DIRECT,
'binding:profile': {'pci_vendor_info': '1137:0047',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'},
'binding:vif_details': {model.VIF_DETAILS_PROFILEID: 'pfid'},
},
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port4',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:05',
'binding:vif_type': model.VIF_TYPE_802_QBH,
'binding:vnic_type': model.VNIC_TYPE_MACVTAP,
'binding:profile': {'pci_vendor_info': '1137:0047',
'pci_slot': '0000:0a:00.2',
'physical_network': 'phynet1'},
'binding:vif_details': {model.VIF_DETAILS_PROFILEID: 'pfid'},
},
# admin_state_up=True and status='ACTIVE' thus vif.active=True
# This port has no binding:vnic_type to verify default is assumed
{'id': 'port5',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:06',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
# No binding:vnic_type
'binding:vif_details': {},
},
# This does not match the networks we provide below,
# so it should be ignored (and is here to verify that)
{'id': 'port6',
'network_id': 'other-net-id',
'admin_state_up': True,
'status': 'DOWN',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
},
]
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [
{'id': 'net-id',
'name': 'foo',
'tenant_id': 'fake',
}
]
neutronapi.get_client(mox.IgnoreArg(), admin=True).MultipleTimes(
).AndReturn(self.moxed_client)
self.moxed_client.list_ports(
tenant_id='fake', device_id='uuid').AndReturn(
{'ports': fake_ports})
self.mox.StubOutWithMock(api, '_get_floating_ips_by_fixed_and_port')
self.mox.StubOutWithMock(api, '_get_subnets_from_port')
requested_ports = [fake_ports[2], fake_ports[0], fake_ports[1],
fake_ports[3], fake_ports[4], fake_ports[5]]
for requested_port in requested_ports:
api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', requested_port['id']).AndReturn(
[{'floating_ip_address': '10.0.0.1'}])
for requested_port in requested_ports:
api._get_subnets_from_port(self.context, requested_port
).AndReturn(fake_subnets)
self.mox.StubOutWithMock(api, '_get_preexisting_port_ids')
api._get_preexisting_port_ids(fake_inst).AndReturn(['port5'])
self.mox.ReplayAll()
neutronapi.get_client('fake')
fake_inst.info_cache = objects.InstanceInfoCache.new(
self.context, 'fake-uuid')
fake_inst.info_cache.network_info = model.NetworkInfo.hydrate([])
nw_infos = api._build_network_info_model(
self.context, fake_inst,
fake_nets,
[fake_ports[2]['id'],
fake_ports[0]['id'],
fake_ports[1]['id'],
fake_ports[3]['id'],
fake_ports[4]['id'],
fake_ports[5]['id']],
preexisting_port_ids=['port3'])
self.assertEqual(len(nw_infos), 6)
index = 0
for nw_info in nw_infos:
self.assertEqual(nw_info['address'],
requested_ports[index]['mac_address'])
self.assertEqual(nw_info['devname'], 'tapport' + str(index))
self.assertIsNone(nw_info['ovs_interfaceid'])
self.assertEqual(nw_info['type'],
requested_ports[index]['binding:vif_type'])
if nw_info['type'] == model.VIF_TYPE_BRIDGE:
self.assertEqual(nw_info['network']['bridge'], 'brqnet-id')
self.assertEqual(nw_info['vnic_type'],
requested_ports[index].get('binding:vnic_type',
model.VNIC_TYPE_NORMAL))
self.assertEqual(nw_info.get('details'),
requested_ports[index].get('binding:vif_details'))
self.assertEqual(nw_info.get('profile'),
requested_ports[index].get('binding:profile'))
index += 1
self.assertEqual(nw_infos[0]['active'], False)
self.assertEqual(nw_infos[1]['active'], True)
self.assertEqual(nw_infos[2]['active'], True)
self.assertEqual(nw_infos[3]['active'], True)
self.assertEqual(nw_infos[4]['active'], True)
self.assertEqual(nw_infos[5]['active'], True)
self.assertEqual(nw_infos[0]['id'], 'port0')
self.assertEqual(nw_infos[1]['id'], 'port1')
self.assertEqual(nw_infos[2]['id'], 'port2')
self.assertEqual(nw_infos[3]['id'], 'port3')
self.assertEqual(nw_infos[4]['id'], 'port4')
self.assertEqual(nw_infos[5]['id'], 'port5')
self.assertFalse(nw_infos[0]['preserve_on_delete'])
self.assertFalse(nw_infos[1]['preserve_on_delete'])
self.assertFalse(nw_infos[2]['preserve_on_delete'])
self.assertTrue(nw_infos[3]['preserve_on_delete'])
self.assertFalse(nw_infos[4]['preserve_on_delete'])
self.assertTrue(nw_infos[5]['preserve_on_delete'])
@mock.patch('nova.network.neutronv2.api.API._nw_info_get_subnets')
@mock.patch('nova.network.neutronv2.api.API._nw_info_get_ips')
@mock.patch('nova.network.neutronv2.api.API._nw_info_build_network')
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.API._gather_port_ids_and_networks')
def test_build_network_info_model_empty(
self, mock_gather_port_ids_and_networks,
mock_get_preexisting_port_ids,
mock_nw_info_build_network,
mock_nw_info_get_ips,
mock_nw_info_get_subnets):
api = neutronapi.API()
fake_inst = objects.Instance()
fake_inst.project_id = 'fake'
fake_inst.uuid = 'uuid'
fake_inst.info_cache = objects.InstanceInfoCache()
fake_inst.info_cache.network_info = model.NetworkInfo()
fake_ports = [
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port1',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:01',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
]
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
neutronapi.get_client(mox.IgnoreArg(), admin=True).MultipleTimes(
).AndReturn(self.moxed_client)
self.moxed_client.list_ports(
tenant_id='fake', device_id='uuid').AndReturn(
{'ports': fake_ports})
mock_gather_port_ids_and_networks.return_value = (None, None)
mock_get_preexisting_port_ids.return_value = []
mock_nw_info_build_network.return_value = (None, None)
mock_nw_info_get_ips.return_value = []
mock_nw_info_get_subnets.return_value = fake_subnets
self.mox.ReplayAll()
neutronapi.get_client('fake')
nw_infos = api._build_network_info_model(
self.context, fake_inst)
self.assertEqual(len(nw_infos), 1)
def test_get_subnets_from_port(self):
api = neutronapi.API()
port_data = copy.copy(self.port_data1[0])
subnet_data1 = copy.copy(self.subnet_data1)
subnet_data1[0]['host_routes'] = [
{'destination': '192.168.0.0/24', 'nexthop': '1.0.0.10'}
]
self.moxed_client.list_subnets(
id=[port_data['fixed_ips'][0]['subnet_id']]
).AndReturn({'subnets': subnet_data1})
self.moxed_client.list_ports(
network_id=subnet_data1[0]['network_id'],
device_owner='network:dhcp').AndReturn({'ports': []})
self.mox.ReplayAll()
subnets = api._get_subnets_from_port(self.context, port_data)
self.assertEqual(len(subnets), 1)
self.assertEqual(len(subnets[0]['routes']), 1)
self.assertEqual(subnets[0]['routes'][0]['cidr'],
subnet_data1[0]['host_routes'][0]['destination'])
self.assertEqual(subnets[0]['routes'][0]['gateway']['address'],
subnet_data1[0]['host_routes'][0]['nexthop'])
def test_get_all_empty_list_networks(self):
api = neutronapi.API()
self.moxed_client.list_networks().AndReturn({'networks': []})
self.mox.ReplayAll()
networks = api.get_all(self.context)
self.assertIsInstance(networks, objects.NetworkList)
self.assertEqual(0, len(networks))
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_get_port_vnic_info_1(self, mock_get_client):
api = neutronapi.API()
self.mox.ResetAll()
test_port = {
'port': {'id': 'my_port_id1',
'network_id': 'net-id',
'binding:vnic_type': model.VNIC_TYPE_DIRECT,
},
}
test_net = {'network': {'provider:physical_network': 'phynet1'}}
mock_client = mock_get_client()
mock_client.show_port.return_value = test_port
mock_client.show_network.return_value = test_net
vnic_type, phynet_name = api._get_port_vnic_info(
self.context, mock_client, test_port['port']['id'])
mock_client.show_port.assert_called_once_with(test_port['port']['id'],
fields=['binding:vnic_type', 'network_id'])
mock_client.show_network.assert_called_once_with(
test_port['port']['network_id'],
fields='provider:physical_network')
self.assertEqual(model.VNIC_TYPE_DIRECT, vnic_type)
self.assertEqual(phynet_name, 'phynet1')
def _test_get_port_vnic_info(self, mock_get_client,
binding_vnic_type=None):
api = neutronapi.API()
self.mox.ResetAll()
test_port = {
'port': {'id': 'my_port_id2',
'network_id': 'net-id',
},
}
if binding_vnic_type:
test_port['port']['binding:vnic_type'] = binding_vnic_type
mock_get_client.reset_mock()
mock_client = mock_get_client()
mock_client.show_port.return_value = test_port
vnic_type, phynet_name = api._get_port_vnic_info(
self.context, mock_client, test_port['port']['id'])
mock_client.show_port.assert_called_once_with(test_port['port']['id'],
fields=['binding:vnic_type', 'network_id'])
self.assertEqual(model.VNIC_TYPE_NORMAL, vnic_type)
self.assertFalse(phynet_name)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_get_port_vnic_info_2(self, mock_get_client):
self._test_get_port_vnic_info(mock_get_client,
binding_vnic_type=model.VNIC_TYPE_NORMAL)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_get_port_vnic_info_3(self, mock_get_client):
self._test_get_port_vnic_info(mock_get_client)
@mock.patch.object(neutronapi.API, "_get_port_vnic_info")
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_create_pci_requests_for_sriov_ports(self, mock_get_client,
mock_get_port_vnic_info):
api = neutronapi.API()
self.mox.ResetAll()
requested_networks = objects.NetworkRequestList(
objects = [
objects.NetworkRequest(port_id='my_portid1'),
objects.NetworkRequest(network_id='net1'),
objects.NetworkRequest(port_id='my_portid2'),
objects.NetworkRequest(port_id='my_portid3'),
objects.NetworkRequest(port_id='my_portid4')])
pci_requests = objects.InstancePCIRequests(requests=[])
mock_get_port_vnic_info.side_effect = [
(model.VNIC_TYPE_DIRECT, 'phynet1'),
(model.VNIC_TYPE_NORMAL, ''),
(model.VNIC_TYPE_MACVTAP, 'phynet1'),
(model.VNIC_TYPE_MACVTAP, 'phynet2')
]
api.create_pci_requests_for_sriov_ports(
None, pci_requests, requested_networks)
self.assertEqual(3, len(pci_requests.requests))
has_pci_request_id = [net.pci_request_id is not None for net in
requested_networks.objects]
expected_results = [True, False, False, True, True]
self.assertEqual(expected_results, has_pci_request_id)
class TestNeutronv2WithMock(test.TestCase):
"""Used to test Neutron V2 API with mock."""
def setUp(self):
super(TestNeutronv2WithMock, self).setUp()
self.api = neutronapi.API()
self.context = context.RequestContext(
'fake-user', 'fake-project',
auth_token='bff4a5a6b9eb4ea2a6efec6eefb77936')
@mock.patch('oslo_concurrency.lockutils.lock')
def test_get_instance_nw_info_locks_per_instance(self, mock_lock):
instance = objects.Instance(uuid=uuid.uuid4())
api = neutronapi.API()
mock_lock.side_effect = test.TestingException
self.assertRaises(test.TestingException,
api.get_instance_nw_info, 'context', instance)
mock_lock.assert_called_once_with('refresh_cache-%s' % instance.uuid)
@mock.patch('nova.network.neutronv2.api.LOG')
def test_get_instance_nw_info_verify_duplicates_ignored(self, mock_log):
"""test that the returned networks & port_ids from
_gather_port_ids_and_networks doesn't contain any duplicates
The test fakes an instance with two ports connected to two networks.
The _gather_port_ids_and_networks method will be called with the
instance and a list of port ids of which one port id is configured
already to the instance (== duplicate #1) and a list of
networks that already contains a network to which an instance port
is connected (== duplicate #2).
All-in-all, we expect the resulting port ids list to contain 3 items
(["instance_port_1", "port_1", "port_2"]) and the resulting networks
list to contain 3 items (["net_1", "net_2", "instance_network_1"])
while the warning message for duplicate items was executed twice
(due to "duplicate #1" & "duplicate #2")
"""
networks = [model.Network(id="net_1"),
model.Network(id="net_2")]
port_ids = ["port_1", "port_2"]
instance_networks = [{"id": "instance_network_1",
"name": "fake_network",
"tenant_id": "fake_tenant_id"}]
instance_port_ids = ["instance_port_1"]
network_info = model.NetworkInfo(
[{'id': port_ids[0],
'network': networks[0]},
{'id': instance_port_ids[0],
'network': model.Network(
id=instance_networks[0]["id"],
label=instance_networks[0]["name"],
meta={"tenant_id": instance_networks[0]["tenant_id"]})}]
)
instance_uuid = uuid.uuid4()
instance = objects.Instance(uuid=instance_uuid,
info_cache=objects.InstanceInfoCache(
context=self.context,
instance_uuid=instance_uuid,
network_info=network_info))
new_networks, new_port_ids = self.api._gather_port_ids_and_networks(
self.context, instance, networks, port_ids)
self.assertEqual(new_networks, networks + instance_networks)
self.assertEqual(new_port_ids, instance_port_ids + port_ids)
self.assertEqual(2, mock_log.warning.call_count)
@mock.patch('oslo_concurrency.lockutils.lock')
@mock.patch.object(neutronapi.API, '_get_instance_nw_info')
@mock.patch('nova.network.base_api.update_instance_cache_with_nw_info')
def test_get_instance_nw_info(self, mock_update, mock_get, mock_lock):
fake_result = mock.sentinel.get_nw_info_result
mock_get.return_value = fake_result
instance = fake_instance.fake_instance_obj(self.context)
result = self.api.get_instance_nw_info(self.context, instance)
mock_get.assert_called_once_with(self.context, instance)
mock_update.assert_called_once_with(self.api, self.context, instance,
nw_info=fake_result,
update_cells=False)
self.assertEqual(fake_result, result)
def _test_validate_networks_fixed_ip_no_dup(self, nets, requested_networks,
ids, list_port_values):
def _fake_list_ports(**search_opts):
for args, return_value in list_port_values:
if args == search_opts:
return return_value
self.fail('Unexpected call to list_ports %s' % search_opts)
with contextlib.nested(
mock.patch.object(client.Client, 'list_ports',
side_effect=_fake_list_ports),
mock.patch.object(client.Client, 'list_networks',
return_value={'networks': nets}),
mock.patch.object(client.Client, 'show_quota',
return_value={'quota': {'port': 50}})) as (
list_ports_mock, list_networks_mock, show_quota_mock):
self.api.validate_networks(self.context, requested_networks, 1)
self.assertEqual(len(list_port_values),
len(list_ports_mock.call_args_list))
list_networks_mock.assert_called_once_with(id=ids)
show_quota_mock.assert_called_once_with(tenant_id='fake-project')
def test_validate_networks_over_limit_quota(self):
"""Test validates that a relevant exception is being raised when
there are more ports defined, than there is a quota for it.
"""
requested_networks = [('my_netid1', '10.0.1.2', None, None),
('my_netid2', '10.0.1.3', None, None)]
list_port_values = [({'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'},
{'ports': []}),
({'network_id': 'my_netid2',
'fixed_ips': 'ip_address=10.0.1.3',
'fields': 'device_id'},
{'ports': []}),
({'tenant_id': 'fake-project'},
{'ports': [1, 2, 3, 4, 5]})]
nets = [{'subnets': '1'}, {'subnets': '2'}]
def _fake_list_ports(**search_opts):
for args, return_value in list_port_values:
if args == search_opts:
return return_value
with contextlib.nested(
mock.patch.object(self.api, '_get_available_networks',
return_value=nets),
mock.patch.object(client.Client, 'list_ports',
side_effect=_fake_list_ports),
mock.patch.object(client.Client, 'show_quota',
return_value={'quota': {'port': 1}})):
exc = self.assertRaises(exception.PortLimitExceeded,
self.api.validate_networks,
self.context, requested_networks, 1)
expected_exception_msg = ('The number of defined ports: '
'%(ports)d is over the limit: '
'%(quota)d' %
{'ports': 5,
'quota': 1})
self.assertEqual(expected_exception_msg, str(exc))
def test_validate_networks_fixed_ip_no_dup1(self):
# Test validation for a request for a network with a
# fixed ip that is not already in use because no fixed ips in use
nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'fake-project'}]
requested_networks = [('my_netid1', '10.0.1.2', None, None)]
ids = ['my_netid1']
list_port_values = [({'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'},
{'ports': []}),
({'tenant_id': 'fake-project'},
{'ports': []})]
self._test_validate_networks_fixed_ip_no_dup(nets1, requested_networks,
ids, list_port_values)
def test_validate_networks_fixed_ip_no_dup2(self):
# Test validation for a request for a network with a
# fixed ip that is not already in use because not used on this net id
nets2 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'fake-project'},
{'id': 'my_netid2',
'name': 'my_netname2',
'subnets': ['mysubnid2'],
'tenant_id': 'fake-project'}]
requested_networks = [('my_netid1', '10.0.1.2', None, None),
('my_netid2', '10.0.1.3', None, None)]
ids = ['my_netid1', 'my_netid2']
list_port_values = [({'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'},
{'ports': []}),
({'network_id': 'my_netid2',
'fixed_ips': 'ip_address=10.0.1.3',
'fields': 'device_id'},
{'ports': []}),
({'tenant_id': 'fake-project'},
{'ports': []})]
self._test_validate_networks_fixed_ip_no_dup(nets2, requested_networks,
ids, list_port_values)
def test_validate_networks_fixed_ip_dup(self):
# Test validation for a request for a network with a
# fixed ip that is already in use
requested_networks = [('my_netid1', '10.0.1.2', None, None)]
list_port_mock_params = {'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'}
list_port_mock_return = {'ports': [({'device_id': 'my_deviceid'})]}
with mock.patch.object(client.Client, 'list_ports',
return_value=list_port_mock_return) as (
list_ports_mock):
self.assertRaises(exception.FixedIpAlreadyInUse,
self.api.validate_networks,
self.context, requested_networks, 1)
list_ports_mock.assert_called_once_with(**list_port_mock_params)
def test_allocate_floating_ip_exceed_limit(self):
# Verify that the correct exception is thrown when quota exceed
pool_name = 'dummy'
api = neutronapi.API()
with contextlib.nested(
mock.patch.object(client.Client, 'create_floatingip'),
mock.patch.object(api,
'_get_floating_ip_pool_id_by_name_or_id')) as (
create_mock, get_mock):
create_mock.side_effect = exceptions.OverQuotaClient()
self.assertRaises(exception.FloatingIpLimitExceeded,
api.allocate_floating_ip,
self.context, pool_name)
def test_allocate_floating_ip_no_ipv4_subnet(self):
api = neutronapi.API()
net_id = uuid.uuid4()
error_msg = ('Bad floatingip request: Network %s does not contain '
'any IPv4 subnet' % net_id)
with contextlib.nested(
mock.patch.object(client.Client, 'create_floatingip'),
mock.patch.object(api,
'_get_floating_ip_pool_id_by_name_or_id')) as (
create_mock, get_mock):
create_mock.side_effect = exceptions.BadRequest(error_msg)
self.assertRaises(exception.FloatingIpBadRequest,
api.allocate_floating_ip, self.context,
'ext_net')
def test_create_port_for_instance_no_more_ip(self):
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
with mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.IpAddressGenerationFailureClient()) as (
create_port_mock):
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone}}
self.assertRaises(exception.NoMoreFixedIps,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body)
create_port_mock.assert_called_once_with(port_req_body)
@mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.MacAddressInUseClient())
def test_create_port_for_instance_mac_address_in_use(self,
create_port_mock):
# Create fake data.
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone,
'mac_address': 'XX:XX:XX:XX:XX:XX'}}
available_macs = set(['XX:XX:XX:XX:XX:XX'])
# Run the code.
self.assertRaises(exception.PortInUse,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body,
available_macs=available_macs)
# Assert the calls.
create_port_mock.assert_called_once_with(port_req_body)
@mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.IpAddressInUseClient())
def test_create_port_for_fixed_ip_in_use(self, create_port_mock):
# Create fake data.
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone,
'mac_address': 'XX:XX:XX:XX:XX:XX'}}
fake_ip = '1.1.1.1'
# Run the code.
self.assertRaises(exception.FixedIpAlreadyInUse,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body,
fixed_ip=fake_ip)
# Assert the calls.
create_port_mock.assert_called_once_with(port_req_body)
@mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.InvalidIpForNetworkClient())
def test_create_port_with_invalid_ip_for_network(self, create_port_mock):
# Create fake data.
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone,
'mac_address': 'XX:XX:XX:XX:XX:XX'}}
fake_ip = '1.1.1.1'
# Run the code.
exc = self.assertRaises(exception.InvalidInput,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body,
fixed_ip=fake_ip)
# Assert the exception message
expected_exception_msg = ('Invalid input received: Fixed IP %(ip)s is '
'not a valid ip address for network '
'%(net_id)s.' %
{'ip': fake_ip, 'net_id': net['id']})
self.assertEqual(expected_exception_msg, str(exc))
# Assert the calls.
create_port_mock.assert_called_once_with(port_req_body)
def test_get_network_detail_not_found(self):
api = neutronapi.API()
expected_exc = exceptions.NetworkNotFoundClient()
network_uuid = '02cacbca-7d48-4a2c-8011-43eecf8a9786'
with mock.patch.object(client.Client, 'show_network',
side_effect=expected_exc) as (
fake_show_network):
self.assertRaises(exception.NetworkNotFound,
api.get,
self.context,
network_uuid)
fake_show_network.assert_called_once_with(network_uuid)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.API.'
'_refresh_neutron_extensions_cache')
def test_deallocate_for_instance_uses_delete_helper(self,
mock_refresh,
mock_preexisting):
# setup fake data
instance = fake_instance.fake_instance_obj(self.context)
mock_preexisting.return_value = []
port_data = {'ports': [{'id': str(uuid.uuid4())}]}
ports = set([port['id'] for port in port_data.get('ports')])
api = neutronapi.API()
# setup mocks
mock_client = mock.Mock()
mock_client.list_ports.return_value = port_data
with contextlib.nested(
mock.patch.object(neutronapi, 'get_client',
return_value=mock_client),
mock.patch.object(api, '_delete_ports')
) as (
mock_get_client, mock_delete
):
# run the code
api.deallocate_for_instance(self.context, instance)
# assert the calls
mock_client.list_ports.assert_called_once_with(
device_id=instance.uuid)
mock_delete.assert_called_once_with(
mock_client, instance, ports, raise_if_fail=True)
def _test_delete_ports(self, expect_raise):
results = [exceptions.NeutronClientException, None]
mock_client = mock.Mock()
with mock.patch.object(mock_client, 'delete_port',
side_effect=results):
api = neutronapi.API()
api._delete_ports(mock_client, {'uuid': 'foo'}, ['port1', 'port2'],
raise_if_fail=expect_raise)
def test_delete_ports_raise(self):
self.assertRaises(exceptions.NeutronClientException,
self._test_delete_ports, True)
def test_delete_ports_no_raise(self):
self._test_delete_ports(False)
def test_delete_ports_never_raise_404(self):
mock_client = mock.Mock()
mock_client.delete_port.side_effect = exceptions.PortNotFoundClient
api = neutronapi.API()
api._delete_ports(mock_client, {'uuid': 'foo'}, ['port1'],
raise_if_fail=True)
mock_client.delete_port.assert_called_once_with('port1')
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_port_for_instance_fails(self, mock_preexisting):
mock_preexisting.return_value = []
mock_client = mock.Mock()
api = neutronapi.API()
with contextlib.nested(
mock.patch.object(neutronapi, 'get_client',
return_value=mock_client),
mock.patch.object(api, '_delete_ports',
side_effect=exceptions.Unauthorized),
mock.patch.object(api, 'get_instance_nw_info')
) as (
get_client, delete_ports, get_nw_info
):
self.assertRaises(exceptions.Unauthorized,
api.deallocate_port_for_instance,
self.context, instance={'uuid': 'fake'},
port_id='fake')
# make sure that we didn't try to reload nw info
self.assertFalse(get_nw_info.called)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def _test_show_port_exceptions(self, client_exc, expected_nova_exc,
get_client_mock):
show_port_mock = mock.Mock(side_effect=client_exc)
get_client_mock.return_value.show_port = show_port_mock
self.assertRaises(expected_nova_exc, self.api.show_port,
self.context, 'fake_port_id')
def test_show_port_not_found(self):
self._test_show_port_exceptions(exceptions.PortNotFoundClient,
exception.PortNotFound)
def test_show_port_forbidden(self):
self._test_show_port_exceptions(exceptions.Unauthorized,
exception.Forbidden)
def test_show_port_unknown_exception(self):
self._test_show_port_exceptions(exceptions.NeutronClientException,
exception.NovaException)
def test_get_network(self):
api = neutronapi.API()
with mock.patch.object(client.Client, 'show_network') as mock_show:
mock_show.return_value = {
'network': {'id': 'fake-uuid', 'name': 'fake-network'}
}
net_obj = api.get(self.context, 'fake-uuid')
self.assertEqual('fake-network', net_obj.label)
self.assertEqual('fake-network', net_obj.name)
self.assertEqual('fake-uuid', net_obj.uuid)
def test_get_all_networks(self):
api = neutronapi.API()
with mock.patch.object(client.Client, 'list_networks') as mock_list:
mock_list.return_value = {
'networks': [
{'id': 'fake-uuid1', 'name': 'fake-network1'},
{'id': 'fake-uuid2', 'name': 'fake-network2'},
]}
net_objs = api.get_all(self.context)
self.assertIsInstance(net_objs, objects.NetworkList)
self.assertEqual(2, len(net_objs))
self.assertEqual(('fake-uuid1', 'fake-network1'),
(net_objs[0].uuid, net_objs[0].name))
self.assertEqual(('fake-uuid2', 'fake-network2'),
(net_objs[1].uuid, net_objs[1].name))
@mock.patch.object(neutronapi.API, "_refresh_neutron_extensions_cache")
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_update_instance_vnic_index(self, mock_get_client,
mock_refresh_extensions):
api = neutronapi.API()
api.extensions = set([constants.VNIC_INDEX_EXT])
mock_client = mock_get_client()
mock_client.update_port.return_value = 'port'
instance = {'project_id': '9d049e4b60b64716978ab415e6fbd5c0',
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance',
'availability_zone': 'nova',
'host': 'some_host'}
instance = objects.Instance(**instance)
vif = {'id': 'fake-port-id'}
api.update_instance_vnic_index(self.context, instance, vif, 7)
port_req_body = {'port': {'vnic_index': 7}}
mock_client.update_port.assert_called_once_with('fake-port-id',
port_req_body)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_update_port_bindings_for_instance_same_host(self,
get_client_mock):
instance = fake_instance.fake_instance_obj(self.context)
self.api._has_port_binding_extension = mock.Mock(return_value=True)
# We test two ports, one with the same host as the host passed in and
# one where binding:host_id isn't set, so we update that port.
fake_ports = {'ports': [
{'id': 'fake-port-1',
'binding:host_id': instance.host},
{'id': 'fake-port-2'}]}
list_ports_mock = mock.Mock(return_value=fake_ports)
get_client_mock.return_value.list_ports = list_ports_mock
update_port_mock = mock.Mock()
get_client_mock.return_value.update_port = update_port_mock
self.api._update_port_binding_for_instance(self.context, instance,
instance.host)
# Assert that update_port was only called on the port without a host.
update_port_mock.assert_called_once_with(
'fake-port-2', {'port': {'binding:host_id': instance.host}})
@mock.patch('nova.network.neutronv2.api.compute_utils')
def test_get_preexisting_port_ids(self, mocked_comp_utils):
mocked_comp_utils.get_nw_info_for_instance.return_value = [model.VIF(
id='1', preserve_on_delete=False), model.VIF(
id='2', preserve_on_delete=True), model.VIF(
id='3', preserve_on_delete=True)]
result = self.api._get_preexisting_port_ids(None)
self.assertEqual(['2', '3'], result, "Invalid preexisting ports")
def _test_unbind_ports_get_client(self, mock_neutron,
mock_has_ext, has_ext=False):
mock_ctx = mock.Mock(is_admin=False)
mock_has_ext.return_value = has_ext
ports = ["1", "2", "3"]
self.api._unbind_ports(mock_ctx, ports, mock_neutron)
get_client_calls = []
get_client_calls.append(mock.call(mock_ctx)
if not has_ext else
mock.call(mock_ctx, admin=True))
if has_ext:
self.assertEqual(1, mock_neutron.call_count)
mock_neutron.assert_has_calls(get_client_calls, True)
else:
self.assertEqual(0, mock_neutron.call_count)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports_get_client_binding_extension(self,
mock_neutron,
mock_has_ext):
self._test_unbind_ports_get_client(mock_neutron, mock_has_ext, True)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports_get_client(self, mock_neutron, mock_has_ext):
self._test_unbind_ports_get_client(mock_neutron, mock_has_ext)
def _test_unbind_ports(self, mock_neutron, mock_has_ext, has_ext=False):
mock_client = mock.Mock()
mock_update_port = mock.Mock()
mock_client.update_port = mock_update_port
mock_ctx = mock.Mock(is_admin=False)
mock_has_ext.return_value = has_ext
mock_neutron.return_value = mock_client
ports = ["1", "2", "3"]
api = neutronapi.API()
api._unbind_ports(mock_ctx, ports, mock_client)
body = {'port': {'device_id': '', 'device_owner': ''}}
if has_ext:
body['port']['binding:host_id'] = None
update_port_calls = []
for p in ports:
update_port_calls.append(mock.call(p, body))
self.assertEqual(3, mock_update_port.call_count)
mock_update_port.assert_has_calls(update_port_calls)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports_binding_ext(self, mock_neutron, mock_has_ext):
self._test_unbind_ports(mock_neutron, mock_has_ext, True)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports(self, mock_neutron, mock_has_ext):
self._test_unbind_ports(mock_neutron, mock_has_ext, False)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
def test_unbind_ports_no_port_ids(self, mock_has_ext):
# Tests that None entries in the ports list are filtered out.
mock_client = mock.Mock()
mock_update_port = mock.Mock()
mock_client.update_port = mock_update_port
mock_ctx = mock.Mock(is_admin=False)
mock_has_ext.return_value = True
api = neutronapi.API()
api._unbind_ports(mock_ctx, [None], mock_client, mock_client)
self.assertFalse(mock_update_port.called)
@mock.patch('nova.network.neutronv2.api.API.get_instance_nw_info')
@mock.patch('nova.network.neutronv2.api.excutils')
@mock.patch('nova.network.neutronv2.api.API._delete_ports')
@mock.patch('nova.network.neutronv2.api.API.'
'_check_external_network_attach')
@mock.patch('nova.network.neutronv2.api.LOG')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.API.'
'_populate_neutron_extension_values')
@mock.patch('nova.network.neutronv2.api.API._get_available_networks')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_allocate_for_instance_unbind(self, mock_ntrn,
mock_avail_nets,
mock_ext_vals,
mock_has_pbe,
mock_unbind,
mock_log,
mock_cena,
mock_del_ports,
mock_exeu,
mock_giwn):
mock_nc = mock.Mock()
def show_port(port_id):
return {'port': {'network_id': 'net-1', 'id': port_id,
'tenant_id': 'proj-1'}}
mock_nc.show_port = show_port
mock_ntrn.return_value = mock_nc
mock_nc.update_port.side_effect = [True, True, Exception]
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_has_pbe.return_value = False
nw_req = objects.NetworkRequestList(
objects = [objects.NetworkRequest(port_id='fake-port1'),
objects.NetworkRequest(port_id='fake-port2'),
objects.NetworkRequest(port_id='fail-port')])
mock_avail_nets.return_value = [{'id': 'net-1'}]
self.api.allocate_for_instance(mock.sentinel.ctx,
mock_inst,
requested_networks=nw_req)
mock_unbind.assert_called_once_with(mock.sentinel.ctx,
['fake-port1', 'fake-port2'],
mock.ANY,
mock.ANY)
@mock.patch('nova.objects.network_request.utils')
@mock.patch('nova.network.neutronv2.api.LOG')
@mock.patch('nova.network.neutronv2.api.base_api')
@mock.patch('nova.network.neutronv2.api.API._delete_ports')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_preexisting_deallocate_for_instance(self, mock_ntrn,
mock_gppids,
mock_unbind,
mock_deletep,
mock_baseapi,
mock_log,
req_utils):
req_utils.is_neutron.return_value = True
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_nc.list_ports.return_value = {'ports': [
{'id': 'port-1'}, {'id': 'port-2'}, {'id': 'port-3'}
]}
nw_req = objects.NetworkRequestList(
objects = [objects.NetworkRequest(network_id='net-1',
address='192.168.0.3',
port_id='port-1',
pci_request_id='pci-1')])
mock_gppids.return_value = ['port-3']
self.api.deallocate_for_instance(mock.sentinel.ctx, mock_inst,
requested_networks=nw_req)
mock_unbind.assert_called_once_with(mock.sentinel.ctx,
set(['port-1', 'port-3']),
mock.ANY)
mock_deletep.assert_called_once_with(mock_nc,
mock_inst,
set(['port-2']),
raise_if_fail=True)
@mock.patch('nova.network.neutronv2.api.API.get_instance_nw_info')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
@mock.patch('nova.network.neutronv2.api.compute_utils')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_preexisting_deallocate_port_for_instance(self,
mock_ntrn,
mock_comp_utils,
mock_unbind,
mock_netinfo):
mock_comp_utils.get_nw_info_for_instance.return_value = [model.VIF(
id='1', preserve_on_delete=False), model.VIF(
id='2', preserve_on_delete=True), model.VIF(
id='3', preserve_on_delete=True)]
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_client = mock.Mock()
mock_ntrn.return_value = mock_client
self.api.deallocate_port_for_instance(mock.sentinel.ctx,
mock_inst, '2')
mock_unbind.assert_called_once_with(mock.sentinel.ctx, ['2'],
mock_client)
@mock.patch('nova.network.neutronv2.api.API.'
'_check_external_network_attach')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.API.'
'_populate_neutron_extension_values')
@mock.patch('nova.network.neutronv2.api.API._get_available_networks')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_port_binding_failed_created_port(self, mock_ntrn,
mock_avail_nets,
mock_ext_vals,
mock_has_pbe,
mock_cena):
mock_has_pbe.return_value = True
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_avail_nets.return_value = [{'id': 'net-1'}]
mock_nc.create_port.return_value = {'port': {'id': 'fake_id',
'tenant_id': mock_inst.project_id,
'binding:vif_type': 'binding_failed'}}
self.assertRaises(exception.PortBindingFailed,
self.api.allocate_for_instance,
mock.sentinel.ctx,
mock_inst)
mock_nc.delete_port.assert_called_once_with('fake_id')
@mock.patch('nova.network.neutronv2.api.API._show_port')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_port_binding_failed_with_request(self, mock_ntrn,
mock_has_pbe,
mock_show_port):
mock_has_pbe.return_value = True
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_show_port.return_value = {
'tenant_id': mock_inst.project_id,
'binding:vif_type': 'binding_failed'}
nw_req = objects.NetworkRequestList(
objects = [objects.NetworkRequest(port_id='fake_id')])
self.assertRaises(exception.PortBindingFailed,
self.api.allocate_for_instance,
mock.sentinel.ctx, mock_inst,
requested_networks=nw_req)
class TestNeutronv2ModuleMethods(test.NoDBTestCase):
def test_gather_port_ids_and_networks_wrong_params(self):
api = neutronapi.API()
# Test with networks not None and port_ids is None
self.assertRaises(exception.NovaException,
api._gather_port_ids_and_networks,
'fake_context', 'fake_instance',
[{'network': {'name': 'foo'}}], None)
# Test with networks is None and port_ids not None
self.assertRaises(exception.NovaException,
api._gather_port_ids_and_networks,
'fake_context', 'fake_instance',
None, ['list', 'of', 'port_ids'])
def test_ensure_requested_network_ordering_no_preference_ids(self):
l = [1, 2, 3]
neutronapi._ensure_requested_network_ordering(
lambda x: x,
l,
None)
def test_ensure_requested_network_ordering_no_preference_hashes(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
neutronapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
None)
self.assertEqual(l, [{'id': 3}, {'id': 1}, {'id': 2}])
def test_ensure_requested_network_ordering_with_preference(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
neutronapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
[1, 2, 3])
self.assertEqual(l, [{'id': 1}, {'id': 2}, {'id': 3}])
class TestNeutronv2Portbinding(TestNeutronv2Base):
def test_allocate_for_instance_portbinding(self):
self._allocate_for_instance(1, portbinding=True)
def test_populate_neutron_extension_values_binding(self):
api = neutronapi.API()
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.PORTBINDING_EXT}]})
self.mox.ReplayAll()
host_id = 'my_host_id'
instance = {'host': host_id}
port_req_body = {'port': {}}
api._populate_neutron_extension_values(self.context, instance,
None, port_req_body)
self.assertEqual(port_req_body['port']['binding:host_id'], host_id)
self.assertFalse(port_req_body['port'].get('binding:profile'))
@mock.patch.object(pci_whitelist, 'get_pci_device_devspec')
@mock.patch.object(pci_manager, 'get_instance_pci_devs')
def test_populate_neutron_extension_values_binding_sriov(self,
mock_get_instance_pci_devs,
mock_get_pci_device_devspec):
api = neutronapi.API()
host_id = 'my_host_id'
instance = {'host': host_id}
port_req_body = {'port': {}}
pci_req_id = 'my_req_id'
pci_dev = {'vendor_id': '1377',
'product_id': '0047',
'address': '0000:0a:00.1',
}
PciDevice = collections.namedtuple('PciDevice',
['vendor_id', 'product_id', 'address'])
mydev = PciDevice(**pci_dev)
profile = {'pci_vendor_info': '1377:0047',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1',
}
mock_get_instance_pci_devs.return_value = [mydev]
devspec = mock.Mock()
devspec.get_tags.return_value = {'physical_network': 'phynet1'}
mock_get_pci_device_devspec.return_value = devspec
api._populate_neutron_binding_profile(instance,
pci_req_id, port_req_body)
self.assertEqual(port_req_body['port']['binding:profile'], profile)
def _test_update_port_binding_false(self, func_name, *args):
api = neutronapi.API()
func = getattr(api, func_name)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
refresh_cache=True).AndReturn(False)
self.mox.ReplayAll()
func(*args)
def _test_update_port_binding_true(self, expected_bind_host,
func_name, *args):
api = neutronapi.API()
func = getattr(api, func_name)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
refresh_cache=True).AndReturn(True)
neutronapi.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
search_opts = {'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id']}
ports = {'ports': [{'id': 'test1'}]}
self.moxed_client.list_ports(**search_opts).AndReturn(ports)
port_req_body = {'port':
{'binding:host_id': expected_bind_host}}
self.moxed_client.update_port('test1',
port_req_body).AndReturn(None)
self.mox.ReplayAll()
func(*args)
def _test_update_port_true_exception(self, expected_bind_host,
func_name, *args):
api = neutronapi.API()
func = getattr(api, func_name)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
refresh_cache=True).AndReturn(True)
neutronapi.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
search_opts = {'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id']}
ports = {'ports': [{'id': 'test1'}]}
self.moxed_client.list_ports(**search_opts).AndReturn(ports)
port_req_body = {'port':
{'binding:host_id': expected_bind_host}}
self.moxed_client.update_port('test1',
port_req_body).AndRaise(
Exception("fail to update port"))
self.mox.ReplayAll()
self.assertRaises(NEUTRON_CLIENT_EXCEPTION,
func,
*args)
def test_migrate_instance_finish_binding_false(self):
self._test_update_port_binding_false('migrate_instance_finish',
self.context, None,
{'dest_compute': 'fake'})
def test_migrate_instance_finish_binding_true(self):
migration = {'source_compute': self.instance.get('host'),
'dest_compute': 'dest_host'}
instance = self._fake_instance_object(self.instance)
self._test_update_port_binding_true('dest_host',
'migrate_instance_finish',
self.context,
instance,
migration)
def test_migrate_instance_finish_binding_true_exception(self):
migration = {'source_compute': self.instance.get('host'),
'dest_compute': 'dest_host'}
instance = self._fake_instance_object(self.instance)
self._test_update_port_true_exception('dest_host',
'migrate_instance_finish',
self.context,
instance,
migration)
def test_setup_instance_network_on_host_false(self):
self._test_update_port_binding_false(
'setup_instance_network_on_host', self.context, None,
'fake_host')
def test_setup_instance_network_on_host_true(self):
instance = self._fake_instance_object(self.instance)
self._test_update_port_binding_true('fake_host',
'setup_instance_network_on_host',
self.context,
instance,
'fake_host')
def test_setup_instance_network_on_host_exception(self):
instance = self._fake_instance_object(self.instance)
self._test_update_port_true_exception(
'fake_host', 'setup_instance_network_on_host',
self.context, instance, 'fake_host')
def test_associate_not_implemented(self):
api = neutronapi.API()
self.assertRaises(NotImplementedError,
api.associate,
self.context, 'id')
class TestNeutronv2ExtraDhcpOpts(TestNeutronv2Base):
def setUp(self):
super(TestNeutronv2ExtraDhcpOpts, self).setUp()
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
def test_allocate_for_instance_1_with_extra_dhcp_opts_turned_off(self):
self._allocate_for_instance(1, extra_dhcp_opts=False)
def test_allocate_for_instance_extradhcpopts(self):
dhcp_opts = [{'opt_name': 'bootfile-name',
'opt_value': 'pxelinux.0'},
{'opt_name': 'tftp-server',
'opt_value': '123.123.123.123'},
{'opt_name': 'server-ip-address',
'opt_value': '123.123.123.456'}]
self._allocate_for_instance(1, dhcp_options=dhcp_opts)
class TestNeutronClientForAdminScenarios(test.NoDBTestCase):
@mock.patch('keystoneclient.auth.identity.v2.Password.get_token')
def _test_get_client_for_admin(self, auth_mock,
use_id=False, admin_context=False):
token_value = uuid.uuid4().hex
auth_mock.return_value = token_value
self.flags(auth_strategy=None, group='neutron')
self.flags(url='http://anyhost/', group='neutron')
self.flags(timeout=30, group='neutron')
if use_id:
self.flags(admin_tenant_id='admin_tenant_id', group='neutron')
self.flags(admin_user_id='admin_user_id', group='neutron')
if admin_context:
my_context = context.get_admin_context()
else:
my_context = context.RequestContext('userid', 'my_tenantid',
auth_token='token')
# clean global
neutronapi.reset_state()
if admin_context:
# Note that the context does not contain a token but is
# an admin context which will force an elevation to admin
# credentials.
context_client = neutronapi.get_client(my_context)
else:
# Note that the context is not elevated, but the True is passed in
# which will force an elevation to admin credentials even though
# the context has an auth_token.
context_client = neutronapi.get_client(my_context, True)
admin_auth = neutronapi._ADMIN_AUTH
self.assertEqual(CONF.neutron.admin_auth_url, admin_auth.auth_url)
self.assertEqual(CONF.neutron.admin_password, admin_auth.password)
if use_id:
self.assertEqual(CONF.neutron.admin_tenant_id,
admin_auth.tenant_id)
self.assertEqual(CONF.neutron.admin_user_id, admin_auth.user_id)
self.assertIsNone(admin_auth.tenant_name)
self.assertIsNone(admin_auth.username)
else:
self.assertEqual(CONF.neutron.admin_tenant_name,
admin_auth.tenant_name)
self.assertEqual(CONF.neutron.admin_username, admin_auth.username)
self.assertIsNone(admin_auth.tenant_id)
self.assertIsNone(admin_auth.user_id)
self.assertEqual(CONF.neutron.timeout, neutronapi._SESSION.timeout)
self.assertEqual(token_value, context_client.httpclient.auth.token)
self.assertEqual(CONF.neutron.url,
context_client.httpclient.auth.endpoint)
def test_get_client_for_admin(self):
self._test_get_client_for_admin()
def test_get_client_for_admin_with_id(self):
self._test_get_client_for_admin(use_id=True)
def test_get_client_for_admin_context(self):
self._test_get_client_for_admin(admin_context=True)
def test_get_client_for_admin_context_with_id(self):
self._test_get_client_for_admin(use_id=True, admin_context=True)
| [
"gokrokvertskhov@mirantis.com"
] | gokrokvertskhov@mirantis.com |
532a15865360bfb4a5e5ff44b0d84b1d0a8fbe76 | 62111918133bcee3d047a6a53ade17c1a02b25d7 | /login_app/routing.py | ca1ea489c2130543d072af843f51e2ae62a56575 | [] | no_license | SuperLalka/Webim-test | c5cf1b7858edb7546351f7cf89f165d8c35cf6af | c7d4ebfe21a8cd1ebbc290287573a4d491042ad0 | refs/heads/master | 2023-01-14T02:35:35.169705 | 2020-11-23T18:54:26 | 2020-11-23T19:39:00 | 312,047,405 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 155 | py | from django.conf.urls import url
from login_app import consumers
websocket_urlpatterns = [
url(r'ws/index$', consumers.NumberConsumer.as_asgi()),
]
| [
"oomarkizoo@mail.ru"
] | oomarkizoo@mail.ru |
6148f29660770166eb2b88bd95268e8ebb855c58 | eff5cd25fa442b70491262bada0584eaaf8add46 | /tfx/tools/cli/testdata/test_pipeline_airflow_2.py | 324d6b0cfbc2ed928677e8a5bbc8c1e5c375d2ac | [
"Apache-2.0"
] | permissive | fsx950223/tfx | c58e58a85e6de6e9abcb8790acbf36424b5b2029 | 527fe2bab6e4f62febfe1a2029358fabe55f418c | refs/heads/master | 2021-01-04T12:12:51.010090 | 2020-01-26T04:43:14 | 2020-01-26T04:43:14 | 240,543,231 | 1 | 0 | Apache-2.0 | 2020-02-14T15:48:12 | 2020-02-14T15:48:11 | null | UTF-8 | Python | false | false | 3,007 | py | # Lint as: python2, python3
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pipeline for testing CLI."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
from tfx.components.example_gen.csv_example_gen.component import CsvExampleGen
from tfx.components.schema_gen.component import SchemaGen
from tfx.components.statistics_gen.component import StatisticsGen
from tfx.orchestration import pipeline
from tfx.orchestration.airflow.airflow_dag_runner import AirflowDagRunner
from tfx.orchestration.airflow.airflow_dag_runner import AirflowPipelineConfig
from tfx.utils.dsl_utils import csv_input
# This example assumes that the taxi data is stored in ~/taxi/data and the
# taxi utility function is in ~/taxi. Feel free to customize this as needed.
_taxi_root = os.path.join(os.environ['HOME'], 'taxi')
_data_root = os.path.join(_taxi_root, 'data/simple')
# Directory and data locations. This example assumes all of the chicago taxi
# example code and metadata library is relative to $HOME, but you can store
# these files anywhere on your local filesystem.
_tfx_root = os.path.join(os.environ['HOME'], 'tfx')
_pipeline_root = os.path.join(_tfx_root, 'pipelines')
_metadata_db_root = os.path.join(_tfx_root, 'metadata')
_log_root = os.path.join(_tfx_root, 'logs')
# Airflow-specific configs; these will be passed directly to airflow
_airflow_config = {
'schedule_interval': None,
'start_date': datetime.datetime(2019, 1, 1),
}
def _create_pipeline():
"""Implements the chicago taxi pipeline with TFX."""
examples = csv_input(_data_root)
# Brings data into the pipeline or otherwise joins/converts training data.
example_gen = CsvExampleGen(input=examples)
# Computes statistics over data for visualization and example validation.
statistics_gen = StatisticsGen(examples=example_gen.outputs['examples'])
# Generates schema based on statistics files.
infer_schema = SchemaGen(statistics=statistics_gen.outputs['statistics'])
return pipeline.Pipeline(
pipeline_name='chicago_taxi_simple',
pipeline_root=_pipeline_root,
components=[
example_gen, statistics_gen, infer_schema
],
enable_cache=True,
metadata_db_root=_metadata_db_root,
)
# Airflow checks 'DAG' keyword for finding the dag.
airflow_pipeline = AirflowDagRunner(AirflowPipelineConfig(_airflow_config)).run(
_create_pipeline())
| [
"tensorflow-extended-team@google.com"
] | tensorflow-extended-team@google.com |
626404a165a73ccad1b33be70aadbd60a6ae0090 | e7bac28bdc58481f283fc840f93abb27b971a60e | /calc/urls.py | f96212bf362f3e753d0a4e7b0f1539607021b661 | [] | no_license | RameesRoshanck/python-sample | 87d810973a773a74764ceacff3327cd8c4b7dbab | 21c183dfa78b20730810dcb861244ed02a80388d | refs/heads/main | 2023-08-27T23:22:55.303716 | 2021-10-14T15:03:24 | 2021-10-14T15:03:24 | 417,175,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | from django.urls import path
from .import views
urlpatterns = [
path('home',views.home,name='home'),
path('',views.login,name='login'),
path('logout',views.logout,name='logout'),
] | [
"ckmhdroshan@gmail.com"
] | ckmhdroshan@gmail.com |
a703dc2eb5ad305e5bd2196c3484053910cf36c6 | a838d4bed14d5df5314000b41f8318c4ebe0974e | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_07_01/aio/operations/_network_security_groups_operations.py | 55371021d9e09323425a87dad705ef9be90f1746 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | scbedd/azure-sdk-for-python | ee7cbd6a8725ddd4a6edfde5f40a2a589808daea | cc8bdfceb23e5ae9f78323edc2a4e66e348bb17a | refs/heads/master | 2023-09-01T08:38:56.188954 | 2021-06-17T22:52:28 | 2021-06-17T22:52:28 | 159,568,218 | 2 | 0 | MIT | 2019-08-11T21:16:01 | 2018-11-28T21:34:49 | Python | UTF-8 | Python | false | false | 27,723 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class NetworkSecurityGroupsOperations:
"""NetworkSecurityGroupsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
network_security_group_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def get(
self,
resource_group_name: str,
network_security_group_name: str,
expand: Optional[str] = None,
**kwargs
) -> "_models.NetworkSecurityGroup":
"""Gets the specified network security group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkSecurityGroup, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.NetworkSecurityGroup",
**kwargs
) -> "_models.NetworkSecurityGroup":
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkSecurityGroup')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.NetworkSecurityGroup",
**kwargs
) -> AsyncLROPoller["_models.NetworkSecurityGroup"]:
"""Creates or updates a network security group in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param parameters: Parameters supplied to the create or update network security group
operation.
:type parameters: ~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroup
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the AsyncARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either NetworkSecurityGroup or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroup]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
network_security_group_name=network_security_group_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
network_security_group_name: str,
parameters: "_models.TagsObject",
**kwargs
) -> "_models.NetworkSecurityGroup":
"""Updates a network security group tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_security_group_name: The name of the network security group.
:type network_security_group_name: str
:param parameters: Parameters supplied to update network security group tags.
:type parameters: ~azure.mgmt.network.v2020_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkSecurityGroup, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroup
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroup"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkSecurityGroupName': self._serialize.url("network_security_group_name", network_security_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkSecurityGroup', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups/{networkSecurityGroupName}'} # type: ignore
def list_all(
self,
**kwargs
) -> AsyncIterable["_models.NetworkSecurityGroupListResult"]:
"""Gets all network security groups in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkSecurityGroups'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.NetworkSecurityGroupListResult"]:
"""Gets all network security groups in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_07_01.models.NetworkSecurityGroupListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkSecurityGroupListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkSecurityGroupListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkSecurityGroups'} # type: ignore
| [
"noreply@github.com"
] | scbedd.noreply@github.com |
90f6f0f5d079484f4200d377926011f1ba77383e | 8a9733c29f930c499c837ecbde8d7f6fabb95347 | /hasAlternatingBits.py | 6170608b4735086466116a72da7ebdbac1196d90 | [] | no_license | 0ff5ec/Programming | 6c6911095d18a57adc7eafe369ad609ae973c709 | 31374f76b21a2ede9544846382234c87153e4fa8 | refs/heads/master | 2022-03-14T18:16:04.172132 | 2019-12-18T03:31:23 | 2019-12-18T03:31:23 | 125,090,470 | 8 | 2 | null | null | null | null | UTF-8 | Python | false | false | 969 | py | # _*_ coding = utf-8 _*_
'''
Given a positive integer, check whether it has alternating bits: namely, if two adjacent bits will always have different values.
Example 1:
Input: 5
Output: True
Explanation:
The binary representation of 5 is: 101
Example 2:
Input: 7
Output: False
Explanation:
The binary representation of 7 is: 111.
Example 3:
Input: 11
Output: False
Explanation:
The binary representation of 11 is: 1011.
Example 4:
Input: 10
Output: True
Explanation:
The binary representation of 10 is: 1010.
'''
"""
:type n: int
:rtype: bool
"""
class Solution(object):
def hasAlternatingBits(self, n):
prev = None
while n != 0:
if prev == None:
prev = n%2
elif prev == n%2:
return False
prev = n%2
n = n//2
return True
if __name__ == "__main__":
sol = Solution()
print(sol.hasAlternatingBits(int(raw_input('Enter the number: '))))
| [
"offsec.akash@gmail.com"
] | offsec.akash@gmail.com |
8651372e4563b7c115a7c05e662c90f9ec13ca72 | a0c70282989d255718b3fb68690887a271f7f929 | /scotstartup/views.py | dccdfa374048c97aec1dc8d6f85cdb572b3aa264 | [] | no_license | martygrant/scotstartup | 0c4a49a8dc0d10d4d1925a65f928a25a8c7c7d81 | 85052681a3d3f1f75b2b2e4858382063a79f7614 | refs/heads/master | 2021-03-27T13:01:52.813184 | 2017-12-26T15:55:24 | 2017-12-26T15:55:24 | 101,567,852 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,595 | py | from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from scotstartup.models import Company, Event
from scotstartup.forms import CompanyForm, UserForm, UserProfileForm, EventForm
def index(request):
company_list = Company.objects.all()
context_dict = {}
context_dict['companies'] = company_list
recentCompanies = Company.objects.order_by("-created")[:5]
context_dict['recentCompanies'] = recentCompanies
featuredCompanies = Company.objects.filter(featured=True)[:5]
context_dict['featuredCompanies'] = featuredCompanies
events = Event.objects.order_by("-created")[:5]
context_dict['events'] = events
return render(request, 'scotstartup/index.html', context_dict)
def companies(request):
company_list = Company.objects.all()
context_dict = {}
context_dict['companies'] = company_list
return render(request, 'scotstartup/companies.html', context_dict)
def company(request, company_name_slug):
context_dict = {}
try:
company = Company.objects.get(slug=company_name_slug)
context_dict['company_name'] = company.name
context_dict['company_description'] = company.description
context_dict['company'] = company
except Company.DoesNotExist:
pass
return render(request, 'scotstartup/company.html', context_dict)
def add_company(request):
if request.method == 'POST':
form = CompanyForm(request.POST)
if form.is_valid():
form.save(commit=True)
return index(request)
else:
print form.errors
else:
form = CompanyForm()
return render(request, 'scotstartup/add_company.html', {'form': form})
def event(request, event_name_slug):
context_dict = {}
try:
event = Event.objects.get(slug=event_name_slug)
context_dict['event_name'] = event.name
context_dict['event_description'] = event.description
context_dict['event'] = event
except event.DoesNotExist:
pass
return render(request, 'scotstartup/event.html', context_dict)
def events(request):
event_list = Event.objects.all()
context_dict = {}
context_dict['events'] = event_list
return render(request, 'scotstartup/events.html', context_dict)
def add_event(request):
if request.method == 'POST':
form = EventForm(request.POST)
if form.is_valid():
form.save(commit=True)
return index(request)
else:
print form.errors
else:
form = EventForm()
return render(request, 'scotstartup/add_event.html', {'form': form})
def about(request):
return render(request, 'scotstartup/about.html')
def news(request):
return render(request, 'scotstartup/news.html')
def search(request):
context_dict = {}
if 'q' in request.GET:
q = request.GET.get('q')
context_dict['query'] = q
try:
companies = Company.objects.filter(Q(name__contains=q))
context_dict['companies'] = companies
events = Event.objects.filter(Q(name__contains=q))
context_dict['events'] = events
except:
pass
return render(request, 'scotstartup/search.html', context_dict)
def profile(request):
return render(request, 'scotstartup/profile.html') | [
"martingrant@outlook.com"
] | martingrant@outlook.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.